diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a74101922..ac1c24d10 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b142b72bea6f30d8efb36dfa8c58e0d63ae5329b \ No newline at end of file +a8f547d3728fba835fbdda301e846829c5cbbef5 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 706329a62..44b3782e1 100755 --- a/.gitattributes +++ b/.gitattributes @@ -23,6 +23,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Tempora databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponse.java linguist-generated=true @@ -114,6 +126,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingU databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java linguist-generated=true @@ -202,8 +215,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousU databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -219,7 +230,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true @@ -230,12 +240,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Credentials databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java linguist-generated=true @@ -248,10 +252,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -262,12 +262,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true @@ -288,7 +287,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLoc databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java linguist-generated=true @@ -320,17 +318,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java linguist-generated=true @@ -338,7 +334,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java linguist-generated=true @@ -356,12 +351,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java linguist-generated=true @@ -385,7 +379,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumes databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java linguist-generated=true @@ -412,7 +405,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java linguist-generated=true @@ -423,7 +415,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java linguist-generated=true @@ -461,9 +452,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCred databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true @@ -495,15 +483,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true @@ -824,9 +811,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadTyp databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java linguist-generated=true @@ -837,9 +821,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSc databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true @@ -857,6 +838,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGet databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java linguist-generated=true @@ -865,8 +848,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpa databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true @@ -889,16 +870,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageE databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true @@ -906,7 +879,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrib databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true @@ -914,6 +886,47 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Unpublis databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true @@ -1106,6 +1119,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true @@ -1118,6 +1135,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyC databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java linguist-generated=true @@ -1255,6 +1273,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDet databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationTypeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java linguist-generated=true @@ -1435,9 +1454,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java linguist-generated=true @@ -1508,10 +1524,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExper databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true @@ -1545,8 +1557,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java linguist-generated=true @@ -1882,6 +1892,19 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Worksp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java linguist-generated=true @@ -2050,6 +2073,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablem databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java linguist-generated=true @@ -2195,7 +2220,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java linguist-generated=true @@ -2204,6 +2228,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotifi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true @@ -2228,6 +2253,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true @@ -2304,7 +2331,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAc databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true @@ -2634,6 +2661,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessMessage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonType.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index ac1055a9c..72638ff3f 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,67 @@ ### Internal Changes ### API Changes +* Added `com.databricks.sdk.service.aibuilder`, `com.databricks.sdk.service.database` and `com.databricks.sdk.service.qualitymonitorv2` packages. +* Added `workspaceClient.customLlms()` service. +* Added `workspaceClient.database()` service. +* Added `workspaceClient.qualityMonitorV2()` service. +* Added `updatePrivateEndpointRule()` method for `accountClient.networkConnectivity()` service. +* Added `listSpaces()` method for `workspaceClient.genie()` service. +* Added `pageToken` field for `com.databricks.sdk.service.billing.ListLogDeliveryRequest`. +* Added `nextPageToken` field for `com.databricks.sdk.service.billing.WrappedLogDeliveryConfigurations`. +* Added `nextPageToken` field for `com.databricks.sdk.service.catalog.EffectivePermissionsList`. +* Added `maxResults` and `pageToken` fields for `com.databricks.sdk.service.catalog.GetEffectiveRequest`. +* Added `maxResults` and `pageToken` fields for `com.databricks.sdk.service.catalog.GetGrantRequest`. +* Added `nextPageToken` field for `com.databricks.sdk.service.catalog.ListMetastoresResponse`. +* Added `cleanRoomName` field for `com.databricks.sdk.service.cleanrooms.CleanRoomAsset`. +* [Breaking] Added `name` field for `com.databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`. +* [Breaking] Added `name` field for `com.databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`. +* Added `triggerState` field for `com.databricks.sdk.service.jobs.BaseJob`. +* Added `triggerState` field for `com.databricks.sdk.service.jobs.Job`. +* Added `dbtCloudOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. +* Added `dbtCloudTask` field for `com.databricks.sdk.service.jobs.RunTask`. +* Added `dbtCloudTask` field for `com.databricks.sdk.service.jobs.SubmitTask`. +* Added `dbtCloudTask` field for `com.databricks.sdk.service.jobs.Task`. +* Added `endpointService` and `resourceNames` fields for `com.databricks.sdk.service.settings.CreatePrivateEndpointRule`. +* Added `awsPrivateEndpointRules` field for `com.databricks.sdk.service.settings.NccEgressTargetRules`. +* Added `taskTimeOverTimeRange` field for `com.databricks.sdk.service.sql.QueryMetrics`. +* Added `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum`. +* Added `CLUSTER_MIGRATED` enum value for `com.databricks.sdk.service.compute.EventType`. +* Added `DRIVER_UNHEALTHY` enum value for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* [Breaking] Changed `create()` method for `accountClient.logDelivery()` service with new required argument order. +* [Breaking] Changed `get()` method for `accountClient.logDelivery()` service to return `com.databricks.sdk.service.billing.GetLogDeliveryConfigurationResponse` class. +* [Breaking] Changed `createPrivateEndpointRule()`, `deletePrivateEndpointRule()` and `getPrivateEndpointRule()` methods for `accountClient.networkConnectivity()` service to return `com.databricks.sdk.service.settings.NccPrivateEndpointRule` class. +* [Breaking] Changed `listPrivateEndpointRules()` method for `accountClient.networkConnectivity()` service to return `com.databricks.sdk.service.settings.ListPrivateEndpointRulesResponse` class. +* [Breaking] Changed `delete()` and `get()` methods for `workspaceClient.cleanRoomAssets()` service with new required argument order. +* [Breaking] Changed `delete()` and `get()` methods for `workspaceClient.cleanRoomAssets()` service . Method path has changed. +* [Breaking] Changed `get()` method for `workspaceClient.grants()` service to return `com.databricks.sdk.service.catalog.GetPermissionsResponse` class. +* [Breaking] Changed `update()` method for `workspaceClient.grants()` service to return `com.databricks.sdk.service.catalog.UpdatePermissionsResponse` class. +* [Breaking] Changed `list()` method for `workspaceClient.metastores()` service to require request of `com.databricks.sdk.service.catalog.ListMetastoresRequest` class. +* Changed `accountId`, `credentialsId`, `logType`, `outputFormat` and `storageConfigurationId` fields for `com.databricks.sdk.service.billing.LogDeliveryConfiguration` to be required. +* Changed `message` and `status` fields for `com.databricks.sdk.service.billing.LogDeliveryStatus` to be required. +* [Breaking] Changed `logDeliveryConfiguration` field for `com.databricks.sdk.service.billing.WrappedCreateLogDeliveryConfiguration` to be required. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.GetEffectiveRequest` to type `String` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.GetGrantRequest` to type `String` class. +* [Breaking] Changed `deltaSharingScope` field for `com.databricks.sdk.service.catalog.GetMetastoreSummaryResponse` to type `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum` class. +* [Breaking] Changed `deltaSharingScope` field for `com.databricks.sdk.service.catalog.MetastoreInfo` to type `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum` class. +* [Breaking] Changed `deltaSharingScope` field for `com.databricks.sdk.service.catalog.UpdateMetastore` to type `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.UpdatePermissions` to type `String` class. +* Changed `resourceId` field for `com.databricks.sdk.service.settings.CreatePrivateEndpointRule` to no longer be required. +* [Breaking] Changed pagination for `accountClient.networkConnectivity().listPrivateEndpointRules()` method. +* [Breaking] Removed `workspaceClient.databaseInstances()` service. +* [Breaking] Removed `workspaceClient.queryExecution()` service. +* [Breaking] Removed `updateNccAzurePrivateEndpointRulePublic()` method for `accountClient.networkConnectivity()` service. +* [Breaking] Removed `getCredentialsForTraceDataDownload()`, `getCredentialsForTraceDataUpload()` and `listLoggedModelArtifacts()` methods for `workspaceClient.experiments()` service. +* [Breaking] Removed `getPublishedDashboardEmbedded()` method for `workspaceClient.lakeviewEmbedded()` service. +* [Breaking] Removed `assetFullName` field for `com.databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`. +* [Breaking] Removed `assetFullName` field for `com.databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.ClusterAttributes`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.ClusterDetails`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.ClusterSpec`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.CreateCluster`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.EditCluster`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.UpdateClusterResource`. +* [Breaking] Removed `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.GetMetastoreSummaryResponseDeltaSharingScope`. +* [Breaking] Removed `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.MetastoreInfoDeltaSharingScope`. +* [Breaking] Removed `CATALOG`, `CLEAN_ROOM`, `CONNECTION`, `CREDENTIAL`, `EXTERNAL_LOCATION`, `EXTERNAL_METADATA`, `FUNCTION`, `METASTORE`, `PIPELINE`, `PROVIDER`, `RECIPIENT`, `SCHEMA`, `SHARE`, `STAGING_TABLE`, `STORAGE_CREDENTIAL`, `TABLE`, `UNKNOWN_SECURABLE_TYPE` and `VOLUME` enum values for `com.databricks.sdk.service.catalog.SecurableType`. +* [Breaking] Removed `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.UpdateMetastoreDeltaSharingScope`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index be96caf24..8e635b302 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -305,60 +305,8 @@ public AccountIpAccessListsAPI ipAccessLists() { } /** - * These APIs manage log delivery configurations for this account. The two supported log types for - * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This - * feature works with all account ID types. - * - *

Log delivery works with all account types. However, if your account is on the E2 version of - * the platform or on a select custom plan that allows multiple workspaces per account, you can - * optionally configure different storage destinations for each workspace. Log delivery status is - * also provided to know the latest status of log delivery attempts. The high-level flow of - * billable usage delivery: - * - *

1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. - * Using Databricks APIs, call the Account API to create a [storage configuration - * object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, - * create the appropriate AWS IAM role. For full details, including the required IAM role policies - * and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the - * Account API to create a [credential configuration object](:method:Credentials/Create) that uses - * the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the - * Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the - * credential and storage configuration objects from previous steps. You can specify if the logs - * should include all events of that log type in your account (_Account level_ delivery) or only - * events for a specific set of workspaces (_workspace level_ delivery). Account level log - * delivery applies to all current and future workspaces plus account level logs, while workspace - * level log delivery solely delivers logs related to the specified workspaces. You can create - * multiple types of delivery configurations per account. - * - *

For billable usage delivery: * For more information about billable usage logs, see [Billable - * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is - * `//billable-usage/csv/`, where `` is the name of the optional - * delivery path prefix you set up during log delivery configuration. Files are named - * `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to - * specific workspaces (_workspace level_ logs). You can aggregate usage for your entire account - * by creating an _account level_ delivery configuration that delivers logs for all current and - * future workspaces in your account. * The files are delivered daily by overwriting the month's - * CSV file for each workspace. - * - *

For audit log delivery: * For more information about about audit log delivery, see [Audit - * log delivery], which includes information about the used JSON schema. * The delivery location - * is - * `//workspaceId=/date=/auditlogs_.json`. - * Files may get overwritten with the same content multiple times to achieve exactly-once - * delivery. * If the audit log delivery configuration included specific workspace IDs, only - * _workspace-level_ audit logs for those workspaces are delivered. If the log delivery - * configuration applies to the entire account (_account level_ delivery configuration), the audit - * log delivery includes workspace-level audit logs for all workspaces in the account as well as - * account-level audit logs. See [Audit log delivery] for details. * Auditable events are - * typically available in logs within 15 minutes. - * - *

[Audit log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable - * usage log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html - * [create a new AWS S3 bucket]: - * https://docs.databricks.com/administration-guide/account-api/aws-storage.html + * These APIs manage Log delivery configurations for this account. Log delivery configs enable you + * to configure the delivery of the specified type of logs to your storage account. */ public LogDeliveryAPI logDelivery() { return logDeliveryAPI; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index d4c066a69..bf50805dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -8,6 +8,8 @@ import com.databricks.sdk.mixin.ClustersExt; import com.databricks.sdk.mixin.DbfsExt; import com.databricks.sdk.mixin.SecretsExt; +import com.databricks.sdk.service.aibuilder.CustomLlmsAPI; +import com.databricks.sdk.service.aibuilder.CustomLlmsService; import com.databricks.sdk.service.apps.AppsAPI; import com.databricks.sdk.service.apps.AppsService; import com.databricks.sdk.service.catalog.ArtifactAllowlistsAPI; @@ -18,8 +20,6 @@ import com.databricks.sdk.service.catalog.ConnectionsService; import com.databricks.sdk.service.catalog.CredentialsAPI; import com.databricks.sdk.service.catalog.CredentialsService; -import com.databricks.sdk.service.catalog.DatabaseInstancesAPI; -import com.databricks.sdk.service.catalog.DatabaseInstancesService; import com.databricks.sdk.service.catalog.ExternalLocationsAPI; import com.databricks.sdk.service.catalog.ExternalLocationsService; import com.databricks.sdk.service.catalog.FunctionsAPI; @@ -83,8 +83,8 @@ import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI; import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService; import com.databricks.sdk.service.dashboards.LakeviewService; -import com.databricks.sdk.service.dashboards.QueryExecutionAPI; -import com.databricks.sdk.service.dashboards.QueryExecutionService; +import com.databricks.sdk.service.database.DatabaseAPI; +import com.databricks.sdk.service.database.DatabaseService; import com.databricks.sdk.service.files.DbfsService; import com.databricks.sdk.service.files.FilesAPI; import com.databricks.sdk.service.files.FilesService; @@ -140,6 +140,8 @@ import com.databricks.sdk.service.ml.ModelRegistryService; import com.databricks.sdk.service.pipelines.PipelinesAPI; import com.databricks.sdk.service.pipelines.PipelinesService; +import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2API; +import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2Service; import com.databricks.sdk.service.serving.ServingEndpointsAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneAPI; import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneService; @@ -240,10 +242,11 @@ public class WorkspaceClient { private CredentialsAPI credentialsAPI; private CredentialsManagerAPI credentialsManagerAPI; private CurrentUserAPI currentUserAPI; + private CustomLlmsAPI customLlmsAPI; private DashboardWidgetsAPI dashboardWidgetsAPI; private DashboardsAPI dashboardsAPI; private DataSourcesAPI dataSourcesAPI; - private DatabaseInstancesAPI databaseInstancesAPI; + private DatabaseAPI databaseAPI; private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private ExperimentsAPI experimentsAPI; @@ -281,10 +284,10 @@ public class WorkspaceClient { private ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboardsAPI; private ProviderProvidersAPI providerProvidersAPI; private ProvidersAPI providersAPI; + private QualityMonitorV2API qualityMonitorV2API; private QualityMonitorsAPI qualityMonitorsAPI; private QueriesAPI queriesAPI; private QueriesLegacyAPI queriesLegacyAPI; - private QueryExecutionAPI queryExecutionAPI; private QueryHistoryAPI queryHistoryAPI; private QueryVisualizationsAPI queryVisualizationsAPI; private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI; @@ -350,10 +353,11 @@ public WorkspaceClient(DatabricksConfig config) { credentialsAPI = new CredentialsAPI(apiClient); credentialsManagerAPI = new CredentialsManagerAPI(apiClient); currentUserAPI = new CurrentUserAPI(apiClient); + customLlmsAPI = new CustomLlmsAPI(apiClient); dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); dashboardsAPI = new DashboardsAPI(apiClient); dataSourcesAPI = new DataSourcesAPI(apiClient); - databaseInstancesAPI = new DatabaseInstancesAPI(apiClient); + databaseAPI = new DatabaseAPI(apiClient); dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); @@ -391,10 +395,10 @@ public WorkspaceClient(DatabricksConfig config) { providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient); providerProvidersAPI = new ProviderProvidersAPI(apiClient); providersAPI = new ProvidersAPI(apiClient); + qualityMonitorV2API = new QualityMonitorV2API(apiClient); qualityMonitorsAPI = new QualityMonitorsAPI(apiClient); queriesAPI = new QueriesAPI(apiClient); queriesLegacyAPI = new QueriesLegacyAPI(apiClient); - queryExecutionAPI = new QueryExecutionAPI(apiClient); queryHistoryAPI = new QueryHistoryAPI(apiClient); queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient); queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient); @@ -676,6 +680,11 @@ public CurrentUserAPI currentUser() { return currentUserAPI; } + /** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */ + public CustomLlmsAPI customLlms() { + return customLlmsAPI; + } + /** * This is an evolving API that facilitates the addition and removal of widgets from existing * dashboards within the Databricks Workspace. Data structures may change over time. @@ -714,8 +723,8 @@ public DataSourcesAPI dataSources() { } /** Database Instances provide access to a database via REST API or direct SQL. */ - public DatabaseInstancesAPI databaseInstances() { - return databaseInstancesAPI; + public DatabaseAPI database() { + return databaseAPI; } /** @@ -796,6 +805,8 @@ public ExternalLocationsAPI externalLocations() { * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ public FilesAPI files() { @@ -1211,6 +1222,11 @@ public ProvidersAPI providers() { return providersAPI; } + /** Manage data quality of UC objects (currently support `schema`) */ + public QualityMonitorV2API qualityMonitorV2() { + return qualityMonitorV2API; + } + /** * A monitor computes and monitors data or model quality metrics for a table over time. It * generates metrics tables and a dashboard that you can use to monitor table health and set @@ -1249,11 +1265,6 @@ public QueriesLegacyAPI queriesLegacy() { return queriesLegacyAPI; } - /** Query execution APIs for AI / BI Dashboards */ - public QueryExecutionAPI queryExecution() { - return queryExecutionAPI; - } - /** * A service responsible for storing and retrieving the list of queries run against SQL endpoints * and serverless compute. @@ -2035,6 +2046,17 @@ public WorkspaceClient withCurrentUserAPI(CurrentUserAPI currentUser) { return this; } + /** Replace the default CustomLlmsService with a custom implementation. */ + public WorkspaceClient withCustomLlmsImpl(CustomLlmsService customLlms) { + return this.withCustomLlmsAPI(new CustomLlmsAPI(customLlms)); + } + + /** Replace the default CustomLlmsAPI with a custom implementation. */ + public WorkspaceClient withCustomLlmsAPI(CustomLlmsAPI customLlms) { + this.customLlmsAPI = customLlms; + return this; + } + /** Replace the default DashboardWidgetsService with a custom implementation. */ public WorkspaceClient withDashboardWidgetsImpl(DashboardWidgetsService dashboardWidgets) { return this.withDashboardWidgetsAPI(new DashboardWidgetsAPI(dashboardWidgets)); @@ -2068,14 +2090,14 @@ public WorkspaceClient withDataSourcesAPI(DataSourcesAPI dataSources) { return this; } - /** Replace the default DatabaseInstancesService with a custom implementation. */ - public WorkspaceClient withDatabaseInstancesImpl(DatabaseInstancesService databaseInstances) { - return this.withDatabaseInstancesAPI(new DatabaseInstancesAPI(databaseInstances)); + /** Replace the default DatabaseService with a custom implementation. */ + public WorkspaceClient withDatabaseImpl(DatabaseService database) { + return this.withDatabaseAPI(new DatabaseAPI(database)); } - /** Replace the default DatabaseInstancesAPI with a custom implementation. */ - public WorkspaceClient withDatabaseInstancesAPI(DatabaseInstancesAPI databaseInstances) { - this.databaseInstancesAPI = databaseInstances; + /** Replace the default DatabaseAPI with a custom implementation. */ + public WorkspaceClient withDatabaseAPI(DatabaseAPI database) { + this.databaseAPI = database; return this; } @@ -2507,6 +2529,17 @@ public WorkspaceClient withProvidersAPI(ProvidersAPI providers) { return this; } + /** Replace the default QualityMonitorV2Service with a custom implementation. */ + public WorkspaceClient withQualityMonitorV2Impl(QualityMonitorV2Service qualityMonitorV2) { + return this.withQualityMonitorV2API(new QualityMonitorV2API(qualityMonitorV2)); + } + + /** Replace the default QualityMonitorV2API with a custom implementation. */ + public WorkspaceClient withQualityMonitorV2API(QualityMonitorV2API qualityMonitorV2) { + this.qualityMonitorV2API = qualityMonitorV2; + return this; + } + /** Replace the default QualityMonitorsService with a custom implementation. */ public WorkspaceClient withQualityMonitorsImpl(QualityMonitorsService qualityMonitors) { return this.withQualityMonitorsAPI(new QualityMonitorsAPI(qualityMonitors)); @@ -2540,17 +2573,6 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) { return this; } - /** Replace the default QueryExecutionService with a custom implementation. */ - public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) { - return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution)); - } - - /** Replace the default QueryExecutionAPI with a custom implementation. */ - public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) { - this.queryExecutionAPI = queryExecution; - return this; - } - /** Replace the default QueryHistoryService with a custom implementation. */ public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) { return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java new file mode 100755 index 000000000..905d58253 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class CancelCustomLlmOptimizationRunRequest { + /** */ + @JsonIgnore private String id; + + public CancelCustomLlmOptimizationRunRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CancelCustomLlmOptimizationRunRequest that = (CancelCustomLlmOptimizationRunRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(CancelCustomLlmOptimizationRunRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java similarity index 76% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java index 94f12df20..62f4aac5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java @@ -1,13 +1,13 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.aibuilder; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import java.util.Objects; @Generated -public class ExecuteQueryResponse { +public class CancelResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ExecuteQueryResponse.class).toString(); + return new ToStringer(CancelResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java new file mode 100755 index 000000000..06fb1ec76 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java @@ -0,0 +1,190 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CustomLlm { + /** */ + @JsonProperty("agent_artifact_path") + private String agentArtifactPath; + + /** Creation timestamp of the custom LLM */ + @JsonProperty("creation_time") + private String creationTime; + + /** Creator of the custom LLM */ + @JsonProperty("creator") + private String creator; + + /** Datasets used for training and evaluating the model, not for inference */ + @JsonProperty("datasets") + private Collection datasets; + + /** Name of the endpoint that will be used to serve the custom LLM */ + @JsonProperty("endpoint_name") + private String endpointName; + + /** Guidelines for the custom LLM to adhere to */ + @JsonProperty("guidelines") + private Collection guidelines; + + /** */ + @JsonProperty("id") + private String id; + + /** Instructions for the custom LLM to follow */ + @JsonProperty("instructions") + private String instructions; + + /** Name of the custom LLM */ + @JsonProperty("name") + private String name; + + /** If optimization is kicked off, tracks the state of the custom LLM */ + @JsonProperty("optimization_state") + private State optimizationState; + + public CustomLlm setAgentArtifactPath(String agentArtifactPath) { + this.agentArtifactPath = agentArtifactPath; + return this; + } + + public String getAgentArtifactPath() { + return agentArtifactPath; + } + + public CustomLlm setCreationTime(String creationTime) { + this.creationTime = creationTime; + return this; + } + + public String getCreationTime() { + return creationTime; + } + + public CustomLlm setCreator(String creator) { + this.creator = creator; + return this; + } + + public String getCreator() { + return creator; + } + + public CustomLlm setDatasets(Collection datasets) { + this.datasets = datasets; + return this; + } + + public Collection getDatasets() { + return datasets; + } + + public CustomLlm setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public CustomLlm setGuidelines(Collection guidelines) { + this.guidelines = guidelines; + return this; + } + + public Collection getGuidelines() { + return guidelines; + } + + public CustomLlm setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public CustomLlm setInstructions(String instructions) { + this.instructions = instructions; + return this; + } + + public String getInstructions() { + return instructions; + } + + public CustomLlm setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public CustomLlm setOptimizationState(State optimizationState) { + this.optimizationState = optimizationState; + return this; + } + + public State getOptimizationState() { + return optimizationState; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomLlm that = (CustomLlm) o; + return Objects.equals(agentArtifactPath, that.agentArtifactPath) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(creator, that.creator) + && Objects.equals(datasets, that.datasets) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(guidelines, that.guidelines) + && Objects.equals(id, that.id) + && Objects.equals(instructions, that.instructions) + && Objects.equals(name, that.name) + && Objects.equals(optimizationState, that.optimizationState); + } + + @Override + public int hashCode() { + return Objects.hash( + agentArtifactPath, + creationTime, + creator, + datasets, + endpointName, + guidelines, + id, + instructions, + name, + optimizationState); + } + + @Override + public String toString() { + return new ToStringer(CustomLlm.class) + .add("agentArtifactPath", agentArtifactPath) + .add("creationTime", creationTime) + .add("creator", creator) + .add("datasets", datasets) + .add("endpointName", endpointName) + .add("guidelines", guidelines) + .add("id", id) + .add("instructions", instructions) + .add("name", name) + .add("optimizationState", optimizationState) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsAPI.java new file mode 100755 index 000000000..448c15449 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsAPI.java @@ -0,0 +1,66 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */ +@Generated +public class CustomLlmsAPI { + private static final Logger LOG = LoggerFactory.getLogger(CustomLlmsAPI.class); + + private final CustomLlmsService impl; + + /** Regular-use constructor */ + public CustomLlmsAPI(ApiClient apiClient) { + impl = new CustomLlmsImpl(apiClient); + } + + /** Constructor for mocks */ + public CustomLlmsAPI(CustomLlmsService mock) { + impl = mock; + } + + public void cancel(String id) { + cancel(new CancelCustomLlmOptimizationRunRequest().setId(id)); + } + + /** Cancel a Custom LLM Optimization Run. */ + public void cancel(CancelCustomLlmOptimizationRunRequest request) { + impl.cancel(request); + } + + public CustomLlm create(String id) { + return create(new StartCustomLlmOptimizationRunRequest().setId(id)); + } + + /** Start a Custom LLM Optimization Run. */ + public CustomLlm create(StartCustomLlmOptimizationRunRequest request) { + return impl.create(request); + } + + public CustomLlm get(String id) { + return get(new GetCustomLlmRequest().setId(id)); + } + + /** Get a Custom LLM. */ + public CustomLlm get(GetCustomLlmRequest request) { + return impl.get(request); + } + + public CustomLlm update(String id, CustomLlm customLlm, String updateMask) { + return update( + new UpdateCustomLlmRequest().setId(id).setCustomLlm(customLlm).setUpdateMask(updateMask)); + } + + /** Update a Custom LLM. */ + public CustomLlm update(UpdateCustomLlmRequest request) { + return impl.update(request); + } + + public CustomLlmsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java new file mode 100755 index 000000000..e954adaa6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java @@ -0,0 +1,73 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of CustomLlms */ +@Generated +class CustomLlmsImpl implements CustomLlmsService { + private final ApiClient apiClient; + + public CustomLlmsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public void cancel(CancelCustomLlmOptimizationRunRequest request) { + String path = String.format("/api/2.0/custom-llms/%s/optimize/cancel", request.getId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CancelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public CustomLlm create(StartCustomLlmOptimizationRunRequest request) { + String path = String.format("/api/2.0/custom-llms/%s/optimize", request.getId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CustomLlm.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public CustomLlm get(GetCustomLlmRequest request) { + String path = String.format("/api/2.0/custom-llms/%s", request.getId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CustomLlm.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public CustomLlm update(UpdateCustomLlmRequest request) { + String path = String.format("/api/2.0/custom-llms/%s", request.getId()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CustomLlm.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java new file mode 100755 index 000000000..5f4b4246c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java @@ -0,0 +1,26 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; + +/** + * The Custom LLMs service manages state and powers the UI for the Custom LLM product. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface CustomLlmsService { + /** Cancel a Custom LLM Optimization Run. */ + void cancel(CancelCustomLlmOptimizationRunRequest cancelCustomLlmOptimizationRunRequest); + + /** Start a Custom LLM Optimization Run. */ + CustomLlm create(StartCustomLlmOptimizationRunRequest startCustomLlmOptimizationRunRequest); + + /** Get a Custom LLM. */ + CustomLlm get(GetCustomLlmRequest getCustomLlmRequest); + + /** Update a Custom LLM. */ + CustomLlm update(UpdateCustomLlmRequest updateCustomLlmRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java new file mode 100755 index 000000000..c482c4eee --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Dataset { + /** */ + @JsonProperty("table") + private Table table; + + public Dataset setTable(Table table) { + this.table = table; + return this; + } + + public Table getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Dataset that = (Dataset) o; + return Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + return new ToStringer(Dataset.class).add("table", table).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java new file mode 100755 index 000000000..981a2903a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Custom LLM */ +@Generated +public class GetCustomLlmRequest { + /** The id of the custom llm */ + @JsonIgnore private String id; + + public GetCustomLlmRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetCustomLlmRequest that = (GetCustomLlmRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetCustomLlmRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java new file mode 100755 index 000000000..b9713f495 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class StartCustomLlmOptimizationRunRequest { + /** The Id of the tile. */ + @JsonIgnore private String id; + + public StartCustomLlmOptimizationRunRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StartCustomLlmOptimizationRunRequest that = (StartCustomLlmOptimizationRunRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(StartCustomLlmOptimizationRunRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java new file mode 100755 index 000000000..fbc8d5ec5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; + +/** States of Custom LLM optimization lifecycle. */ +@Generated +public enum State { + CANCELLED, + COMPLETED, + CREATED, + FAILED, + PENDING, + RUNNING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java new file mode 100755 index 000000000..a5140c9f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Table { + /** Name of the request column */ + @JsonProperty("request_col") + private String requestCol; + + /** Optional: Name of the response column if the data is labeled */ + @JsonProperty("response_col") + private String responseCol; + + /** Full UC table path in catalog.schema.table_name format */ + @JsonProperty("table_path") + private String tablePath; + + public Table setRequestCol(String requestCol) { + this.requestCol = requestCol; + return this; + } + + public String getRequestCol() { + return requestCol; + } + + public Table setResponseCol(String responseCol) { + this.responseCol = responseCol; + return this; + } + + public String getResponseCol() { + return responseCol; + } + + public Table setTablePath(String tablePath) { + this.tablePath = tablePath; + return this; + } + + public String getTablePath() { + return tablePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Table that = (Table) o; + return Objects.equals(requestCol, that.requestCol) + && Objects.equals(responseCol, that.responseCol) + && Objects.equals(tablePath, that.tablePath); + } + + @Override + public int hashCode() { + return Objects.hash(requestCol, responseCol, tablePath); + } + + @Override + public String toString() { + return new ToStringer(Table.class) + .add("requestCol", requestCol) + .add("responseCol", responseCol) + .add("tablePath", tablePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java new file mode 100755 index 000000000..3cff645de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.aibuilder; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateCustomLlmRequest { + /** The CustomLlm containing the fields which should be updated. */ + @JsonProperty("custom_llm") + private CustomLlm customLlm; + + /** The id of the custom llm */ + @JsonIgnore private String id; + + /** + * The list of the CustomLlm fields to update. These should correspond to the values (or lack + * thereof) present in `custom_llm`. + * + *

The field mask must be a single string, with multiple fields separated by commas (no + * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate + * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is + * not allowed, as only the entire collection field can be specified. Field names must exactly + * match the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. + */ + @JsonProperty("update_mask") + private String updateMask; + + public UpdateCustomLlmRequest setCustomLlm(CustomLlm customLlm) { + this.customLlm = customLlm; + return this; + } + + public CustomLlm getCustomLlm() { + return customLlm; + } + + public UpdateCustomLlmRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateCustomLlmRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateCustomLlmRequest that = (UpdateCustomLlmRequest) o; + return Objects.equals(customLlm, that.customLlm) + && Objects.equals(id, that.id) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(customLlm, id, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateCustomLlmRequest.class) + .add("customLlm", customLlm) + .add("id", id) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java index 895258dbe..619f90f27 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** * Log Delivery Configuration */ @Generated public class CreateLogDeliveryConfigurationParams { /** The optional human-readable name of the log delivery configuration. Defaults to empty. */ @@ -34,21 +35,17 @@ public class CreateLogDeliveryConfigurationParams { private String deliveryPathPrefix; /** - * This field applies only if `log_type` is `BILLABLE_USAGE`. This is the optional start month and - * year for delivery, specified in `YYYY-MM` format. Defaults to current year and month. - * `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`). + * This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and + * year for delivery, specified in YYYY-MM format. Defaults to current year and month. + * BILLABLE_USAGE logs are not available for usage before March 2019 (2019-03). */ @JsonProperty("delivery_start_time") private String deliveryStartTime; /** - * Log delivery type. Supported values are: - * - *

* `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - * [View billable usage]. - * - *

* `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - * logging] + * Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log + * delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure [audit + * log delivery]. For the JSON schema, see [Configure audit logging] * *

[Configure audit logging]: * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View @@ -62,12 +59,11 @@ public class CreateLogDeliveryConfigurationParams { private LogType logType; /** - * The file type of log delivery. - * - *

* If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - * values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - * `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - * supported. For the schema, see the [Configuring audit logs]. + * The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. + * Only the CSV (comma-separated values) format is supported. For the schema, see the [View + * billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON + * (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit + * logs]. * *

[Configuring audit logs]: * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java index d1aee2690..f4fae5ddc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java @@ -5,23 +5,19 @@ import com.databricks.sdk.support.Generated; /** - * The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery - * attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has - * succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of - * misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The + * * The status string for log delivery. Possible values are: `CREATED`: There were no log delivery + * attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has + * succeeded completely. `USER_FAILURE`: The latest attempt of log delivery failed because of + * misconfiguration of customer provided permissions on role or storage. `SYSTEM_FAILURE`: The * latest attempt of log delivery failed because of an Databricks internal error. Contact support if - * it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been + * it doesn't go away soon. `NOT_FOUND`: The log delivery status as the configuration has been * disabled since the release of this feature or there are no workspaces in the account. */ @Generated public enum DeliveryStatus { - CREATED, // There were no log delivery attempts since the config was created. - NOT_FOUND, // The log delivery status as the configuration has been disabled since the - // release of this feature or there are no workspaces in the account. - SUCCEEDED, // The latest attempt of log delivery has succeeded completely. - SYSTEM_FAILURE, // The latest attempt of log delivery failed because of an internal - // error. Contact support if it doesn't go away soon. - USER_FAILURE, // The latest attempt of log delivery failed because of misconfiguration of - // customer provided permissions on role or storage. - + CREATED, + NOT_FOUND, + SUCCEEDED, + SYSTEM_FAILURE, + USER_FAILURE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java new file mode 100755 index 000000000..0c891d100 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.billing; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetLogDeliveryConfigurationResponse { + /** The fetched log delivery configuration */ + @JsonProperty("log_delivery_configuration") + private LogDeliveryConfiguration logDeliveryConfiguration; + + public GetLogDeliveryConfigurationResponse setLogDeliveryConfiguration( + LogDeliveryConfiguration logDeliveryConfiguration) { + this.logDeliveryConfiguration = logDeliveryConfiguration; + return this; + } + + public LogDeliveryConfiguration getLogDeliveryConfiguration() { + return logDeliveryConfiguration; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLogDeliveryConfigurationResponse that = (GetLogDeliveryConfigurationResponse) o; + return Objects.equals(logDeliveryConfiguration, that.logDeliveryConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash(logDeliveryConfiguration); + } + + @Override + public String toString() { + return new ToStringer(GetLogDeliveryConfigurationResponse.class) + .add("logDeliveryConfiguration", logDeliveryConfiguration) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java index d4b457c37..2f05db001 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java @@ -10,7 +10,7 @@ /** Get log delivery configuration */ @Generated public class GetLogDeliveryRequest { - /** Databricks log delivery configuration ID */ + /** The log delivery configuration id of customer */ @JsonIgnore private String logDeliveryConfigurationId; public GetLogDeliveryRequest setLogDeliveryConfigurationId(String logDeliveryConfigurationId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java index c68133ca6..fedbbd71c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListLogDeliveryRequest.java @@ -11,17 +11,25 @@ /** Get all log delivery configurations */ @Generated public class ListLogDeliveryRequest { - /** Filter by credential configuration ID. */ + /** The Credentials id to filter the search results with */ @JsonIgnore @QueryParam("credentials_id") private String credentialsId; - /** Filter by status `ENABLED` or `DISABLED`. */ + /** + * A page token received from a previous get all budget configurations call. This token can be + * used to retrieve the subsequent page. Requests first page if absent. + */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** The log delivery status to filter the search results with */ @JsonIgnore @QueryParam("status") private LogDeliveryConfigStatus status; - /** Filter by storage configuration ID. */ + /** The Storage Configuration id to filter the search results with */ @JsonIgnore @QueryParam("storage_configuration_id") private String storageConfigurationId; @@ -35,6 +43,15 @@ public String getCredentialsId() { return credentialsId; } + public ListLogDeliveryRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + public ListLogDeliveryRequest setStatus(LogDeliveryConfigStatus status) { this.status = status; return this; @@ -59,19 +76,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListLogDeliveryRequest that = (ListLogDeliveryRequest) o; return Objects.equals(credentialsId, that.credentialsId) + && Objects.equals(pageToken, that.pageToken) && Objects.equals(status, that.status) && Objects.equals(storageConfigurationId, that.storageConfigurationId); } @Override public int hashCode() { - return Objects.hash(credentialsId, status, storageConfigurationId); + return Objects.hash(credentialsId, pageToken, status, storageConfigurationId); } @Override public String toString() { return new ToStringer(ListLogDeliveryRequest.class) .add("credentialsId", credentialsId) + .add("pageToken", pageToken) .add("status", status) .add("storageConfigurationId", storageConfigurationId) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java index d00bca784..bab132ce3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java @@ -8,59 +8,8 @@ import org.slf4j.LoggerFactory; /** - * These APIs manage log delivery configurations for this account. The two supported log types for - * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This - * feature works with all account ID types. - * - *

Log delivery works with all account types. However, if your account is on the E2 version of - * the platform or on a select custom plan that allows multiple workspaces per account, you can - * optionally configure different storage destinations for each workspace. Log delivery status is - * also provided to know the latest status of log delivery attempts. The high-level flow of billable - * usage delivery: - * - *

1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. - * Using Databricks APIs, call the Account API to create a [storage configuration - * object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, - * create the appropriate AWS IAM role. For full details, including the required IAM role policies - * and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the - * Account API to create a [credential configuration object](:method:Credentials/Create) that uses - * the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the - * Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the - * credential and storage configuration objects from previous steps. You can specify if the logs - * should include all events of that log type in your account (_Account level_ delivery) or only - * events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery - * applies to all current and future workspaces plus account level logs, while workspace level log - * delivery solely delivers logs related to the specified workspaces. You can create multiple types - * of delivery configurations per account. - * - *

For billable usage delivery: * For more information about billable usage logs, see [Billable - * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is - * `//billable-usage/csv/`, where `` is the name of the optional - * delivery path prefix you set up during log delivery configuration. Files are named - * `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific - * workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating - * an _account level_ delivery configuration that delivers logs for all current and future - * workspaces in your account. * The files are delivered daily by overwriting the month's CSV file - * for each workspace. - * - *

For audit log delivery: * For more information about about audit log delivery, see [Audit log - * delivery], which includes information about the used JSON schema. * The delivery location is - * `//workspaceId=/date=/auditlogs_.json`. - * Files may get overwritten with the same content multiple times to achieve exactly-once delivery. - * * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_ - * audit logs for those workspaces are delivered. If the log delivery configuration applies to the - * entire account (_account level_ delivery configuration), the audit log delivery includes - * workspace-level audit logs for all workspaces in the account as well as account-level audit logs. - * See [Audit log delivery] for details. * Auditable events are typically available in logs within - * 15 minutes. - * - *

[Audit log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage - * log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html - * [create a new AWS S3 bucket]: - * https://docs.databricks.com/administration-guide/account-api/aws-storage.html + * These APIs manage Log delivery configurations for this account. Log delivery configs enable you + * to configure the delivery of the specified type of logs to your storage account. */ @Generated public class LogDeliveryAPI { @@ -78,6 +27,13 @@ public LogDeliveryAPI(LogDeliveryService mock) { impl = mock; } + public WrappedLogDeliveryConfiguration create( + CreateLogDeliveryConfigurationParams logDeliveryConfiguration) { + return create( + new WrappedCreateLogDeliveryConfiguration() + .setLogDeliveryConfiguration(logDeliveryConfiguration)); + } + /** * Create a new log delivery configuration. * @@ -109,7 +65,7 @@ public WrappedLogDeliveryConfiguration create(WrappedCreateLogDeliveryConfigurat return impl.create(request); } - public WrappedLogDeliveryConfiguration get(String logDeliveryConfigurationId) { + public GetLogDeliveryConfigurationResponse get(String logDeliveryConfigurationId) { return get( new GetLogDeliveryRequest().setLogDeliveryConfigurationId(logDeliveryConfigurationId)); } @@ -119,7 +75,7 @@ public WrappedLogDeliveryConfiguration get(String logDeliveryConfigurationId) { * *

Gets a Databricks log delivery configuration object for an account, both specified by ID. */ - public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) { + public GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest request) { return impl.get(request); } @@ -133,7 +89,13 @@ public Iterable list(ListLogDeliveryRequest request) { request, impl::list, WrappedLogDeliveryConfigurations::getLogDeliveryConfigurations, - response -> null); + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); } public void patchStatus(String logDeliveryConfigurationId, LogDeliveryConfigStatus status) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigStatus.java index 402d34da2..ca18d0792 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfigStatus.java @@ -5,10 +5,10 @@ import com.databricks.sdk.support.Generated; /** - * Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - * Defaults to `ENABLED`. You can [enable or disable the - * configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is - * not supported, so disable a log delivery configuration that is no longer needed. + * * Log Delivery Status + * + *

`ENABLED`: All dependencies have executed and succeeded `DISABLED`: At least one dependency + * has succeeded */ @Generated public enum LogDeliveryConfigStatus { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java index 6a6f6521b..1a078c1c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java @@ -8,13 +8,14 @@ import java.util.Collection; import java.util.Objects; +/** * Log Delivery Configuration */ @Generated public class LogDeliveryConfiguration { - /** The Databricks account ID that hosts the log delivery configuration. */ + /** Databricks account ID. */ @JsonProperty("account_id") private String accountId; - /** Databricks log delivery configuration ID. */ + /** The unique UUID of log delivery configuration */ @JsonProperty("config_id") private String configId; @@ -46,25 +47,21 @@ public class LogDeliveryConfiguration { private String deliveryPathPrefix; /** - * This field applies only if `log_type` is `BILLABLE_USAGE`. This is the optional start month and - * year for delivery, specified in `YYYY-MM` format. Defaults to current year and month. - * `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`). + * This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and + * year for delivery, specified in YYYY-MM format. Defaults to current year and month. + * BILLABLE_USAGE logs are not available for usage before March 2019 (2019-03). */ @JsonProperty("delivery_start_time") private String deliveryStartTime; - /** Databricks log delivery status. */ + /** The LogDeliveryStatus of this log delivery configuration */ @JsonProperty("log_delivery_status") private LogDeliveryStatus logDeliveryStatus; /** - * Log delivery type. Supported values are: - * - *

* `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - * [View billable usage]. - * - *

* `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - * logging] + * Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log + * delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure [audit + * log delivery]. For the JSON schema, see [Configure audit logging] * *

[Configure audit logging]: * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View @@ -78,12 +75,11 @@ public class LogDeliveryConfiguration { private LogType logType; /** - * The file type of log delivery. - * - *

* If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - * values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - * `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - * supported. For the schema, see the [Configuring audit logs]. + * The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. + * Only the CSV (comma-separated values) format is supported. For the schema, see the [View + * billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON + * (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit + * logs]. * *

[Configuring audit logs]: * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java index dd4e64dcf..187955234 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java @@ -32,7 +32,7 @@ public WrappedLogDeliveryConfiguration create(WrappedCreateLogDeliveryConfigurat } @Override - public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) { + public GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest request) { String path = String.format( "/api/2.0/accounts/%s/log-delivery/%s", @@ -41,7 +41,7 @@ public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, WrappedLogDeliveryConfiguration.class); + return apiClient.execute(req, GetLogDeliveryConfigurationResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java index 422788de9..8e66ac799 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java @@ -4,59 +4,8 @@ import com.databricks.sdk.support.Generated; /** - * These APIs manage log delivery configurations for this account. The two supported log types for - * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This - * feature works with all account ID types. - * - *

Log delivery works with all account types. However, if your account is on the E2 version of - * the platform or on a select custom plan that allows multiple workspaces per account, you can - * optionally configure different storage destinations for each workspace. Log delivery status is - * also provided to know the latest status of log delivery attempts. The high-level flow of billable - * usage delivery: - * - *

1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. - * Using Databricks APIs, call the Account API to create a [storage configuration - * object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, - * create the appropriate AWS IAM role. For full details, including the required IAM role policies - * and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the - * Account API to create a [credential configuration object](:method:Credentials/Create) that uses - * the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the - * Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the - * credential and storage configuration objects from previous steps. You can specify if the logs - * should include all events of that log type in your account (_Account level_ delivery) or only - * events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery - * applies to all current and future workspaces plus account level logs, while workspace level log - * delivery solely delivers logs related to the specified workspaces. You can create multiple types - * of delivery configurations per account. - * - *

For billable usage delivery: * For more information about billable usage logs, see [Billable - * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is - * `//billable-usage/csv/`, where `` is the name of the optional - * delivery path prefix you set up during log delivery configuration. Files are named - * `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific - * workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating - * an _account level_ delivery configuration that delivers logs for all current and future - * workspaces in your account. * The files are delivered daily by overwriting the month's CSV file - * for each workspace. - * - *

For audit log delivery: * For more information about about audit log delivery, see [Audit log - * delivery], which includes information about the used JSON schema. * The delivery location is - * `//workspaceId=/date=/auditlogs_.json`. - * Files may get overwritten with the same content multiple times to achieve exactly-once delivery. - * * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_ - * audit logs for those workspaces are delivered. If the log delivery configuration applies to the - * entire account (_account level_ delivery configuration), the audit log delivery includes - * workspace-level audit logs for all workspaces in the account as well as account-level audit logs. - * See [Audit log delivery] for details. * Auditable events are typically available in logs within - * 15 minutes. - * - *

[Audit log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage - * log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html - * [create a new AWS S3 bucket]: - * https://docs.databricks.com/administration-guide/account-api/aws-storage.html + * These APIs manage Log delivery configurations for this account. Log delivery configs enable you + * to configure the delivery of the specified type of logs to your storage account. * *

This is the high-level interface, that contains generated methods. * @@ -99,7 +48,7 @@ WrappedLogDeliveryConfiguration create( * *

Gets a Databricks log delivery configuration object for an account, both specified by ID. */ - WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest getLogDeliveryRequest); + GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest getLogDeliveryRequest); /** * Get all log delivery configurations. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java index 5c37c00aa..e5d5a7ede 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java @@ -7,7 +7,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Databricks log delivery status. */ @Generated public class LogDeliveryStatus { /** The UTC time for the latest log delivery attempt. */ @@ -26,14 +25,13 @@ public class LogDeliveryStatus { private String message; /** - * The status string for log delivery. Possible values are: * `CREATED`: There were no log - * delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log - * delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed - * because of misconfiguration of customer provided permissions on role or storage. * - * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal - * error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as - * the configuration has been disabled since the release of this feature or there are no - * workspaces in the account. + * Enum that describes the status. Possible values are: * `CREATED`: There were no log delivery + * attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has + * succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of + * misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The + * latest attempt of log delivery failed because of an Databricks internal error. Contact support + * if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has + * been disabled since the release of this feature or there are no workspaces in the account. */ @JsonProperty("status") private DeliveryStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java index 0e657964a..2df06fe30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java @@ -4,22 +4,7 @@ import com.databricks.sdk.support.Generated; -/** - * Log delivery type. Supported values are: - * - *

* `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - * [View billable usage]. - * - *

* `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - * logging] - * - *

[Configure audit logging]: - * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable - * usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log - * delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - * [billable usage log delivery]: - * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - */ +/** * Log Delivery Type */ @Generated public enum LogType { AUDIT_LOGS, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java index 192017e22..4298a6b0f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java @@ -4,18 +4,7 @@ import com.databricks.sdk.support.Generated; -/** - * The file type of log delivery. - * - *

* If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - * values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - * `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - * supported. For the schema, see the [Configuring audit logs]. - * - *

[Configuring audit logs]: - * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable - * usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html - */ +/** * Log Delivery Output Format */ @Generated public enum OutputFormat { CSV, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java index 383fcd194..3fc98a262 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java @@ -8,9 +8,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** * Update Log Delivery Configuration */ @Generated public class UpdateLogDeliveryConfigurationStatusRequest { - /** Databricks log delivery configuration ID */ + /** The log delivery configuration id of customer */ @JsonIgnore private String logDeliveryConfigurationId; /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java index cb830b923..1cf2ed48e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java @@ -7,9 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** * Properties of the new log delivery configuration. */ @Generated public class WrappedCreateLogDeliveryConfiguration { - /** */ + /** * Log Delivery Configuration */ @JsonProperty("log_delivery_configuration") private CreateLogDeliveryConfigurationParams logDeliveryConfiguration; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java index 15d0080e9..f35961b31 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java @@ -9,7 +9,7 @@ @Generated public class WrappedLogDeliveryConfiguration { - /** */ + /** The created log delivery configuration */ @JsonProperty("log_delivery_configuration") private LogDeliveryConfiguration logDeliveryConfiguration; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java index ddb9ba7d7..6d553b893 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java @@ -10,10 +10,17 @@ @Generated public class WrappedLogDeliveryConfigurations { - /** */ + /** Log delivery configurations were returned successfully. */ @JsonProperty("log_delivery_configurations") private Collection logDeliveryConfigurations; + /** + * Token which can be sent as `page_token` to retrieve the next page of results. If this field is + * omitted, there are no subsequent budgets. + */ + @JsonProperty("next_page_token") + private String nextPageToken; + public WrappedLogDeliveryConfigurations setLogDeliveryConfigurations( Collection logDeliveryConfigurations) { this.logDeliveryConfigurations = logDeliveryConfigurations; @@ -24,23 +31,34 @@ public Collection getLogDeliveryConfigurations() { return logDeliveryConfigurations; } + public WrappedLogDeliveryConfigurations setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; WrappedLogDeliveryConfigurations that = (WrappedLogDeliveryConfigurations) o; - return Objects.equals(logDeliveryConfigurations, that.logDeliveryConfigurations); + return Objects.equals(logDeliveryConfigurations, that.logDeliveryConfigurations) + && Objects.equals(nextPageToken, that.nextPageToken); } @Override public int hashCode() { - return Objects.hash(logDeliveryConfigurations); + return Objects.hash(logDeliveryConfigurations, nextPageToken); } @Override public String toString() { return new ToStringer(WrappedLogDeliveryConfigurations.class) .add("logDeliveryConfigurations", logDeliveryConfigurations) + .add("nextPageToken", nextPageToken) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index c43cb89bd..b076b2ff4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 30 */ +/** Next Id: 31 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java index 717ad49a0..c10f826ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java @@ -13,11 +13,7 @@ public class CreateMetastore { @JsonProperty("name") private String name; - /** - * Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted - * in the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is - * omitted, the region of the workspace receiving the request will be used. - */ + /** Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). */ @JsonProperty("region") private String region; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java index 779793175..01bd3ff8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java @@ -11,7 +11,7 @@ @Generated public class CreateMetastoreAssignment { /** - * The name of the default catalog in the metastore. This field is depracted. Please use "Default + * The name of the default catalog in the metastore. This field is deprecated. Please use "Default * Namespace API" to configure the default catalog for a Databricks workspace. */ @JsonProperty("default_catalog_name") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java similarity index 67% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java index acfbdae1d..d862afd5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java @@ -4,9 +4,8 @@ import com.databricks.sdk.support.Generated; -/** The scope of Delta Sharing enabled for the metastore. */ @Generated -public enum MetastoreInfoDeltaSharingScope { +public enum DeltaSharingScopeEnum { INTERNAL, INTERNAL_AND_EXTERNAL, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java index 61fc856a1..175d38ada 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java @@ -10,10 +10,26 @@ @Generated public class EffectivePermissionsList { + /** + * Opaque token to retrieve the next page of results. Absent if there are no more pages. + * __page_token__ should be set to this value for the next request (for the next page of results). + */ + @JsonProperty("next_page_token") + private String nextPageToken; + /** The privileges conveyed to each principal (either directly or via inheritance) */ @JsonProperty("privilege_assignments") private Collection privilegeAssignments; + public EffectivePermissionsList setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + public EffectivePermissionsList setPrivilegeAssignments( Collection privilegeAssignments) { this.privilegeAssignments = privilegeAssignments; @@ -29,17 +45,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EffectivePermissionsList that = (EffectivePermissionsList) o; - return Objects.equals(privilegeAssignments, that.privilegeAssignments); + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(privilegeAssignments, that.privilegeAssignments); } @Override public int hashCode() { - return Objects.hash(privilegeAssignments); + return Objects.hash(nextPageToken, privilegeAssignments); } @Override public String toString() { return new ToStringer(EffectivePermissionsList.class) + .add("nextPageToken", nextPageToken) .add("privilegeAssignments", privilegeAssignments) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java index a0603c5fa..570aaa00a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java @@ -14,6 +14,27 @@ public class GetEffectiveRequest { /** Full name of securable. */ @JsonIgnore private String fullName; + /** + * Specifies the maximum number of privileges to return (page length). Every + * EffectivePrivilegeAssignment present in a single page response is guaranteed to contain all the + * effective privileges granted on (or inherited by) the requested Securable for the respective + * principal. + * + *

If not set, all the effective permissions are returned. If set to - lesser than 0: invalid + * parameter error - 0: page length is set to a server configured value - lesser than 150 but + * greater than 0: invalid parameter error (this is to ensure that server is able to return at + * least one complete EffectivePrivilegeAssignment in a single page response) - greater than (or + * equal to) 150: page length is the minimum of this value and a server configured value + */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + /** Opaque token for the next page of results (pagination). */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + /** * If provided, only the effective permissions for the specified principal (user or group) are * returned. @@ -23,7 +44,7 @@ public class GetEffectiveRequest { private String principal; /** Type of securable. */ - @JsonIgnore private SecurableType securableType; + @JsonIgnore private String securableType; public GetEffectiveRequest setFullName(String fullName) { this.fullName = fullName; @@ -34,6 +55,24 @@ public String getFullName() { return fullName; } + public GetEffectiveRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public GetEffectiveRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + public GetEffectiveRequest setPrincipal(String principal) { this.principal = principal; return this; @@ -43,12 +82,12 @@ public String getPrincipal() { return principal; } - public GetEffectiveRequest setSecurableType(SecurableType securableType) { + public GetEffectiveRequest setSecurableType(String securableType) { this.securableType = securableType; return this; } - public SecurableType getSecurableType() { + public String getSecurableType() { return securableType; } @@ -58,19 +97,23 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GetEffectiveRequest that = (GetEffectiveRequest) o; return Objects.equals(fullName, that.fullName) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) && Objects.equals(principal, that.principal) && Objects.equals(securableType, that.securableType); } @Override public int hashCode() { - return Objects.hash(fullName, principal, securableType); + return Objects.hash(fullName, maxResults, pageToken, principal, securableType); } @Override public String toString() { return new ToStringer(GetEffectiveRequest.class) .add("fullName", fullName) + .add("maxResults", maxResults) + .add("pageToken", pageToken) .add("principal", principal) .add("securableType", securableType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java index 1fdab979d..8dfca3ffc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java @@ -14,13 +14,33 @@ public class GetGrantRequest { /** Full name of securable. */ @JsonIgnore private String fullName; + /** + * Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment + * present in a single page response is guaranteed to contain all the privileges granted on the + * requested Securable for the respective principal. + * + *

If not set, all the permissions are returned. If set to - lesser than 0: invalid parameter + * error - 0: page length is set to a server configured value - lesser than 150 but greater than + * 0: invalid parameter error (this is to ensure that server is able to return at least one + * complete PrivilegeAssignment in a single page response) - greater than (or equal to) 150: page + * length is the minimum of this value and a server configured value + */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + /** Opaque pagination token to go to next page based on previous query. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + /** If provided, only the permissions for the specified principal (user or group) are returned. */ @JsonIgnore @QueryParam("principal") private String principal; /** Type of securable. */ - @JsonIgnore private SecurableType securableType; + @JsonIgnore private String securableType; public GetGrantRequest setFullName(String fullName) { this.fullName = fullName; @@ -31,6 +51,24 @@ public String getFullName() { return fullName; } + public GetGrantRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public GetGrantRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + public GetGrantRequest setPrincipal(String principal) { this.principal = principal; return this; @@ -40,12 +78,12 @@ public String getPrincipal() { return principal; } - public GetGrantRequest setSecurableType(SecurableType securableType) { + public GetGrantRequest setSecurableType(String securableType) { this.securableType = securableType; return this; } - public SecurableType getSecurableType() { + public String getSecurableType() { return securableType; } @@ -55,19 +93,23 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GetGrantRequest that = (GetGrantRequest) o; return Objects.equals(fullName, that.fullName) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken) && Objects.equals(principal, that.principal) && Objects.equals(securableType, that.securableType); } @Override public int hashCode() { - return Objects.hash(fullName, principal, securableType); + return Objects.hash(fullName, maxResults, pageToken, principal, securableType); } @Override public String toString() { return new ToStringer(GetGrantRequest.class) .add("fullName", fullName) + .add("maxResults", maxResults) + .add("pageToken", pageToken) .add("principal", principal) .add("securableType", securableType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java index 34e138f12..f30d70fe7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java @@ -38,7 +38,7 @@ public class GetMetastoreSummaryResponse { /** The scope of Delta Sharing enabled for the metastore. */ @JsonProperty("delta_sharing_scope") - private GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope; + private DeltaSharingScopeEnum deltaSharingScope; /** Whether to allow non-DBR clients to directly access entities under the metastore. */ @JsonProperty("external_access_enabled") @@ -148,13 +148,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { return deltaSharingRecipientTokenLifetimeInSeconds; } - public GetMetastoreSummaryResponse setDeltaSharingScope( - GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope) { + public GetMetastoreSummaryResponse setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { this.deltaSharingScope = deltaSharingScope; return this; } - public GetMetastoreSummaryResponseDeltaSharingScope getDeltaSharingScope() { + public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java deleted file mode 100755 index 336e0cc06..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; - -/** The scope of Delta Sharing enabled for the metastore. */ -@Generated -public enum GetMetastoreSummaryResponseDeltaSharingScope { - INTERNAL, - INTERNAL_AND_EXTERNAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java new file mode 100755 index 000000000..f0375ba39 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetPermissionsResponse { + /** + * Opaque token to retrieve the next page of results. Absent if there are no more pages. + * __page_token__ should be set to this value for the next request (for the next page of results). + */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** The privileges assigned to each principal */ + @JsonProperty("privilege_assignments") + private Collection privilegeAssignments; + + public GetPermissionsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetPermissionsResponse setPrivilegeAssignments( + Collection privilegeAssignments) { + this.privilegeAssignments = privilegeAssignments; + return this; + } + + public Collection getPrivilegeAssignments() { + return privilegeAssignments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPermissionsResponse that = (GetPermissionsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(privilegeAssignments, that.privilegeAssignments); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, privilegeAssignments); + } + + @Override + public String toString() { + return new ToStringer(GetPermissionsResponse.class) + .add("nextPageToken", nextPageToken) + .add("privilegeAssignments", privilegeAssignments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java index 4d1bfcec2..84982e379 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.java @@ -33,20 +33,20 @@ public GrantsAPI(GrantsService mock) { impl = mock; } - public PermissionsList get(SecurableType securableType, String fullName) { + public GetPermissionsResponse get(String securableType, String fullName) { return get(new GetGrantRequest().setSecurableType(securableType).setFullName(fullName)); } /** * Get permissions. * - *

Gets the permissions for a securable. + *

Gets the permissions for a securable. Does not include inherited permissions. */ - public PermissionsList get(GetGrantRequest request) { + public GetPermissionsResponse get(GetGrantRequest request) { return impl.get(request); } - public EffectivePermissionsList getEffective(SecurableType securableType, String fullName) { + public EffectivePermissionsList getEffective(String securableType, String fullName) { return getEffective( new GetEffectiveRequest().setSecurableType(securableType).setFullName(fullName)); } @@ -54,13 +54,14 @@ public EffectivePermissionsList getEffective(SecurableType securableType, String /** * Get effective permissions. * - *

Gets the effective permissions for a securable. + *

Gets the effective permissions for a securable. Includes inherited permissions from any + * parent securables. */ public EffectivePermissionsList getEffective(GetEffectiveRequest request) { return impl.getEffective(request); } - public PermissionsList update(SecurableType securableType, String fullName) { + public UpdatePermissionsResponse update(String securableType, String fullName) { return update(new UpdatePermissions().setSecurableType(securableType).setFullName(fullName)); } @@ -69,7 +70,7 @@ public PermissionsList update(SecurableType securableType, String fullName) { * *

Updates the permissions for a securable. */ - public PermissionsList update(UpdatePermissions request) { + public UpdatePermissionsResponse update(UpdatePermissions request) { return impl.update(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java index 1a8219ede..cf01d91a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java @@ -17,7 +17,7 @@ public GrantsImpl(ApiClient apiClient) { } @Override - public PermissionsList get(GetGrantRequest request) { + public GetPermissionsResponse get(GetGrantRequest request) { String path = String.format( "/api/2.1/unity-catalog/permissions/%s/%s", @@ -26,7 +26,7 @@ public PermissionsList get(GetGrantRequest request) { Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, PermissionsList.class); + return apiClient.execute(req, GetPermissionsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -49,7 +49,7 @@ public EffectivePermissionsList getEffective(GetEffectiveRequest request) { } @Override - public PermissionsList update(UpdatePermissions request) { + public UpdatePermissionsResponse update(UpdatePermissions request) { String path = String.format( "/api/2.1/unity-catalog/permissions/%s/%s", @@ -59,7 +59,7 @@ public PermissionsList update(UpdatePermissions request) { ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, PermissionsList.class); + return apiClient.execute(req, UpdatePermissionsResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java index 9176b21ce..275b60772 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java @@ -23,14 +23,15 @@ public interface GrantsService { /** * Get permissions. * - *

Gets the permissions for a securable. + *

Gets the permissions for a securable. Does not include inherited permissions. */ - PermissionsList get(GetGrantRequest getGrantRequest); + GetPermissionsResponse get(GetGrantRequest getGrantRequest); /** * Get effective permissions. * - *

Gets the effective permissions for a securable. + *

Gets the effective permissions for a securable. Includes inherited permissions from any + * parent securables. */ EffectivePermissionsList getEffective(GetEffectiveRequest getEffectiveRequest); @@ -39,5 +40,5 @@ public interface GrantsService { * *

Updates the permissions for a securable. */ - PermissionsList update(UpdatePermissions updatePermissions); + UpdatePermissionsResponse update(UpdatePermissions updatePermissions); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java new file mode 100755 index 000000000..cc722491d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List metastores */ +@Generated +public class ListMetastoresRequest { + /** + * Maximum number of metastores to return. - when set to a value greater than 0, the page length + * is the minimum of this value and a server configured value; - when set to 0, the page length is + * set to a server configured value (recommended); - when set to a value less than 0, an invalid + * parameter error is returned; - If not set, all the metastores are returned (not recommended). - + * Note: The number of returned metastores might be less than the specified max_results size, even + * zero. The only definitive indication that no further metastores can be fetched is when the + * next_page_token is unset from the response. + */ + @JsonIgnore + @QueryParam("max_results") + private Long maxResults; + + /** Opaque pagination token to go to next page based on previous query. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListMetastoresRequest setMaxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + public Long getMaxResults() { + return maxResults; + } + + public ListMetastoresRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMetastoresRequest that = (ListMetastoresRequest) o; + return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(maxResults, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMetastoresRequest.class) + .add("maxResults", maxResults) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java index 257aa2443..e906c400f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java @@ -14,6 +14,13 @@ public class ListMetastoresResponse { @JsonProperty("metastores") private Collection metastores; + /** + * Opaque token to retrieve the next page of results. Absent if there are no more pages. + * __page_token__ should be set to this value for the next request (for the next page of results). + */ + @JsonProperty("next_page_token") + private String nextPageToken; + public ListMetastoresResponse setMetastores(Collection metastores) { this.metastores = metastores; return this; @@ -23,21 +30,34 @@ public Collection getMetastores() { return metastores; } + public ListMetastoresResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListMetastoresResponse that = (ListMetastoresResponse) o; - return Objects.equals(metastores, that.metastores); + return Objects.equals(metastores, that.metastores) + && Objects.equals(nextPageToken, that.nextPageToken); } @Override public int hashCode() { - return Objects.hash(metastores); + return Objects.hash(metastores, nextPageToken); } @Override public String toString() { - return new ToStringer(ListMetastoresResponse.class).add("metastores", metastores).toString(); + return new ToStringer(ListMetastoresResponse.class) + .add("metastores", metastores) + .add("nextPageToken", nextPageToken) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java index 2eef53dfc..2a4d57758 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java @@ -38,7 +38,7 @@ public class MetastoreInfo { /** The scope of Delta Sharing enabled for the metastore. */ @JsonProperty("delta_sharing_scope") - private MetastoreInfoDeltaSharingScope deltaSharingScope; + private DeltaSharingScopeEnum deltaSharingScope; /** Whether to allow non-DBR clients to directly access entities under the metastore. */ @JsonProperty("external_access_enabled") @@ -146,12 +146,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { return deltaSharingRecipientTokenLifetimeInSeconds; } - public MetastoreInfo setDeltaSharingScope(MetastoreInfoDeltaSharingScope deltaSharingScope) { + public MetastoreInfo setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { this.deltaSharingScope = deltaSharingScope; return this; } - public MetastoreInfoDeltaSharingScope getDeltaSharingScope() { + public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java index c25f726bd..f468b89cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java @@ -114,9 +114,18 @@ public MetastoreInfo get(GetMetastoreRequest request) { * an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in * the array. */ - public Iterable list() { + public Iterable list(ListMetastoresRequest request) { return new Paginator<>( - null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null); + request, + impl::list, + ListMetastoresResponse::getMetastores, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java index 87e65c5ac..8cb9ee53b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java @@ -84,10 +84,11 @@ public MetastoreInfo get(GetMetastoreRequest request) { } @Override - public ListMetastoresResponse list() { + public ListMetastoresResponse list(ListMetastoresRequest request) { String path = "/api/2.1/unity-catalog/metastores"; try { Request req = new Request("GET", path); + ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); return apiClient.execute(req, ListMetastoresResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java index ab8ed1a27..bfad43db3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java @@ -70,7 +70,7 @@ public interface MetastoresService { * an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in * the array. */ - ListMetastoresResponse list(); + ListMetastoresResponse list(ListMetastoresRequest listMetastoresRequest); /** * Get a metastore summary. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java index 82c622577..3b80b364d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java @@ -23,7 +23,7 @@ public class UpdateMetastore { /** The scope of Delta Sharing enabled for the metastore. */ @JsonProperty("delta_sharing_scope") - private UpdateMetastoreDeltaSharingScope deltaSharingScope; + private DeltaSharingScopeEnum deltaSharingScope; /** Unique ID of the metastore. */ @JsonIgnore private String id; @@ -63,12 +63,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() { return deltaSharingRecipientTokenLifetimeInSeconds; } - public UpdateMetastore setDeltaSharingScope(UpdateMetastoreDeltaSharingScope deltaSharingScope) { + public UpdateMetastore setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) { this.deltaSharingScope = deltaSharingScope; return this; } - public UpdateMetastoreDeltaSharingScope getDeltaSharingScope() { + public DeltaSharingScopeEnum getDeltaSharingScope() { return deltaSharingScope; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java index 79471c1ef..5f9b08158 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java @@ -11,7 +11,7 @@ @Generated public class UpdateMetastoreAssignment { /** - * The name of the default catalog in the metastore. This field is depracted. Please use "Default + * The name of the default catalog in the metastore. This field is deprecated. Please use "Default * Namespace API" to configure the default catalog for a Databricks workspace. */ @JsonProperty("default_catalog_name") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java deleted file mode 100755 index 13d6b6c2e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; - -/** The scope of Delta Sharing enabled for the metastore. */ -@Generated -public enum UpdateMetastoreDeltaSharingScope { - INTERNAL, - INTERNAL_AND_EXTERNAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java index c75f52e40..4f2e5568d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java @@ -19,7 +19,7 @@ public class UpdatePermissions { @JsonIgnore private String fullName; /** Type of securable. */ - @JsonIgnore private SecurableType securableType; + @JsonIgnore private String securableType; public UpdatePermissions setChanges(Collection changes) { this.changes = changes; @@ -39,12 +39,12 @@ public String getFullName() { return fullName; } - public UpdatePermissions setSecurableType(SecurableType securableType) { + public UpdatePermissions setSecurableType(String securableType) { this.securableType = securableType; return this; } - public SecurableType getSecurableType() { + public String getSecurableType() { return securableType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java index ba6234b65..49fb3c47d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java @@ -9,12 +9,12 @@ import java.util.Objects; @Generated -public class PermissionsList { +public class UpdatePermissionsResponse { /** The privileges assigned to each principal */ @JsonProperty("privilege_assignments") private Collection privilegeAssignments; - public PermissionsList setPrivilegeAssignments( + public UpdatePermissionsResponse setPrivilegeAssignments( Collection privilegeAssignments) { this.privilegeAssignments = privilegeAssignments; return this; @@ -28,7 +28,7 @@ public Collection getPrivilegeAssignments() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - PermissionsList that = (PermissionsList) o; + UpdatePermissionsResponse that = (UpdatePermissionsResponse) o; return Objects.equals(privilegeAssignments, that.privilegeAssignments); } @@ -39,7 +39,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(PermissionsList.class) + return new ToStringer(UpdatePermissionsResponse.class) .add("privilegeAssignments", privilegeAssignments) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java index 367a7360c..0d22fac3d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAsset.java @@ -18,6 +18,13 @@ public class CleanRoomAsset { @JsonProperty("asset_type") private CleanRoomAssetAssetType assetType; + /** + * The name of the clean room this asset belongs to. This is an output-only field to ensure proper + * resource identification. + */ + @JsonProperty("clean_room_name") + private String cleanRoomName; + /** * Foreign table details available to all collaborators of the clean room. Present if and only if * **asset_type** is **FOREIGN_TABLE** @@ -112,6 +119,15 @@ public CleanRoomAssetAssetType getAssetType() { return assetType; } + public CleanRoomAsset setCleanRoomName(String cleanRoomName) { + this.cleanRoomName = cleanRoomName; + return this; + } + + public String getCleanRoomName() { + return cleanRoomName; + } + public CleanRoomAsset setForeignTable(CleanRoomAssetForeignTable foreignTable) { this.foreignTable = foreignTable; return this; @@ -219,6 +235,7 @@ public boolean equals(Object o) { CleanRoomAsset that = (CleanRoomAsset) o; return Objects.equals(addedAt, that.addedAt) && Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) && Objects.equals(foreignTable, that.foreignTable) && Objects.equals(foreignTableLocalDetails, that.foreignTableLocalDetails) && Objects.equals(name, that.name) @@ -237,6 +254,7 @@ public int hashCode() { return Objects.hash( addedAt, assetType, + cleanRoomName, foreignTable, foreignTableLocalDetails, name, @@ -255,6 +273,7 @@ public String toString() { return new ToStringer(CleanRoomAsset.class) .add("addedAt", addedAt) .add("assetType", assetType) + .add("cleanRoomName", cleanRoomName) .add("foreignTable", foreignTable) .add("foreignTableLocalDetails", foreignTableLocalDetails) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java index 5bd9d49f6..cdb59e7a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java @@ -45,13 +45,12 @@ public CleanRoomAsset create(CreateCleanRoomAssetRequest request) { return impl.create(request); } - public void delete( - String cleanRoomName, CleanRoomAssetAssetType assetType, String assetFullName) { + public void delete(String cleanRoomName, CleanRoomAssetAssetType assetType, String name) { delete( new DeleteCleanRoomAssetRequest() .setCleanRoomName(cleanRoomName) .setAssetType(assetType) - .setAssetFullName(assetFullName)); + .setName(name)); } /** @@ -63,13 +62,12 @@ public void delete(DeleteCleanRoomAssetRequest request) { impl.delete(request); } - public CleanRoomAsset get( - String cleanRoomName, CleanRoomAssetAssetType assetType, String assetFullName) { + public CleanRoomAsset get(String cleanRoomName, CleanRoomAssetAssetType assetType, String name) { return get( new GetCleanRoomAssetRequest() .setCleanRoomName(cleanRoomName) .setAssetType(assetType) - .setAssetFullName(assetFullName)); + .setName(name)); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java index 444feb55d..34d8b7b59 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java @@ -35,7 +35,7 @@ public void delete(DeleteCleanRoomAssetRequest request) { String path = String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", - request.getCleanRoomName(), request.getAssetType(), request.getAssetFullName()); + request.getCleanRoomName(), request.getAssetType(), request.getName()); try { Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); @@ -51,7 +51,7 @@ public CleanRoomAsset get(GetCleanRoomAssetRequest request) { String path = String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", - request.getCleanRoomName(), request.getAssetType(), request.getAssetFullName()); + request.getCleanRoomName(), request.getAssetType(), request.getName()); try { Request req = new Request("GET", path); ApiClient.setQuery(req, request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java index c75e90f31..482c33464 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/DeleteCleanRoomAssetRequest.java @@ -10,23 +10,14 @@ /** Delete an asset */ @Generated public class DeleteCleanRoomAssetRequest { - /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ - @JsonIgnore private String assetFullName; - /** The type of the asset. */ @JsonIgnore private CleanRoomAssetAssetType assetType; /** Name of the clean room. */ @JsonIgnore private String cleanRoomName; - public DeleteCleanRoomAssetRequest setAssetFullName(String assetFullName) { - this.assetFullName = assetFullName; - return this; - } - - public String getAssetFullName() { - return assetFullName; - } + /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ + @JsonIgnore private String name; public DeleteCleanRoomAssetRequest setAssetType(CleanRoomAssetAssetType assetType) { this.assetType = assetType; @@ -46,27 +37,36 @@ public String getCleanRoomName() { return cleanRoomName; } + public DeleteCleanRoomAssetRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteCleanRoomAssetRequest that = (DeleteCleanRoomAssetRequest) o; - return Objects.equals(assetFullName, that.assetFullName) - && Objects.equals(assetType, that.assetType) - && Objects.equals(cleanRoomName, that.cleanRoomName); + return Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(assetFullName, assetType, cleanRoomName); + return Objects.hash(assetType, cleanRoomName, name); } @Override public String toString() { return new ToStringer(DeleteCleanRoomAssetRequest.class) - .add("assetFullName", assetFullName) .add("assetType", assetType) .add("cleanRoomName", cleanRoomName) + .add("name", name) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java index 9bc47aca3..a82af23df 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/GetCleanRoomAssetRequest.java @@ -10,23 +10,14 @@ /** Get an asset */ @Generated public class GetCleanRoomAssetRequest { - /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ - @JsonIgnore private String assetFullName; - /** The type of the asset. */ @JsonIgnore private CleanRoomAssetAssetType assetType; /** Name of the clean room. */ @JsonIgnore private String cleanRoomName; - public GetCleanRoomAssetRequest setAssetFullName(String assetFullName) { - this.assetFullName = assetFullName; - return this; - } - - public String getAssetFullName() { - return assetFullName; - } + /** The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. */ + @JsonIgnore private String name; public GetCleanRoomAssetRequest setAssetType(CleanRoomAssetAssetType assetType) { this.assetType = assetType; @@ -46,27 +37,36 @@ public String getCleanRoomName() { return cleanRoomName; } + public GetCleanRoomAssetRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetCleanRoomAssetRequest that = (GetCleanRoomAssetRequest) o; - return Objects.equals(assetFullName, that.assetFullName) - && Objects.equals(assetType, that.assetType) - && Objects.equals(cleanRoomName, that.cleanRoomName); + return Objects.equals(assetType, that.assetType) + && Objects.equals(cleanRoomName, that.cleanRoomName) + && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(assetFullName, assetType, cleanRoomName); + return Objects.hash(assetType, cleanRoomName, name); } @Override public String toString() { return new ToStringer(GetCleanRoomAssetRequest.class) - .add("assetFullName", assetFullName) .add("assetType", assetType) .add("cleanRoomName", cleanRoomName) + .add("name", name) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java index c359b9f8a..ff9668106 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java @@ -191,14 +191,6 @@ public class ClusterAttributes { @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -254,10 +246,6 @@ public class ClusterAttributes { @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * @@ -442,24 +430,6 @@ public String getPolicyId() { return policyId; } - public ClusterAttributes setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public ClusterAttributes setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public ClusterAttributes setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -514,16 +484,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public ClusterAttributes setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public ClusterAttributes setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -566,15 +526,12 @@ public boolean equals(Object o) { && Objects.equals(kind, that.kind) && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -601,15 +558,12 @@ public int hashCode() { kind, nodeTypeId, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -636,15 +590,12 @@ public String toString() { .add("kind", kind) .add("nodeTypeId", nodeTypeId) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index b2309f101..8b957e3dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -287,14 +287,6 @@ public class ClusterDetails { @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -394,10 +386,6 @@ public class ClusterDetails { @JsonProperty("termination_reason") private TerminationReason terminationReason; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * @@ -708,24 +696,6 @@ public String getPolicyId() { return policyId; } - public ClusterDetails setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public ClusterDetails setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public ClusterDetails setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -843,16 +813,6 @@ public TerminationReason getTerminationReason() { return terminationReason; } - public ClusterDetails setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public ClusterDetails setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -909,8 +869,6 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) @@ -924,7 +882,6 @@ public boolean equals(Object o) { && Objects.equals(stateMessage, that.stateMessage) && Objects.equals(terminatedTime, that.terminatedTime) && Objects.equals(terminationReason, that.terminationReason) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -965,8 +922,6 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, @@ -980,7 +935,6 @@ public int hashCode() { stateMessage, terminatedTime, terminationReason, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -1021,8 +975,6 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) @@ -1036,7 +988,6 @@ public String toString() { .add("stateMessage", stateMessage) .add("terminatedTime", terminatedTime) .add("terminationReason", terminationReason) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index 78c7ddbfa..08cd8a715 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -218,14 +218,6 @@ public class ClusterSpec { @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -281,10 +273,6 @@ public class ClusterSpec { @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * @@ -496,24 +484,6 @@ public String getPolicyId() { return policyId; } - public ClusterSpec setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public ClusterSpec setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public ClusterSpec setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -568,15 +538,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public ClusterSpec setTotalInitialRemoteShuffleDiskSize(Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public ClusterSpec setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -622,15 +583,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -660,15 +618,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -698,15 +653,12 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index 027bae1c8..79853eda0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -221,14 +221,6 @@ public class CreateCluster { @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -284,10 +276,6 @@ public class CreateCluster { @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * @@ -508,24 +496,6 @@ public String getPolicyId() { return policyId; } - public CreateCluster setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public CreateCluster setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public CreateCluster setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -580,16 +550,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public CreateCluster setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public CreateCluster setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -636,15 +596,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -675,15 +632,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -714,15 +668,12 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java index a2765d651..57bd01c54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CustomPolicyTag.java @@ -13,19 +13,11 @@ public class CustomPolicyTag { * The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be * “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these tags are * preserved. - * - *

- Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala - * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17) */ @JsonProperty("key") private String key; - /** - * The value of the tag. - * - *

- Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala - * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24) - */ + /** The value of the tag. */ @JsonProperty("value") private String value; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index bbf12f00d..81c1b7e85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -218,14 +218,6 @@ public class EditCluster { @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -281,10 +273,6 @@ public class EditCluster { @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * @@ -505,24 +493,6 @@ public String getPolicyId() { return policyId; } - public EditCluster setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public EditCluster setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public EditCluster setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -577,15 +547,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public EditCluster setTotalInitialRemoteShuffleDiskSize(Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public EditCluster setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -632,15 +593,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -671,15 +629,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -710,15 +665,12 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index d6dcdf38c..ecdbc682b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -15,31 +15,23 @@ */ @Generated public class Environment { - /** - * Client version used by the environment The client is the user-facing environment of the - * runtime. Each client comes with a specific set of pre-installed libraries. The version is a - * string, consisting of the major client version. - */ + /** Use `environment_version` instead. */ @JsonProperty("client") private String client; /** * List of pip dependencies, as supported by the version of pip in this environment. Each - * dependency is a pip requirement file line - * https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be - * , , (WSFS or Volumes in - * Databricks), E.g. dependencies: ["foo==0.0.1", "-r - * /Workspace/test/requirements.txt"] + * dependency is a valid pip requirements file line per + * https://pip.pypa.io/en/stable/reference/requirements-file-format/. Allowed dependencies include + * a requirement specifier, an archive URL, a local project path (such as WSFS or UC Volumes in + * Databricks), or a VCS project URL. */ @JsonProperty("dependencies") private Collection dependencies; /** - * We renamed `client` to `environment_version` in notebook exports. This field is meant solely so - * that imported notebooks with `environment_version` can be deserialized correctly, in a - * backwards-compatible way (i.e. if `client` is specified instead of `environment_version`, it - * will be deserialized correctly). Do NOT use this field for any other purpose, e.g. notebook - * storage. This field is not yet exposed to customers (e.g. in the jobs API). + * Required. Environment version used by the environment. Each version comes with a specific + * Python version and a set of Python packages. The version is a string, consisting of an integer. */ @JsonProperty("environment_version") private String environmentVersion; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java index bc250469e..c1a501e22 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java @@ -11,6 +11,7 @@ public enum EventType { AUTOSCALING_BACKOFF, AUTOSCALING_FAILED, AUTOSCALING_STATS_REPORT, + CLUSTER_MIGRATED, CREATING, DBFS_DOWN, DID_NOT_EXPAND_DISK, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index 8eba3a564..74a38a68c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -76,6 +76,7 @@ public enum TerminationReasonCode { DRIVER_OUT_OF_MEMORY, DRIVER_POD_CREATION_FAILURE, DRIVER_UNEXPECTED_FAILURE, + DRIVER_UNHEALTHY, DRIVER_UNREACHABLE, DRIVER_UNRESPONSIVE, DYNAMIC_SPARK_CONF_SIZE_EXCEEDED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java index 7d3e13c7d..151d44359 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java @@ -207,14 +207,6 @@ public class UpdateClusterResource { @JsonProperty("policy_id") private String policyId; - /** If set, what the configurable IOPS for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_iops") - private Long remoteShuffleDiskIops; - - /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */ - @JsonProperty("remote_shuffle_disk_throughput") - private Long remoteShuffleDiskThroughput; - /** * Determines the cluster's runtime engine, either standard or Photon. * @@ -270,10 +262,6 @@ public class UpdateClusterResource { @JsonProperty("ssh_public_keys") private Collection sshPublicKeys; - /** If set, what the total initial volume size (in GB) of the remote shuffle disks should be. */ - @JsonProperty("total_initial_remote_shuffle_disk_size") - private Long totalInitialRemoteShuffleDiskSize; - /** * This field can only be used when `kind = CLASSIC_PREVIEW`. * @@ -476,24 +464,6 @@ public String getPolicyId() { return policyId; } - public UpdateClusterResource setRemoteShuffleDiskIops(Long remoteShuffleDiskIops) { - this.remoteShuffleDiskIops = remoteShuffleDiskIops; - return this; - } - - public Long getRemoteShuffleDiskIops() { - return remoteShuffleDiskIops; - } - - public UpdateClusterResource setRemoteShuffleDiskThroughput(Long remoteShuffleDiskThroughput) { - this.remoteShuffleDiskThroughput = remoteShuffleDiskThroughput; - return this; - } - - public Long getRemoteShuffleDiskThroughput() { - return remoteShuffleDiskThroughput; - } - public UpdateClusterResource setRuntimeEngine(RuntimeEngine runtimeEngine) { this.runtimeEngine = runtimeEngine; return this; @@ -548,16 +518,6 @@ public Collection getSshPublicKeys() { return sshPublicKeys; } - public UpdateClusterResource setTotalInitialRemoteShuffleDiskSize( - Long totalInitialRemoteShuffleDiskSize) { - this.totalInitialRemoteShuffleDiskSize = totalInitialRemoteShuffleDiskSize; - return this; - } - - public Long getTotalInitialRemoteShuffleDiskSize() { - return totalInitialRemoteShuffleDiskSize; - } - public UpdateClusterResource setUseMlRuntime(Boolean useMlRuntime) { this.useMlRuntime = useMlRuntime; return this; @@ -602,15 +562,12 @@ public boolean equals(Object o) { && Objects.equals(nodeTypeId, that.nodeTypeId) && Objects.equals(numWorkers, that.numWorkers) && Objects.equals(policyId, that.policyId) - && Objects.equals(remoteShuffleDiskIops, that.remoteShuffleDiskIops) - && Objects.equals(remoteShuffleDiskThroughput, that.remoteShuffleDiskThroughput) && Objects.equals(runtimeEngine, that.runtimeEngine) && Objects.equals(singleUserName, that.singleUserName) && Objects.equals(sparkConf, that.sparkConf) && Objects.equals(sparkEnvVars, that.sparkEnvVars) && Objects.equals(sparkVersion, that.sparkVersion) && Objects.equals(sshPublicKeys, that.sshPublicKeys) - && Objects.equals(totalInitialRemoteShuffleDiskSize, that.totalInitialRemoteShuffleDiskSize) && Objects.equals(useMlRuntime, that.useMlRuntime) && Objects.equals(workloadType, that.workloadType); } @@ -639,15 +596,12 @@ public int hashCode() { nodeTypeId, numWorkers, policyId, - remoteShuffleDiskIops, - remoteShuffleDiskThroughput, runtimeEngine, singleUserName, sparkConf, sparkEnvVars, sparkVersion, sshPublicKeys, - totalInitialRemoteShuffleDiskSize, useMlRuntime, workloadType); } @@ -676,15 +630,12 @@ public String toString() { .add("nodeTypeId", nodeTypeId) .add("numWorkers", numWorkers) .add("policyId", policyId) - .add("remoteShuffleDiskIops", remoteShuffleDiskIops) - .add("remoteShuffleDiskThroughput", remoteShuffleDiskThroughput) .add("runtimeEngine", runtimeEngine) .add("singleUserName", singleUserName) .add("sparkConf", sparkConf) .add("sparkEnvVars", sparkEnvVars) .add("sparkVersion", sparkVersion) .add("sshPublicKeys", sshPublicKeys) - .add("totalInitialRemoteShuffleDiskSize", totalInitialRemoteShuffleDiskSize) .add("useMlRuntime", useMlRuntime) .add("workloadType", workloadType) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java deleted file mode 100755 index a2b487b40..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java +++ /dev/null @@ -1,80 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Collection; -import java.util.Objects; - -/** Cancel the results for the a query for a published, embedded dashboard */ -@Generated -public class CancelPublishedQueryExecutionRequest { - /** */ - @JsonIgnore - @QueryParam("dashboard_name") - private String dashboardName; - - /** */ - @JsonIgnore - @QueryParam("dashboard_revision_id") - private String dashboardRevisionId; - - /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */ - @JsonIgnore - @QueryParam("tokens") - private Collection tokens; - - public CancelPublishedQueryExecutionRequest setDashboardName(String dashboardName) { - this.dashboardName = dashboardName; - return this; - } - - public String getDashboardName() { - return dashboardName; - } - - public CancelPublishedQueryExecutionRequest setDashboardRevisionId(String dashboardRevisionId) { - this.dashboardRevisionId = dashboardRevisionId; - return this; - } - - public String getDashboardRevisionId() { - return dashboardRevisionId; - } - - public CancelPublishedQueryExecutionRequest setTokens(Collection tokens) { - this.tokens = tokens; - return this; - } - - public Collection getTokens() { - return tokens; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CancelPublishedQueryExecutionRequest that = (CancelPublishedQueryExecutionRequest) o; - return Objects.equals(dashboardName, that.dashboardName) - && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) - && Objects.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardName, dashboardRevisionId, tokens); - } - - @Override - public String toString() { - return new ToStringer(CancelPublishedQueryExecutionRequest.class) - .add("dashboardName", dashboardName) - .add("dashboardRevisionId", dashboardRevisionId) - .add("tokens", tokens) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java deleted file mode 100755 index 3476fb9ef..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class CancelQueryExecutionResponse { - /** */ - @JsonProperty("status") - private Collection status; - - public CancelQueryExecutionResponse setStatus( - Collection status) { - this.status = status; - return this; - } - - public Collection getStatus() { - return status; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CancelQueryExecutionResponse that = (CancelQueryExecutionResponse) o; - return Objects.equals(status, that.status); - } - - @Override - public int hashCode() { - return Objects.hash(status); - } - - @Override - public String toString() { - return new ToStringer(CancelQueryExecutionResponse.class).add("status", status).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java deleted file mode 100755 index d84d7214e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java +++ /dev/null @@ -1,83 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CancelQueryExecutionResponseStatus { - /** - * The token to poll for result asynchronously Example: - * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - */ - @JsonProperty("data_token") - private String dataToken; - - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("pending") - private Empty pending; - - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("success") - private Empty success; - - public CancelQueryExecutionResponseStatus setDataToken(String dataToken) { - this.dataToken = dataToken; - return this; - } - - public String getDataToken() { - return dataToken; - } - - public CancelQueryExecutionResponseStatus setPending(Empty pending) { - this.pending = pending; - return this; - } - - public Empty getPending() { - return pending; - } - - public CancelQueryExecutionResponseStatus setSuccess(Empty success) { - this.success = success; - return this; - } - - public Empty getSuccess() { - return success; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CancelQueryExecutionResponseStatus that = (CancelQueryExecutionResponseStatus) o; - return Objects.equals(dataToken, that.dataToken) - && Objects.equals(pending, that.pending) - && Objects.equals(success, that.success); - } - - @Override - public int hashCode() { - return Objects.hash(dataToken, pending, success); - } - - @Override - public String toString() { - return new ToStringer(CancelQueryExecutionResponseStatus.class) - .add("dataToken", dataToken) - .add("pending", pending) - .add("success", success) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java deleted file mode 100755 index c5223007c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java +++ /dev/null @@ -1,86 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -/** - * Execute query request for published Dashboards. Since published dashboards have the option of - * running as the publisher, the datasets, warehouse_id are excluded from the request and instead - * read from the source (lakeview-config) via the additional parameters (dashboardName and - * dashboardRevisionId) - */ -@Generated -public class ExecutePublishedDashboardQueryRequest { - /** - * Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains - * the list of datasets, warehouse_id, and embedded_credentials - */ - @JsonProperty("dashboard_name") - private String dashboardName; - - /** */ - @JsonProperty("dashboard_revision_id") - private String dashboardRevisionId; - - /** - * A dashboard schedule can override the warehouse used as compute for processing the published - * dashboard queries - */ - @JsonProperty("override_warehouse_id") - private String overrideWarehouseId; - - public ExecutePublishedDashboardQueryRequest setDashboardName(String dashboardName) { - this.dashboardName = dashboardName; - return this; - } - - public String getDashboardName() { - return dashboardName; - } - - public ExecutePublishedDashboardQueryRequest setDashboardRevisionId(String dashboardRevisionId) { - this.dashboardRevisionId = dashboardRevisionId; - return this; - } - - public String getDashboardRevisionId() { - return dashboardRevisionId; - } - - public ExecutePublishedDashboardQueryRequest setOverrideWarehouseId(String overrideWarehouseId) { - this.overrideWarehouseId = overrideWarehouseId; - return this; - } - - public String getOverrideWarehouseId() { - return overrideWarehouseId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ExecutePublishedDashboardQueryRequest that = (ExecutePublishedDashboardQueryRequest) o; - return Objects.equals(dashboardName, that.dashboardName) - && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) - && Objects.equals(overrideWarehouseId, that.overrideWarehouseId); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardName, dashboardRevisionId, overrideWarehouseId); - } - - @Override - public String toString() { - return new ToStringer(ExecutePublishedDashboardQueryRequest.class) - .add("dashboardName", dashboardName) - .add("dashboardRevisionId", dashboardRevisionId) - .add("overrideWarehouseId", overrideWarehouseId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index dfeb33522..c06edace8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -310,6 +310,15 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { return impl.getSpace(request); } + /** + * List Genie spaces. + * + *

Get list of Genie Spaces. + */ + public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) { + return impl.listSpaces(request); + } + public Wait startConversation( String spaceId, String content) { return startConversation( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index 8396a9cff..7812c329c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -197,6 +197,19 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { } } + @Override + public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) { + String path = "/api/2.0/genie/spaces"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieListSpacesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieStartConversationResponse startConversation( GenieStartConversationMessageRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java new file mode 100755 index 000000000..d4a48245e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List Genie spaces */ +@Generated +public class GenieListSpacesRequest { + /** Maximum number of spaces to return per page */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token for getting the next page of results */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public GenieListSpacesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GenieListSpacesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListSpacesRequest that = (GenieListSpacesRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GenieListSpacesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java new file mode 100755 index 000000000..20d19bc24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GenieListSpacesResponse { + /** Token to get the next page of results */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** List of Genie spaces */ + @JsonProperty("spaces") + private Collection spaces; + + public GenieListSpacesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GenieListSpacesResponse setSpaces(Collection spaces) { + this.spaces = spaces; + return this; + } + + public Collection getSpaces() { + return spaces; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListSpacesResponse that = (GenieListSpacesResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(spaces, that.spaces); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, spaces); + } + + @Override + public String toString() { + return new ToStringer(GenieListSpacesResponse.class) + .add("nextPageToken", nextPageToken) + .add("spaces", spaces) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index b47db20e0..98dd0b4d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -110,6 +110,13 @@ GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( */ GenieSpace getSpace(GenieGetSpaceRequest genieGetSpaceRequest); + /** + * List Genie spaces. + * + *

Get list of Genie Spaces. + */ + GenieListSpacesResponse listSpaces(GenieListSpacesRequest genieListSpacesRequest); + /** * Start conversation. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java deleted file mode 100755 index b88922e54..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Read a published dashboard in an embedded ui. */ -@Generated -public class GetPublishedDashboardEmbeddedRequest { - /** UUID identifying the published dashboard. */ - @JsonIgnore private String dashboardId; - - public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) { - this.dashboardId = dashboardId; - return this; - } - - public String getDashboardId() { - return dashboardId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o; - return Objects.equals(dashboardId, that.dashboardId); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardId); - } - - @Override - public String toString() { - return new ToStringer(GetPublishedDashboardEmbeddedRequest.class) - .add("dashboardId", dashboardId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java index ec34c7536..eb5fda3a4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java @@ -23,20 +23,6 @@ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) { impl = mock; } - public void getPublishedDashboardEmbedded(String dashboardId) { - getPublishedDashboardEmbedded( - new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId)); - } - - /** - * Read a published dashboard in an embedded ui. - * - *

Get the current published dashboard within an embedded context. - */ - public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) { - impl.getPublishedDashboardEmbedded(request); - } - public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) { return getPublishedDashboardTokenInfo( new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java index 38c982eb1..55a489702 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java @@ -16,21 +16,6 @@ public LakeviewEmbeddedImpl(ApiClient apiClient) { this.apiClient = apiClient; } - @Override - public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) { - String path = - String.format( - "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - apiClient.execute(req, GetPublishedDashboardEmbeddedResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - @Override public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo( GetPublishedDashboardTokenInfoRequest request) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java index a7fbb8cdb..cad465780 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java @@ -12,14 +12,6 @@ */ @Generated public interface LakeviewEmbeddedService { - /** - * Read a published dashboard in an embedded ui. - * - *

Get the current published dashboard within an embedded context. - */ - void getPublishedDashboardEmbedded( - GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest); - /** * Read an information of a published dashboard to mint an OAuth token. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java deleted file mode 100755 index f041070b2..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java +++ /dev/null @@ -1,45 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class PendingStatus { - /** - * The token to poll for result asynchronously Example: - * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - */ - @JsonProperty("data_token") - private String dataToken; - - public PendingStatus setDataToken(String dataToken) { - this.dataToken = dataToken; - return this; - } - - public String getDataToken() { - return dataToken; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PendingStatus that = (PendingStatus) o; - return Objects.equals(dataToken, that.dataToken); - } - - @Override - public int hashCode() { - return Objects.hash(dataToken); - } - - @Override - public String toString() { - return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java deleted file mode 100755 index 958dd8311..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java +++ /dev/null @@ -1,80 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Collection; -import java.util.Objects; - -/** Poll the results for the a query for a published, embedded dashboard */ -@Generated -public class PollPublishedQueryStatusRequest { - /** */ - @JsonIgnore - @QueryParam("dashboard_name") - private String dashboardName; - - /** */ - @JsonIgnore - @QueryParam("dashboard_revision_id") - private String dashboardRevisionId; - - /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */ - @JsonIgnore - @QueryParam("tokens") - private Collection tokens; - - public PollPublishedQueryStatusRequest setDashboardName(String dashboardName) { - this.dashboardName = dashboardName; - return this; - } - - public String getDashboardName() { - return dashboardName; - } - - public PollPublishedQueryStatusRequest setDashboardRevisionId(String dashboardRevisionId) { - this.dashboardRevisionId = dashboardRevisionId; - return this; - } - - public String getDashboardRevisionId() { - return dashboardRevisionId; - } - - public PollPublishedQueryStatusRequest setTokens(Collection tokens) { - this.tokens = tokens; - return this; - } - - public Collection getTokens() { - return tokens; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PollPublishedQueryStatusRequest that = (PollPublishedQueryStatusRequest) o; - return Objects.equals(dashboardName, that.dashboardName) - && Objects.equals(dashboardRevisionId, that.dashboardRevisionId) - && Objects.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - return Objects.hash(dashboardName, dashboardRevisionId, tokens); - } - - @Override - public String toString() { - return new ToStringer(PollPublishedQueryStatusRequest.class) - .add("dashboardName", dashboardName) - .add("dashboardRevisionId", dashboardRevisionId) - .add("tokens", tokens) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java deleted file mode 100755 index 778e1d961..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java +++ /dev/null @@ -1,43 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class PollQueryStatusResponse { - /** */ - @JsonProperty("data") - private Collection data; - - public PollQueryStatusResponse setData(Collection data) { - this.data = data; - return this; - } - - public Collection getData() { - return data; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PollQueryStatusResponse that = (PollQueryStatusResponse) o; - return Objects.equals(data, that.data); - } - - @Override - public int hashCode() { - return Objects.hash(data); - } - - @Override - public String toString() { - return new ToStringer(PollQueryStatusResponse.class).add("data", data).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java deleted file mode 100755 index 9de9b2743..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java +++ /dev/null @@ -1,42 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class PollQueryStatusResponseData { - /** */ - @JsonProperty("status") - private QueryResponseStatus status; - - public PollQueryStatusResponseData setStatus(QueryResponseStatus status) { - this.status = status; - return this; - } - - public QueryResponseStatus getStatus() { - return status; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PollQueryStatusResponseData that = (PollQueryStatusResponseData) o; - return Objects.equals(status, that.status); - } - - @Override - public int hashCode() { - return Objects.hash(status); - } - - @Override - public String toString() { - return new ToStringer(PollQueryStatusResponseData.class).add("status", status).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java deleted file mode 100755 index eb016a2f8..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java +++ /dev/null @@ -1,68 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.support.Generated; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Query execution APIs for AI / BI Dashboards */ -@Generated -public class QueryExecutionAPI { - private static final Logger LOG = LoggerFactory.getLogger(QueryExecutionAPI.class); - - private final QueryExecutionService impl; - - /** Regular-use constructor */ - public QueryExecutionAPI(ApiClient apiClient) { - impl = new QueryExecutionImpl(apiClient); - } - - /** Constructor for mocks */ - public QueryExecutionAPI(QueryExecutionService mock) { - impl = mock; - } - - public CancelQueryExecutionResponse cancelPublishedQueryExecution( - String dashboardName, String dashboardRevisionId) { - return cancelPublishedQueryExecution( - new CancelPublishedQueryExecutionRequest() - .setDashboardName(dashboardName) - .setDashboardRevisionId(dashboardRevisionId)); - } - - /** Cancel the results for the a query for a published, embedded dashboard. */ - public CancelQueryExecutionResponse cancelPublishedQueryExecution( - CancelPublishedQueryExecutionRequest request) { - return impl.cancelPublishedQueryExecution(request); - } - - public void executePublishedDashboardQuery(String dashboardName, String dashboardRevisionId) { - executePublishedDashboardQuery( - new ExecutePublishedDashboardQueryRequest() - .setDashboardName(dashboardName) - .setDashboardRevisionId(dashboardRevisionId)); - } - - /** Execute a query for a published dashboard. */ - public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) { - impl.executePublishedDashboardQuery(request); - } - - public PollQueryStatusResponse pollPublishedQueryStatus( - String dashboardName, String dashboardRevisionId) { - return pollPublishedQueryStatus( - new PollPublishedQueryStatusRequest() - .setDashboardName(dashboardName) - .setDashboardRevisionId(dashboardRevisionId)); - } - - /** Poll the results for the a query for a published, embedded dashboard. */ - public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) { - return impl.pollPublishedQueryStatus(request); - } - - public QueryExecutionService impl() { - return impl; - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java deleted file mode 100755 index 46db1f805..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java +++ /dev/null @@ -1,59 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.core.ApiClient; -import com.databricks.sdk.core.DatabricksException; -import com.databricks.sdk.core.http.Request; -import com.databricks.sdk.support.Generated; -import java.io.IOException; - -/** Package-local implementation of QueryExecution */ -@Generated -class QueryExecutionImpl implements QueryExecutionService { - private final ApiClient apiClient; - - public QueryExecutionImpl(ApiClient apiClient) { - this.apiClient = apiClient; - } - - @Override - public CancelQueryExecutionResponse cancelPublishedQueryExecution( - CancelPublishedQueryExecutionRequest request) { - String path = "/api/2.0/lakeview-query/query/published"; - try { - Request req = new Request("DELETE", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, CancelQueryExecutionResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) { - String path = "/api/2.0/lakeview-query/query/published"; - try { - Request req = new Request("POST", path, apiClient.serialize(request)); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - req.withHeader("Content-Type", "application/json"); - apiClient.execute(req, ExecuteQueryResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) { - String path = "/api/2.0/lakeview-query/query/published"; - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, PollQueryStatusResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java deleted file mode 100755 index d30cda5b6..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java +++ /dev/null @@ -1,26 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; - -/** - * Query execution APIs for AI / BI Dashboards - * - *

This is the high-level interface, that contains generated methods. - * - *

Evolving: this interface is under development. Method signatures may change. - */ -@Generated -public interface QueryExecutionService { - /** Cancel the results for the a query for a published, embedded dashboard. */ - CancelQueryExecutionResponse cancelPublishedQueryExecution( - CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest); - - /** Execute a query for a published dashboard. */ - void executePublishedDashboardQuery( - ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest); - - /** Poll the results for the a query for a published, embedded dashboard. */ - PollQueryStatusResponse pollPublishedQueryStatus( - PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest); -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java deleted file mode 100755 index 334f3d007..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class QueryResponseStatus { - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("canceled") - private Empty canceled; - - /** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the - * firm right now. - */ - @JsonProperty("closed") - private Empty closed; - - /** */ - @JsonProperty("pending") - private PendingStatus pending; - - /** - * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be - * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit - * logging purpose to record the statement_id of all QueryResponseStatus. - */ - @JsonProperty("statement_id") - private String statementId; - - /** */ - @JsonProperty("success") - private SuccessStatus success; - - public QueryResponseStatus setCanceled(Empty canceled) { - this.canceled = canceled; - return this; - } - - public Empty getCanceled() { - return canceled; - } - - public QueryResponseStatus setClosed(Empty closed) { - this.closed = closed; - return this; - } - - public Empty getClosed() { - return closed; - } - - public QueryResponseStatus setPending(PendingStatus pending) { - this.pending = pending; - return this; - } - - public PendingStatus getPending() { - return pending; - } - - public QueryResponseStatus setStatementId(String statementId) { - this.statementId = statementId; - return this; - } - - public String getStatementId() { - return statementId; - } - - public QueryResponseStatus setSuccess(SuccessStatus success) { - this.success = success; - return this; - } - - public SuccessStatus getSuccess() { - return success; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QueryResponseStatus that = (QueryResponseStatus) o; - return Objects.equals(canceled, that.canceled) - && Objects.equals(closed, that.closed) - && Objects.equals(pending, that.pending) - && Objects.equals(statementId, that.statementId) - && Objects.equals(success, that.success); - } - - @Override - public int hashCode() { - return Objects.hash(canceled, closed, pending, statementId, success); - } - - @Override - public String toString() { - return new ToStringer(QueryResponseStatus.class) - .add("canceled", canceled) - .add("closed", closed) - .add("pending", pending) - .add("statementId", statementId) - .add("success", success) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java deleted file mode 100755 index c54d199d3..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java +++ /dev/null @@ -1,61 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.dashboards; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class SuccessStatus { - /** - * The token to poll for result asynchronously Example: - * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - */ - @JsonProperty("data_token") - private String dataToken; - - /** Whether the query result is truncated (either by byte limit or row limit) */ - @JsonProperty("truncated") - private Boolean truncated; - - public SuccessStatus setDataToken(String dataToken) { - this.dataToken = dataToken; - return this; - } - - public String getDataToken() { - return dataToken; - } - - public SuccessStatus setTruncated(Boolean truncated) { - this.truncated = truncated; - return this; - } - - public Boolean getTruncated() { - return truncated; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SuccessStatus that = (SuccessStatus) o; - return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated); - } - - @Override - public int hashCode() { - return Objects.hash(dataToken, truncated); - } - - @Override - public String toString() { - return new ToStringer(SuccessStatus.class) - .add("dataToken", dataToken) - .add("truncated", truncated) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java similarity index 96% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java index d58ef38df..a05d27a4a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java similarity index 96% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java index 2aa9d2a71..d7da58737 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java new file mode 100755 index 000000000..b3eef0fb3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a Database Table */ +@Generated +public class CreateDatabaseTableRequest { + /** Next field marker: 13 */ + @JsonProperty("table") + private DatabaseTable table; + + public CreateDatabaseTableRequest setTable(DatabaseTable table) { + this.table = table; + return this; + } + + public DatabaseTable getTable() { + return table; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDatabaseTableRequest that = (CreateDatabaseTableRequest) o; + return Objects.equals(table, that.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + return new ToStringer(CreateDatabaseTableRequest.class).add("table", table).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java similarity index 94% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java index 2c1ea4700..02070ad39 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -10,7 +10,7 @@ /** Create a Synced Database Table */ @Generated public class CreateSyncedDatabaseTableRequest { - /** Next field marker: 10 */ + /** Next field marker: 12 */ @JsonProperty("synced_table") private SyncedDatabaseTable syncedTable; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java similarity index 77% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java index c91d638d3..1d452de83 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java @@ -1,5 +1,5 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; @@ -9,18 +9,18 @@ /** Database Instances provide access to a database via REST API or direct SQL. */ @Generated -public class DatabaseInstancesAPI { - private static final Logger LOG = LoggerFactory.getLogger(DatabaseInstancesAPI.class); +public class DatabaseAPI { + private static final Logger LOG = LoggerFactory.getLogger(DatabaseAPI.class); - private final DatabaseInstancesService impl; + private final DatabaseService impl; /** Regular-use constructor */ - public DatabaseInstancesAPI(ApiClient apiClient) { - impl = new DatabaseInstancesImpl(apiClient); + public DatabaseAPI(ApiClient apiClient) { + impl = new DatabaseImpl(apiClient); } /** Constructor for mocks */ - public DatabaseInstancesAPI(DatabaseInstancesService mock) { + public DatabaseAPI(DatabaseService mock) { impl = mock; } @@ -43,6 +43,15 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req return impl.createDatabaseInstance(request); } + public DatabaseTable createDatabaseTable(DatabaseTable table) { + return createDatabaseTable(new CreateDatabaseTableRequest().setTable(table)); + } + + /** Create a Database Table. */ + public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) { + return impl.createDatabaseTable(request); + } + public SyncedDatabaseTable createSyncedDatabaseTable(SyncedDatabaseTable syncedTable) { return createSyncedDatabaseTable( new CreateSyncedDatabaseTableRequest().setSyncedTable(syncedTable)); @@ -71,6 +80,15 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { impl.deleteDatabaseInstance(request); } + public void deleteDatabaseTable(String name) { + deleteDatabaseTable(new DeleteDatabaseTableRequest().setName(name)); + } + + /** Delete a Database Table. */ + public void deleteDatabaseTable(DeleteDatabaseTableRequest request) { + impl.deleteDatabaseTable(request); + } + public void deleteSyncedDatabaseTable(String name) { deleteSyncedDatabaseTable(new DeleteSyncedDatabaseTableRequest().setName(name)); } @@ -85,6 +103,11 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque return impl.findDatabaseInstanceByUid(request); } + /** Generates a credential that can be used to access database instances. */ + public DatabaseCredential generateDatabaseCredential(GenerateDatabaseCredentialRequest request) { + return impl.generateDatabaseCredential(request); + } + public DatabaseCatalog getDatabaseCatalog(String name) { return getDatabaseCatalog(new GetDatabaseCatalogRequest().setName(name)); } @@ -103,6 +126,15 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) return impl.getDatabaseInstance(request); } + public DatabaseTable getDatabaseTable(String name) { + return getDatabaseTable(new GetDatabaseTableRequest().setName(name)); + } + + /** Get a Database Table. */ + public DatabaseTable getDatabaseTable(GetDatabaseTableRequest request) { + return impl.getDatabaseTable(request); + } + public SyncedDatabaseTable getSyncedDatabaseTable(String name) { return getSyncedDatabaseTable(new GetSyncedDatabaseTableRequest().setName(name)); } @@ -141,7 +173,7 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req return impl.updateDatabaseInstance(request); } - public DatabaseInstancesService impl() { + public DatabaseService impl() { return impl; } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java similarity index 98% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java index b4d1c2d57..06049eb18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java new file mode 100755 index 000000000..088ca1d41 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DatabaseCredential { + /** */ + @JsonProperty("token") + private String token; + + public DatabaseCredential setToken(String token) { + this.token = token; + return this; + } + + public String getToken() { + return token; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseCredential that = (DatabaseCredential) o; + return Objects.equals(token, that.token); + } + + @Override + public int hashCode() { + return Objects.hash(token); + } + + @Override + public String toString() { + return new ToStringer(DatabaseCredential.class).add("token", token).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java similarity index 74% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java index bea3e0708..b32623c1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java @@ -1,5 +1,5 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.DatabricksException; @@ -7,12 +7,12 @@ import com.databricks.sdk.support.Generated; import java.io.IOException; -/** Package-local implementation of DatabaseInstances */ +/** Package-local implementation of Database */ @Generated -class DatabaseInstancesImpl implements DatabaseInstancesService { +class DatabaseImpl implements DatabaseService { private final ApiClient apiClient; - public DatabaseInstancesImpl(ApiClient apiClient) { + public DatabaseImpl(ApiClient apiClient) { this.apiClient = apiClient; } @@ -44,6 +44,20 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req } } + @Override + public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) { + String path = "/api/2.0/database/tables"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getTable())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public SyncedDatabaseTable createSyncedDatabaseTable(CreateSyncedDatabaseTableRequest request) { String path = "/api/2.0/database/synced_tables"; @@ -84,6 +98,19 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) { } } + @Override + public void deleteDatabaseTable(DeleteDatabaseTableRequest request) { + String path = String.format("/api/2.0/database/tables/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteDatabaseTableResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) { String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); @@ -110,6 +137,20 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque } } + @Override + public DatabaseCredential generateDatabaseCredential(GenerateDatabaseCredentialRequest request) { + String path = "/api/2.0/database/credentials"; + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DatabaseCredential.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest request) { String path = String.format("/api/2.0/database/catalogs/%s", request.getName()); @@ -136,6 +177,19 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request) } } + @Override + public DatabaseTable getDatabaseTable(GetDatabaseTableRequest request) { + String path = String.format("/api/2.0/database/tables/%s", request.getName()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DatabaseTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest request) { String path = String.format("/api/2.0/database/synced_tables/%s", request.getName()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java similarity index 76% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java index ce72b3cba..045e7f04e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -12,15 +12,7 @@ */ @Generated public class DatabaseInstance { - /** Password for admin user to create. If not provided, no user will be created. */ - @JsonProperty("admin_password") - private String adminPassword; - - /** Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'. */ - @JsonProperty("admin_rolename") - private String adminRolename; - - /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4". */ + /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8". */ @JsonProperty("capacity") private String capacity; @@ -56,24 +48,6 @@ public class DatabaseInstance { @JsonProperty("uid") private String uid; - public DatabaseInstance setAdminPassword(String adminPassword) { - this.adminPassword = adminPassword; - return this; - } - - public String getAdminPassword() { - return adminPassword; - } - - public DatabaseInstance setAdminRolename(String adminRolename) { - this.adminRolename = adminRolename; - return this; - } - - public String getAdminRolename() { - return adminRolename; - } - public DatabaseInstance setCapacity(String capacity) { this.capacity = capacity; return this; @@ -160,9 +134,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DatabaseInstance that = (DatabaseInstance) o; - return Objects.equals(adminPassword, that.adminPassword) - && Objects.equals(adminRolename, that.adminRolename) - && Objects.equals(capacity, that.capacity) + return Objects.equals(capacity, that.capacity) && Objects.equals(creationTime, that.creationTime) && Objects.equals(creator, that.creator) && Objects.equals(name, that.name) @@ -176,24 +148,12 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - adminPassword, - adminRolename, - capacity, - creationTime, - creator, - name, - pgVersion, - readWriteDns, - state, - stopped, - uid); + capacity, creationTime, creator, name, pgVersion, readWriteDns, state, stopped, uid); } @Override public String toString() { return new ToStringer(DatabaseInstance.class) - .add("adminPassword", adminPassword) - .add("adminRolename", adminRolename) .add("capacity", capacity) .add("creationTime", creationTime) .add("creator", creator) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java similarity index 84% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java index 909921d03..536812f91 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java similarity index 76% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java index 9bf012769..09dcbff3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java @@ -1,5 +1,5 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; @@ -11,7 +11,7 @@ *

Evolving: this interface is under development. Method signatures may change. */ @Generated -public interface DatabaseInstancesService { +public interface DatabaseService { /** Create a Database Catalog. */ DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest createDatabaseCatalogRequest); @@ -19,6 +19,9 @@ public interface DatabaseInstancesService { DatabaseInstance createDatabaseInstance( CreateDatabaseInstanceRequest createDatabaseInstanceRequest); + /** Create a Database Table. */ + DatabaseTable createDatabaseTable(CreateDatabaseTableRequest createDatabaseTableRequest); + /** Create a Synced Database Table. */ SyncedDatabaseTable createSyncedDatabaseTable( CreateSyncedDatabaseTableRequest createSyncedDatabaseTableRequest); @@ -29,6 +32,9 @@ SyncedDatabaseTable createSyncedDatabaseTable( /** Delete a Database Instance. */ void deleteDatabaseInstance(DeleteDatabaseInstanceRequest deleteDatabaseInstanceRequest); + /** Delete a Database Table. */ + void deleteDatabaseTable(DeleteDatabaseTableRequest deleteDatabaseTableRequest); + /** Delete a Synced Database Table. */ void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest); @@ -36,12 +42,19 @@ SyncedDatabaseTable createSyncedDatabaseTable( DatabaseInstance findDatabaseInstanceByUid( FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest); + /** Generates a credential that can be used to access database instances. */ + DatabaseCredential generateDatabaseCredential( + GenerateDatabaseCredentialRequest generateDatabaseCredentialRequest); + /** Get a Database Catalog. */ DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest getDatabaseCatalogRequest); /** Get a Database Instance. */ DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest getDatabaseInstanceRequest); + /** Get a Database Table. */ + DatabaseTable getDatabaseTable(GetDatabaseTableRequest getDatabaseTableRequest); + /** Get a Synced Database Table. */ SyncedDatabaseTable getSyncedDatabaseTable( GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java new file mode 100755 index 000000000..5018d7b59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java @@ -0,0 +1,108 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Next field marker: 13 */ +@Generated +public class DatabaseTable { + /** + * Name of the target database instance. This is required when creating database tables in + * standard catalogs. This is optional when creating database tables in registered catalogs. If + * this field is specified when creating database tables in registered catalogs, the database + * instance name MUST match that of the registered catalog (or the request will be rejected). + */ + @JsonProperty("database_instance_name") + private String databaseInstanceName; + + /** + * Target Postgres database object (logical database) name for this table. This field is optional + * in all scenarios. + * + *

When creating a table in a registered Postgres catalog, the target Postgres database name is + * inferred to be that of the registered catalog. If this field is specified in this scenario, the + * Postgres database name MUST match that of the registered catalog (or the request will be + * rejected). + * + *

When creating a table in a standard catalog, the target database name is inferred to be that + * of the standard catalog. In this scenario, specifying this field will allow targeting an + * arbitrary postgres database. Note that this has implications for the + * `create_database_objects_is_missing` field in `spec`. + */ + @JsonProperty("logical_database_name") + private String logicalDatabaseName; + + /** Full three-part (catalog, schema, table) name of the table. */ + @JsonProperty("name") + private String name; + + /** Data serving REST API URL for this table */ + @JsonProperty("table_serving_url") + private String tableServingUrl; + + public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) { + this.databaseInstanceName = databaseInstanceName; + return this; + } + + public String getDatabaseInstanceName() { + return databaseInstanceName; + } + + public DatabaseTable setLogicalDatabaseName(String logicalDatabaseName) { + this.logicalDatabaseName = logicalDatabaseName; + return this; + } + + public String getLogicalDatabaseName() { + return logicalDatabaseName; + } + + public DatabaseTable setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DatabaseTable setTableServingUrl(String tableServingUrl) { + this.tableServingUrl = tableServingUrl; + return this; + } + + public String getTableServingUrl() { + return tableServingUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabaseTable that = (DatabaseTable) o; + return Objects.equals(databaseInstanceName, that.databaseInstanceName) + && Objects.equals(logicalDatabaseName, that.logicalDatabaseName) + && Objects.equals(name, that.name) + && Objects.equals(tableServingUrl, that.tableServingUrl); + } + + @Override + public int hashCode() { + return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl); + } + + @Override + public String toString() { + return new ToStringer(DatabaseTable.class) + .add("databaseInstanceName", databaseInstanceName) + .add("logicalDatabaseName", logicalDatabaseName) + .add("name", name) + .add("tableServingUrl", tableServingUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java similarity index 95% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java index 3a455fea8..3ac34bf9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java similarity index 93% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java index 17de1764a..718037444 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java similarity index 98% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java index e043e1347..81ed118c6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java similarity index 93% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java index 4d96f2e05..09a2ec03e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java new file mode 100755 index 000000000..a372f064a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a Database Table */ +@Generated +public class DeleteDatabaseTableRequest { + /** */ + @JsonIgnore private String name; + + public DeleteDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteDatabaseTableRequest that = (DeleteDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteDatabaseTableRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java similarity index 72% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java index 5aefc388e..6e851748a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java @@ -1,13 +1,13 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import java.util.Objects; @Generated -public class GetPublishedDashboardEmbeddedResponse { +public class DeleteDatabaseTableResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(GetPublishedDashboardEmbeddedResponse.class).toString(); + return new ToStringer(DeleteDatabaseTableResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java similarity index 95% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java index 506ab393b..41d1a388c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java similarity index 93% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java index 147f31d48..6649e3cfc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java similarity index 96% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java index 894cb8153..180eb8971 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java new file mode 100755 index 000000000..ba727372f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Generates a credential that can be used to access database instances */ +@Generated +public class GenerateDatabaseCredentialRequest { + /** Instances to which the token will be scoped. */ + @JsonProperty("instance_names") + private Collection instanceNames; + + /** */ + @JsonProperty("request_id") + private String requestId; + + public GenerateDatabaseCredentialRequest setInstanceNames(Collection instanceNames) { + this.instanceNames = instanceNames; + return this; + } + + public Collection getInstanceNames() { + return instanceNames; + } + + public GenerateDatabaseCredentialRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenerateDatabaseCredentialRequest that = (GenerateDatabaseCredentialRequest) o; + return Objects.equals(instanceNames, that.instanceNames) + && Objects.equals(requestId, that.requestId); + } + + @Override + public int hashCode() { + return Objects.hash(instanceNames, requestId); + } + + @Override + public String toString() { + return new ToStringer(GenerateDatabaseCredentialRequest.class) + .add("instanceNames", instanceNames) + .add("requestId", requestId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java similarity index 95% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java index cd9b08f25..49550232b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java similarity index 95% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java index 131d51e48..2aa6924ec 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java new file mode 100755 index 000000000..81c25b174 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Get a Database Table */ +@Generated +public class GetDatabaseTableRequest { + /** */ + @JsonIgnore private String name; + + public GetDatabaseTableRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDatabaseTableRequest that = (GetDatabaseTableRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDatabaseTableRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java similarity index 95% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java index 4c54608d7..d15853061 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java similarity index 97% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java index dbea4c7b3..72eeb79c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java similarity index 97% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java index e797c3e3a..7163b726c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java similarity index 97% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java index 2cf1e9752..3a9141f95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java new file mode 100755 index 000000000..88dcf59fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ProvisioningInfoState { + ACTIVE, + DEGRADED, + DELETING, + FAILED, + PROVISIONING, + UPDATING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java similarity index 95% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java index dc513fb25..5a23e80d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java @@ -1,18 +1,18 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; -/** Next field marker: 10 */ +/** Next field marker: 12 */ @Generated public class SyncedDatabaseTable { /** Synced Table data synchronization status */ @JsonProperty("data_synchronization_status") - private OnlineTableStatus dataSynchronizationStatus; + private SyncedTableStatus dataSynchronizationStatus; /** * Name of the target database instance. This is required when creating synced database tables in @@ -61,12 +61,12 @@ public class SyncedDatabaseTable { private ProvisioningInfoState unityCatalogProvisioningState; public SyncedDatabaseTable setDataSynchronizationStatus( - OnlineTableStatus dataSynchronizationStatus) { + SyncedTableStatus dataSynchronizationStatus) { this.dataSynchronizationStatus = dataSynchronizationStatus; return this; } - public OnlineTableStatus getDataSynchronizationStatus() { + public SyncedTableStatus getDataSynchronizationStatus() { return dataSynchronizationStatus; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java new file mode 100755 index 000000000..b8bd29ac7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + * or the SYNCED_UPDATING_PIPELINE_RESOURCES state. + */ +@Generated +public class SyncedTableContinuousUpdateStatus { + /** Progress of the initial data synchronization. */ + @JsonProperty("initial_pipeline_sync_progress") + private SyncedTablePipelineProgress initialPipelineSyncProgress; + + /** + * The last source table Delta version that was synced to the synced table. Note that this Delta + * version may not be completely synced to the synced table yet. + */ + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + /** + * The timestamp of the last time any data was synchronized from the source table to the synced + * table. + */ + @JsonProperty("timestamp") + private String timestamp; + + public SyncedTableContinuousUpdateStatus setInitialPipelineSyncProgress( + SyncedTablePipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public SyncedTablePipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + public SyncedTableContinuousUpdateStatus setLastProcessedCommitVersion( + Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableContinuousUpdateStatus setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableContinuousUpdateStatus that = (SyncedTableContinuousUpdateStatus) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress) + && Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress, lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableContinuousUpdateStatus.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java new file mode 100755 index 000000000..e79f9da4f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java @@ -0,0 +1,70 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + * SYNCED_PIPELINE_FAILED state. + */ +@Generated +public class SyncedTableFailedStatus { + /** + * The last source table Delta version that was synced to the synced table. Note that this Delta + * version may only be partially synced to the synced table. Only populated if the table is still + * synced and available for serving. + */ + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + /** + * The timestamp of the last time any data was synchronized from the source table to the synced + * table. Only populated if the table is still synced and available for serving. + */ + @JsonProperty("timestamp") + private String timestamp; + + public SyncedTableFailedStatus setLastProcessedCommitVersion(Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableFailedStatus setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableFailedStatus that = (SyncedTableFailedStatus) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableFailedStatus.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java new file mode 100755 index 000000000..f0f88e83f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java @@ -0,0 +1,117 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Progress information of the Synced Table data synchronization pipeline. */ +@Generated +public class SyncedTablePipelineProgress { + /** The estimated time remaining to complete this update in seconds. */ + @JsonProperty("estimated_completion_time_seconds") + private Double estimatedCompletionTimeSeconds; + + /** + * The source table Delta version that was last processed by the pipeline. The pipeline may not + * have completely processed this version yet. + */ + @JsonProperty("latest_version_currently_processing") + private Long latestVersionCurrentlyProcessing; + + /** The completion ratio of this update. This is a number between 0 and 1. */ + @JsonProperty("sync_progress_completion") + private Double syncProgressCompletion; + + /** The number of rows that have been synced in this update. */ + @JsonProperty("synced_row_count") + private Long syncedRowCount; + + /** + * The total number of rows that need to be synced in this update. This number may be an estimate. + */ + @JsonProperty("total_row_count") + private Long totalRowCount; + + public SyncedTablePipelineProgress setEstimatedCompletionTimeSeconds( + Double estimatedCompletionTimeSeconds) { + this.estimatedCompletionTimeSeconds = estimatedCompletionTimeSeconds; + return this; + } + + public Double getEstimatedCompletionTimeSeconds() { + return estimatedCompletionTimeSeconds; + } + + public SyncedTablePipelineProgress setLatestVersionCurrentlyProcessing( + Long latestVersionCurrentlyProcessing) { + this.latestVersionCurrentlyProcessing = latestVersionCurrentlyProcessing; + return this; + } + + public Long getLatestVersionCurrentlyProcessing() { + return latestVersionCurrentlyProcessing; + } + + public SyncedTablePipelineProgress setSyncProgressCompletion(Double syncProgressCompletion) { + this.syncProgressCompletion = syncProgressCompletion; + return this; + } + + public Double getSyncProgressCompletion() { + return syncProgressCompletion; + } + + public SyncedTablePipelineProgress setSyncedRowCount(Long syncedRowCount) { + this.syncedRowCount = syncedRowCount; + return this; + } + + public Long getSyncedRowCount() { + return syncedRowCount; + } + + public SyncedTablePipelineProgress setTotalRowCount(Long totalRowCount) { + this.totalRowCount = totalRowCount; + return this; + } + + public Long getTotalRowCount() { + return totalRowCount; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTablePipelineProgress that = (SyncedTablePipelineProgress) o; + return Objects.equals(estimatedCompletionTimeSeconds, that.estimatedCompletionTimeSeconds) + && Objects.equals(latestVersionCurrentlyProcessing, that.latestVersionCurrentlyProcessing) + && Objects.equals(syncProgressCompletion, that.syncProgressCompletion) + && Objects.equals(syncedRowCount, that.syncedRowCount) + && Objects.equals(totalRowCount, that.totalRowCount); + } + + @Override + public int hashCode() { + return Objects.hash( + estimatedCompletionTimeSeconds, + latestVersionCurrentlyProcessing, + syncProgressCompletion, + syncedRowCount, + totalRowCount); + } + + @Override + public String toString() { + return new ToStringer(SyncedTablePipelineProgress.class) + .add("estimatedCompletionTimeSeconds", estimatedCompletionTimeSeconds) + .add("latestVersionCurrentlyProcessing", latestVersionCurrentlyProcessing) + .add("syncProgressCompletion", syncProgressCompletion) + .add("syncedRowCount", syncedRowCount) + .add("totalRowCount", totalRowCount) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java new file mode 100755 index 000000000..d6839992f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java @@ -0,0 +1,52 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the + * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. + */ +@Generated +public class SyncedTableProvisioningStatus { + /** + * Details about initial data synchronization. Only populated when in the + * PROVISIONING_INITIAL_SNAPSHOT state. + */ + @JsonProperty("initial_pipeline_sync_progress") + private SyncedTablePipelineProgress initialPipelineSyncProgress; + + public SyncedTableProvisioningStatus setInitialPipelineSyncProgress( + SyncedTablePipelineProgress initialPipelineSyncProgress) { + this.initialPipelineSyncProgress = initialPipelineSyncProgress; + return this; + } + + public SyncedTablePipelineProgress getInitialPipelineSyncProgress() { + return initialPipelineSyncProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableProvisioningStatus that = (SyncedTableProvisioningStatus) o; + return Objects.equals(initialPipelineSyncProgress, that.initialPipelineSyncProgress); + } + + @Override + public int hashCode() { + return Objects.hash(initialPipelineSyncProgress); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableProvisioningStatus.class) + .add("initialPipelineSyncProgress", initialPipelineSyncProgress) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java index 0f0fd271d..6dbba1818 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java similarity index 99% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java index 0f7ae97ef..c454a0d3c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java new file mode 100755 index 000000000..f0012f316 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java @@ -0,0 +1,21 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; + +/** The state of a synced table. */ +@Generated +public enum SyncedTableState { + SYNCED_TABLED_OFFLINE, + SYNCED_TABLE_OFFLINE_FAILED, + SYNCED_TABLE_ONLINE, + SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE, + SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE, + SYNCED_TABLE_ONLINE_PIPELINE_FAILED, + SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE, + SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES, + SYNCED_TABLE_PROVISIONING, + SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT, + SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java new file mode 100755 index 000000000..a62637cfe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java @@ -0,0 +1,140 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Status of a synced table. */ +@Generated +public class SyncedTableStatus { + /** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + * or the SYNCED_UPDATING_PIPELINE_RESOURCES state. + */ + @JsonProperty("continuous_update_status") + private SyncedTableContinuousUpdateStatus continuousUpdateStatus; + + /** The state of the synced table. */ + @JsonProperty("detailed_state") + private SyncedTableState detailedState; + + /** + * Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + * SYNCED_PIPELINE_FAILED state. + */ + @JsonProperty("failed_status") + private SyncedTableFailedStatus failedStatus; + + /** A text description of the current state of the synced table. */ + @JsonProperty("message") + private String message; + + /** + * Detailed status of a synced table. Shown if the synced table is in the + * PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state. + */ + @JsonProperty("provisioning_status") + private SyncedTableProvisioningStatus provisioningStatus; + + /** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE + * or the SYNCED_NO_PENDING_UPDATE state. + */ + @JsonProperty("triggered_update_status") + private SyncedTableTriggeredUpdateStatus triggeredUpdateStatus; + + public SyncedTableStatus setContinuousUpdateStatus( + SyncedTableContinuousUpdateStatus continuousUpdateStatus) { + this.continuousUpdateStatus = continuousUpdateStatus; + return this; + } + + public SyncedTableContinuousUpdateStatus getContinuousUpdateStatus() { + return continuousUpdateStatus; + } + + public SyncedTableStatus setDetailedState(SyncedTableState detailedState) { + this.detailedState = detailedState; + return this; + } + + public SyncedTableState getDetailedState() { + return detailedState; + } + + public SyncedTableStatus setFailedStatus(SyncedTableFailedStatus failedStatus) { + this.failedStatus = failedStatus; + return this; + } + + public SyncedTableFailedStatus getFailedStatus() { + return failedStatus; + } + + public SyncedTableStatus setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public SyncedTableStatus setProvisioningStatus(SyncedTableProvisioningStatus provisioningStatus) { + this.provisioningStatus = provisioningStatus; + return this; + } + + public SyncedTableProvisioningStatus getProvisioningStatus() { + return provisioningStatus; + } + + public SyncedTableStatus setTriggeredUpdateStatus( + SyncedTableTriggeredUpdateStatus triggeredUpdateStatus) { + this.triggeredUpdateStatus = triggeredUpdateStatus; + return this; + } + + public SyncedTableTriggeredUpdateStatus getTriggeredUpdateStatus() { + return triggeredUpdateStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableStatus that = (SyncedTableStatus) o; + return Objects.equals(continuousUpdateStatus, that.continuousUpdateStatus) + && Objects.equals(detailedState, that.detailedState) + && Objects.equals(failedStatus, that.failedStatus) + && Objects.equals(message, that.message) + && Objects.equals(provisioningStatus, that.provisioningStatus) + && Objects.equals(triggeredUpdateStatus, that.triggeredUpdateStatus); + } + + @Override + public int hashCode() { + return Objects.hash( + continuousUpdateStatus, + detailedState, + failedStatus, + message, + provisioningStatus, + triggeredUpdateStatus); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableStatus.class) + .add("continuousUpdateStatus", continuousUpdateStatus) + .add("detailedState", detailedState) + .add("failedStatus", failedStatus) + .add("message", message) + .add("provisioningStatus", provisioningStatus) + .add("triggeredUpdateStatus", triggeredUpdateStatus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java new file mode 100755 index 000000000..756e597ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.database; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE or + * the SYNCED_NO_PENDING_UPDATE state. + */ +@Generated +public class SyncedTableTriggeredUpdateStatus { + /** + * The last source table Delta version that was synced to the synced table. Note that this Delta + * version may not be completely synced to the synced table yet. + */ + @JsonProperty("last_processed_commit_version") + private Long lastProcessedCommitVersion; + + /** + * The timestamp of the last time any data was synchronized from the source table to the synced + * table. + */ + @JsonProperty("timestamp") + private String timestamp; + + /** Progress of the active data synchronization pipeline. */ + @JsonProperty("triggered_update_progress") + private SyncedTablePipelineProgress triggeredUpdateProgress; + + public SyncedTableTriggeredUpdateStatus setLastProcessedCommitVersion( + Long lastProcessedCommitVersion) { + this.lastProcessedCommitVersion = lastProcessedCommitVersion; + return this; + } + + public Long getLastProcessedCommitVersion() { + return lastProcessedCommitVersion; + } + + public SyncedTableTriggeredUpdateStatus setTimestamp(String timestamp) { + this.timestamp = timestamp; + return this; + } + + public String getTimestamp() { + return timestamp; + } + + public SyncedTableTriggeredUpdateStatus setTriggeredUpdateProgress( + SyncedTablePipelineProgress triggeredUpdateProgress) { + this.triggeredUpdateProgress = triggeredUpdateProgress; + return this; + } + + public SyncedTablePipelineProgress getTriggeredUpdateProgress() { + return triggeredUpdateProgress; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncedTableTriggeredUpdateStatus that = (SyncedTableTriggeredUpdateStatus) o; + return Objects.equals(lastProcessedCommitVersion, that.lastProcessedCommitVersion) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(triggeredUpdateProgress, that.triggeredUpdateProgress); + } + + @Override + public int hashCode() { + return Objects.hash(lastProcessedCommitVersion, timestamp, triggeredUpdateProgress); + } + + @Override + public String toString() { + return new ToStringer(SyncedTableTriggeredUpdateStatus.class) + .add("lastProcessedCommitVersion", lastProcessedCommitVersion) + .add("timestamp", timestamp) + .add("triggeredUpdateProgress", triggeredUpdateProgress) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java similarity index 98% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java index d40d63ba7..fe0ed445c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.catalog; +package com.databricks.sdk.service.database; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.QueryParam; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java index 25f565e50..7bfecc4ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesAPI.java @@ -26,6 +26,8 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java index b5103d010..791175943 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java @@ -21,6 +21,8 @@ * `enable_experimental_files_api_client = True` in your configuration profile or use the * environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. * + *

Use of Files API may incur Databricks data transfer charges. + * *

[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html * *

This is the high-level interface, that contains generated methods. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java index 634e2397a..7016a0673 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java @@ -75,7 +75,7 @@ public Group get(GetAccountGroupRequest request) { public Iterable list(ListAccountGroupsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java index ff6280873..415577a5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsAPI.java @@ -73,7 +73,7 @@ public ServicePrincipal get(GetAccountServicePrincipalRequest request) { public Iterable list(ListAccountServicePrincipalsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java index b378db43c..77249ac5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersAPI.java @@ -79,7 +79,7 @@ public User get(GetAccountUserRequest request) { public Iterable list(ListAccountUsersRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java index 01a61454b..a6b7414d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsAPI.java @@ -75,7 +75,7 @@ public Group get(GetGroupRequest request) { public Iterable list(ListGroupsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java index 28d5c5511..f28627207 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsAPI.java @@ -73,7 +73,7 @@ public ServicePrincipal get(GetServicePrincipalRequest request) { public Iterable list(ListServicePrincipalsRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java index d079aba02..d5797926d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersAPI.java @@ -98,7 +98,7 @@ public PasswordPermissions getPermissions() { public Iterable list(ListUsersRequest request) { request.setStartIndex(1L); if (request.getCount() == null) { - request.setCount(100L); + request.setCount(10000L); } return new Paginator<>( request, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java index fa9f0ab77..f772fc643 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java @@ -50,6 +50,10 @@ public class BaseJob { @JsonProperty("settings") private JobSettings settings; + /** State of the trigger associated with the job. */ + @JsonProperty("trigger_state") + private TriggerStateProto triggerState; + public BaseJob setCreatedTime(Long createdTime) { this.createdTime = createdTime; return this; @@ -104,6 +108,15 @@ public JobSettings getSettings() { return settings; } + public BaseJob setTriggerState(TriggerStateProto triggerState) { + this.triggerState = triggerState; + return this; + } + + public TriggerStateProto getTriggerState() { + return triggerState; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -114,13 +127,20 @@ public boolean equals(Object o) { && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) && Objects.equals(hasMore, that.hasMore) && Objects.equals(jobId, that.jobId) - && Objects.equals(settings, that.settings); + && Objects.equals(settings, that.settings) + && Objects.equals(triggerState, that.triggerState); } @Override public int hashCode() { return Objects.hash( - createdTime, creatorUserName, effectiveBudgetPolicyId, hasMore, jobId, settings); + createdTime, + creatorUserName, + effectiveBudgetPolicyId, + hasMore, + jobId, + settings, + triggerState); } @Override @@ -132,6 +152,7 @@ public String toString() { .add("hasMore", hasMore) .add("jobId", jobId) .add("settings", settings) + .add("triggerState", triggerState) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java new file mode 100755 index 000000000..76683e3f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Format of response retrieved from dbt Cloud, for inclusion in output */ +@Generated +public class DbtCloudJobRunStep { + /** Orders the steps in the job */ + @JsonProperty("index") + private Long index; + + /** Output of the step */ + @JsonProperty("logs") + private String logs; + + /** Name of the step in the job */ + @JsonProperty("name") + private String name; + + /** State of the step */ + @JsonProperty("status") + private DbtCloudRunStatus status; + + public DbtCloudJobRunStep setIndex(Long index) { + this.index = index; + return this; + } + + public Long getIndex() { + return index; + } + + public DbtCloudJobRunStep setLogs(String logs) { + this.logs = logs; + return this; + } + + public String getLogs() { + return logs; + } + + public DbtCloudJobRunStep setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public DbtCloudJobRunStep setStatus(DbtCloudRunStatus status) { + this.status = status; + return this; + } + + public DbtCloudRunStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudJobRunStep that = (DbtCloudJobRunStep) o; + return Objects.equals(index, that.index) + && Objects.equals(logs, that.logs) + && Objects.equals(name, that.name) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(index, logs, name, status); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudJobRunStep.class) + .add("index", index) + .add("logs", logs) + .add("name", name) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java new file mode 100755 index 000000000..9ae787e53 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; + +/** Response enumeration from calling the dbt Cloud API, for inclusion in output */ +@Generated +public enum DbtCloudRunStatus { + CANCELLED, + ERROR, + QUEUED, + RUNNING, + STARTING, + SUCCESS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java new file mode 100755 index 000000000..7d54134b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DbtCloudTask { + /** The resource name of the UC connection that authenticates the dbt Cloud for this task */ + @JsonProperty("connection_resource_name") + private String connectionResourceName; + + /** Id of the dbt Cloud job to be triggered */ + @JsonProperty("dbt_cloud_job_id") + private Long dbtCloudJobId; + + public DbtCloudTask setConnectionResourceName(String connectionResourceName) { + this.connectionResourceName = connectionResourceName; + return this; + } + + public String getConnectionResourceName() { + return connectionResourceName; + } + + public DbtCloudTask setDbtCloudJobId(Long dbtCloudJobId) { + this.dbtCloudJobId = dbtCloudJobId; + return this; + } + + public Long getDbtCloudJobId() { + return dbtCloudJobId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudTask that = (DbtCloudTask) o; + return Objects.equals(connectionResourceName, that.connectionResourceName) + && Objects.equals(dbtCloudJobId, that.dbtCloudJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectionResourceName, dbtCloudJobId); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudTask.class) + .add("connectionResourceName", connectionResourceName) + .add("dbtCloudJobId", dbtCloudJobId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java new file mode 100755 index 000000000..cfbc136be --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java @@ -0,0 +1,76 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class DbtCloudTaskOutput { + /** Id of the job run in dbt Cloud */ + @JsonProperty("dbt_cloud_job_run_id") + private Long dbtCloudJobRunId; + + /** Steps of the job run as received from dbt Cloud */ + @JsonProperty("dbt_cloud_job_run_output") + private Collection dbtCloudJobRunOutput; + + /** Url where full run details can be viewed */ + @JsonProperty("dbt_cloud_job_run_url") + private String dbtCloudJobRunUrl; + + public DbtCloudTaskOutput setDbtCloudJobRunId(Long dbtCloudJobRunId) { + this.dbtCloudJobRunId = dbtCloudJobRunId; + return this; + } + + public Long getDbtCloudJobRunId() { + return dbtCloudJobRunId; + } + + public DbtCloudTaskOutput setDbtCloudJobRunOutput( + Collection dbtCloudJobRunOutput) { + this.dbtCloudJobRunOutput = dbtCloudJobRunOutput; + return this; + } + + public Collection getDbtCloudJobRunOutput() { + return dbtCloudJobRunOutput; + } + + public DbtCloudTaskOutput setDbtCloudJobRunUrl(String dbtCloudJobRunUrl) { + this.dbtCloudJobRunUrl = dbtCloudJobRunUrl; + return this; + } + + public String getDbtCloudJobRunUrl() { + return dbtCloudJobRunUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DbtCloudTaskOutput that = (DbtCloudTaskOutput) o; + return Objects.equals(dbtCloudJobRunId, that.dbtCloudJobRunId) + && Objects.equals(dbtCloudJobRunOutput, that.dbtCloudJobRunOutput) + && Objects.equals(dbtCloudJobRunUrl, that.dbtCloudJobRunUrl); + } + + @Override + public int hashCode() { + return Objects.hash(dbtCloudJobRunId, dbtCloudJobRunOutput, dbtCloudJobRunUrl); + } + + @Override + public String toString() { + return new ToStringer(DbtCloudTaskOutput.class) + .add("dbtCloudJobRunId", dbtCloudJobRunId) + .add("dbtCloudJobRunOutput", dbtCloudJobRunOutput) + .add("dbtCloudJobRunUrl", dbtCloudJobRunUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerState.java new file mode 100755 index 000000000..cb51a944d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerState.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FileArrivalTriggerState { + /** Indicates whether the trigger leverages file events to detect file arrivals. */ + @JsonProperty("using_file_events") + private Boolean usingFileEvents; + + public FileArrivalTriggerState setUsingFileEvents(Boolean usingFileEvents) { + this.usingFileEvents = usingFileEvents; + return this; + } + + public Boolean getUsingFileEvents() { + return usingFileEvents; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileArrivalTriggerState that = (FileArrivalTriggerState) o; + return Objects.equals(usingFileEvents, that.usingFileEvents); + } + + @Override + public int hashCode() { + return Objects.hash(usingFileEvents); + } + + @Override + public String toString() { + return new ToStringer(FileArrivalTriggerState.class) + .add("usingFileEvents", usingFileEvents) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index 062121875..94e5f7d60 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -67,6 +67,10 @@ public class Job { @JsonProperty("settings") private JobSettings settings; + /** State of the trigger associated with the job. */ + @JsonProperty("trigger_state") + private TriggerStateProto triggerState; + public Job setCreatedTime(Long createdTime) { this.createdTime = createdTime; return this; @@ -139,6 +143,15 @@ public JobSettings getSettings() { return settings; } + public Job setTriggerState(TriggerStateProto triggerState) { + this.triggerState = triggerState; + return this; + } + + public TriggerStateProto getTriggerState() { + return triggerState; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -151,7 +164,8 @@ public boolean equals(Object o) { && Objects.equals(jobId, that.jobId) && Objects.equals(nextPageToken, that.nextPageToken) && Objects.equals(runAsUserName, that.runAsUserName) - && Objects.equals(settings, that.settings); + && Objects.equals(settings, that.settings) + && Objects.equals(triggerState, that.triggerState); } @Override @@ -164,7 +178,8 @@ public int hashCode() { jobId, nextPageToken, runAsUserName, - settings); + settings, + triggerState); } @Override @@ -178,6 +193,7 @@ public String toString() { .add("nextPageToken", nextPageToken) .add("runAsUserName", runAsUserName) .add("settings", settings) + .add("triggerState", triggerState) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java index b635850c7..2f8c6294b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java @@ -18,6 +18,10 @@ public class RunOutput { @JsonProperty("dashboard_output") private DashboardTaskOutput dashboardOutput; + /** */ + @JsonProperty("dbt_cloud_output") + private DbtCloudTaskOutput dbtCloudOutput; + /** The output of a dbt task, if available. */ @JsonProperty("dbt_output") private DbtOutput dbtOutput; @@ -96,6 +100,15 @@ public DashboardTaskOutput getDashboardOutput() { return dashboardOutput; } + public RunOutput setDbtCloudOutput(DbtCloudTaskOutput dbtCloudOutput) { + this.dbtCloudOutput = dbtCloudOutput; + return this; + } + + public DbtCloudTaskOutput getDbtCloudOutput() { + return dbtCloudOutput; + } + public RunOutput setDbtOutput(DbtOutput dbtOutput) { this.dbtOutput = dbtOutput; return this; @@ -193,6 +206,7 @@ public boolean equals(Object o) { RunOutput that = (RunOutput) o; return Objects.equals(cleanRoomsNotebookOutput, that.cleanRoomsNotebookOutput) && Objects.equals(dashboardOutput, that.dashboardOutput) + && Objects.equals(dbtCloudOutput, that.dbtCloudOutput) && Objects.equals(dbtOutput, that.dbtOutput) && Objects.equals(error, that.error) && Objects.equals(errorTrace, that.errorTrace) @@ -210,6 +224,7 @@ public int hashCode() { return Objects.hash( cleanRoomsNotebookOutput, dashboardOutput, + dbtCloudOutput, dbtOutput, error, errorTrace, @@ -227,6 +242,7 @@ public String toString() { return new ToStringer(RunOutput.class) .add("cleanRoomsNotebookOutput", cleanRoomsNotebookOutput) .add("dashboardOutput", dashboardOutput) + .add("dbtCloudOutput", dbtCloudOutput) .add("dbtOutput", dbtOutput) .add("error", error) .add("errorTrace", errorTrace) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index ce8826caf..40661ad7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -57,6 +57,10 @@ public class RunTask { @JsonProperty("dashboard_task") private DashboardTask dashboardTask; + /** Task type for dbt cloud */ + @JsonProperty("dbt_cloud_task") + private DbtCloudTask dbtCloudTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. @@ -366,6 +370,15 @@ public DashboardTask getDashboardTask() { return dashboardTask; } + public RunTask setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + public RunTask setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -728,6 +741,7 @@ public boolean equals(Object o) { && Objects.equals(clusterInstance, that.clusterInstance) && Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -778,6 +792,7 @@ public int hashCode() { clusterInstance, conditionTask, dashboardTask, + dbtCloudTask, dbtTask, dependsOn, description, @@ -828,6 +843,7 @@ public String toString() { .add("clusterInstance", clusterInstance) .add("conditionTask", conditionTask) .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 1627e870c..a87ac5818 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -30,6 +30,10 @@ public class SubmitTask { @JsonProperty("dashboard_task") private DashboardTask dashboardTask; + /** Task type for dbt cloud */ + @JsonProperty("dbt_cloud_task") + private DbtCloudTask dbtCloudTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. @@ -219,6 +223,15 @@ public DashboardTask getDashboardTask() { return dashboardTask; } + public SubmitTask setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + public SubmitTask setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -452,6 +465,7 @@ public boolean equals(Object o) { return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) && Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -485,6 +499,7 @@ public int hashCode() { cleanRoomsNotebookTask, conditionTask, dashboardTask, + dbtCloudTask, dbtTask, dependsOn, description, @@ -518,6 +533,7 @@ public String toString() { .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) .add("conditionTask", conditionTask) .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 6729c4691..de5569841 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -30,6 +30,10 @@ public class Task { @JsonProperty("dashboard_task") private DashboardTask dashboardTask; + /** Task type for dbt cloud */ + @JsonProperty("dbt_cloud_task") + private DbtCloudTask dbtCloudTask; + /** * The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task * requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse. @@ -258,6 +262,15 @@ public DashboardTask getDashboardTask() { return dashboardTask; } + public Task setDbtCloudTask(DbtCloudTask dbtCloudTask) { + this.dbtCloudTask = dbtCloudTask; + return this; + } + + public DbtCloudTask getDbtCloudTask() { + return dbtCloudTask; + } + public Task setDbtTask(DbtTask dbtTask) { this.dbtTask = dbtTask; return this; @@ -536,6 +549,7 @@ public boolean equals(Object o) { return Objects.equals(cleanRoomsNotebookTask, that.cleanRoomsNotebookTask) && Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dashboardTask, that.dashboardTask) + && Objects.equals(dbtCloudTask, that.dbtCloudTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) @@ -574,6 +588,7 @@ public int hashCode() { cleanRoomsNotebookTask, conditionTask, dashboardTask, + dbtCloudTask, dbtTask, dependsOn, description, @@ -612,6 +627,7 @@ public String toString() { .add("cleanRoomsNotebookTask", cleanRoomsNotebookTask) .add("conditionTask", conditionTask) .add("dashboardTask", dashboardTask) + .add("dbtCloudTask", dbtCloudTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java new file mode 100755 index 000000000..d95eac4f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TriggerStateProto { + /** */ + @JsonProperty("file_arrival") + private FileArrivalTriggerState fileArrival; + + public TriggerStateProto setFileArrival(FileArrivalTriggerState fileArrival) { + this.fileArrival = fileArrival; + return this; + } + + public FileArrivalTriggerState getFileArrival() { + return fileArrival; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TriggerStateProto that = (TriggerStateProto) o; + return Objects.equals(fileArrival, that.fileArrival); + } + + @Override + public int hashCode() { + return Objects.hash(fileArrival); + } + + @Override + public String toString() { + return new ToStringer(TriggerStateProto.class).add("fileArrival", fileArrival).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java deleted file mode 100755 index 7f57da157..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class ArtifactCredentialInfo { - /** - * A collection of HTTP headers that should be specified when uploading to or downloading from the - * specified `signed_uri`. - */ - @JsonProperty("headers") - private Collection headers; - - /** - * The path, relative to the Run's artifact root location, of the artifact that can be accessed - * with the credential. - */ - @JsonProperty("path") - private String path; - - /** The ID of the MLflow Run containing the artifact that can be accessed with the credential. */ - @JsonProperty("run_id") - private String runId; - - /** The signed URI credential that provides access to the artifact. */ - @JsonProperty("signed_uri") - private String signedUri; - - /** - * The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access - * Signature URI). - */ - @JsonProperty("type") - private ArtifactCredentialType typeValue; - - public ArtifactCredentialInfo setHeaders(Collection headers) { - this.headers = headers; - return this; - } - - public Collection getHeaders() { - return headers; - } - - public ArtifactCredentialInfo setPath(String path) { - this.path = path; - return this; - } - - public String getPath() { - return path; - } - - public ArtifactCredentialInfo setRunId(String runId) { - this.runId = runId; - return this; - } - - public String getRunId() { - return runId; - } - - public ArtifactCredentialInfo setSignedUri(String signedUri) { - this.signedUri = signedUri; - return this; - } - - public String getSignedUri() { - return signedUri; - } - - public ArtifactCredentialInfo setType(ArtifactCredentialType typeValue) { - this.typeValue = typeValue; - return this; - } - - public ArtifactCredentialType getType() { - return typeValue; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArtifactCredentialInfo that = (ArtifactCredentialInfo) o; - return Objects.equals(headers, that.headers) - && Objects.equals(path, that.path) - && Objects.equals(runId, that.runId) - && Objects.equals(signedUri, that.signedUri) - && Objects.equals(typeValue, that.typeValue); - } - - @Override - public int hashCode() { - return Objects.hash(headers, path, runId, signedUri, typeValue); - } - - @Override - public String toString() { - return new ToStringer(ArtifactCredentialInfo.class) - .add("headers", headers) - .add("path", path) - .add("runId", runId) - .add("signedUri", signedUri) - .add("typeValue", typeValue) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java deleted file mode 100755 index 053a8991c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java +++ /dev/null @@ -1,58 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class ArtifactCredentialInfoHttpHeader { - /** The HTTP header name. */ - @JsonProperty("name") - private String name; - - /** The HTTP header value. */ - @JsonProperty("value") - private String value; - - public ArtifactCredentialInfoHttpHeader setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - - public ArtifactCredentialInfoHttpHeader setValue(String value) { - this.value = value; - return this; - } - - public String getValue() { - return value; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ArtifactCredentialInfoHttpHeader that = (ArtifactCredentialInfoHttpHeader) o; - return Objects.equals(name, that.name) && Objects.equals(value, that.value); - } - - @Override - public int hashCode() { - return Objects.hash(name, value); - } - - @Override - public String toString() { - return new ToStringer(ArtifactCredentialInfoHttpHeader.class) - .add("name", name) - .add("value", value) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java deleted file mode 100755 index ec4cf4370..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; - -/** The type of a given artifact access credential */ -@Generated -public enum ArtifactCredentialType { - AWS_PRESIGNED_URL, - AZURE_ADLS_GEN2_SAS_URI, - AZURE_SAS_URI, - GCP_SIGNED_URL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java index 3d4f594b2..2f7c37615 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateRegistryWebhook.java @@ -56,7 +56,10 @@ public class CreateRegistryWebhook { @JsonProperty("job_spec") private JobSpec jobSpec; - /** Name of the model whose events would trigger this webhook. */ + /** + * If model name is not specified, a registry-wide webhook is created that listens for the + * specified events across all versions of all registered models. + */ @JsonProperty("model_name") private String modelName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java index 0b4986340..7c4288cea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentRequest.java @@ -11,7 +11,7 @@ /** Delete a comment */ @Generated public class DeleteCommentRequest { - /** */ + /** Unique identifier of an activity */ @JsonIgnore @QueryParam("id") private String id; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index bcc631e68..ecc0c1931 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -173,30 +173,6 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { return impl.getByName(request); } - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - String requestId) { - return getCredentialsForTraceDataDownload( - new GetCredentialsForTraceDataDownloadRequest().setRequestId(requestId)); - } - - /** Get credentials to download trace data. */ - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest request) { - return impl.getCredentialsForTraceDataDownload(request); - } - - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - String requestId) { - return getCredentialsForTraceDataUpload( - new GetCredentialsForTraceDataUploadRequest().setRequestId(requestId)); - } - - /** Get credentials to upload trace data. */ - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest request) { - return impl.getCredentialsForTraceDataUpload(request); - } - public GetExperimentResponse getExperiment(String experimentId) { return getExperiment(new GetExperimentRequest().setExperimentId(experimentId)); } @@ -329,21 +305,6 @@ public Iterable listExperiments(ListExperimentsRequest request) { }); } - public ListLoggedModelArtifactsResponse listLoggedModelArtifacts(String modelId) { - return listLoggedModelArtifacts(new ListLoggedModelArtifactsRequest().setModelId(modelId)); - } - - /** - * List artifacts for a logged model. - * - *

List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix - * which if specified, the response contains only artifacts with the specified prefix. - */ - public ListLoggedModelArtifactsResponse listLoggedModelArtifacts( - ListLoggedModelArtifactsRequest request) { - return impl.listLoggedModelArtifacts(request); - } - /** * Log a batch of metrics/params/tags for a run. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index c228b7e72..1b53bb69f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -169,38 +169,6 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) { } } - @Override - public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest request) { - String path = - String.format( - "/api/2.0/mlflow/traces/%s/credentials-for-data-download", request.getRequestId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetCredentialsForTraceDataDownloadResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - @Override - public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest request) { - String path = - String.format( - "/api/2.0/mlflow/traces/%s/credentials-for-data-upload", request.getRequestId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, GetCredentialsForTraceDataUploadResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - @Override public GetExperimentResponse getExperiment(GetExperimentRequest request) { String path = "/api/2.0/mlflow/experiments/get"; @@ -308,22 +276,6 @@ public ListExperimentsResponse listExperiments(ListExperimentsRequest request) { } } - @Override - public ListLoggedModelArtifactsResponse listLoggedModelArtifacts( - ListLoggedModelArtifactsRequest request) { - String path = - String.format( - "/api/2.0/mlflow/logged-models/%s/artifacts/directories", request.getModelId()); - try { - Request req = new Request("GET", path); - ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); - return apiClient.execute(req, ListLoggedModelArtifactsResponse.class); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - @Override public void logBatch(LogBatch request) { String path = "/api/2.0/mlflow/runs/log-batch"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index abafed87e..7613522e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -96,14 +96,6 @@ FinalizeLoggedModelResponse finalizeLoggedModel( */ GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest); - /** Get credentials to download trace data. */ - GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload( - GetCredentialsForTraceDataDownloadRequest getCredentialsForTraceDataDownloadRequest); - - /** Get credentials to upload trace data. */ - GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload( - GetCredentialsForTraceDataUploadRequest getCredentialsForTraceDataUploadRequest); - /** * Get an experiment. * @@ -166,15 +158,6 @@ ExperimentPermissions getPermissions( */ ListExperimentsResponse listExperiments(ListExperimentsRequest listExperimentsRequest); - /** - * List artifacts for a logged model. - * - *

List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix - * which if specified, the response contains only artifacts with the specified prefix. - */ - ListLoggedModelArtifactsResponse listLoggedModelArtifacts( - ListLoggedModelArtifactsRequest listLoggedModelArtifactsRequest); - /** * Log a batch of metrics/params/tags for a run. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java index e57f1bbae..3bb12c2f8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java @@ -15,7 +15,7 @@ public class FinalizeLoggedModelRequest { /** * Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that - * something went wrong when logging the model weights / agent code). + * something went wrong when logging the model weights / agent code. */ @JsonProperty("status") private LoggedModelStatus status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java deleted file mode 100755 index 42aac217e..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get credentials to download trace data */ -@Generated -public class GetCredentialsForTraceDataDownloadRequest { - /** The ID of the trace to fetch artifact download credentials for. */ - @JsonIgnore private String requestId; - - public GetCredentialsForTraceDataDownloadRequest setRequestId(String requestId) { - this.requestId = requestId; - return this; - } - - public String getRequestId() { - return requestId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataDownloadRequest that = (GetCredentialsForTraceDataDownloadRequest) o; - return Objects.equals(requestId, that.requestId); - } - - @Override - public int hashCode() { - return Objects.hash(requestId); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataDownloadRequest.class) - .add("requestId", requestId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java deleted file mode 100755 index 839e04921..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java +++ /dev/null @@ -1,46 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class GetCredentialsForTraceDataDownloadResponse { - /** The artifact download credentials for the specified trace data. */ - @JsonProperty("credential_info") - private ArtifactCredentialInfo credentialInfo; - - public GetCredentialsForTraceDataDownloadResponse setCredentialInfo( - ArtifactCredentialInfo credentialInfo) { - this.credentialInfo = credentialInfo; - return this; - } - - public ArtifactCredentialInfo getCredentialInfo() { - return credentialInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataDownloadResponse that = - (GetCredentialsForTraceDataDownloadResponse) o; - return Objects.equals(credentialInfo, that.credentialInfo); - } - - @Override - public int hashCode() { - return Objects.hash(credentialInfo); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataDownloadResponse.class) - .add("credentialInfo", credentialInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java deleted file mode 100755 index e7c6d452c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** Get credentials to upload trace data */ -@Generated -public class GetCredentialsForTraceDataUploadRequest { - /** The ID of the trace to fetch artifact upload credentials for. */ - @JsonIgnore private String requestId; - - public GetCredentialsForTraceDataUploadRequest setRequestId(String requestId) { - this.requestId = requestId; - return this; - } - - public String getRequestId() { - return requestId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataUploadRequest that = (GetCredentialsForTraceDataUploadRequest) o; - return Objects.equals(requestId, that.requestId); - } - - @Override - public int hashCode() { - return Objects.hash(requestId); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataUploadRequest.class) - .add("requestId", requestId) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java deleted file mode 100755 index 9dcaed06c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java +++ /dev/null @@ -1,45 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class GetCredentialsForTraceDataUploadResponse { - /** The artifact upload credentials for the specified trace data. */ - @JsonProperty("credential_info") - private ArtifactCredentialInfo credentialInfo; - - public GetCredentialsForTraceDataUploadResponse setCredentialInfo( - ArtifactCredentialInfo credentialInfo) { - this.credentialInfo = credentialInfo; - return this; - } - - public ArtifactCredentialInfo getCredentialInfo() { - return credentialInfo; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - GetCredentialsForTraceDataUploadResponse that = (GetCredentialsForTraceDataUploadResponse) o; - return Objects.equals(credentialInfo, that.credentialInfo); - } - - @Override - public int hashCode() { - return Objects.hash(credentialInfo); - } - - @Override - public String toString() { - return new ToStringer(GetCredentialsForTraceDataUploadResponse.class) - .add("credentialInfo", credentialInfo) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java deleted file mode 100755 index e94842c95..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java +++ /dev/null @@ -1,83 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.QueryParam; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import java.util.Objects; - -/** List artifacts for a logged model */ -@Generated -public class ListLoggedModelArtifactsRequest { - /** Filter artifacts matching this path (a relative path from the root artifact directory). */ - @JsonIgnore - @QueryParam("artifact_directory_path") - private String artifactDirectoryPath; - - /** The ID of the logged model for which to list the artifacts. */ - @JsonIgnore private String modelId; - - /** - * Token indicating the page of artifact results to fetch. `page_token` is not supported when - * listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. - * Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, - * which supports pagination. See [List directory contents | Files - * API](/api/workspace/files/listdirectorycontents). - */ - @JsonIgnore - @QueryParam("page_token") - private String pageToken; - - public ListLoggedModelArtifactsRequest setArtifactDirectoryPath(String artifactDirectoryPath) { - this.artifactDirectoryPath = artifactDirectoryPath; - return this; - } - - public String getArtifactDirectoryPath() { - return artifactDirectoryPath; - } - - public ListLoggedModelArtifactsRequest setModelId(String modelId) { - this.modelId = modelId; - return this; - } - - public String getModelId() { - return modelId; - } - - public ListLoggedModelArtifactsRequest setPageToken(String pageToken) { - this.pageToken = pageToken; - return this; - } - - public String getPageToken() { - return pageToken; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ListLoggedModelArtifactsRequest that = (ListLoggedModelArtifactsRequest) o; - return Objects.equals(artifactDirectoryPath, that.artifactDirectoryPath) - && Objects.equals(modelId, that.modelId) - && Objects.equals(pageToken, that.pageToken); - } - - @Override - public int hashCode() { - return Objects.hash(artifactDirectoryPath, modelId, pageToken); - } - - @Override - public String toString() { - return new ToStringer(ListLoggedModelArtifactsRequest.class) - .add("artifactDirectoryPath", artifactDirectoryPath) - .add("modelId", modelId) - .add("pageToken", pageToken) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java deleted file mode 100755 index 35e2dbe82..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java +++ /dev/null @@ -1,75 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.ml; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Collection; -import java.util.Objects; - -@Generated -public class ListLoggedModelArtifactsResponse { - /** File location and metadata for artifacts. */ - @JsonProperty("files") - private Collection files; - - /** Token that can be used to retrieve the next page of artifact results */ - @JsonProperty("next_page_token") - private String nextPageToken; - - /** Root artifact directory for the logged model. */ - @JsonProperty("root_uri") - private String rootUri; - - public ListLoggedModelArtifactsResponse setFiles(Collection files) { - this.files = files; - return this; - } - - public Collection getFiles() { - return files; - } - - public ListLoggedModelArtifactsResponse setNextPageToken(String nextPageToken) { - this.nextPageToken = nextPageToken; - return this; - } - - public String getNextPageToken() { - return nextPageToken; - } - - public ListLoggedModelArtifactsResponse setRootUri(String rootUri) { - this.rootUri = rootUri; - return this; - } - - public String getRootUri() { - return rootUri; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ListLoggedModelArtifactsResponse that = (ListLoggedModelArtifactsResponse) o; - return Objects.equals(files, that.files) - && Objects.equals(nextPageToken, that.nextPageToken) - && Objects.equals(rootUri, that.rootUri); - } - - @Override - public int hashCode() { - return Objects.hash(files, nextPageToken, rootUri); - } - - @Override - public String toString() { - return new ToStringer(ListLoggedModelArtifactsResponse.class) - .add("files", files) - .add("nextPageToken", nextPageToken) - .add("rootUri", rootUri) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java index 604f034f7..243593e6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java @@ -15,12 +15,7 @@ public class RunInputs { @JsonProperty("dataset_inputs") private Collection datasetInputs; - /** - * **NOTE**: Experimental: This API field may change or be removed in a future release without - * warning. - * - *

Model inputs to the Run. - */ + /** Model inputs to the Run. */ @JsonProperty("model_inputs") private Collection modelInputs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index aede0bea6..55ad9af9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -106,7 +106,8 @@ public void delete(String pipelineId) { /** * Delete a pipeline. * - *

Deletes a pipeline. + *

Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the + * pipeline and its tables. You cannot undo this action. */ public void delete(DeletePipelineRequest request) { impl.delete(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java index 332eabdcf..59f5b9f3e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java @@ -35,7 +35,8 @@ public interface PipelinesService { /** * Delete a pipeline. * - *

Deletes a pipeline. + *

Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the + * pipeline and its tables. You cannot undo this action. */ void delete(DeletePipelineRequest deletePipelineRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java new file mode 100755 index 000000000..5b14da636 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AnomalyDetectionConfig { + /** Run id of the last run of the workflow */ + @JsonProperty("last_run_id") + private String lastRunId; + + /** The status of the last run of the workflow. */ + @JsonProperty("latest_run_status") + private AnomalyDetectionRunStatus latestRunStatus; + + public AnomalyDetectionConfig setLastRunId(String lastRunId) { + this.lastRunId = lastRunId; + return this; + } + + public String getLastRunId() { + return lastRunId; + } + + public AnomalyDetectionConfig setLatestRunStatus(AnomalyDetectionRunStatus latestRunStatus) { + this.latestRunStatus = latestRunStatus; + return this; + } + + public AnomalyDetectionRunStatus getLatestRunStatus() { + return latestRunStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnomalyDetectionConfig that = (AnomalyDetectionConfig) o; + return Objects.equals(lastRunId, that.lastRunId) + && Objects.equals(latestRunStatus, that.latestRunStatus); + } + + @Override + public int hashCode() { + return Objects.hash(lastRunId, latestRunStatus); + } + + @Override + public String toString() { + return new ToStringer(AnomalyDetectionConfig.class) + .add("lastRunId", lastRunId) + .add("latestRunStatus", latestRunStatus) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java new file mode 100755 index 000000000..8de4b6bb6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java @@ -0,0 +1,18 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; + +/** Status of Anomaly Detection Job Run */ +@Generated +public enum AnomalyDetectionRunStatus { + ANOMALY_DETECTION_RUN_STATUS_CANCELED, + ANOMALY_DETECTION_RUN_STATUS_FAILED, + ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED, + ANOMALY_DETECTION_RUN_STATUS_PENDING, + ANOMALY_DETECTION_RUN_STATUS_RUNNING, + ANOMALY_DETECTION_RUN_STATUS_SUCCESS, + ANOMALY_DETECTION_RUN_STATUS_UNKNOWN, + ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java new file mode 100755 index 000000000..f7ea1c964 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Create a quality monitor */ +@Generated +public class CreateQualityMonitorRequest { + /** */ + @JsonProperty("quality_monitor") + private QualityMonitor qualityMonitor; + + public CreateQualityMonitorRequest setQualityMonitor(QualityMonitor qualityMonitor) { + this.qualityMonitor = qualityMonitor; + return this; + } + + public QualityMonitor getQualityMonitor() { + return qualityMonitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateQualityMonitorRequest that = (CreateQualityMonitorRequest) o; + return Objects.equals(qualityMonitor, that.qualityMonitor); + } + + @Override + public int hashCode() { + return Objects.hash(qualityMonitor); + } + + @Override + public String toString() { + return new ToStringer(CreateQualityMonitorRequest.class) + .add("qualityMonitor", qualityMonitor) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java new file mode 100755 index 000000000..761677c05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Delete a quality monitor */ +@Generated +public class DeleteQualityMonitorRequest { + /** The uuid of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + @JsonIgnore private String objectType; + + public DeleteQualityMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public DeleteQualityMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteQualityMonitorRequest that = (DeleteQualityMonitorRequest) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(DeleteQualityMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java similarity index 67% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java index 8714d62a6..8d3d5dd8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java @@ -1,17 +1,13 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.dashboards; +package com.databricks.sdk.service.qualitymonitorv2; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** - * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm - * right now. - */ @Generated -public class Empty { +public class DeleteQualityMonitorResponse { @Override public boolean equals(Object o) { @@ -27,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(Empty.class).toString(); + return new ToStringer(DeleteQualityMonitorResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java new file mode 100755 index 000000000..7575721ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Read a quality monitor */ +@Generated +public class GetQualityMonitorRequest { + /** The uuid of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + @JsonIgnore private String objectType; + + public GetQualityMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public GetQualityMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetQualityMonitorRequest that = (GetQualityMonitorRequest) o; + return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(GetQualityMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java new file mode 100755 index 000000000..db6234b9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** List quality monitors */ +@Generated +public class ListQualityMonitorRequest { + /** */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListQualityMonitorRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListQualityMonitorRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQualityMonitorRequest that = (ListQualityMonitorRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListQualityMonitorRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java new file mode 100755 index 000000000..8b332ed92 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListQualityMonitorResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("quality_monitors") + private Collection qualityMonitors; + + public ListQualityMonitorResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListQualityMonitorResponse setQualityMonitors(Collection qualityMonitors) { + this.qualityMonitors = qualityMonitors; + return this; + } + + public Collection getQualityMonitors() { + return qualityMonitors; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListQualityMonitorResponse that = (ListQualityMonitorResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(qualityMonitors, that.qualityMonitors); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, qualityMonitors); + } + + @Override + public String toString() { + return new ToStringer(ListQualityMonitorResponse.class) + .add("nextPageToken", nextPageToken) + .add("qualityMonitors", qualityMonitors) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java new file mode 100755 index 000000000..6171c98b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class QualityMonitor { + /** */ + @JsonProperty("anomaly_detection_config") + private AnomalyDetectionConfig anomalyDetectionConfig; + + /** The uuid of the request object. For example, schema id. */ + @JsonProperty("object_id") + private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + @JsonProperty("object_type") + private String objectType; + + public QualityMonitor setAnomalyDetectionConfig(AnomalyDetectionConfig anomalyDetectionConfig) { + this.anomalyDetectionConfig = anomalyDetectionConfig; + return this; + } + + public AnomalyDetectionConfig getAnomalyDetectionConfig() { + return anomalyDetectionConfig; + } + + public QualityMonitor setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public QualityMonitor setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QualityMonitor that = (QualityMonitor) o; + return Objects.equals(anomalyDetectionConfig, that.anomalyDetectionConfig) + && Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType); + } + + @Override + public int hashCode() { + return Objects.hash(anomalyDetectionConfig, objectId, objectType); + } + + @Override + public String toString() { + return new ToStringer(QualityMonitor.class) + .add("anomalyDetectionConfig", anomalyDetectionConfig) + .add("objectId", objectId) + .add("objectType", objectType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java new file mode 100755 index 000000000..b0a02ea48 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java @@ -0,0 +1,109 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Manage data quality of UC objects (currently support `schema`) */ +@Generated +public class QualityMonitorV2API { + private static final Logger LOG = LoggerFactory.getLogger(QualityMonitorV2API.class); + + private final QualityMonitorV2Service impl; + + /** Regular-use constructor */ + public QualityMonitorV2API(ApiClient apiClient) { + impl = new QualityMonitorV2Impl(apiClient); + } + + /** Constructor for mocks */ + public QualityMonitorV2API(QualityMonitorV2Service mock) { + impl = mock; + } + + public QualityMonitor createQualityMonitor(QualityMonitor qualityMonitor) { + return createQualityMonitor( + new CreateQualityMonitorRequest().setQualityMonitor(qualityMonitor)); + } + + /** + * Create a quality monitor. + * + *

Create a quality monitor on UC object + */ + public QualityMonitor createQualityMonitor(CreateQualityMonitorRequest request) { + return impl.createQualityMonitor(request); + } + + public void deleteQualityMonitor(String objectType, String objectId) { + deleteQualityMonitor( + new DeleteQualityMonitorRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * Delete a quality monitor. + * + *

Delete a quality monitor on UC object + */ + public void deleteQualityMonitor(DeleteQualityMonitorRequest request) { + impl.deleteQualityMonitor(request); + } + + public QualityMonitor getQualityMonitor(String objectType, String objectId) { + return getQualityMonitor( + new GetQualityMonitorRequest().setObjectType(objectType).setObjectId(objectId)); + } + + /** + * Read a quality monitor. + * + *

Read a quality monitor on UC object + */ + public QualityMonitor getQualityMonitor(GetQualityMonitorRequest request) { + return impl.getQualityMonitor(request); + } + + /** + * List quality monitors. + * + *

(Unimplemented) List quality monitors + */ + public Iterable listQualityMonitor(ListQualityMonitorRequest request) { + return new Paginator<>( + request, + impl::listQualityMonitor, + ListQualityMonitorResponse::getQualityMonitors, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + public QualityMonitor updateQualityMonitor( + String objectType, String objectId, QualityMonitor qualityMonitor) { + return updateQualityMonitor( + new UpdateQualityMonitorRequest() + .setObjectType(objectType) + .setObjectId(objectId) + .setQualityMonitor(qualityMonitor)); + } + + /** + * Update a quality monitor. + * + *

(Unimplemented) Update a quality monitor on UC object + */ + public QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest request) { + return impl.updateQualityMonitor(request); + } + + public QualityMonitorV2Service impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java new file mode 100755 index 000000000..0880dbd86 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; + +/** Package-local implementation of QualityMonitorV2 */ +@Generated +class QualityMonitorV2Impl implements QualityMonitorV2Service { + private final ApiClient apiClient; + + public QualityMonitorV2Impl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public QualityMonitor createQualityMonitor(CreateQualityMonitorRequest request) { + String path = "/api/2.0/quality-monitors"; + try { + Request req = new Request("POST", path, apiClient.serialize(request.getQualityMonitor())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QualityMonitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteQualityMonitor(DeleteQualityMonitorRequest request) { + String path = + String.format( + "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteQualityMonitorResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public QualityMonitor getQualityMonitor(GetQualityMonitorRequest request) { + String path = + String.format( + "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, QualityMonitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListQualityMonitorResponse listQualityMonitor(ListQualityMonitorRequest request) { + String path = "/api/2.0/quality-monitors"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListQualityMonitorResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest request) { + String path = + String.format( + "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId()); + try { + Request req = new Request("PUT", path, apiClient.serialize(request.getQualityMonitor())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QualityMonitor.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java new file mode 100755 index 000000000..762b01606 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; + +/** + * Manage data quality of UC objects (currently support `schema`) + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface QualityMonitorV2Service { + /** + * Create a quality monitor. + * + *

Create a quality monitor on UC object + */ + QualityMonitor createQualityMonitor(CreateQualityMonitorRequest createQualityMonitorRequest); + + /** + * Delete a quality monitor. + * + *

Delete a quality monitor on UC object + */ + void deleteQualityMonitor(DeleteQualityMonitorRequest deleteQualityMonitorRequest); + + /** + * Read a quality monitor. + * + *

Read a quality monitor on UC object + */ + QualityMonitor getQualityMonitor(GetQualityMonitorRequest getQualityMonitorRequest); + + /** + * List quality monitors. + * + *

(Unimplemented) List quality monitors + */ + ListQualityMonitorResponse listQualityMonitor( + ListQualityMonitorRequest listQualityMonitorRequest); + + /** + * Update a quality monitor. + * + *

(Unimplemented) Update a quality monitor on UC object + */ + QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest updateQualityMonitorRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java new file mode 100755 index 000000000..0c7f4fb84 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.qualitymonitorv2; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Update a quality monitor */ +@Generated +public class UpdateQualityMonitorRequest { + /** The uuid of the request object. For example, schema id. */ + @JsonIgnore private String objectId; + + /** The type of the monitored object. Can be one of the following: schema. */ + @JsonIgnore private String objectType; + + /** */ + @JsonProperty("quality_monitor") + private QualityMonitor qualityMonitor; + + public UpdateQualityMonitorRequest setObjectId(String objectId) { + this.objectId = objectId; + return this; + } + + public String getObjectId() { + return objectId; + } + + public UpdateQualityMonitorRequest setObjectType(String objectType) { + this.objectType = objectType; + return this; + } + + public String getObjectType() { + return objectType; + } + + public UpdateQualityMonitorRequest setQualityMonitor(QualityMonitor qualityMonitor) { + this.qualityMonitor = qualityMonitor; + return this; + } + + public QualityMonitor getQualityMonitor() { + return qualityMonitor; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateQualityMonitorRequest that = (UpdateQualityMonitorRequest) o; + return Objects.equals(objectId, that.objectId) + && Objects.equals(objectType, that.objectType) + && Objects.equals(qualityMonitor, that.qualityMonitor); + } + + @Override + public int hashCode() { + return Objects.hash(objectId, objectType, qualityMonitor); + } + + @Override + public String toString() { + return new ToStringer(UpdateQualityMonitorRequest.class) + .add("objectId", objectId) + .add("objectType", objectType) + .add("qualityMonitor", qualityMonitor) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java index ea50df387..51630a687 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java @@ -15,7 +15,7 @@ @Generated public class CreatePrivateEndpointRule { /** - * Only used by private endpoints to customer-managed resources. + * Only used by private endpoints to customer-managed private endpoint services. * *

Domain names of target private link service. When updating this field, the full list of * target domain_names must be specified. @@ -24,8 +24,14 @@ public class CreatePrivateEndpointRule { private Collection domainNames; /** - * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer | - * mysqlServer + * The full target AWS endpoint service name that connects to the destination resources of the + * private endpoint. + */ + @JsonProperty("endpoint_service") + private String endpointService; + + /** + * Not used by customer-managed private endpoint services. * *

The sub-resource type (group ID) of the target resource. Note that to connect to workspace * root storage (root DBFS), you need two endpoints, one for blob and one for dfs. @@ -37,6 +43,17 @@ public class CreatePrivateEndpointRule { @JsonProperty("resource_id") private String resourceId; + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + @JsonProperty("resource_names") + private Collection resourceNames; + public CreatePrivateEndpointRule setDomainNames(Collection domainNames) { this.domainNames = domainNames; return this; @@ -46,6 +63,15 @@ public Collection getDomainNames() { return domainNames; } + public CreatePrivateEndpointRule setEndpointService(String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + public CreatePrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -64,27 +90,40 @@ public String getResourceId() { return resourceId; } + public CreatePrivateEndpointRule setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreatePrivateEndpointRule that = (CreatePrivateEndpointRule) o; return Objects.equals(domainNames, that.domainNames) + && Objects.equals(endpointService, that.endpointService) && Objects.equals(groupId, that.groupId) - && Objects.equals(resourceId, that.resourceId); + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(resourceNames, that.resourceNames); } @Override public int hashCode() { - return Objects.hash(domainNames, groupId, resourceId); + return Objects.hash(domainNames, endpointService, groupId, resourceId, resourceNames); } @Override public String toString() { return new ToStringer(CreatePrivateEndpointRule.class) .add("domainNames", domainNames) + .add("endpointService", endpointService) .add("groupId", groupId) .add("resourceId", resourceId) + .add("resourceNames", resourceNames) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java new file mode 100755 index 000000000..65bb8dc20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java @@ -0,0 +1,294 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that for private endpoints towards a VPC + * endpoint service behind a customer-managed NLB, you must approve the endpoint in AWS console + * after initialization. + */ +@Generated +public class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule { + /** Databricks account ID. You can find your account ID from the Accounts Console. */ + @JsonProperty("account_id") + private String accountId; + + /** + * The current status of this private endpoint. The private endpoint rules are effective only if + * the connection state is ESTABLISHED. Remember that you must approve new endpoints on your + * resources in the AWS console before they take effect. The possible values are: - PENDING: The + * endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved + * and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected + * by the private link resource owner. - DISCONNECTED: Connection was removed by the private link + * resource owner, the private endpoint becomes informative and should be deleted for clean-up. - + * EXPIRED: If the endpoint is created but not approved in 14 days, it is EXPIRED. + */ + @JsonProperty("connection_state") + private CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + connectionState; + + /** Time in epoch milliseconds when this object was created. */ + @JsonProperty("creation_time") + private Long creationTime; + + /** Whether this private endpoint is deactivated. */ + @JsonProperty("deactivated") + private Boolean deactivated; + + /** Time in epoch milliseconds when this object was deactivated. */ + @JsonProperty("deactivated_at") + private Long deactivatedAt; + + /** + * Only used by private endpoints towards a VPC endpoint service for customer-managed VPC endpoint + * service. + * + *

The target AWS resource FQDNs accessible via the VPC endpoint service. When updating this + * field, we perform full update on this field. Please ensure a full list of desired domain_names + * is provided. + */ + @JsonProperty("domain_names") + private Collection domainNames; + + /** + * Only used by private endpoints towards an AWS S3 service. + * + *

Update this field to activate/deactivate this private endpoint to allow egress access from + * serverless compute resources. + */ + @JsonProperty("enabled") + private Boolean enabled; + + /** + * The full target AWS endpoint service name that connects to the destination resources of the + * private endpoint. + */ + @JsonProperty("endpoint_service") + private String endpointService; + + /** + * The ID of a network connectivity configuration, which is the parent resource of this private + * endpoint rule object. + */ + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + @JsonProperty("resource_names") + private Collection resourceNames; + + /** The ID of a private endpoint rule. */ + @JsonProperty("rule_id") + private String ruleId; + + /** Time in epoch milliseconds when this object was updated. */ + @JsonProperty("updated_time") + private Long updatedTime; + + /** + * The AWS VPC endpoint ID. You can use this ID to identify VPC endpoint created by Databricks. + */ + @JsonProperty("vpc_endpoint_id") + private String vpcEndpointId; + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setAccountId( + String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setConnectionState( + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + connectionState) { + this.connectionState = connectionState; + return this; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + getConnectionState() { + return connectionState; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setCreationTime( + Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setDeactivated( + Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setDeactivatedAt( + Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setDomainNames( + Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setEndpointService( + String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule + setNetworkConnectivityConfigId(String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setResourceNames( + Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setUpdatedTime( + Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + public CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule setVpcEndpointId( + String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule that = + (CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceNames, that.resourceNames) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime) + && Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + enabled, + endpointService, + networkConnectivityConfigId, + resourceNames, + ruleId, + updatedTime, + vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.class) + .add("accountId", accountId) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("endpointService", endpointService) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceNames", resourceNames) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java new file mode 100755 index 000000000..54c96f842 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState { + DISCONNECTED, + ESTABLISHED, + EXPIRED, + PENDING, + REJECTED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java similarity index 70% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java index 03ccf6398..74f1b554a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java @@ -10,10 +10,10 @@ /** The private endpoint rule list was successfully retrieved. */ @Generated -public class ListNccAzurePrivateEndpointRulesResponse { +public class ListPrivateEndpointRulesResponse { /** */ @JsonProperty("items") - private Collection items; + private Collection items; /** * A token that can be used to get the next page of results. If null, there are no more results to @@ -22,17 +22,16 @@ public class ListNccAzurePrivateEndpointRulesResponse { @JsonProperty("next_page_token") private String nextPageToken; - public ListNccAzurePrivateEndpointRulesResponse setItems( - Collection items) { + public ListPrivateEndpointRulesResponse setItems(Collection items) { this.items = items; return this; } - public Collection getItems() { + public Collection getItems() { return items; } - public ListNccAzurePrivateEndpointRulesResponse setNextPageToken(String nextPageToken) { + public ListPrivateEndpointRulesResponse setNextPageToken(String nextPageToken) { this.nextPageToken = nextPageToken; return this; } @@ -45,7 +44,7 @@ public String getNextPageToken() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListNccAzurePrivateEndpointRulesResponse that = (ListNccAzurePrivateEndpointRulesResponse) o; + ListPrivateEndpointRulesResponse that = (ListPrivateEndpointRulesResponse) o; return Objects.equals(items, that.items) && Objects.equals(nextPageToken, that.nextPageToken); } @@ -56,7 +55,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ListNccAzurePrivateEndpointRulesResponse.class) + return new ToStringer(ListPrivateEndpointRulesResponse.class) .add("items", items) .add("nextPageToken", nextPageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java index 6228e6f9b..c233b7892 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java @@ -41,7 +41,7 @@ public class NccAzurePrivateEndpointRule { private Long deactivatedAt; /** - * Only used by private endpoints to customer-managed resources. + * Not used by customer-managed private endpoint services. * *

Domain names of target private link service. When updating this field, the full list of * target domain_names must be specified. @@ -54,8 +54,7 @@ public class NccAzurePrivateEndpointRule { private String endpointName; /** - * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer | - * mysqlServer + * Only used by private endpoints to Azure first-party services. * *

The sub-resource type (group ID) of the target resource. Note that to connect to workspace * root storage (root DBFS), you need two endpoints, one for blob and one for dfs. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java index 4cb399bdf..2fd0903d6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java @@ -11,10 +11,27 @@ /** Target rule controls the egress rules that are dedicated to specific resources. */ @Generated public class NccEgressTargetRules { + /** AWS private endpoint rule controls the AWS private endpoint based egress rules. */ + @JsonProperty("aws_private_endpoint_rules") + private Collection + awsPrivateEndpointRules; + /** */ @JsonProperty("azure_private_endpoint_rules") private Collection azurePrivateEndpointRules; + public NccEgressTargetRules setAwsPrivateEndpointRules( + Collection + awsPrivateEndpointRules) { + this.awsPrivateEndpointRules = awsPrivateEndpointRules; + return this; + } + + public Collection + getAwsPrivateEndpointRules() { + return awsPrivateEndpointRules; + } + public NccEgressTargetRules setAzurePrivateEndpointRules( Collection azurePrivateEndpointRules) { this.azurePrivateEndpointRules = azurePrivateEndpointRules; @@ -30,17 +47,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NccEgressTargetRules that = (NccEgressTargetRules) o; - return Objects.equals(azurePrivateEndpointRules, that.azurePrivateEndpointRules); + return Objects.equals(awsPrivateEndpointRules, that.awsPrivateEndpointRules) + && Objects.equals(azurePrivateEndpointRules, that.azurePrivateEndpointRules); } @Override public int hashCode() { - return Objects.hash(azurePrivateEndpointRules); + return Objects.hash(awsPrivateEndpointRules, azurePrivateEndpointRules); } @Override public String toString() { return new ToStringer(NccEgressTargetRules.class) + .add("awsPrivateEndpointRules", awsPrivateEndpointRules) .add("azurePrivateEndpointRules", azurePrivateEndpointRules) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java new file mode 100755 index 000000000..cf51cf09f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java @@ -0,0 +1,331 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + * portal after initialization. + */ +@Generated +public class NccPrivateEndpointRule { + /** Databricks account ID. You can find your account ID from the Accounts Console. */ + @JsonProperty("account_id") + private String accountId; + + /** + * The current status of this private endpoint. The private endpoint rules are effective only if + * the connection state is ESTABLISHED. Remember that you must approve new endpoints on your + * resources in the Cloud console before they take effect. The possible values are: - PENDING: The + * endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved + * and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected + * by the private link resource owner. - DISCONNECTED: Connection was removed by the private link + * resource owner, the private endpoint becomes informative and should be deleted for clean-up. - + * EXPIRED: If the endpoint was created but not approved in 14 days, it will be EXPIRED. + */ + @JsonProperty("connection_state") + private NccPrivateEndpointRulePrivateLinkConnectionState connectionState; + + /** Time in epoch milliseconds when this object was created. */ + @JsonProperty("creation_time") + private Long creationTime; + + /** Whether this private endpoint is deactivated. */ + @JsonProperty("deactivated") + private Boolean deactivated; + + /** Time in epoch milliseconds when this object was deactivated. */ + @JsonProperty("deactivated_at") + private Long deactivatedAt; + + /** + * Only used by private endpoints to customer-managed private endpoint services. + * + *

Domain names of target private link service. When updating this field, the full list of + * target domain_names must be specified. + */ + @JsonProperty("domain_names") + private Collection domainNames; + + /** + * Only used by private endpoints towards an AWS S3 service. + * + *

Update this field to activate/deactivate this private endpoint to allow egress access from + * serverless compute resources. + */ + @JsonProperty("enabled") + private Boolean enabled; + + /** The name of the Azure private endpoint resource. */ + @JsonProperty("endpoint_name") + private String endpointName; + + /** + * The full target AWS endpoint service name that connects to the destination resources of the + * private endpoint. + */ + @JsonProperty("endpoint_service") + private String endpointService; + + /** + * Not used by customer-managed private endpoint services. + * + *

The sub-resource type (group ID) of the target resource. Note that to connect to workspace + * root storage (root DBFS), you need two endpoints, one for blob and one for dfs. + */ + @JsonProperty("group_id") + private String groupId; + + /** + * The ID of a network connectivity configuration, which is the parent resource of this private + * endpoint rule object. + */ + @JsonProperty("network_connectivity_config_id") + private String networkConnectivityConfigId; + + /** The Azure resource ID of the target resource. */ + @JsonProperty("resource_id") + private String resourceId; + + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + @JsonProperty("resource_names") + private Collection resourceNames; + + /** The ID of a private endpoint rule. */ + @JsonProperty("rule_id") + private String ruleId; + + /** Time in epoch milliseconds when this object was updated. */ + @JsonProperty("updated_time") + private Long updatedTime; + + /** + * The AWS VPC endpoint ID. You can use this ID to identify the VPC endpoint created by + * Databricks. + */ + @JsonProperty("vpc_endpoint_id") + private String vpcEndpointId; + + public NccPrivateEndpointRule setAccountId(String accountId) { + this.accountId = accountId; + return this; + } + + public String getAccountId() { + return accountId; + } + + public NccPrivateEndpointRule setConnectionState( + NccPrivateEndpointRulePrivateLinkConnectionState connectionState) { + this.connectionState = connectionState; + return this; + } + + public NccPrivateEndpointRulePrivateLinkConnectionState getConnectionState() { + return connectionState; + } + + public NccPrivateEndpointRule setCreationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + public Long getCreationTime() { + return creationTime; + } + + public NccPrivateEndpointRule setDeactivated(Boolean deactivated) { + this.deactivated = deactivated; + return this; + } + + public Boolean getDeactivated() { + return deactivated; + } + + public NccPrivateEndpointRule setDeactivatedAt(Long deactivatedAt) { + this.deactivatedAt = deactivatedAt; + return this; + } + + public Long getDeactivatedAt() { + return deactivatedAt; + } + + public NccPrivateEndpointRule setDomainNames(Collection domainNames) { + this.domainNames = domainNames; + return this; + } + + public Collection getDomainNames() { + return domainNames; + } + + public NccPrivateEndpointRule setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public NccPrivateEndpointRule setEndpointName(String endpointName) { + this.endpointName = endpointName; + return this; + } + + public String getEndpointName() { + return endpointName; + } + + public NccPrivateEndpointRule setEndpointService(String endpointService) { + this.endpointService = endpointService; + return this; + } + + public String getEndpointService() { + return endpointService; + } + + public NccPrivateEndpointRule setGroupId(String groupId) { + this.groupId = groupId; + return this; + } + + public String getGroupId() { + return groupId; + } + + public NccPrivateEndpointRule setNetworkConnectivityConfigId(String networkConnectivityConfigId) { + this.networkConnectivityConfigId = networkConnectivityConfigId; + return this; + } + + public String getNetworkConnectivityConfigId() { + return networkConnectivityConfigId; + } + + public NccPrivateEndpointRule setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + public NccPrivateEndpointRule setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + + public NccPrivateEndpointRule setRuleId(String ruleId) { + this.ruleId = ruleId; + return this; + } + + public String getRuleId() { + return ruleId; + } + + public NccPrivateEndpointRule setUpdatedTime(Long updatedTime) { + this.updatedTime = updatedTime; + return this; + } + + public Long getUpdatedTime() { + return updatedTime; + } + + public NccPrivateEndpointRule setVpcEndpointId(String vpcEndpointId) { + this.vpcEndpointId = vpcEndpointId; + return this; + } + + public String getVpcEndpointId() { + return vpcEndpointId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccPrivateEndpointRule that = (NccPrivateEndpointRule) o; + return Objects.equals(accountId, that.accountId) + && Objects.equals(connectionState, that.connectionState) + && Objects.equals(creationTime, that.creationTime) + && Objects.equals(deactivated, that.deactivated) + && Objects.equals(deactivatedAt, that.deactivatedAt) + && Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(endpointName, that.endpointName) + && Objects.equals(endpointService, that.endpointService) + && Objects.equals(groupId, that.groupId) + && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) + && Objects.equals(resourceId, that.resourceId) + && Objects.equals(resourceNames, that.resourceNames) + && Objects.equals(ruleId, that.ruleId) + && Objects.equals(updatedTime, that.updatedTime) + && Objects.equals(vpcEndpointId, that.vpcEndpointId); + } + + @Override + public int hashCode() { + return Objects.hash( + accountId, + connectionState, + creationTime, + deactivated, + deactivatedAt, + domainNames, + enabled, + endpointName, + endpointService, + groupId, + networkConnectivityConfigId, + resourceId, + resourceNames, + ruleId, + updatedTime, + vpcEndpointId); + } + + @Override + public String toString() { + return new ToStringer(NccPrivateEndpointRule.class) + .add("accountId", accountId) + .add("connectionState", connectionState) + .add("creationTime", creationTime) + .add("deactivated", deactivated) + .add("deactivatedAt", deactivatedAt) + .add("domainNames", domainNames) + .add("enabled", enabled) + .add("endpointName", endpointName) + .add("endpointService", endpointService) + .add("groupId", groupId) + .add("networkConnectivityConfigId", networkConnectivityConfigId) + .add("resourceId", resourceId) + .add("resourceNames", resourceNames) + .add("ruleId", ruleId) + .add("updatedTime", updatedTime) + .add("vpcEndpointId", vpcEndpointId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java new file mode 100755 index 000000000..0b0bcdebd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum NccPrivateEndpointRulePrivateLinkConnectionState { + DISCONNECTED, + ESTABLISHED, + EXPIRED, + PENDING, + REJECTED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java index 72ae3444f..275519332 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java @@ -62,7 +62,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( return impl.createNetworkConnectivityConfiguration(request); } - public NccAzurePrivateEndpointRule createPrivateEndpointRule( + public NccPrivateEndpointRule createPrivateEndpointRule( String networkConnectivityConfigId, CreatePrivateEndpointRule privateEndpointRule) { return createPrivateEndpointRule( new CreatePrivateEndpointRuleRequest() @@ -84,7 +84,7 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule( *

[serverless private link]: * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link */ - public NccAzurePrivateEndpointRule createPrivateEndpointRule( + public NccPrivateEndpointRule createPrivateEndpointRule( CreatePrivateEndpointRuleRequest request) { return impl.createPrivateEndpointRule(request); } @@ -105,7 +105,7 @@ public void deleteNetworkConnectivityConfiguration( impl.deleteNetworkConnectivityConfiguration(request); } - public NccAzurePrivateEndpointRule deletePrivateEndpointRule( + public NccPrivateEndpointRule deletePrivateEndpointRule( String networkConnectivityConfigId, String privateEndpointRuleId) { return deletePrivateEndpointRule( new DeletePrivateEndpointRuleRequest() @@ -122,7 +122,7 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( * `deactivated` field is set to `true` and the private endpoint is not available to your * serverless compute resources. */ - public NccAzurePrivateEndpointRule deletePrivateEndpointRule( + public NccPrivateEndpointRule deletePrivateEndpointRule( DeletePrivateEndpointRuleRequest request) { return impl.deletePrivateEndpointRule(request); } @@ -144,7 +144,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( return impl.getNetworkConnectivityConfiguration(request); } - public NccAzurePrivateEndpointRule getPrivateEndpointRule( + public NccPrivateEndpointRule getPrivateEndpointRule( String networkConnectivityConfigId, String privateEndpointRuleId) { return getPrivateEndpointRule( new GetPrivateEndpointRuleRequest() @@ -157,7 +157,7 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule( * *

Gets the private endpoint rule. */ - public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { + public NccPrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { return impl.getPrivateEndpointRule(request); } @@ -181,7 +181,7 @@ public Iterable listNetworkConnectivityConfigu }); } - public Iterable listPrivateEndpointRules( + public Iterable listPrivateEndpointRules( String networkConnectivityConfigId) { return listPrivateEndpointRules( new ListPrivateEndpointRulesRequest() @@ -193,12 +193,12 @@ public Iterable listPrivateEndpointRules( * *

Gets an array of private endpoint rules. */ - public Iterable listPrivateEndpointRules( + public Iterable listPrivateEndpointRules( ListPrivateEndpointRulesRequest request) { return new Paginator<>( request, impl::listPrivateEndpointRules, - ListNccAzurePrivateEndpointRulesResponse::getItems, + ListPrivateEndpointRulesResponse::getItems, response -> { String token = response.getNextPageToken(); if (token == null || token.isEmpty()) { @@ -208,13 +208,13 @@ public Iterable listPrivateEndpointRules( }); } - public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( + public NccPrivateEndpointRule updatePrivateEndpointRule( String networkConnectivityConfigId, String privateEndpointRuleId, UpdatePrivateEndpointRule privateEndpointRule, String updateMask) { - return updateNccAzurePrivateEndpointRulePublic( - new UpdateNccAzurePrivateEndpointRulePublicRequest() + return updatePrivateEndpointRule( + new UpdateNccPrivateEndpointRuleRequest() .setNetworkConnectivityConfigId(networkConnectivityConfigId) .setPrivateEndpointRuleId(privateEndpointRuleId) .setPrivateEndpointRule(privateEndpointRule) @@ -227,9 +227,9 @@ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( *

Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed * resources is allowed to be updated. */ - public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( - UpdateNccAzurePrivateEndpointRulePublicRequest request) { - return impl.updateNccAzurePrivateEndpointRulePublic(request); + public NccPrivateEndpointRule updatePrivateEndpointRule( + UpdateNccPrivateEndpointRuleRequest request) { + return impl.updatePrivateEndpointRule(request); } public NetworkConnectivityService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java index 6c03595d4..316184e01 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java @@ -10,7 +10,9 @@ /** Properties of the new network connectivity configuration. */ @Generated public class NetworkConnectivityConfiguration { - /** The Databricks account ID that hosts the credential. */ + /** + * Your Databricks account ID. You can find your account ID in your Databricks accounts console. + */ @JsonProperty("account_id") private String accountId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java index 16b4dd419..4bd996e8a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java @@ -35,7 +35,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( } @Override - public NccAzurePrivateEndpointRule createPrivateEndpointRule( + public NccPrivateEndpointRule createPrivateEndpointRule( CreatePrivateEndpointRuleRequest request) { String path = String.format( @@ -47,7 +47,7 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule( ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -71,7 +71,7 @@ public void deleteNetworkConnectivityConfiguration( } @Override - public NccAzurePrivateEndpointRule deletePrivateEndpointRule( + public NccPrivateEndpointRule deletePrivateEndpointRule( DeletePrivateEndpointRuleRequest request) { String path = String.format( @@ -83,7 +83,7 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( Request req = new Request("DELETE", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -107,7 +107,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( } @Override - public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { + public NccPrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) { String path = String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s", @@ -118,7 +118,7 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRule Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } @@ -141,7 +141,7 @@ public ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConf } @Override - public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( + public ListPrivateEndpointRulesResponse listPrivateEndpointRules( ListPrivateEndpointRulesRequest request) { String path = String.format( @@ -151,15 +151,15 @@ public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( Request req = new Request("GET", path); ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); - return apiClient.execute(req, ListNccAzurePrivateEndpointRulesResponse.class); + return apiClient.execute(req, ListPrivateEndpointRulesResponse.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } @Override - public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( - UpdateNccAzurePrivateEndpointRulePublicRequest request) { + public NccPrivateEndpointRule updatePrivateEndpointRule( + UpdateNccPrivateEndpointRuleRequest request) { String path = String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s", @@ -172,7 +172,7 @@ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( ApiClient.setQuery(req, request); req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); - return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + return apiClient.execute(req, NccPrivateEndpointRule.class); } catch (IOException e) { throw new DatabricksException("IO error: " + e.getMessage(), e); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java index 55abae74d..eeaa80e88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java @@ -53,7 +53,7 @@ NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( *

[serverless private link]: * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link */ - NccAzurePrivateEndpointRule createPrivateEndpointRule( + NccPrivateEndpointRule createPrivateEndpointRule( CreatePrivateEndpointRuleRequest createPrivateEndpointRuleRequest); /** @@ -73,7 +73,7 @@ void deleteNetworkConnectivityConfiguration( * `deactivated` field is set to `true` and the private endpoint is not available to your * serverless compute resources. */ - NccAzurePrivateEndpointRule deletePrivateEndpointRule( + NccPrivateEndpointRule deletePrivateEndpointRule( DeletePrivateEndpointRuleRequest deletePrivateEndpointRuleRequest); /** @@ -89,7 +89,7 @@ NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( * *

Gets the private endpoint rule. */ - NccAzurePrivateEndpointRule getPrivateEndpointRule( + NccPrivateEndpointRule getPrivateEndpointRule( GetPrivateEndpointRuleRequest getPrivateEndpointRuleRequest); /** @@ -105,7 +105,7 @@ ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurati * *

Gets an array of private endpoint rules. */ - ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( + ListPrivateEndpointRulesResponse listPrivateEndpointRules( ListPrivateEndpointRulesRequest listPrivateEndpointRulesRequest); /** @@ -114,7 +114,6 @@ ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( *

Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed * resources is allowed to be updated. */ - NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic( - UpdateNccAzurePrivateEndpointRulePublicRequest - updateNccAzurePrivateEndpointRulePublicRequest); + NccPrivateEndpointRule updatePrivateEndpointRule( + UpdateNccPrivateEndpointRuleRequest updateNccPrivateEndpointRuleRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java similarity index 82% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java index 666de476e..7d38074a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java @@ -11,8 +11,11 @@ /** Update a private endpoint rule */ @Generated -public class UpdateNccAzurePrivateEndpointRulePublicRequest { - /** Your Network Connectivity Configuration ID. */ +public class UpdateNccPrivateEndpointRuleRequest { + /** + * The ID of a network connectivity configuration, which is the parent resource of this private + * endpoint rule object. + */ @JsonIgnore private String networkConnectivityConfigId; /** @@ -36,7 +39,7 @@ public class UpdateNccAzurePrivateEndpointRulePublicRequest { @QueryParam("update_mask") private String updateMask; - public UpdateNccAzurePrivateEndpointRulePublicRequest setNetworkConnectivityConfigId( + public UpdateNccPrivateEndpointRuleRequest setNetworkConnectivityConfigId( String networkConnectivityConfigId) { this.networkConnectivityConfigId = networkConnectivityConfigId; return this; @@ -46,7 +49,7 @@ public String getNetworkConnectivityConfigId() { return networkConnectivityConfigId; } - public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRule( + public UpdateNccPrivateEndpointRuleRequest setPrivateEndpointRule( UpdatePrivateEndpointRule privateEndpointRule) { this.privateEndpointRule = privateEndpointRule; return this; @@ -56,7 +59,7 @@ public UpdatePrivateEndpointRule getPrivateEndpointRule() { return privateEndpointRule; } - public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRuleId( + public UpdateNccPrivateEndpointRuleRequest setPrivateEndpointRuleId( String privateEndpointRuleId) { this.privateEndpointRuleId = privateEndpointRuleId; return this; @@ -66,7 +69,7 @@ public String getPrivateEndpointRuleId() { return privateEndpointRuleId; } - public UpdateNccAzurePrivateEndpointRulePublicRequest setUpdateMask(String updateMask) { + public UpdateNccPrivateEndpointRuleRequest setUpdateMask(String updateMask) { this.updateMask = updateMask; return this; } @@ -79,8 +82,7 @@ public String getUpdateMask() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - UpdateNccAzurePrivateEndpointRulePublicRequest that = - (UpdateNccAzurePrivateEndpointRulePublicRequest) o; + UpdateNccPrivateEndpointRuleRequest that = (UpdateNccPrivateEndpointRuleRequest) o; return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(privateEndpointRule, that.privateEndpointRule) && Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId) @@ -95,7 +97,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(UpdateNccAzurePrivateEndpointRulePublicRequest.class) + return new ToStringer(UpdateNccPrivateEndpointRuleRequest.class) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("privateEndpointRule", privateEndpointRule) .add("privateEndpointRuleId", privateEndpointRuleId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java index f7df95078..94975cd2e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java @@ -15,7 +15,7 @@ @Generated public class UpdatePrivateEndpointRule { /** - * Only used by private endpoints to customer-managed resources. + * Only used by private endpoints to customer-managed private endpoint services. * *

Domain names of target private link service. When updating this field, the full list of * target domain_names must be specified. @@ -23,6 +23,26 @@ public class UpdatePrivateEndpointRule { @JsonProperty("domain_names") private Collection domainNames; + /** + * Only used by private endpoints towards an AWS S3 service. + * + *

Update this field to activate/deactivate this private endpoint to allow egress access from + * serverless compute resources. + */ + @JsonProperty("enabled") + private Boolean enabled; + + /** + * Only used by private endpoints towards AWS S3 service. + * + *

The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket + * names must be in the same region as the NCC/endpoint service. When updating this field, we + * perform full update on this field. Please ensure a full list of desired resource_names is + * provided. + */ + @JsonProperty("resource_names") + private Collection resourceNames; + public UpdatePrivateEndpointRule setDomainNames(Collection domainNames) { this.domainNames = domainNames; return this; @@ -32,23 +52,45 @@ public Collection getDomainNames() { return domainNames; } + public UpdatePrivateEndpointRule setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + public UpdatePrivateEndpointRule setResourceNames(Collection resourceNames) { + this.resourceNames = resourceNames; + return this; + } + + public Collection getResourceNames() { + return resourceNames; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpdatePrivateEndpointRule that = (UpdatePrivateEndpointRule) o; - return Objects.equals(domainNames, that.domainNames); + return Objects.equals(domainNames, that.domainNames) + && Objects.equals(enabled, that.enabled) + && Objects.equals(resourceNames, that.resourceNames); } @Override public int hashCode() { - return Objects.hash(domainNames); + return Objects.hash(domainNames, enabled, resourceNames); } @Override public String toString() { return new ToStringer(UpdatePrivateEndpointRule.class) .add("domainNames", domainNames) + .add("enabled", enabled) + .add("resourceNames", resourceNames) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java index 26288362d..0f87dddb2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateSharePermissions.java @@ -11,7 +11,7 @@ @Generated public class UpdateSharePermissions { - /** Array of permission changes. */ + /** Array of permissions change objects. */ @JsonProperty("changes") private Collection changes; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java index 208b22c9f..d49c21f88 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.java @@ -99,6 +99,13 @@ public class QueryMetrics { @JsonProperty("spill_to_disk_bytes") private Long spillToDiskBytes; + /** + * sum of task times completed in a range of wall clock time, approximated to a configurable + * number of points aggregated over all stages and jobs in the query (based on task_total_time_ms) + */ + @JsonProperty("task_time_over_time_range") + private TaskTimeOverRange taskTimeOverTimeRange; + /** Sum of execution time for all of the query’s tasks, in milliseconds. */ @JsonProperty("task_total_time_ms") private Long taskTotalTimeMs; @@ -282,6 +289,15 @@ public Long getSpillToDiskBytes() { return spillToDiskBytes; } + public QueryMetrics setTaskTimeOverTimeRange(TaskTimeOverRange taskTimeOverTimeRange) { + this.taskTimeOverTimeRange = taskTimeOverTimeRange; + return this; + } + + public TaskTimeOverRange getTaskTimeOverTimeRange() { + return taskTimeOverTimeRange; + } + public QueryMetrics setTaskTotalTimeMs(Long taskTotalTimeMs) { this.taskTotalTimeMs = taskTotalTimeMs; return this; @@ -333,6 +349,7 @@ public boolean equals(Object o) { && Objects.equals(rowsProducedCount, that.rowsProducedCount) && Objects.equals(rowsReadCount, that.rowsReadCount) && Objects.equals(spillToDiskBytes, that.spillToDiskBytes) + && Objects.equals(taskTimeOverTimeRange, that.taskTimeOverTimeRange) && Objects.equals(taskTotalTimeMs, that.taskTotalTimeMs) && Objects.equals(totalTimeMs, that.totalTimeMs) && Objects.equals(writeRemoteBytes, that.writeRemoteBytes); @@ -360,6 +377,7 @@ public int hashCode() { rowsProducedCount, rowsReadCount, spillToDiskBytes, + taskTimeOverTimeRange, taskTotalTimeMs, totalTimeMs, writeRemoteBytes); @@ -387,6 +405,7 @@ public String toString() { .add("rowsProducedCount", rowsProducedCount) .add("rowsReadCount", rowsReadCount) .add("spillToDiskBytes", spillToDiskBytes) + .add("taskTimeOverTimeRange", taskTimeOverTimeRange) .add("taskTotalTimeMs", taskTotalTimeMs) .add("totalTimeMs", totalTimeMs) .add("writeRemoteBytes", writeRemoteBytes) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java new file mode 100755 index 000000000..c8b21f225 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class TaskTimeOverRange { + /** */ + @JsonProperty("entries") + private Collection entries; + + /** + * interval length for all entries (difference in start time and end time of an entry range) the + * same for all entries start time of first interval is query_start_time_ms + */ + @JsonProperty("interval") + private Long interval; + + public TaskTimeOverRange setEntries(Collection entries) { + this.entries = entries; + return this; + } + + public Collection getEntries() { + return entries; + } + + public TaskTimeOverRange setInterval(Long interval) { + this.interval = interval; + return this; + } + + public Long getInterval() { + return interval; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskTimeOverRange that = (TaskTimeOverRange) o; + return Objects.equals(entries, that.entries) && Objects.equals(interval, that.interval); + } + + @Override + public int hashCode() { + return Objects.hash(entries, interval); + } + + @Override + public String toString() { + return new ToStringer(TaskTimeOverRange.class) + .add("entries", entries) + .add("interval", interval) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java new file mode 100755 index 000000000..3488e1b2c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TaskTimeOverRangeEntry { + /** + * total task completion time in this time range, aggregated over all stages and jobs in the query + */ + @JsonProperty("task_completed_time_ms") + private Long taskCompletedTimeMs; + + public TaskTimeOverRangeEntry setTaskCompletedTimeMs(Long taskCompletedTimeMs) { + this.taskCompletedTimeMs = taskCompletedTimeMs; + return this; + } + + public Long getTaskCompletedTimeMs() { + return taskCompletedTimeMs; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TaskTimeOverRangeEntry that = (TaskTimeOverRangeEntry) o; + return Objects.equals(taskCompletedTimeMs, that.taskCompletedTimeMs); + } + + @Override + public int hashCode() { + return Objects.hash(taskCompletedTimeMs); + } + + @Override + public String toString() { + return new ToStringer(TaskTimeOverRangeEntry.class) + .add("taskCompletedTimeMs", taskCompletedTimeMs) + .toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/EncryptionKeysIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/EncryptionKeysIT.java index 8b7324c8e..f36b13a8c 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/EncryptionKeysIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/EncryptionKeysIT.java @@ -1,11 +1,7 @@ package com.databricks.sdk.integration; -import com.databricks.sdk.AccountClient; -import com.databricks.sdk.integration.framework.CollectionUtils; import com.databricks.sdk.integration.framework.EnvContext; import com.databricks.sdk.integration.framework.EnvTest; -import com.databricks.sdk.service.provisioning.CustomerManagedKey; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import org.junit.jupiter.api.extension.ExtendWith; @@ -13,12 +9,15 @@ @DisabledIfEnvironmentVariable(named = "ARM_CLIENT_ID", matches = ".*") @ExtendWith(EnvTest.class) public class EncryptionKeysIT { - @Test - void lists(AccountClient a) { - Iterable list = a.encryptionKeys().list(); + // TODO: Enable this test when the test account is updated to support this. + // Either by upgrading the test account tier to Enterprise or by adding this + // feature to the test account. + // @Test + // void lists(AccountClient a) { + // Iterable list = a.encryptionKeys().list(); - java.util.List all = CollectionUtils.asList(list); + // java.util.List all = CollectionUtils.asList(list); - CollectionUtils.assertUnique(all); - } + // CollectionUtils.assertUnique(all); + // } }