diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a74101922..ac1c24d10 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -b142b72bea6f30d8efb36dfa8c58e0d63ae5329b \ No newline at end of file +a8f547d3728fba835fbdda301e846829c5cbbef5 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 706329a62..44b3782e1 100755 --- a/.gitattributes +++ b/.gitattributes @@ -23,6 +23,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Tempora databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlmsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppAccessControlResponse.java linguist-generated=true @@ -114,6 +126,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBillingU databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetBudgetPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryConfigurationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/GetLogDeliveryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LimitConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ListBudgetConfigurationsRequest.java linguist-generated=true @@ -202,8 +215,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousU databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionParameterStyle.java linguist-generated=true @@ -219,7 +230,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateVolumeRequestContent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialInfo.java linguist-generated=true @@ -230,12 +240,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Credentials databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DataSourceFormat.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccount.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabricksGcpServiceAccountResponse.java linguist-generated=true @@ -248,10 +252,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCredentialResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true @@ -262,12 +262,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegis databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteStorageCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableConstraintRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteVolumeRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntimePropertiesKvPairs.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true @@ -288,7 +287,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLoc databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FailedStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FileEventQueue.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ForeignKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java linguist-generated=true @@ -320,17 +318,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseCatalogRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiveRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetOnlineTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetQuotaResponse.java linguist-generated=true @@ -338,7 +334,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetStorageCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSyncedDatabaseTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetWorkspaceBindingsResponse.java linguist-generated=true @@ -356,12 +351,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnect databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCredentialsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListDatabaseInstancesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsResponse.java linguist-generated=true @@ -385,7 +379,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumes databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MatchType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfoDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresService.java linguist-generated=true @@ -412,7 +405,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NewPipelineSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java linguist-generated=true @@ -423,7 +415,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java linguist-generated=true @@ -461,9 +452,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCred databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedDatabaseTable.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSchedulingPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SyncedTableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemaInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java linguist-generated=true @@ -495,15 +483,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCredentialRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateExternalLocation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true @@ -824,9 +811,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadTyp databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java linguist-generated=true @@ -837,9 +821,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSc databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true @@ -857,6 +838,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGet databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java linguist-generated=true @@ -865,8 +848,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpa databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true @@ -889,16 +870,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageE databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true @@ -906,7 +879,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrib databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true @@ -914,6 +886,47 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Unpublis databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateScheduleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableContinuousUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableFailedStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTablePipelineProgress.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableProvisioningStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSchedulingPolicy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true @@ -1106,6 +1119,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardPageSnapshot.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DashboardTaskOutput.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudJobRunStep.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTask.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtCloudTaskOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteJob.java linguist-generated=true @@ -1118,6 +1135,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/EnforcePolicyC databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java linguist-generated=true @@ -1255,6 +1273,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDet databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationTypeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerStateProto.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateJob.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/UpdateResponse.java linguist-generated=true @@ -1435,9 +1454,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialInfoHttpHeader.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ArtifactCredentialType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateComment.java linguist-generated=true @@ -1508,10 +1524,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExper databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetByNameRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentByNameResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetExperimentPermissionLevelsResponse.java linguist-generated=true @@ -1545,8 +1557,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListExperimentsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListRegistryWebhooks.java linguist-generated=true @@ -1882,6 +1892,19 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Worksp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitor.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2API.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java linguist-generated=true @@ -2050,6 +2073,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablem databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DashboardEmailSubscriptionsImpl.java linguist-generated=true @@ -2195,7 +2220,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkPoliciesRequest.java linguist-generated=true @@ -2204,6 +2228,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotifi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNotificationDestinationsResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublicTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true @@ -2228,6 +2253,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureSe databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true @@ -2304,7 +2331,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAc databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredAccountRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredEnforceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateLlmProxyPartnerPoweredWorkspaceRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNetworkPolicyRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true @@ -2634,6 +2661,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopRequest.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StopWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Success.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SuccessMessage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRange.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TaskTimeOverRangeEntry.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReason.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonType.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index ac1055a9c..72638ff3f 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,67 @@ ### Internal Changes ### API Changes +* Added `com.databricks.sdk.service.aibuilder`, `com.databricks.sdk.service.database` and `com.databricks.sdk.service.qualitymonitorv2` packages. +* Added `workspaceClient.customLlms()` service. +* Added `workspaceClient.database()` service. +* Added `workspaceClient.qualityMonitorV2()` service. +* Added `updatePrivateEndpointRule()` method for `accountClient.networkConnectivity()` service. +* Added `listSpaces()` method for `workspaceClient.genie()` service. +* Added `pageToken` field for `com.databricks.sdk.service.billing.ListLogDeliveryRequest`. +* Added `nextPageToken` field for `com.databricks.sdk.service.billing.WrappedLogDeliveryConfigurations`. +* Added `nextPageToken` field for `com.databricks.sdk.service.catalog.EffectivePermissionsList`. +* Added `maxResults` and `pageToken` fields for `com.databricks.sdk.service.catalog.GetEffectiveRequest`. +* Added `maxResults` and `pageToken` fields for `com.databricks.sdk.service.catalog.GetGrantRequest`. +* Added `nextPageToken` field for `com.databricks.sdk.service.catalog.ListMetastoresResponse`. +* Added `cleanRoomName` field for `com.databricks.sdk.service.cleanrooms.CleanRoomAsset`. +* [Breaking] Added `name` field for `com.databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`. +* [Breaking] Added `name` field for `com.databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`. +* Added `triggerState` field for `com.databricks.sdk.service.jobs.BaseJob`. +* Added `triggerState` field for `com.databricks.sdk.service.jobs.Job`. +* Added `dbtCloudOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. +* Added `dbtCloudTask` field for `com.databricks.sdk.service.jobs.RunTask`. +* Added `dbtCloudTask` field for `com.databricks.sdk.service.jobs.SubmitTask`. +* Added `dbtCloudTask` field for `com.databricks.sdk.service.jobs.Task`. +* Added `endpointService` and `resourceNames` fields for `com.databricks.sdk.service.settings.CreatePrivateEndpointRule`. +* Added `awsPrivateEndpointRules` field for `com.databricks.sdk.service.settings.NccEgressTargetRules`. +* Added `taskTimeOverTimeRange` field for `com.databricks.sdk.service.sql.QueryMetrics`. +* Added `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum`. +* Added `CLUSTER_MIGRATED` enum value for `com.databricks.sdk.service.compute.EventType`. +* Added `DRIVER_UNHEALTHY` enum value for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* [Breaking] Changed `create()` method for `accountClient.logDelivery()` service with new required argument order. +* [Breaking] Changed `get()` method for `accountClient.logDelivery()` service to return `com.databricks.sdk.service.billing.GetLogDeliveryConfigurationResponse` class. +* [Breaking] Changed `createPrivateEndpointRule()`, `deletePrivateEndpointRule()` and `getPrivateEndpointRule()` methods for `accountClient.networkConnectivity()` service to return `com.databricks.sdk.service.settings.NccPrivateEndpointRule` class. +* [Breaking] Changed `listPrivateEndpointRules()` method for `accountClient.networkConnectivity()` service to return `com.databricks.sdk.service.settings.ListPrivateEndpointRulesResponse` class. +* [Breaking] Changed `delete()` and `get()` methods for `workspaceClient.cleanRoomAssets()` service with new required argument order. +* [Breaking] Changed `delete()` and `get()` methods for `workspaceClient.cleanRoomAssets()` service . Method path has changed. +* [Breaking] Changed `get()` method for `workspaceClient.grants()` service to return `com.databricks.sdk.service.catalog.GetPermissionsResponse` class. +* [Breaking] Changed `update()` method for `workspaceClient.grants()` service to return `com.databricks.sdk.service.catalog.UpdatePermissionsResponse` class. +* [Breaking] Changed `list()` method for `workspaceClient.metastores()` service to require request of `com.databricks.sdk.service.catalog.ListMetastoresRequest` class. +* Changed `accountId`, `credentialsId`, `logType`, `outputFormat` and `storageConfigurationId` fields for `com.databricks.sdk.service.billing.LogDeliveryConfiguration` to be required. +* Changed `message` and `status` fields for `com.databricks.sdk.service.billing.LogDeliveryStatus` to be required. +* [Breaking] Changed `logDeliveryConfiguration` field for `com.databricks.sdk.service.billing.WrappedCreateLogDeliveryConfiguration` to be required. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.GetEffectiveRequest` to type `String` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.GetGrantRequest` to type `String` class. +* [Breaking] Changed `deltaSharingScope` field for `com.databricks.sdk.service.catalog.GetMetastoreSummaryResponse` to type `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum` class. +* [Breaking] Changed `deltaSharingScope` field for `com.databricks.sdk.service.catalog.MetastoreInfo` to type `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum` class. +* [Breaking] Changed `deltaSharingScope` field for `com.databricks.sdk.service.catalog.UpdateMetastore` to type `com.databricks.sdk.service.catalog.DeltaSharingScopeEnum` class. +* [Breaking] Changed `securableType` field for `com.databricks.sdk.service.catalog.UpdatePermissions` to type `String` class. +* Changed `resourceId` field for `com.databricks.sdk.service.settings.CreatePrivateEndpointRule` to no longer be required. +* [Breaking] Changed pagination for `accountClient.networkConnectivity().listPrivateEndpointRules()` method. +* [Breaking] Removed `workspaceClient.databaseInstances()` service. +* [Breaking] Removed `workspaceClient.queryExecution()` service. +* [Breaking] Removed `updateNccAzurePrivateEndpointRulePublic()` method for `accountClient.networkConnectivity()` service. +* [Breaking] Removed `getCredentialsForTraceDataDownload()`, `getCredentialsForTraceDataUpload()` and `listLoggedModelArtifacts()` methods for `workspaceClient.experiments()` service. +* [Breaking] Removed `getPublishedDashboardEmbedded()` method for `workspaceClient.lakeviewEmbedded()` service. +* [Breaking] Removed `assetFullName` field for `com.databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`. +* [Breaking] Removed `assetFullName` field for `com.databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.ClusterAttributes`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.ClusterDetails`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.ClusterSpec`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.CreateCluster`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.EditCluster`. +* [Breaking] Removed `remoteShuffleDiskIops`, `remoteShuffleDiskThroughput` and `totalInitialRemoteShuffleDiskSize` fields for `com.databricks.sdk.service.compute.UpdateClusterResource`. +* [Breaking] Removed `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.GetMetastoreSummaryResponseDeltaSharingScope`. +* [Breaking] Removed `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.MetastoreInfoDeltaSharingScope`. +* [Breaking] Removed `CATALOG`, `CLEAN_ROOM`, `CONNECTION`, `CREDENTIAL`, `EXTERNAL_LOCATION`, `EXTERNAL_METADATA`, `FUNCTION`, `METASTORE`, `PIPELINE`, `PROVIDER`, `RECIPIENT`, `SCHEMA`, `SHARE`, `STAGING_TABLE`, `STORAGE_CREDENTIAL`, `TABLE`, `UNKNOWN_SECURABLE_TYPE` and `VOLUME` enum values for `com.databricks.sdk.service.catalog.SecurableType`. +* [Breaking] Removed `INTERNAL` and `INTERNAL_AND_EXTERNAL` enum values for `com.databricks.sdk.service.catalog.UpdateMetastoreDeltaSharingScope`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index be96caf24..8e635b302 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -305,60 +305,8 @@ public AccountIpAccessListsAPI ipAccessLists() { } /** - * These APIs manage log delivery configurations for this account. The two supported log types for - * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This - * feature works with all account ID types. - * - *
Log delivery works with all account types. However, if your account is on the E2 version of - * the platform or on a select custom plan that allows multiple workspaces per account, you can - * optionally configure different storage destinations for each workspace. Log delivery status is - * also provided to know the latest status of log delivery attempts. The high-level flow of - * billable usage delivery: - * - *
1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. - * Using Databricks APIs, call the Account API to create a [storage configuration - * object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, - * create the appropriate AWS IAM role. For full details, including the required IAM role policies - * and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the - * Account API to create a [credential configuration object](:method:Credentials/Create) that uses - * the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the - * Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the - * credential and storage configuration objects from previous steps. You can specify if the logs - * should include all events of that log type in your account (_Account level_ delivery) or only - * events for a specific set of workspaces (_workspace level_ delivery). Account level log - * delivery applies to all current and future workspaces plus account level logs, while workspace - * level log delivery solely delivers logs related to the specified workspaces. You can create - * multiple types of delivery configurations per account. - * - *
For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * ` For audit log delivery: * For more information about about audit log delivery, see [Audit
- * log delivery], which includes information about the used JSON schema. * The delivery location
- * is
- * ` [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable
- * usage log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * These APIs manage Log delivery configurations for this account. Log delivery configs enable you
+ * to configure the delivery of the specified type of logs to your storage account.
*/
public LogDeliveryAPI logDelivery() {
return logDeliveryAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index d4c066a69..bf50805dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -8,6 +8,8 @@
import com.databricks.sdk.mixin.ClustersExt;
import com.databricks.sdk.mixin.DbfsExt;
import com.databricks.sdk.mixin.SecretsExt;
+import com.databricks.sdk.service.aibuilder.CustomLlmsAPI;
+import com.databricks.sdk.service.aibuilder.CustomLlmsService;
import com.databricks.sdk.service.apps.AppsAPI;
import com.databricks.sdk.service.apps.AppsService;
import com.databricks.sdk.service.catalog.ArtifactAllowlistsAPI;
@@ -18,8 +20,6 @@
import com.databricks.sdk.service.catalog.ConnectionsService;
import com.databricks.sdk.service.catalog.CredentialsAPI;
import com.databricks.sdk.service.catalog.CredentialsService;
-import com.databricks.sdk.service.catalog.DatabaseInstancesAPI;
-import com.databricks.sdk.service.catalog.DatabaseInstancesService;
import com.databricks.sdk.service.catalog.ExternalLocationsAPI;
import com.databricks.sdk.service.catalog.ExternalLocationsService;
import com.databricks.sdk.service.catalog.FunctionsAPI;
@@ -83,8 +83,8 @@
import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI;
import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService;
import com.databricks.sdk.service.dashboards.LakeviewService;
-import com.databricks.sdk.service.dashboards.QueryExecutionAPI;
-import com.databricks.sdk.service.dashboards.QueryExecutionService;
+import com.databricks.sdk.service.database.DatabaseAPI;
+import com.databricks.sdk.service.database.DatabaseService;
import com.databricks.sdk.service.files.DbfsService;
import com.databricks.sdk.service.files.FilesAPI;
import com.databricks.sdk.service.files.FilesService;
@@ -140,6 +140,8 @@
import com.databricks.sdk.service.ml.ModelRegistryService;
import com.databricks.sdk.service.pipelines.PipelinesAPI;
import com.databricks.sdk.service.pipelines.PipelinesService;
+import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2API;
+import com.databricks.sdk.service.qualitymonitorv2.QualityMonitorV2Service;
import com.databricks.sdk.service.serving.ServingEndpointsAPI;
import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneAPI;
import com.databricks.sdk.service.serving.ServingEndpointsDataPlaneService;
@@ -240,10 +242,11 @@ public class WorkspaceClient {
private CredentialsAPI credentialsAPI;
private CredentialsManagerAPI credentialsManagerAPI;
private CurrentUserAPI currentUserAPI;
+ private CustomLlmsAPI customLlmsAPI;
private DashboardWidgetsAPI dashboardWidgetsAPI;
private DashboardsAPI dashboardsAPI;
private DataSourcesAPI dataSourcesAPI;
- private DatabaseInstancesAPI databaseInstancesAPI;
+ private DatabaseAPI databaseAPI;
private DbfsExt dbfsAPI;
private DbsqlPermissionsAPI dbsqlPermissionsAPI;
private ExperimentsAPI experimentsAPI;
@@ -281,10 +284,10 @@ public class WorkspaceClient {
private ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboardsAPI;
private ProviderProvidersAPI providerProvidersAPI;
private ProvidersAPI providersAPI;
+ private QualityMonitorV2API qualityMonitorV2API;
private QualityMonitorsAPI qualityMonitorsAPI;
private QueriesAPI queriesAPI;
private QueriesLegacyAPI queriesLegacyAPI;
- private QueryExecutionAPI queryExecutionAPI;
private QueryHistoryAPI queryHistoryAPI;
private QueryVisualizationsAPI queryVisualizationsAPI;
private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI;
@@ -350,10 +353,11 @@ public WorkspaceClient(DatabricksConfig config) {
credentialsAPI = new CredentialsAPI(apiClient);
credentialsManagerAPI = new CredentialsManagerAPI(apiClient);
currentUserAPI = new CurrentUserAPI(apiClient);
+ customLlmsAPI = new CustomLlmsAPI(apiClient);
dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient);
dashboardsAPI = new DashboardsAPI(apiClient);
dataSourcesAPI = new DataSourcesAPI(apiClient);
- databaseInstancesAPI = new DatabaseInstancesAPI(apiClient);
+ databaseAPI = new DatabaseAPI(apiClient);
dbfsAPI = new DbfsExt(apiClient);
dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient);
experimentsAPI = new ExperimentsAPI(apiClient);
@@ -391,10 +395,10 @@ public WorkspaceClient(DatabricksConfig config) {
providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient);
providerProvidersAPI = new ProviderProvidersAPI(apiClient);
providersAPI = new ProvidersAPI(apiClient);
+ qualityMonitorV2API = new QualityMonitorV2API(apiClient);
qualityMonitorsAPI = new QualityMonitorsAPI(apiClient);
queriesAPI = new QueriesAPI(apiClient);
queriesLegacyAPI = new QueriesLegacyAPI(apiClient);
- queryExecutionAPI = new QueryExecutionAPI(apiClient);
queryHistoryAPI = new QueryHistoryAPI(apiClient);
queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient);
queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient);
@@ -676,6 +680,11 @@ public CurrentUserAPI currentUser() {
return currentUserAPI;
}
+ /** The Custom LLMs service manages state and powers the UI for the Custom LLM product. */
+ public CustomLlmsAPI customLlms() {
+ return customLlmsAPI;
+ }
+
/**
* This is an evolving API that facilitates the addition and removal of widgets from existing
* dashboards within the Databricks Workspace. Data structures may change over time.
@@ -714,8 +723,8 @@ public DataSourcesAPI dataSources() {
}
/** Database Instances provide access to a database via REST API or direct SQL. */
- public DatabaseInstancesAPI databaseInstances() {
- return databaseInstancesAPI;
+ public DatabaseAPI database() {
+ return databaseAPI;
}
/**
@@ -796,6 +805,8 @@ public ExternalLocationsAPI externalLocations() {
* `enable_experimental_files_api_client = True` in your configuration profile or use the
* environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
*
+ * Use of Files API may incur Databricks data transfer charges.
+ *
* [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
*/
public FilesAPI files() {
@@ -1211,6 +1222,11 @@ public ProvidersAPI providers() {
return providersAPI;
}
+ /** Manage data quality of UC objects (currently support `schema`) */
+ public QualityMonitorV2API qualityMonitorV2() {
+ return qualityMonitorV2API;
+ }
+
/**
* A monitor computes and monitors data or model quality metrics for a table over time. It
* generates metrics tables and a dashboard that you can use to monitor table health and set
@@ -1249,11 +1265,6 @@ public QueriesLegacyAPI queriesLegacy() {
return queriesLegacyAPI;
}
- /** Query execution APIs for AI / BI Dashboards */
- public QueryExecutionAPI queryExecution() {
- return queryExecutionAPI;
- }
-
/**
* A service responsible for storing and retrieving the list of queries run against SQL endpoints
* and serverless compute.
@@ -2035,6 +2046,17 @@ public WorkspaceClient withCurrentUserAPI(CurrentUserAPI currentUser) {
return this;
}
+ /** Replace the default CustomLlmsService with a custom implementation. */
+ public WorkspaceClient withCustomLlmsImpl(CustomLlmsService customLlms) {
+ return this.withCustomLlmsAPI(new CustomLlmsAPI(customLlms));
+ }
+
+ /** Replace the default CustomLlmsAPI with a custom implementation. */
+ public WorkspaceClient withCustomLlmsAPI(CustomLlmsAPI customLlms) {
+ this.customLlmsAPI = customLlms;
+ return this;
+ }
+
/** Replace the default DashboardWidgetsService with a custom implementation. */
public WorkspaceClient withDashboardWidgetsImpl(DashboardWidgetsService dashboardWidgets) {
return this.withDashboardWidgetsAPI(new DashboardWidgetsAPI(dashboardWidgets));
@@ -2068,14 +2090,14 @@ public WorkspaceClient withDataSourcesAPI(DataSourcesAPI dataSources) {
return this;
}
- /** Replace the default DatabaseInstancesService with a custom implementation. */
- public WorkspaceClient withDatabaseInstancesImpl(DatabaseInstancesService databaseInstances) {
- return this.withDatabaseInstancesAPI(new DatabaseInstancesAPI(databaseInstances));
+ /** Replace the default DatabaseService with a custom implementation. */
+ public WorkspaceClient withDatabaseImpl(DatabaseService database) {
+ return this.withDatabaseAPI(new DatabaseAPI(database));
}
- /** Replace the default DatabaseInstancesAPI with a custom implementation. */
- public WorkspaceClient withDatabaseInstancesAPI(DatabaseInstancesAPI databaseInstances) {
- this.databaseInstancesAPI = databaseInstances;
+ /** Replace the default DatabaseAPI with a custom implementation. */
+ public WorkspaceClient withDatabaseAPI(DatabaseAPI database) {
+ this.databaseAPI = database;
return this;
}
@@ -2507,6 +2529,17 @@ public WorkspaceClient withProvidersAPI(ProvidersAPI providers) {
return this;
}
+ /** Replace the default QualityMonitorV2Service with a custom implementation. */
+ public WorkspaceClient withQualityMonitorV2Impl(QualityMonitorV2Service qualityMonitorV2) {
+ return this.withQualityMonitorV2API(new QualityMonitorV2API(qualityMonitorV2));
+ }
+
+ /** Replace the default QualityMonitorV2API with a custom implementation. */
+ public WorkspaceClient withQualityMonitorV2API(QualityMonitorV2API qualityMonitorV2) {
+ this.qualityMonitorV2API = qualityMonitorV2;
+ return this;
+ }
+
/** Replace the default QualityMonitorsService with a custom implementation. */
public WorkspaceClient withQualityMonitorsImpl(QualityMonitorsService qualityMonitors) {
return this.withQualityMonitorsAPI(new QualityMonitorsAPI(qualityMonitors));
@@ -2540,17 +2573,6 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) {
return this;
}
- /** Replace the default QueryExecutionService with a custom implementation. */
- public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) {
- return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution));
- }
-
- /** Replace the default QueryExecutionAPI with a custom implementation. */
- public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) {
- this.queryExecutionAPI = queryExecution;
- return this;
- }
-
/** Replace the default QueryHistoryService with a custom implementation. */
public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) {
return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java
new file mode 100755
index 000000000..905d58253
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelCustomLlmOptimizationRunRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class CancelCustomLlmOptimizationRunRequest {
+ /** */
+ @JsonIgnore private String id;
+
+ public CancelCustomLlmOptimizationRunRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelCustomLlmOptimizationRunRequest that = (CancelCustomLlmOptimizationRunRequest) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelCustomLlmOptimizationRunRequest.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java
similarity index 76%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java
index 94f12df20..62f4aac5b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecuteQueryResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CancelResponse.java
@@ -1,13 +1,13 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.dashboards;
+package com.databricks.sdk.service.aibuilder;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
@Generated
-public class ExecuteQueryResponse {
+public class CancelResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(ExecuteQueryResponse.class).toString();
+ return new ToStringer(CancelResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java
new file mode 100755
index 000000000..06fb1ec76
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/CustomLlm.java
@@ -0,0 +1,190 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CustomLlm {
+ /** */
+ @JsonProperty("agent_artifact_path")
+ private String agentArtifactPath;
+
+ /** Creation timestamp of the custom LLM */
+ @JsonProperty("creation_time")
+ private String creationTime;
+
+ /** Creator of the custom LLM */
+ @JsonProperty("creator")
+ private String creator;
+
+ /** Datasets used for training and evaluating the model, not for inference */
+ @JsonProperty("datasets")
+ private Collection This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface CustomLlmsService {
+ /** Cancel a Custom LLM Optimization Run. */
+ void cancel(CancelCustomLlmOptimizationRunRequest cancelCustomLlmOptimizationRunRequest);
+
+ /** Start a Custom LLM Optimization Run. */
+ CustomLlm create(StartCustomLlmOptimizationRunRequest startCustomLlmOptimizationRunRequest);
+
+ /** Get a Custom LLM. */
+ CustomLlm get(GetCustomLlmRequest getCustomLlmRequest);
+
+ /** Update a Custom LLM. */
+ CustomLlm update(UpdateCustomLlmRequest updateCustomLlmRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java
new file mode 100755
index 000000000..c482c4eee
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Dataset.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Dataset {
+ /** */
+ @JsonProperty("table")
+ private Table table;
+
+ public Dataset setTable(Table table) {
+ this.table = table;
+ return this;
+ }
+
+ public Table getTable() {
+ return table;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Dataset that = (Dataset) o;
+ return Objects.equals(table, that.table);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(table);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Dataset.class).add("table", table).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java
new file mode 100755
index 000000000..981a2903a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/GetCustomLlmRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get a Custom LLM */
+@Generated
+public class GetCustomLlmRequest {
+ /** The id of the custom llm */
+ @JsonIgnore private String id;
+
+ public GetCustomLlmRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetCustomLlmRequest that = (GetCustomLlmRequest) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetCustomLlmRequest.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java
new file mode 100755
index 000000000..b9713f495
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/StartCustomLlmOptimizationRunRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class StartCustomLlmOptimizationRunRequest {
+ /** The Id of the tile. */
+ @JsonIgnore private String id;
+
+ public StartCustomLlmOptimizationRunRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ StartCustomLlmOptimizationRunRequest that = (StartCustomLlmOptimizationRunRequest) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(StartCustomLlmOptimizationRunRequest.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java
new file mode 100755
index 000000000..fbc8d5ec5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/State.java
@@ -0,0 +1,16 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+
+/** States of Custom LLM optimization lifecycle. */
+@Generated
+public enum State {
+ CANCELLED,
+ COMPLETED,
+ CREATED,
+ FAILED,
+ PENDING,
+ RUNNING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java
new file mode 100755
index 000000000..a5140c9f6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/Table.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Table {
+ /** Name of the request column */
+ @JsonProperty("request_col")
+ private String requestCol;
+
+ /** Optional: Name of the response column if the data is labeled */
+ @JsonProperty("response_col")
+ private String responseCol;
+
+ /** Full UC table path in catalog.schema.table_name format */
+ @JsonProperty("table_path")
+ private String tablePath;
+
+ public Table setRequestCol(String requestCol) {
+ this.requestCol = requestCol;
+ return this;
+ }
+
+ public String getRequestCol() {
+ return requestCol;
+ }
+
+ public Table setResponseCol(String responseCol) {
+ this.responseCol = responseCol;
+ return this;
+ }
+
+ public String getResponseCol() {
+ return responseCol;
+ }
+
+ public Table setTablePath(String tablePath) {
+ this.tablePath = tablePath;
+ return this;
+ }
+
+ public String getTablePath() {
+ return tablePath;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Table that = (Table) o;
+ return Objects.equals(requestCol, that.requestCol)
+ && Objects.equals(responseCol, that.responseCol)
+ && Objects.equals(tablePath, that.tablePath);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(requestCol, responseCol, tablePath);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Table.class)
+ .add("requestCol", requestCol)
+ .add("responseCol", responseCol)
+ .add("tablePath", tablePath)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java
new file mode 100755
index 000000000..3cff645de
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/aibuilder/UpdateCustomLlmRequest.java
@@ -0,0 +1,87 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.aibuilder;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateCustomLlmRequest {
+ /** The CustomLlm containing the fields which should be updated. */
+ @JsonProperty("custom_llm")
+ private CustomLlm customLlm;
+
+ /** The id of the custom llm */
+ @JsonIgnore private String id;
+
+ /**
+ * The list of the CustomLlm fields to update. These should correspond to the values (or lack
+ * thereof) present in `custom_llm`.
+ *
+ * The field mask must be a single string, with multiple fields separated by commas (no
+ * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate
+ * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is
+ * not allowed, as only the entire collection field can be specified. Field names must exactly
+ * match the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("update_mask")
+ private String updateMask;
+
+ public UpdateCustomLlmRequest setCustomLlm(CustomLlm customLlm) {
+ this.customLlm = customLlm;
+ return this;
+ }
+
+ public CustomLlm getCustomLlm() {
+ return customLlm;
+ }
+
+ public UpdateCustomLlmRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public UpdateCustomLlmRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateCustomLlmRequest that = (UpdateCustomLlmRequest) o;
+ return Objects.equals(customLlm, that.customLlm)
+ && Objects.equals(id, that.id)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(customLlm, id, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateCustomLlmRequest.class)
+ .add("customLlm", customLlm)
+ .add("id", id)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java
index 895258dbe..619f90f27 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateLogDeliveryConfigurationParams.java
@@ -8,6 +8,7 @@
import java.util.Collection;
import java.util.Objects;
+/** * Log Delivery Configuration */
@Generated
public class CreateLogDeliveryConfigurationParams {
/** The optional human-readable name of the log delivery configuration. Defaults to empty. */
@@ -34,21 +35,17 @@ public class CreateLogDeliveryConfigurationParams {
private String deliveryPathPrefix;
/**
- * This field applies only if `log_type` is `BILLABLE_USAGE`. This is the optional start month and
- * year for delivery, specified in `YYYY-MM` format. Defaults to current year and month.
- * `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`).
+ * This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and
+ * year for delivery, specified in YYYY-MM format. Defaults to current year and month.
+ * BILLABLE_USAGE logs are not available for usage before March 2019 (2019-03).
*/
@JsonProperty("delivery_start_time")
private String deliveryStartTime;
/**
- * Log delivery type. Supported values are:
- *
- * * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the
- * [View billable usage].
- *
- * * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit
- * logging]
+ * Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log
+ * delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure [audit
+ * log delivery]. For the JSON schema, see [Configure audit logging]
*
* [Configure audit logging]:
* https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View
@@ -62,12 +59,11 @@ public class CreateLogDeliveryConfigurationParams {
private LogType logType;
/**
- * The file type of log delivery.
- *
- * * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated
- * values) format is supported. For the schema, see the [View billable usage] * If `log_type` is
- * `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is
- * supported. For the schema, see the [Configuring audit logs].
+ * The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`.
+ * Only the CSV (comma-separated values) format is supported. For the schema, see the [View
+ * billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON
+ * (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit
+ * logs].
*
* [Configuring audit logs]:
* https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java
index d1aee2690..f4fae5ddc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/DeliveryStatus.java
@@ -5,23 +5,19 @@
import com.databricks.sdk.support.Generated;
/**
- * The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery
- * attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has
- * succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of
- * misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The
+ * * The status string for log delivery. Possible values are: `CREATED`: There were no log delivery
+ * attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has
+ * succeeded completely. `USER_FAILURE`: The latest attempt of log delivery failed because of
+ * misconfiguration of customer provided permissions on role or storage. `SYSTEM_FAILURE`: The
* latest attempt of log delivery failed because of an Databricks internal error. Contact support if
- * it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been
+ * it doesn't go away soon. `NOT_FOUND`: The log delivery status as the configuration has been
* disabled since the release of this feature or there are no workspaces in the account.
*/
@Generated
public enum DeliveryStatus {
- CREATED, // There were no log delivery attempts since the config was created.
- NOT_FOUND, // The log delivery status as the configuration has been disabled since the
- // release of this feature or there are no workspaces in the account.
- SUCCEEDED, // The latest attempt of log delivery has succeeded completely.
- SYSTEM_FAILURE, // The latest attempt of log delivery failed because of an Log delivery works with all account types. However, if your account is on the E2 version of
- * the platform or on a select custom plan that allows multiple workspaces per account, you can
- * optionally configure different storage destinations for each workspace. Log delivery status is
- * also provided to know the latest status of log delivery attempts. The high-level flow of billable
- * usage delivery:
- *
- * 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
- * Using Databricks APIs, call the Account API to create a [storage configuration
- * object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS,
- * create the appropriate AWS IAM role. For full details, including the required IAM role policies
- * and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the
- * Account API to create a [credential configuration object](:method:Credentials/Create) that uses
- * the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the
- * Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the
- * credential and storage configuration objects from previous steps. You can specify if the logs
- * should include all events of that log type in your account (_Account level_ delivery) or only
- * events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery
- * applies to all current and future workspaces plus account level logs, while workspace level log
- * delivery solely delivers logs related to the specified workspaces. You can create multiple types
- * of delivery configurations per account.
- *
- * For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * ` For audit log delivery: * For more information about about audit log delivery, see [Audit log
- * delivery], which includes information about the used JSON schema. * The delivery location is
- * ` [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
- * log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * These APIs manage Log delivery configurations for this account. Log delivery configs enable you
+ * to configure the delivery of the specified type of logs to your storage account.
*/
@Generated
public class LogDeliveryAPI {
@@ -78,6 +27,13 @@ public LogDeliveryAPI(LogDeliveryService mock) {
impl = mock;
}
+ public WrappedLogDeliveryConfiguration create(
+ CreateLogDeliveryConfigurationParams logDeliveryConfiguration) {
+ return create(
+ new WrappedCreateLogDeliveryConfiguration()
+ .setLogDeliveryConfiguration(logDeliveryConfiguration));
+ }
+
/**
* Create a new log delivery configuration.
*
@@ -109,7 +65,7 @@ public WrappedLogDeliveryConfiguration create(WrappedCreateLogDeliveryConfigurat
return impl.create(request);
}
- public WrappedLogDeliveryConfiguration get(String logDeliveryConfigurationId) {
+ public GetLogDeliveryConfigurationResponse get(String logDeliveryConfigurationId) {
return get(
new GetLogDeliveryRequest().setLogDeliveryConfigurationId(logDeliveryConfigurationId));
}
@@ -119,7 +75,7 @@ public WrappedLogDeliveryConfiguration get(String logDeliveryConfigurationId) {
*
* Gets a Databricks log delivery configuration object for an account, both specified by ID.
*/
- public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) {
+ public GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest request) {
return impl.get(request);
}
@@ -133,7 +89,13 @@ public Iterable `ENABLED`: All dependencies have executed and succeeded `DISABLED`: At least one dependency
+ * has succeeded
*/
@Generated
public enum LogDeliveryConfigStatus {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java
index 6a6f6521b..1a078c1c9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryConfiguration.java
@@ -8,13 +8,14 @@
import java.util.Collection;
import java.util.Objects;
+/** * Log Delivery Configuration */
@Generated
public class LogDeliveryConfiguration {
- /** The Databricks account ID that hosts the log delivery configuration. */
+ /** Databricks account ID. */
@JsonProperty("account_id")
private String accountId;
- /** Databricks log delivery configuration ID. */
+ /** The unique UUID of log delivery configuration */
@JsonProperty("config_id")
private String configId;
@@ -46,25 +47,21 @@ public class LogDeliveryConfiguration {
private String deliveryPathPrefix;
/**
- * This field applies only if `log_type` is `BILLABLE_USAGE`. This is the optional start month and
- * year for delivery, specified in `YYYY-MM` format. Defaults to current year and month.
- * `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`).
+ * This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and
+ * year for delivery, specified in YYYY-MM format. Defaults to current year and month.
+ * BILLABLE_USAGE logs are not available for usage before March 2019 (2019-03).
*/
@JsonProperty("delivery_start_time")
private String deliveryStartTime;
- /** Databricks log delivery status. */
+ /** The LogDeliveryStatus of this log delivery configuration */
@JsonProperty("log_delivery_status")
private LogDeliveryStatus logDeliveryStatus;
/**
- * Log delivery type. Supported values are:
- *
- * * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the
- * [View billable usage].
- *
- * * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit
- * logging]
+ * Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log
+ * delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure [audit
+ * log delivery]. For the JSON schema, see [Configure audit logging]
*
* [Configure audit logging]:
* https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View
@@ -78,12 +75,11 @@ public class LogDeliveryConfiguration {
private LogType logType;
/**
- * The file type of log delivery.
- *
- * * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated
- * values) format is supported. For the schema, see the [View billable usage] * If `log_type` is
- * `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is
- * supported. For the schema, see the [Configuring audit logs].
+ * The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`.
+ * Only the CSV (comma-separated values) format is supported. For the schema, see the [View
+ * billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON
+ * (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit
+ * logs].
*
* [Configuring audit logs]:
* https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java
index dd4e64dcf..187955234 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java
@@ -32,7 +32,7 @@ public WrappedLogDeliveryConfiguration create(WrappedCreateLogDeliveryConfigurat
}
@Override
- public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) {
+ public GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/log-delivery/%s",
@@ -41,7 +41,7 @@ public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, WrappedLogDeliveryConfiguration.class);
+ return apiClient.execute(req, GetLogDeliveryConfigurationResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
index 422788de9..8e66ac799 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryService.java
@@ -4,59 +4,8 @@
import com.databricks.sdk.support.Generated;
/**
- * These APIs manage log delivery configurations for this account. The two supported log types for
- * this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This
- * feature works with all account ID types.
- *
- * Log delivery works with all account types. However, if your account is on the E2 version of
- * the platform or on a select custom plan that allows multiple workspaces per account, you can
- * optionally configure different storage destinations for each workspace. Log delivery status is
- * also provided to know the latest status of log delivery attempts. The high-level flow of billable
- * usage delivery:
- *
- * 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy.
- * Using Databricks APIs, call the Account API to create a [storage configuration
- * object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS,
- * create the appropriate AWS IAM role. For full details, including the required IAM role policies
- * and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the
- * Account API to create a [credential configuration object](:method:Credentials/Create) that uses
- * the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the
- * Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the
- * credential and storage configuration objects from previous steps. You can specify if the logs
- * should include all events of that log type in your account (_Account level_ delivery) or only
- * events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery
- * applies to all current and future workspaces plus account level logs, while workspace level log
- * delivery solely delivers logs related to the specified workspaces. You can create multiple types
- * of delivery configurations per account.
- *
- * For billable usage delivery: * For more information about billable usage logs, see [Billable
- * usage log delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
- * ` For audit log delivery: * For more information about about audit log delivery, see [Audit log
- * delivery], which includes information about the used JSON schema. * The delivery location is
- * ` [Audit log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Billable usage
- * log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- * [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- * [create a new AWS S3 bucket]:
- * https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+ * These APIs manage Log delivery configurations for this account. Log delivery configs enable you
+ * to configure the delivery of the specified type of logs to your storage account.
*
* This is the high-level interface, that contains generated methods.
*
@@ -99,7 +48,7 @@ WrappedLogDeliveryConfiguration create(
*
* Gets a Databricks log delivery configuration object for an account, both specified by ID.
*/
- WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest getLogDeliveryRequest);
+ GetLogDeliveryConfigurationResponse get(GetLogDeliveryRequest getLogDeliveryRequest);
/**
* Get all log delivery configurations.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java
index 5c37c00aa..e5d5a7ede 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryStatus.java
@@ -7,7 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Databricks log delivery status. */
@Generated
public class LogDeliveryStatus {
/** The UTC time for the latest log delivery attempt. */
@@ -26,14 +25,13 @@ public class LogDeliveryStatus {
private String message;
/**
- * The status string for log delivery. Possible values are: * `CREATED`: There were no log
- * delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log
- * delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed
- * because of misconfiguration of customer provided permissions on role or storage. *
- * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal
- * error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as
- * the configuration has been disabled since the release of this feature or there are no
- * workspaces in the account.
+ * Enum that describes the status. Possible values are: * `CREATED`: There were no log delivery
+ * attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has
+ * succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of
+ * misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The
+ * latest attempt of log delivery failed because of an Databricks internal error. Contact support
+ * if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has
+ * been disabled since the release of this feature or there are no workspaces in the account.
*/
@JsonProperty("status")
private DeliveryStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java
index 0e657964a..2df06fe30 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogType.java
@@ -4,22 +4,7 @@
import com.databricks.sdk.support.Generated;
-/**
- * Log delivery type. Supported values are:
- *
- * * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the
- * [View billable usage].
- *
- * * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit
- * logging]
- *
- * [Configure audit logging]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable
- * usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log
- * delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
- * [billable usage log delivery]:
- * https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
- */
+/** * Log Delivery Type */
@Generated
public enum LogType {
AUDIT_LOGS,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java
index 192017e22..4298a6b0f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/OutputFormat.java
@@ -4,18 +4,7 @@
import com.databricks.sdk.support.Generated;
-/**
- * The file type of log delivery.
- *
- * * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated
- * values) format is supported. For the schema, see the [View billable usage] * If `log_type` is
- * `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is
- * supported. For the schema, see the [Configuring audit logs].
- *
- * [Configuring audit logs]:
- * https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable
- * usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
- */
+/** * Log Delivery Output Format */
@Generated
public enum OutputFormat {
CSV,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java
index 383fcd194..3fc98a262 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java
@@ -8,9 +8,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** * Update Log Delivery Configuration */
@Generated
public class UpdateLogDeliveryConfigurationStatusRequest {
- /** Databricks log delivery configuration ID */
+ /** The log delivery configuration id of customer */
@JsonIgnore private String logDeliveryConfigurationId;
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java
index cb830b923..1cf2ed48e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java
@@ -7,9 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** * Properties of the new log delivery configuration. */
@Generated
public class WrappedCreateLogDeliveryConfiguration {
- /** */
+ /** * Log Delivery Configuration */
@JsonProperty("log_delivery_configuration")
private CreateLogDeliveryConfigurationParams logDeliveryConfiguration;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java
index 15d0080e9..f35961b31 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java
@@ -9,7 +9,7 @@
@Generated
public class WrappedLogDeliveryConfiguration {
- /** */
+ /** The created log delivery configuration */
@JsonProperty("log_delivery_configuration")
private LogDeliveryConfiguration logDeliveryConfiguration;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java
index ddb9ba7d7..6d553b893 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java
@@ -10,10 +10,17 @@
@Generated
public class WrappedLogDeliveryConfigurations {
- /** */
+ /** Log delivery configurations were returned successfully. */
@JsonProperty("log_delivery_configurations")
private Collection If not set, all the effective permissions are returned. If set to - lesser than 0: invalid
+ * parameter error - 0: page length is set to a server configured value - lesser than 150 but
+ * greater than 0: invalid parameter error (this is to ensure that server is able to return at
+ * least one complete EffectivePrivilegeAssignment in a single page response) - greater than (or
+ * equal to) 150: page length is the minimum of this value and a server configured value
+ */
+ @JsonIgnore
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque token for the next page of results (pagination). */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
/**
* If provided, only the effective permissions for the specified principal (user or group) are
* returned.
@@ -23,7 +44,7 @@ public class GetEffectiveRequest {
private String principal;
/** Type of securable. */
- @JsonIgnore private SecurableType securableType;
+ @JsonIgnore private String securableType;
public GetEffectiveRequest setFullName(String fullName) {
this.fullName = fullName;
@@ -34,6 +55,24 @@ public String getFullName() {
return fullName;
}
+ public GetEffectiveRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public GetEffectiveRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
public GetEffectiveRequest setPrincipal(String principal) {
this.principal = principal;
return this;
@@ -43,12 +82,12 @@ public String getPrincipal() {
return principal;
}
- public GetEffectiveRequest setSecurableType(SecurableType securableType) {
+ public GetEffectiveRequest setSecurableType(String securableType) {
this.securableType = securableType;
return this;
}
- public SecurableType getSecurableType() {
+ public String getSecurableType() {
return securableType;
}
@@ -58,19 +97,23 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GetEffectiveRequest that = (GetEffectiveRequest) o;
return Objects.equals(fullName, that.fullName)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(pageToken, that.pageToken)
&& Objects.equals(principal, that.principal)
&& Objects.equals(securableType, that.securableType);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, principal, securableType);
+ return Objects.hash(fullName, maxResults, pageToken, principal, securableType);
}
@Override
public String toString() {
return new ToStringer(GetEffectiveRequest.class)
.add("fullName", fullName)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
.add("principal", principal)
.add("securableType", securableType)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
index 1fdab979d..8dfca3ffc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
@@ -14,13 +14,33 @@ public class GetGrantRequest {
/** Full name of securable. */
@JsonIgnore private String fullName;
+ /**
+ * Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment
+ * present in a single page response is guaranteed to contain all the privileges granted on the
+ * requested Securable for the respective principal.
+ *
+ * If not set, all the permissions are returned. If set to - lesser than 0: invalid parameter
+ * error - 0: page length is set to a server configured value - lesser than 150 but greater than
+ * 0: invalid parameter error (this is to ensure that server is able to return at least one
+ * complete PrivilegeAssignment in a single page response) - greater than (or equal to) 150: page
+ * length is the minimum of this value and a server configured value
+ */
+ @JsonIgnore
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque pagination token to go to next page based on previous query. */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
/** If provided, only the permissions for the specified principal (user or group) are returned. */
@JsonIgnore
@QueryParam("principal")
private String principal;
/** Type of securable. */
- @JsonIgnore private SecurableType securableType;
+ @JsonIgnore private String securableType;
public GetGrantRequest setFullName(String fullName) {
this.fullName = fullName;
@@ -31,6 +51,24 @@ public String getFullName() {
return fullName;
}
+ public GetGrantRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public GetGrantRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
public GetGrantRequest setPrincipal(String principal) {
this.principal = principal;
return this;
@@ -40,12 +78,12 @@ public String getPrincipal() {
return principal;
}
- public GetGrantRequest setSecurableType(SecurableType securableType) {
+ public GetGrantRequest setSecurableType(String securableType) {
this.securableType = securableType;
return this;
}
- public SecurableType getSecurableType() {
+ public String getSecurableType() {
return securableType;
}
@@ -55,19 +93,23 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GetGrantRequest that = (GetGrantRequest) o;
return Objects.equals(fullName, that.fullName)
+ && Objects.equals(maxResults, that.maxResults)
+ && Objects.equals(pageToken, that.pageToken)
&& Objects.equals(principal, that.principal)
&& Objects.equals(securableType, that.securableType);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, principal, securableType);
+ return Objects.hash(fullName, maxResults, pageToken, principal, securableType);
}
@Override
public String toString() {
return new ToStringer(GetGrantRequest.class)
.add("fullName", fullName)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
.add("principal", principal)
.add("securableType", securableType)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java
index 34e138f12..f30d70fe7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java
@@ -38,7 +38,7 @@ public class GetMetastoreSummaryResponse {
/** The scope of Delta Sharing enabled for the metastore. */
@JsonProperty("delta_sharing_scope")
- private GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope;
+ private DeltaSharingScopeEnum deltaSharingScope;
/** Whether to allow non-DBR clients to directly access entities under the metastore. */
@JsonProperty("external_access_enabled")
@@ -148,13 +148,12 @@ public Long getDeltaSharingRecipientTokenLifetimeInSeconds() {
return deltaSharingRecipientTokenLifetimeInSeconds;
}
- public GetMetastoreSummaryResponse setDeltaSharingScope(
- GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope) {
+ public GetMetastoreSummaryResponse setDeltaSharingScope(DeltaSharingScopeEnum deltaSharingScope) {
this.deltaSharingScope = deltaSharingScope;
return this;
}
- public GetMetastoreSummaryResponseDeltaSharingScope getDeltaSharingScope() {
+ public DeltaSharingScopeEnum getDeltaSharingScope() {
return deltaSharingScope;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java
deleted file mode 100755
index 336e0cc06..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.catalog;
-
-import com.databricks.sdk.support.Generated;
-
-/** The scope of Delta Sharing enabled for the metastore. */
-@Generated
-public enum GetMetastoreSummaryResponseDeltaSharingScope {
- INTERNAL,
- INTERNAL_AND_EXTERNAL,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java
new file mode 100755
index 000000000..f0375ba39
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetPermissionsResponse.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetPermissionsResponse {
+ /**
+ * Opaque token to retrieve the next page of results. Absent if there are no more pages.
+ * __page_token__ should be set to this value for the next request (for the next page of results).
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** The privileges assigned to each principal */
+ @JsonProperty("privilege_assignments")
+ private Collection Gets the permissions for a securable.
+ * Gets the permissions for a securable. Does not include inherited permissions.
*/
- public PermissionsList get(GetGrantRequest request) {
+ public GetPermissionsResponse get(GetGrantRequest request) {
return impl.get(request);
}
- public EffectivePermissionsList getEffective(SecurableType securableType, String fullName) {
+ public EffectivePermissionsList getEffective(String securableType, String fullName) {
return getEffective(
new GetEffectiveRequest().setSecurableType(securableType).setFullName(fullName));
}
@@ -54,13 +54,14 @@ public EffectivePermissionsList getEffective(SecurableType securableType, String
/**
* Get effective permissions.
*
- * Gets the effective permissions for a securable.
+ * Gets the effective permissions for a securable. Includes inherited permissions from any
+ * parent securables.
*/
public EffectivePermissionsList getEffective(GetEffectiveRequest request) {
return impl.getEffective(request);
}
- public PermissionsList update(SecurableType securableType, String fullName) {
+ public UpdatePermissionsResponse update(String securableType, String fullName) {
return update(new UpdatePermissions().setSecurableType(securableType).setFullName(fullName));
}
@@ -69,7 +70,7 @@ public PermissionsList update(SecurableType securableType, String fullName) {
*
* Updates the permissions for a securable.
*/
- public PermissionsList update(UpdatePermissions request) {
+ public UpdatePermissionsResponse update(UpdatePermissions request) {
return impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java
index 1a8219ede..cf01d91a0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java
@@ -17,7 +17,7 @@ public GrantsImpl(ApiClient apiClient) {
}
@Override
- public PermissionsList get(GetGrantRequest request) {
+ public GetPermissionsResponse get(GetGrantRequest request) {
String path =
String.format(
"/api/2.1/unity-catalog/permissions/%s/%s",
@@ -26,7 +26,7 @@ public PermissionsList get(GetGrantRequest request) {
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, PermissionsList.class);
+ return apiClient.execute(req, GetPermissionsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -49,7 +49,7 @@ public EffectivePermissionsList getEffective(GetEffectiveRequest request) {
}
@Override
- public PermissionsList update(UpdatePermissions request) {
+ public UpdatePermissionsResponse update(UpdatePermissions request) {
String path =
String.format(
"/api/2.1/unity-catalog/permissions/%s/%s",
@@ -59,7 +59,7 @@ public PermissionsList update(UpdatePermissions request) {
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, PermissionsList.class);
+ return apiClient.execute(req, UpdatePermissionsResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java
index 9176b21ce..275b60772 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java
@@ -23,14 +23,15 @@ public interface GrantsService {
/**
* Get permissions.
*
- * Gets the permissions for a securable.
+ * Gets the permissions for a securable. Does not include inherited permissions.
*/
- PermissionsList get(GetGrantRequest getGrantRequest);
+ GetPermissionsResponse get(GetGrantRequest getGrantRequest);
/**
* Get effective permissions.
*
- * Gets the effective permissions for a securable.
+ * Gets the effective permissions for a securable. Includes inherited permissions from any
+ * parent securables.
*/
EffectivePermissionsList getEffective(GetEffectiveRequest getEffectiveRequest);
@@ -39,5 +40,5 @@ public interface GrantsService {
*
* Updates the permissions for a securable.
*/
- PermissionsList update(UpdatePermissions updatePermissions);
+ UpdatePermissionsResponse update(UpdatePermissions updatePermissions);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java
new file mode 100755
index 000000000..cc722491d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresRequest.java
@@ -0,0 +1,70 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** List metastores */
+@Generated
+public class ListMetastoresRequest {
+ /**
+ * Maximum number of metastores to return. - when set to a value greater than 0, the page length
+ * is the minimum of this value and a server configured value; - when set to 0, the page length is
+ * set to a server configured value (recommended); - when set to a value less than 0, an invalid
+ * parameter error is returned; - If not set, all the metastores are returned (not recommended). -
+ * Note: The number of returned metastores might be less than the specified max_results size, even
+ * zero. The only definitive indication that no further metastores can be fetched is when the
+ * next_page_token is unset from the response.
+ */
+ @JsonIgnore
+ @QueryParam("max_results")
+ private Long maxResults;
+
+ /** Opaque pagination token to go to next page based on previous query. */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListMetastoresRequest setMaxResults(Long maxResults) {
+ this.maxResults = maxResults;
+ return this;
+ }
+
+ public Long getMaxResults() {
+ return maxResults;
+ }
+
+ public ListMetastoresRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListMetastoresRequest that = (ListMetastoresRequest) o;
+ return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(maxResults, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListMetastoresRequest.class)
+ .add("maxResults", maxResults)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java
index 257aa2443..e906c400f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListMetastoresResponse.java
@@ -14,6 +14,13 @@ public class ListMetastoresResponse {
@JsonProperty("metastores")
private Collection - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
- * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17)
*/
@JsonProperty("key")
private String key;
- /**
- * The value of the tag.
- *
- * - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
- * (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24)
- */
+ /** The value of the tag. */
@JsonProperty("value")
private String value;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
index bbf12f00d..81c1b7e85 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
@@ -218,14 +218,6 @@ public class EditCluster {
@JsonProperty("policy_id")
private String policyId;
- /** If set, what the configurable IOPS for the remote shuffle disk is. */
- @JsonProperty("remote_shuffle_disk_iops")
- private Long remoteShuffleDiskIops;
-
- /** If set, what the configurable throughput (in Mb/s) for the remote shuffle disk is. */
- @JsonProperty("remote_shuffle_disk_throughput")
- private Long remoteShuffleDiskThroughput;
-
/**
* Determines the cluster's runtime engine, either standard or Photon.
*
@@ -281,10 +273,6 @@ public class EditCluster {
@JsonProperty("ssh_public_keys")
private Collection Get list of Genie Spaces.
+ */
+ public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) {
+ return impl.listSpaces(request);
+ }
+
public Wait Get list of Genie Spaces.
+ */
+ GenieListSpacesResponse listSpaces(GenieListSpacesRequest genieListSpacesRequest);
+
/**
* Start conversation.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
deleted file mode 100755
index b88922e54..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import java.util.Objects;
-
-/** Read a published dashboard in an embedded ui. */
-@Generated
-public class GetPublishedDashboardEmbeddedRequest {
- /** UUID identifying the published dashboard. */
- @JsonIgnore private String dashboardId;
-
- public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) {
- this.dashboardId = dashboardId;
- return this;
- }
-
- public String getDashboardId() {
- return dashboardId;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o;
- return Objects.equals(dashboardId, that.dashboardId);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(dashboardId);
- }
-
- @Override
- public String toString() {
- return new ToStringer(GetPublishedDashboardEmbeddedRequest.class)
- .add("dashboardId", dashboardId)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
index ec34c7536..eb5fda3a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
@@ -23,20 +23,6 @@ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) {
impl = mock;
}
- public void getPublishedDashboardEmbedded(String dashboardId) {
- getPublishedDashboardEmbedded(
- new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId));
- }
-
- /**
- * Read a published dashboard in an embedded ui.
- *
- * Get the current published dashboard within an embedded context.
- */
- public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
- impl.getPublishedDashboardEmbedded(request);
- }
-
public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) {
return getPublishedDashboardTokenInfo(
new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
index 38c982eb1..55a489702 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
@@ -16,21 +16,6 @@ public LakeviewEmbeddedImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
- @Override
- public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
- String path =
- String.format(
- "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId());
- try {
- Request req = new Request("GET", path);
- ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
- apiClient.execute(req, GetPublishedDashboardEmbeddedResponse.class);
- } catch (IOException e) {
- throw new DatabricksException("IO error: " + e.getMessage(), e);
- }
- }
-
@Override
public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(
GetPublishedDashboardTokenInfoRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
index a7fbb8cdb..cad465780 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
@@ -12,14 +12,6 @@
*/
@Generated
public interface LakeviewEmbeddedService {
- /**
- * Read a published dashboard in an embedded ui.
- *
- * Get the current published dashboard within an embedded context.
- */
- void getPublishedDashboardEmbedded(
- GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest);
-
/**
* Read an information of a published dashboard to mint an OAuth token.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
deleted file mode 100755
index f041070b2..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
+++ /dev/null
@@ -1,45 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class PendingStatus {
- /**
- * The token to poll for result asynchronously Example:
- * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
- */
- @JsonProperty("data_token")
- private String dataToken;
-
- public PendingStatus setDataToken(String dataToken) {
- this.dataToken = dataToken;
- return this;
- }
-
- public String getDataToken() {
- return dataToken;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- PendingStatus that = (PendingStatus) o;
- return Objects.equals(dataToken, that.dataToken);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(dataToken);
- }
-
- @Override
- public String toString() {
- return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
deleted file mode 100755
index 958dd8311..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
+++ /dev/null
@@ -1,80 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.QueryParam;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import java.util.Collection;
-import java.util.Objects;
-
-/** Poll the results for the a query for a published, embedded dashboard */
-@Generated
-public class PollPublishedQueryStatusRequest {
- /** */
- @JsonIgnore
- @QueryParam("dashboard_name")
- private String dashboardName;
-
- /** */
- @JsonIgnore
- @QueryParam("dashboard_revision_id")
- private String dashboardRevisionId;
-
- /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
- @JsonIgnore
- @QueryParam("tokens")
- private Collection This is the high-level interface, that contains generated methods.
- *
- * Evolving: this interface is under development. Method signatures may change.
- */
-@Generated
-public interface QueryExecutionService {
- /** Cancel the results for the a query for a published, embedded dashboard. */
- CancelQueryExecutionResponse cancelPublishedQueryExecution(
- CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest);
-
- /** Execute a query for a published dashboard. */
- void executePublishedDashboardQuery(
- ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest);
-
- /** Poll the results for the a query for a published, embedded dashboard. */
- PollQueryStatusResponse pollPublishedQueryStatus(
- PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest);
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
deleted file mode 100755
index 334f3d007..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
+++ /dev/null
@@ -1,114 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class QueryResponseStatus {
- /**
- * Represents an empty message, similar to google.protobuf.Empty, which is not available in the
- * firm right now.
- */
- @JsonProperty("canceled")
- private Empty canceled;
-
- /**
- * Represents an empty message, similar to google.protobuf.Empty, which is not available in the
- * firm right now.
- */
- @JsonProperty("closed")
- private Empty closed;
-
- /** */
- @JsonProperty("pending")
- private PendingStatus pending;
-
- /**
- * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be
- * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit
- * logging purpose to record the statement_id of all QueryResponseStatus.
- */
- @JsonProperty("statement_id")
- private String statementId;
-
- /** */
- @JsonProperty("success")
- private SuccessStatus success;
-
- public QueryResponseStatus setCanceled(Empty canceled) {
- this.canceled = canceled;
- return this;
- }
-
- public Empty getCanceled() {
- return canceled;
- }
-
- public QueryResponseStatus setClosed(Empty closed) {
- this.closed = closed;
- return this;
- }
-
- public Empty getClosed() {
- return closed;
- }
-
- public QueryResponseStatus setPending(PendingStatus pending) {
- this.pending = pending;
- return this;
- }
-
- public PendingStatus getPending() {
- return pending;
- }
-
- public QueryResponseStatus setStatementId(String statementId) {
- this.statementId = statementId;
- return this;
- }
-
- public String getStatementId() {
- return statementId;
- }
-
- public QueryResponseStatus setSuccess(SuccessStatus success) {
- this.success = success;
- return this;
- }
-
- public SuccessStatus getSuccess() {
- return success;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- QueryResponseStatus that = (QueryResponseStatus) o;
- return Objects.equals(canceled, that.canceled)
- && Objects.equals(closed, that.closed)
- && Objects.equals(pending, that.pending)
- && Objects.equals(statementId, that.statementId)
- && Objects.equals(success, that.success);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(canceled, closed, pending, statementId, success);
- }
-
- @Override
- public String toString() {
- return new ToStringer(QueryResponseStatus.class)
- .add("canceled", canceled)
- .add("closed", closed)
- .add("pending", pending)
- .add("statementId", statementId)
- .add("success", success)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
deleted file mode 100755
index c54d199d3..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
+++ /dev/null
@@ -1,61 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.dashboards;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class SuccessStatus {
- /**
- * The token to poll for result asynchronously Example:
- * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
- */
- @JsonProperty("data_token")
- private String dataToken;
-
- /** Whether the query result is truncated (either by byte limit or row limit) */
- @JsonProperty("truncated")
- private Boolean truncated;
-
- public SuccessStatus setDataToken(String dataToken) {
- this.dataToken = dataToken;
- return this;
- }
-
- public String getDataToken() {
- return dataToken;
- }
-
- public SuccessStatus setTruncated(Boolean truncated) {
- this.truncated = truncated;
- return this;
- }
-
- public Boolean getTruncated() {
- return truncated;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- SuccessStatus that = (SuccessStatus) o;
- return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(dataToken, truncated);
- }
-
- @Override
- public String toString() {
- return new ToStringer(SuccessStatus.class)
- .add("dataToken", dataToken)
- .add("truncated", truncated)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java
similarity index 96%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java
index d58ef38df..a05d27a4a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseCatalogRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
similarity index 96%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
index 2aa9d2a71..d7da58737 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseInstanceRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
new file mode 100755
index 000000000..b3eef0fb3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateDatabaseTableRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Create a Database Table */
+@Generated
+public class CreateDatabaseTableRequest {
+ /** Next field marker: 13 */
+ @JsonProperty("table")
+ private DatabaseTable table;
+
+ public CreateDatabaseTableRequest setTable(DatabaseTable table) {
+ this.table = table;
+ return this;
+ }
+
+ public DatabaseTable getTable() {
+ return table;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDatabaseTableRequest that = (CreateDatabaseTableRequest) o;
+ return Objects.equals(table, that.table);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(table);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDatabaseTableRequest.class).add("table", table).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
similarity index 94%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
index 2c1ea4700..02070ad39 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSyncedDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/CreateSyncedDatabaseTableRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
@@ -10,7 +10,7 @@
/** Create a Synced Database Table */
@Generated
public class CreateSyncedDatabaseTableRequest {
- /** Next field marker: 10 */
+ /** Next field marker: 12 */
@JsonProperty("synced_table")
private SyncedDatabaseTable syncedTable;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
similarity index 77%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
index c91d638d3..1d452de83 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
@@ -1,5 +1,5 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
@@ -9,18 +9,18 @@
/** Database Instances provide access to a database via REST API or direct SQL. */
@Generated
-public class DatabaseInstancesAPI {
- private static final Logger LOG = LoggerFactory.getLogger(DatabaseInstancesAPI.class);
+public class DatabaseAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DatabaseAPI.class);
- private final DatabaseInstancesService impl;
+ private final DatabaseService impl;
/** Regular-use constructor */
- public DatabaseInstancesAPI(ApiClient apiClient) {
- impl = new DatabaseInstancesImpl(apiClient);
+ public DatabaseAPI(ApiClient apiClient) {
+ impl = new DatabaseImpl(apiClient);
}
/** Constructor for mocks */
- public DatabaseInstancesAPI(DatabaseInstancesService mock) {
+ public DatabaseAPI(DatabaseService mock) {
impl = mock;
}
@@ -43,6 +43,15 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req
return impl.createDatabaseInstance(request);
}
+ public DatabaseTable createDatabaseTable(DatabaseTable table) {
+ return createDatabaseTable(new CreateDatabaseTableRequest().setTable(table));
+ }
+
+ /** Create a Database Table. */
+ public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) {
+ return impl.createDatabaseTable(request);
+ }
+
public SyncedDatabaseTable createSyncedDatabaseTable(SyncedDatabaseTable syncedTable) {
return createSyncedDatabaseTable(
new CreateSyncedDatabaseTableRequest().setSyncedTable(syncedTable));
@@ -71,6 +80,15 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) {
impl.deleteDatabaseInstance(request);
}
+ public void deleteDatabaseTable(String name) {
+ deleteDatabaseTable(new DeleteDatabaseTableRequest().setName(name));
+ }
+
+ /** Delete a Database Table. */
+ public void deleteDatabaseTable(DeleteDatabaseTableRequest request) {
+ impl.deleteDatabaseTable(request);
+ }
+
public void deleteSyncedDatabaseTable(String name) {
deleteSyncedDatabaseTable(new DeleteSyncedDatabaseTableRequest().setName(name));
}
@@ -85,6 +103,11 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque
return impl.findDatabaseInstanceByUid(request);
}
+ /** Generates a credential that can be used to access database instances. */
+ public DatabaseCredential generateDatabaseCredential(GenerateDatabaseCredentialRequest request) {
+ return impl.generateDatabaseCredential(request);
+ }
+
public DatabaseCatalog getDatabaseCatalog(String name) {
return getDatabaseCatalog(new GetDatabaseCatalogRequest().setName(name));
}
@@ -103,6 +126,15 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request)
return impl.getDatabaseInstance(request);
}
+ public DatabaseTable getDatabaseTable(String name) {
+ return getDatabaseTable(new GetDatabaseTableRequest().setName(name));
+ }
+
+ /** Get a Database Table. */
+ public DatabaseTable getDatabaseTable(GetDatabaseTableRequest request) {
+ return impl.getDatabaseTable(request);
+ }
+
public SyncedDatabaseTable getSyncedDatabaseTable(String name) {
return getSyncedDatabaseTable(new GetSyncedDatabaseTableRequest().setName(name));
}
@@ -141,7 +173,7 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req
return impl.updateDatabaseInstance(request);
}
- public DatabaseInstancesService impl() {
+ public DatabaseService impl() {
return impl;
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java
similarity index 98%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java
index b4d1c2d57..06049eb18 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseCatalog.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCatalog.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java
new file mode 100755
index 000000000..088ca1d41
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseCredential.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DatabaseCredential {
+ /** */
+ @JsonProperty("token")
+ private String token;
+
+ public DatabaseCredential setToken(String token) {
+ this.token = token;
+ return this;
+ }
+
+ public String getToken() {
+ return token;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseCredential that = (DatabaseCredential) o;
+ return Objects.equals(token, that.token);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(token);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseCredential.class).add("token", token).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
similarity index 74%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
index bea3e0708..b32623c1c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
@@ -1,5 +1,5 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.core.DatabricksException;
@@ -7,12 +7,12 @@
import com.databricks.sdk.support.Generated;
import java.io.IOException;
-/** Package-local implementation of DatabaseInstances */
+/** Package-local implementation of Database */
@Generated
-class DatabaseInstancesImpl implements DatabaseInstancesService {
+class DatabaseImpl implements DatabaseService {
private final ApiClient apiClient;
- public DatabaseInstancesImpl(ApiClient apiClient) {
+ public DatabaseImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
@@ -44,6 +44,20 @@ public DatabaseInstance createDatabaseInstance(CreateDatabaseInstanceRequest req
}
}
+ @Override
+ public DatabaseTable createDatabaseTable(CreateDatabaseTableRequest request) {
+ String path = "/api/2.0/database/tables";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getTable()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseTable.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public SyncedDatabaseTable createSyncedDatabaseTable(CreateSyncedDatabaseTableRequest request) {
String path = "/api/2.0/database/synced_tables";
@@ -84,6 +98,19 @@ public void deleteDatabaseInstance(DeleteDatabaseInstanceRequest request) {
}
}
+ @Override
+ public void deleteDatabaseTable(DeleteDatabaseTableRequest request) {
+ String path = String.format("/api/2.0/database/tables/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, DeleteDatabaseTableResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request) {
String path = String.format("/api/2.0/database/synced_tables/%s", request.getName());
@@ -110,6 +137,20 @@ public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidReque
}
}
+ @Override
+ public DatabaseCredential generateDatabaseCredential(GenerateDatabaseCredentialRequest request) {
+ String path = "/api/2.0/database/credentials";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseCredential.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest request) {
String path = String.format("/api/2.0/database/catalogs/%s", request.getName());
@@ -136,6 +177,19 @@ public DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest request)
}
}
+ @Override
+ public DatabaseTable getDatabaseTable(GetDatabaseTableRequest request) {
+ String path = String.format("/api/2.0/database/tables/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, DatabaseTable.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest request) {
String path = String.format("/api/2.0/database/synced_tables/%s", request.getName());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
similarity index 76%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
index ce72b3cba..045e7f04e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstance.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
@@ -12,15 +12,7 @@
*/
@Generated
public class DatabaseInstance {
- /** Password for admin user to create. If not provided, no user will be created. */
- @JsonProperty("admin_password")
- private String adminPassword;
-
- /** Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'. */
- @JsonProperty("admin_rolename")
- private String adminRolename;
-
- /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4". */
+ /** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8". */
@JsonProperty("capacity")
private String capacity;
@@ -56,24 +48,6 @@ public class DatabaseInstance {
@JsonProperty("uid")
private String uid;
- public DatabaseInstance setAdminPassword(String adminPassword) {
- this.adminPassword = adminPassword;
- return this;
- }
-
- public String getAdminPassword() {
- return adminPassword;
- }
-
- public DatabaseInstance setAdminRolename(String adminRolename) {
- this.adminRolename = adminRolename;
- return this;
- }
-
- public String getAdminRolename() {
- return adminRolename;
- }
-
public DatabaseInstance setCapacity(String capacity) {
this.capacity = capacity;
return this;
@@ -160,9 +134,7 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DatabaseInstance that = (DatabaseInstance) o;
- return Objects.equals(adminPassword, that.adminPassword)
- && Objects.equals(adminRolename, that.adminRolename)
- && Objects.equals(capacity, that.capacity)
+ return Objects.equals(capacity, that.capacity)
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(creator, that.creator)
&& Objects.equals(name, that.name)
@@ -176,24 +148,12 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- adminPassword,
- adminRolename,
- capacity,
- creationTime,
- creator,
- name,
- pgVersion,
- readWriteDns,
- state,
- stopped,
- uid);
+ capacity, creationTime, creator, name, pgVersion, readWriteDns, state, stopped, uid);
}
@Override
public String toString() {
return new ToStringer(DatabaseInstance.class)
- .add("adminPassword", adminPassword)
- .add("adminRolename", adminRolename)
.add("capacity", capacity)
.add("creationTime", creationTime)
.add("creator", creator)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java
similarity index 84%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java
index 909921d03..536812f91 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstanceState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstanceState.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
similarity index 76%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
index 9bf012769..09dcbff3f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DatabaseInstancesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
@@ -1,5 +1,5 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
@@ -11,7 +11,7 @@
* Evolving: this interface is under development. Method signatures may change.
*/
@Generated
-public interface DatabaseInstancesService {
+public interface DatabaseService {
/** Create a Database Catalog. */
DatabaseCatalog createDatabaseCatalog(CreateDatabaseCatalogRequest createDatabaseCatalogRequest);
@@ -19,6 +19,9 @@ public interface DatabaseInstancesService {
DatabaseInstance createDatabaseInstance(
CreateDatabaseInstanceRequest createDatabaseInstanceRequest);
+ /** Create a Database Table. */
+ DatabaseTable createDatabaseTable(CreateDatabaseTableRequest createDatabaseTableRequest);
+
/** Create a Synced Database Table. */
SyncedDatabaseTable createSyncedDatabaseTable(
CreateSyncedDatabaseTableRequest createSyncedDatabaseTableRequest);
@@ -29,6 +32,9 @@ SyncedDatabaseTable createSyncedDatabaseTable(
/** Delete a Database Instance. */
void deleteDatabaseInstance(DeleteDatabaseInstanceRequest deleteDatabaseInstanceRequest);
+ /** Delete a Database Table. */
+ void deleteDatabaseTable(DeleteDatabaseTableRequest deleteDatabaseTableRequest);
+
/** Delete a Synced Database Table. */
void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest);
@@ -36,12 +42,19 @@ SyncedDatabaseTable createSyncedDatabaseTable(
DatabaseInstance findDatabaseInstanceByUid(
FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest);
+ /** Generates a credential that can be used to access database instances. */
+ DatabaseCredential generateDatabaseCredential(
+ GenerateDatabaseCredentialRequest generateDatabaseCredentialRequest);
+
/** Get a Database Catalog. */
DatabaseCatalog getDatabaseCatalog(GetDatabaseCatalogRequest getDatabaseCatalogRequest);
/** Get a Database Instance. */
DatabaseInstance getDatabaseInstance(GetDatabaseInstanceRequest getDatabaseInstanceRequest);
+ /** Get a Database Table. */
+ DatabaseTable getDatabaseTable(GetDatabaseTableRequest getDatabaseTableRequest);
+
/** Get a Synced Database Table. */
SyncedDatabaseTable getSyncedDatabaseTable(
GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
new file mode 100755
index 000000000..5018d7b59
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
@@ -0,0 +1,108 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Next field marker: 13 */
+@Generated
+public class DatabaseTable {
+ /**
+ * Name of the target database instance. This is required when creating database tables in
+ * standard catalogs. This is optional when creating database tables in registered catalogs. If
+ * this field is specified when creating database tables in registered catalogs, the database
+ * instance name MUST match that of the registered catalog (or the request will be rejected).
+ */
+ @JsonProperty("database_instance_name")
+ private String databaseInstanceName;
+
+ /**
+ * Target Postgres database object (logical database) name for this table. This field is optional
+ * in all scenarios.
+ *
+ * When creating a table in a registered Postgres catalog, the target Postgres database name is
+ * inferred to be that of the registered catalog. If this field is specified in this scenario, the
+ * Postgres database name MUST match that of the registered catalog (or the request will be
+ * rejected).
+ *
+ * When creating a table in a standard catalog, the target database name is inferred to be that
+ * of the standard catalog. In this scenario, specifying this field will allow targeting an
+ * arbitrary postgres database. Note that this has implications for the
+ * `create_database_objects_is_missing` field in `spec`.
+ */
+ @JsonProperty("logical_database_name")
+ private String logicalDatabaseName;
+
+ /** Full three-part (catalog, schema, table) name of the table. */
+ @JsonProperty("name")
+ private String name;
+
+ /** Data serving REST API URL for this table */
+ @JsonProperty("table_serving_url")
+ private String tableServingUrl;
+
+ public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) {
+ this.databaseInstanceName = databaseInstanceName;
+ return this;
+ }
+
+ public String getDatabaseInstanceName() {
+ return databaseInstanceName;
+ }
+
+ public DatabaseTable setLogicalDatabaseName(String logicalDatabaseName) {
+ this.logicalDatabaseName = logicalDatabaseName;
+ return this;
+ }
+
+ public String getLogicalDatabaseName() {
+ return logicalDatabaseName;
+ }
+
+ public DatabaseTable setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DatabaseTable setTableServingUrl(String tableServingUrl) {
+ this.tableServingUrl = tableServingUrl;
+ return this;
+ }
+
+ public String getTableServingUrl() {
+ return tableServingUrl;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatabaseTable that = (DatabaseTable) o;
+ return Objects.equals(databaseInstanceName, that.databaseInstanceName)
+ && Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
+ && Objects.equals(name, that.name)
+ && Objects.equals(tableServingUrl, that.tableServingUrl);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DatabaseTable.class)
+ .add("databaseInstanceName", databaseInstanceName)
+ .add("logicalDatabaseName", logicalDatabaseName)
+ .add("name", name)
+ .add("tableServingUrl", tableServingUrl)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java
similarity index 95%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java
index 3a455fea8..3ac34bf9e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java
similarity index 93%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java
index 17de1764a..718037444 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseCatalogResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseCatalogResponse.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java
similarity index 98%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java
index e043e1347..81ed118c6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.QueryParam;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java
similarity index 93%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java
index 4d96f2e05..09a2ec03e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteDatabaseInstanceResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseInstanceResponse.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java
new file mode 100755
index 000000000..a372f064a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a Database Table */
+@Generated
+public class DeleteDatabaseTableRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public DeleteDatabaseTableRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDatabaseTableRequest that = (DeleteDatabaseTableRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDatabaseTableRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java
similarity index 72%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java
index 5aefc388e..6e851748a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableResponse.java
@@ -1,13 +1,13 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.dashboards;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
@Generated
-public class GetPublishedDashboardEmbeddedResponse {
+public class DeleteDatabaseTableResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(GetPublishedDashboardEmbeddedResponse.class).toString();
+ return new ToStringer(DeleteDatabaseTableResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java
similarity index 95%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java
index 506ab393b..41d1a388c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java
similarity index 93%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java
index 147f31d48..6649e3cfc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteSyncedDatabaseTableResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableResponse.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java
similarity index 96%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java
index 894cb8153..180eb8971 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FindDatabaseInstanceByUidRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.catalog;
+package com.databricks.sdk.service.database;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.QueryParam;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java
new file mode 100755
index 000000000..ba727372f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Generates a credential that can be used to access database instances */
+@Generated
+public class GenerateDatabaseCredentialRequest {
+ /** Instances to which the token will be scoped. */
+ @JsonProperty("instance_names")
+ private Collection Use of Files API may incur Databricks data transfer charges.
+ *
* [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
*/
@Generated
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
index b5103d010..791175943 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesService.java
@@ -21,6 +21,8 @@
* `enable_experimental_files_api_client = True` in your configuration profile or use the
* environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
*
+ * Use of Files API may incur Databricks data transfer charges.
+ *
* [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
*
* This is the high-level interface, that contains generated methods.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
index 634e2397a..7016a0673 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
@@ -75,7 +75,7 @@ public Group get(GetAccountGroupRequest request) {
public Iterable List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix
- * which if specified, the response contains only artifacts with the specified prefix.
- */
- public ListLoggedModelArtifactsResponse listLoggedModelArtifacts(
- ListLoggedModelArtifactsRequest request) {
- return impl.listLoggedModelArtifacts(request);
- }
-
/**
* Log a batch of metrics/params/tags for a run.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
index c228b7e72..1b53bb69f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
@@ -169,38 +169,6 @@ public GetExperimentByNameResponse getByName(GetByNameRequest request) {
}
}
- @Override
- public GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload(
- GetCredentialsForTraceDataDownloadRequest request) {
- String path =
- String.format(
- "/api/2.0/mlflow/traces/%s/credentials-for-data-download", request.getRequestId());
- try {
- Request req = new Request("GET", path);
- ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
- return apiClient.execute(req, GetCredentialsForTraceDataDownloadResponse.class);
- } catch (IOException e) {
- throw new DatabricksException("IO error: " + e.getMessage(), e);
- }
- }
-
- @Override
- public GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload(
- GetCredentialsForTraceDataUploadRequest request) {
- String path =
- String.format(
- "/api/2.0/mlflow/traces/%s/credentials-for-data-upload", request.getRequestId());
- try {
- Request req = new Request("GET", path);
- ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
- return apiClient.execute(req, GetCredentialsForTraceDataUploadResponse.class);
- } catch (IOException e) {
- throw new DatabricksException("IO error: " + e.getMessage(), e);
- }
- }
-
@Override
public GetExperimentResponse getExperiment(GetExperimentRequest request) {
String path = "/api/2.0/mlflow/experiments/get";
@@ -308,22 +276,6 @@ public ListExperimentsResponse listExperiments(ListExperimentsRequest request) {
}
}
- @Override
- public ListLoggedModelArtifactsResponse listLoggedModelArtifacts(
- ListLoggedModelArtifactsRequest request) {
- String path =
- String.format(
- "/api/2.0/mlflow/logged-models/%s/artifacts/directories", request.getModelId());
- try {
- Request req = new Request("GET", path);
- ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
- return apiClient.execute(req, ListLoggedModelArtifactsResponse.class);
- } catch (IOException e) {
- throw new DatabricksException("IO error: " + e.getMessage(), e);
- }
- }
-
@Override
public void logBatch(LogBatch request) {
String path = "/api/2.0/mlflow/runs/log-batch";
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
index abafed87e..7613522e1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
@@ -96,14 +96,6 @@ FinalizeLoggedModelResponse finalizeLoggedModel(
*/
GetExperimentByNameResponse getByName(GetByNameRequest getByNameRequest);
- /** Get credentials to download trace data. */
- GetCredentialsForTraceDataDownloadResponse getCredentialsForTraceDataDownload(
- GetCredentialsForTraceDataDownloadRequest getCredentialsForTraceDataDownloadRequest);
-
- /** Get credentials to upload trace data. */
- GetCredentialsForTraceDataUploadResponse getCredentialsForTraceDataUpload(
- GetCredentialsForTraceDataUploadRequest getCredentialsForTraceDataUploadRequest);
-
/**
* Get an experiment.
*
@@ -166,15 +158,6 @@ ExperimentPermissions getPermissions(
*/
ListExperimentsResponse listExperiments(ListExperimentsRequest listExperimentsRequest);
- /**
- * List artifacts for a logged model.
- *
- * List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix
- * which if specified, the response contains only artifacts with the specified prefix.
- */
- ListLoggedModelArtifactsResponse listLoggedModelArtifacts(
- ListLoggedModelArtifactsRequest listLoggedModelArtifactsRequest);
-
/**
* Log a batch of metrics/params/tags for a run.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java
index e57f1bbae..3bb12c2f8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java
@@ -15,7 +15,7 @@ public class FinalizeLoggedModelRequest {
/**
* Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that
- * something went wrong when logging the model weights / agent code).
+ * something went wrong when logging the model weights / agent code.
*/
@JsonProperty("status")
private LoggedModelStatus status;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java
deleted file mode 100755
index 42aac217e..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadRequest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import java.util.Objects;
-
-/** Get credentials to download trace data */
-@Generated
-public class GetCredentialsForTraceDataDownloadRequest {
- /** The ID of the trace to fetch artifact download credentials for. */
- @JsonIgnore private String requestId;
-
- public GetCredentialsForTraceDataDownloadRequest setRequestId(String requestId) {
- this.requestId = requestId;
- return this;
- }
-
- public String getRequestId() {
- return requestId;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- GetCredentialsForTraceDataDownloadRequest that = (GetCredentialsForTraceDataDownloadRequest) o;
- return Objects.equals(requestId, that.requestId);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(requestId);
- }
-
- @Override
- public String toString() {
- return new ToStringer(GetCredentialsForTraceDataDownloadRequest.class)
- .add("requestId", requestId)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java
deleted file mode 100755
index 839e04921..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataDownloadResponse.java
+++ /dev/null
@@ -1,46 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class GetCredentialsForTraceDataDownloadResponse {
- /** The artifact download credentials for the specified trace data. */
- @JsonProperty("credential_info")
- private ArtifactCredentialInfo credentialInfo;
-
- public GetCredentialsForTraceDataDownloadResponse setCredentialInfo(
- ArtifactCredentialInfo credentialInfo) {
- this.credentialInfo = credentialInfo;
- return this;
- }
-
- public ArtifactCredentialInfo getCredentialInfo() {
- return credentialInfo;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- GetCredentialsForTraceDataDownloadResponse that =
- (GetCredentialsForTraceDataDownloadResponse) o;
- return Objects.equals(credentialInfo, that.credentialInfo);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(credentialInfo);
- }
-
- @Override
- public String toString() {
- return new ToStringer(GetCredentialsForTraceDataDownloadResponse.class)
- .add("credentialInfo", credentialInfo)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java
deleted file mode 100755
index e7c6d452c..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadRequest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import java.util.Objects;
-
-/** Get credentials to upload trace data */
-@Generated
-public class GetCredentialsForTraceDataUploadRequest {
- /** The ID of the trace to fetch artifact upload credentials for. */
- @JsonIgnore private String requestId;
-
- public GetCredentialsForTraceDataUploadRequest setRequestId(String requestId) {
- this.requestId = requestId;
- return this;
- }
-
- public String getRequestId() {
- return requestId;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- GetCredentialsForTraceDataUploadRequest that = (GetCredentialsForTraceDataUploadRequest) o;
- return Objects.equals(requestId, that.requestId);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(requestId);
- }
-
- @Override
- public String toString() {
- return new ToStringer(GetCredentialsForTraceDataUploadRequest.class)
- .add("requestId", requestId)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java
deleted file mode 100755
index 9dcaed06c..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetCredentialsForTraceDataUploadResponse.java
+++ /dev/null
@@ -1,45 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class GetCredentialsForTraceDataUploadResponse {
- /** The artifact upload credentials for the specified trace data. */
- @JsonProperty("credential_info")
- private ArtifactCredentialInfo credentialInfo;
-
- public GetCredentialsForTraceDataUploadResponse setCredentialInfo(
- ArtifactCredentialInfo credentialInfo) {
- this.credentialInfo = credentialInfo;
- return this;
- }
-
- public ArtifactCredentialInfo getCredentialInfo() {
- return credentialInfo;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- GetCredentialsForTraceDataUploadResponse that = (GetCredentialsForTraceDataUploadResponse) o;
- return Objects.equals(credentialInfo, that.credentialInfo);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(credentialInfo);
- }
-
- @Override
- public String toString() {
- return new ToStringer(GetCredentialsForTraceDataUploadResponse.class)
- .add("credentialInfo", credentialInfo)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java
deleted file mode 100755
index e94842c95..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsRequest.java
+++ /dev/null
@@ -1,83 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.QueryParam;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import java.util.Objects;
-
-/** List artifacts for a logged model */
-@Generated
-public class ListLoggedModelArtifactsRequest {
- /** Filter artifacts matching this path (a relative path from the root artifact directory). */
- @JsonIgnore
- @QueryParam("artifact_directory_path")
- private String artifactDirectoryPath;
-
- /** The ID of the logged model for which to list the artifacts. */
- @JsonIgnore private String modelId;
-
- /**
- * Token indicating the page of artifact results to fetch. `page_token` is not supported when
- * listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes.
- * Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes,
- * which supports pagination. See [List directory contents | Files
- * API](/api/workspace/files/listdirectorycontents).
- */
- @JsonIgnore
- @QueryParam("page_token")
- private String pageToken;
-
- public ListLoggedModelArtifactsRequest setArtifactDirectoryPath(String artifactDirectoryPath) {
- this.artifactDirectoryPath = artifactDirectoryPath;
- return this;
- }
-
- public String getArtifactDirectoryPath() {
- return artifactDirectoryPath;
- }
-
- public ListLoggedModelArtifactsRequest setModelId(String modelId) {
- this.modelId = modelId;
- return this;
- }
-
- public String getModelId() {
- return modelId;
- }
-
- public ListLoggedModelArtifactsRequest setPageToken(String pageToken) {
- this.pageToken = pageToken;
- return this;
- }
-
- public String getPageToken() {
- return pageToken;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- ListLoggedModelArtifactsRequest that = (ListLoggedModelArtifactsRequest) o;
- return Objects.equals(artifactDirectoryPath, that.artifactDirectoryPath)
- && Objects.equals(modelId, that.modelId)
- && Objects.equals(pageToken, that.pageToken);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(artifactDirectoryPath, modelId, pageToken);
- }
-
- @Override
- public String toString() {
- return new ToStringer(ListLoggedModelArtifactsRequest.class)
- .add("artifactDirectoryPath", artifactDirectoryPath)
- .add("modelId", modelId)
- .add("pageToken", pageToken)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java
deleted file mode 100755
index 35e2dbe82..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListLoggedModelArtifactsResponse.java
+++ /dev/null
@@ -1,75 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.ml;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Collection;
-import java.util.Objects;
-
-@Generated
-public class ListLoggedModelArtifactsResponse {
- /** File location and metadata for artifacts. */
- @JsonProperty("files")
- private Collection Model inputs to the Run.
- */
+ /** Model inputs to the Run. */
@JsonProperty("model_inputs")
private Collection Deletes a pipeline.
+ * Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the
+ * pipeline and its tables. You cannot undo this action.
*/
public void delete(DeletePipelineRequest request) {
impl.delete(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
index 332eabdcf..59f5b9f3e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
@@ -35,7 +35,8 @@ public interface PipelinesService {
/**
* Delete a pipeline.
*
- * Deletes a pipeline.
+ * Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the
+ * pipeline and its tables. You cannot undo this action.
*/
void delete(DeletePipelineRequest deletePipelineRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java
new file mode 100755
index 000000000..5b14da636
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AnomalyDetectionConfig {
+ /** Run id of the last run of the workflow */
+ @JsonProperty("last_run_id")
+ private String lastRunId;
+
+ /** The status of the last run of the workflow. */
+ @JsonProperty("latest_run_status")
+ private AnomalyDetectionRunStatus latestRunStatus;
+
+ public AnomalyDetectionConfig setLastRunId(String lastRunId) {
+ this.lastRunId = lastRunId;
+ return this;
+ }
+
+ public String getLastRunId() {
+ return lastRunId;
+ }
+
+ public AnomalyDetectionConfig setLatestRunStatus(AnomalyDetectionRunStatus latestRunStatus) {
+ this.latestRunStatus = latestRunStatus;
+ return this;
+ }
+
+ public AnomalyDetectionRunStatus getLatestRunStatus() {
+ return latestRunStatus;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AnomalyDetectionConfig that = (AnomalyDetectionConfig) o;
+ return Objects.equals(lastRunId, that.lastRunId)
+ && Objects.equals(latestRunStatus, that.latestRunStatus);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lastRunId, latestRunStatus);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AnomalyDetectionConfig.class)
+ .add("lastRunId", lastRunId)
+ .add("latestRunStatus", latestRunStatus)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java
new file mode 100755
index 000000000..8de4b6bb6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java
@@ -0,0 +1,18 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+
+/** Status of Anomaly Detection Job Run */
+@Generated
+public enum AnomalyDetectionRunStatus {
+ ANOMALY_DETECTION_RUN_STATUS_CANCELED,
+ ANOMALY_DETECTION_RUN_STATUS_FAILED,
+ ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED,
+ ANOMALY_DETECTION_RUN_STATUS_PENDING,
+ ANOMALY_DETECTION_RUN_STATUS_RUNNING,
+ ANOMALY_DETECTION_RUN_STATUS_SUCCESS,
+ ANOMALY_DETECTION_RUN_STATUS_UNKNOWN,
+ ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java
new file mode 100755
index 000000000..f7ea1c964
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Create a quality monitor */
+@Generated
+public class CreateQualityMonitorRequest {
+ /** */
+ @JsonProperty("quality_monitor")
+ private QualityMonitor qualityMonitor;
+
+ public CreateQualityMonitorRequest setQualityMonitor(QualityMonitor qualityMonitor) {
+ this.qualityMonitor = qualityMonitor;
+ return this;
+ }
+
+ public QualityMonitor getQualityMonitor() {
+ return qualityMonitor;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateQualityMonitorRequest that = (CreateQualityMonitorRequest) o;
+ return Objects.equals(qualityMonitor, that.qualityMonitor);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(qualityMonitor);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateQualityMonitorRequest.class)
+ .add("qualityMonitor", qualityMonitor)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java
new file mode 100755
index 000000000..761677c05
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java
@@ -0,0 +1,57 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete a quality monitor */
+@Generated
+public class DeleteQualityMonitorRequest {
+ /** The uuid of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: schema. */
+ @JsonIgnore private String objectType;
+
+ public DeleteQualityMonitorRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public DeleteQualityMonitorRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteQualityMonitorRequest that = (DeleteQualityMonitorRequest) o;
+ return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteQualityMonitorRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java
similarity index 67%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java
index 8714d62a6..8d3d5dd8c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorResponse.java
@@ -1,17 +1,13 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.dashboards;
+package com.databricks.sdk.service.qualitymonitorv2;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
-/**
- * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm
- * right now.
- */
@Generated
-public class Empty {
+public class DeleteQualityMonitorResponse {
@Override
public boolean equals(Object o) {
@@ -27,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(Empty.class).toString();
+ return new ToStringer(DeleteQualityMonitorResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java
new file mode 100755
index 000000000..7575721ec
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/GetQualityMonitorRequest.java
@@ -0,0 +1,57 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Read a quality monitor */
+@Generated
+public class GetQualityMonitorRequest {
+ /** The uuid of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: schema. */
+ @JsonIgnore private String objectType;
+
+ public GetQualityMonitorRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public GetQualityMonitorRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetQualityMonitorRequest that = (GetQualityMonitorRequest) o;
+ return Objects.equals(objectId, that.objectId) && Objects.equals(objectType, that.objectType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetQualityMonitorRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java
new file mode 100755
index 000000000..db6234b9e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorRequest.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** List quality monitors */
+@Generated
+public class ListQualityMonitorRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListQualityMonitorRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListQualityMonitorRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListQualityMonitorRequest that = (ListQualityMonitorRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListQualityMonitorRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java
new file mode 100755
index 000000000..8b332ed92
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/ListQualityMonitorResponse.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListQualityMonitorResponse {
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("quality_monitors")
+ private Collection Create a quality monitor on UC object
+ */
+ public QualityMonitor createQualityMonitor(CreateQualityMonitorRequest request) {
+ return impl.createQualityMonitor(request);
+ }
+
+ public void deleteQualityMonitor(String objectType, String objectId) {
+ deleteQualityMonitor(
+ new DeleteQualityMonitorRequest().setObjectType(objectType).setObjectId(objectId));
+ }
+
+ /**
+ * Delete a quality monitor.
+ *
+ * Delete a quality monitor on UC object
+ */
+ public void deleteQualityMonitor(DeleteQualityMonitorRequest request) {
+ impl.deleteQualityMonitor(request);
+ }
+
+ public QualityMonitor getQualityMonitor(String objectType, String objectId) {
+ return getQualityMonitor(
+ new GetQualityMonitorRequest().setObjectType(objectType).setObjectId(objectId));
+ }
+
+ /**
+ * Read a quality monitor.
+ *
+ * Read a quality monitor on UC object
+ */
+ public QualityMonitor getQualityMonitor(GetQualityMonitorRequest request) {
+ return impl.getQualityMonitor(request);
+ }
+
+ /**
+ * List quality monitors.
+ *
+ * (Unimplemented) List quality monitors
+ */
+ public Iterable (Unimplemented) Update a quality monitor on UC object
+ */
+ public QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest request) {
+ return impl.updateQualityMonitor(request);
+ }
+
+ public QualityMonitorV2Service impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java
new file mode 100755
index 000000000..0880dbd86
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Impl.java
@@ -0,0 +1,91 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of QualityMonitorV2 */
+@Generated
+class QualityMonitorV2Impl implements QualityMonitorV2Service {
+ private final ApiClient apiClient;
+
+ public QualityMonitorV2Impl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public QualityMonitor createQualityMonitor(CreateQualityMonitorRequest request) {
+ String path = "/api/2.0/quality-monitors";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getQualityMonitor()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, QualityMonitor.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteQualityMonitor(DeleteQualityMonitorRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, DeleteQualityMonitorResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public QualityMonitor getQualityMonitor(GetQualityMonitorRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, QualityMonitor.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListQualityMonitorResponse listQualityMonitor(ListQualityMonitorRequest request) {
+ String path = "/api/2.0/quality-monitors";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListQualityMonitorResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/quality-monitors/%s/%s", request.getObjectType(), request.getObjectId());
+ try {
+ Request req = new Request("PUT", path, apiClient.serialize(request.getQualityMonitor()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, QualityMonitor.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java
new file mode 100755
index 000000000..762b01606
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/QualityMonitorV2Service.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Manage data quality of UC objects (currently support `schema`)
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface QualityMonitorV2Service {
+ /**
+ * Create a quality monitor.
+ *
+ * Create a quality monitor on UC object
+ */
+ QualityMonitor createQualityMonitor(CreateQualityMonitorRequest createQualityMonitorRequest);
+
+ /**
+ * Delete a quality monitor.
+ *
+ * Delete a quality monitor on UC object
+ */
+ void deleteQualityMonitor(DeleteQualityMonitorRequest deleteQualityMonitorRequest);
+
+ /**
+ * Read a quality monitor.
+ *
+ * Read a quality monitor on UC object
+ */
+ QualityMonitor getQualityMonitor(GetQualityMonitorRequest getQualityMonitorRequest);
+
+ /**
+ * List quality monitors.
+ *
+ * (Unimplemented) List quality monitors
+ */
+ ListQualityMonitorResponse listQualityMonitor(
+ ListQualityMonitorRequest listQualityMonitorRequest);
+
+ /**
+ * Update a quality monitor.
+ *
+ * (Unimplemented) Update a quality monitor on UC object
+ */
+ QualityMonitor updateQualityMonitor(UpdateQualityMonitorRequest updateQualityMonitorRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java
new file mode 100755
index 000000000..0c7f4fb84
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/UpdateQualityMonitorRequest.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Update a quality monitor */
+@Generated
+public class UpdateQualityMonitorRequest {
+ /** The uuid of the request object. For example, schema id. */
+ @JsonIgnore private String objectId;
+
+ /** The type of the monitored object. Can be one of the following: schema. */
+ @JsonIgnore private String objectType;
+
+ /** */
+ @JsonProperty("quality_monitor")
+ private QualityMonitor qualityMonitor;
+
+ public UpdateQualityMonitorRequest setObjectId(String objectId) {
+ this.objectId = objectId;
+ return this;
+ }
+
+ public String getObjectId() {
+ return objectId;
+ }
+
+ public UpdateQualityMonitorRequest setObjectType(String objectType) {
+ this.objectType = objectType;
+ return this;
+ }
+
+ public String getObjectType() {
+ return objectType;
+ }
+
+ public UpdateQualityMonitorRequest setQualityMonitor(QualityMonitor qualityMonitor) {
+ this.qualityMonitor = qualityMonitor;
+ return this;
+ }
+
+ public QualityMonitor getQualityMonitor() {
+ return qualityMonitor;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateQualityMonitorRequest that = (UpdateQualityMonitorRequest) o;
+ return Objects.equals(objectId, that.objectId)
+ && Objects.equals(objectType, that.objectType)
+ && Objects.equals(qualityMonitor, that.qualityMonitor);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectId, objectType, qualityMonitor);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateQualityMonitorRequest.class)
+ .add("objectId", objectId)
+ .add("objectType", objectType)
+ .add("qualityMonitor", qualityMonitor)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java
index ea50df387..51630a687 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java
@@ -15,7 +15,7 @@
@Generated
public class CreatePrivateEndpointRule {
/**
- * Only used by private endpoints to customer-managed resources.
+ * Only used by private endpoints to customer-managed private endpoint services.
*
* Domain names of target private link service. When updating this field, the full list of
* target domain_names must be specified.
@@ -24,8 +24,14 @@ public class CreatePrivateEndpointRule {
private Collection The sub-resource type (group ID) of the target resource. Note that to connect to workspace
* root storage (root DBFS), you need two endpoints, one for blob and one for dfs.
@@ -37,6 +43,17 @@ public class CreatePrivateEndpointRule {
@JsonProperty("resource_id")
private String resourceId;
+ /**
+ * Only used by private endpoints towards AWS S3 service.
+ *
+ * The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket
+ * names must be in the same region as the NCC/endpoint service. When updating this field, we
+ * perform full update on this field. Please ensure a full list of desired resource_names is
+ * provided.
+ */
+ @JsonProperty("resource_names")
+ private Collection The target AWS resource FQDNs accessible via the VPC endpoint service. When updating this
+ * field, we perform full update on this field. Please ensure a full list of desired domain_names
+ * is provided.
+ */
+ @JsonProperty("domain_names")
+ private Collection Update this field to activate/deactivate this private endpoint to allow egress access from
+ * serverless compute resources.
+ */
+ @JsonProperty("enabled")
+ private Boolean enabled;
+
+ /**
+ * The full target AWS endpoint service name that connects to the destination resources of the
+ * private endpoint.
+ */
+ @JsonProperty("endpoint_service")
+ private String endpointService;
+
+ /**
+ * The ID of a network connectivity configuration, which is the parent resource of this private
+ * endpoint rule object.
+ */
+ @JsonProperty("network_connectivity_config_id")
+ private String networkConnectivityConfigId;
+
+ /**
+ * Only used by private endpoints towards AWS S3 service.
+ *
+ * The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket
+ * names must be in the same region as the NCC/endpoint service. When updating this field, we
+ * perform full update on this field. Please ensure a full list of desired resource_names is
+ * provided.
+ */
+ @JsonProperty("resource_names")
+ private Collection Domain names of target private link service. When updating this field, the full list of
* target domain_names must be specified.
@@ -54,8 +54,7 @@ public class NccAzurePrivateEndpointRule {
private String endpointName;
/**
- * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer |
- * mysqlServer
+ * Only used by private endpoints to Azure first-party services.
*
* The sub-resource type (group ID) of the target resource. Note that to connect to workspace
* root storage (root DBFS), you need two endpoints, one for blob and one for dfs.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java
index 4cb399bdf..2fd0903d6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java
@@ -11,10 +11,27 @@
/** Target rule controls the egress rules that are dedicated to specific resources. */
@Generated
public class NccEgressTargetRules {
+ /** AWS private endpoint rule controls the AWS private endpoint based egress rules. */
+ @JsonProperty("aws_private_endpoint_rules")
+ private Collection Domain names of target private link service. When updating this field, the full list of
+ * target domain_names must be specified.
+ */
+ @JsonProperty("domain_names")
+ private Collection Update this field to activate/deactivate this private endpoint to allow egress access from
+ * serverless compute resources.
+ */
+ @JsonProperty("enabled")
+ private Boolean enabled;
+
+ /** The name of the Azure private endpoint resource. */
+ @JsonProperty("endpoint_name")
+ private String endpointName;
+
+ /**
+ * The full target AWS endpoint service name that connects to the destination resources of the
+ * private endpoint.
+ */
+ @JsonProperty("endpoint_service")
+ private String endpointService;
+
+ /**
+ * Not used by customer-managed private endpoint services.
+ *
+ * The sub-resource type (group ID) of the target resource. Note that to connect to workspace
+ * root storage (root DBFS), you need two endpoints, one for blob and one for dfs.
+ */
+ @JsonProperty("group_id")
+ private String groupId;
+
+ /**
+ * The ID of a network connectivity configuration, which is the parent resource of this private
+ * endpoint rule object.
+ */
+ @JsonProperty("network_connectivity_config_id")
+ private String networkConnectivityConfigId;
+
+ /** The Azure resource ID of the target resource. */
+ @JsonProperty("resource_id")
+ private String resourceId;
+
+ /**
+ * Only used by private endpoints towards AWS S3 service.
+ *
+ * The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket
+ * names must be in the same region as the NCC/endpoint service. When updating this field, we
+ * perform full update on this field. Please ensure a full list of desired resource_names is
+ * provided.
+ */
+ @JsonProperty("resource_names")
+ private Collection [serverless private link]:
* https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
*/
- public NccAzurePrivateEndpointRule createPrivateEndpointRule(
+ public NccPrivateEndpointRule createPrivateEndpointRule(
CreatePrivateEndpointRuleRequest request) {
return impl.createPrivateEndpointRule(request);
}
@@ -105,7 +105,7 @@ public void deleteNetworkConnectivityConfiguration(
impl.deleteNetworkConnectivityConfiguration(request);
}
- public NccAzurePrivateEndpointRule deletePrivateEndpointRule(
+ public NccPrivateEndpointRule deletePrivateEndpointRule(
String networkConnectivityConfigId, String privateEndpointRuleId) {
return deletePrivateEndpointRule(
new DeletePrivateEndpointRuleRequest()
@@ -122,7 +122,7 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule(
* `deactivated` field is set to `true` and the private endpoint is not available to your
* serverless compute resources.
*/
- public NccAzurePrivateEndpointRule deletePrivateEndpointRule(
+ public NccPrivateEndpointRule deletePrivateEndpointRule(
DeletePrivateEndpointRuleRequest request) {
return impl.deletePrivateEndpointRule(request);
}
@@ -144,7 +144,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration(
return impl.getNetworkConnectivityConfiguration(request);
}
- public NccAzurePrivateEndpointRule getPrivateEndpointRule(
+ public NccPrivateEndpointRule getPrivateEndpointRule(
String networkConnectivityConfigId, String privateEndpointRuleId) {
return getPrivateEndpointRule(
new GetPrivateEndpointRuleRequest()
@@ -157,7 +157,7 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(
*
* Gets the private endpoint rule.
*/
- public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) {
+ public NccPrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) {
return impl.getPrivateEndpointRule(request);
}
@@ -181,7 +181,7 @@ public Iterable Gets an array of private endpoint rules.
*/
- public Iterable Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed
* resources is allowed to be updated.
*/
- public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
- UpdateNccAzurePrivateEndpointRulePublicRequest request) {
- return impl.updateNccAzurePrivateEndpointRulePublic(request);
+ public NccPrivateEndpointRule updatePrivateEndpointRule(
+ UpdateNccPrivateEndpointRuleRequest request) {
+ return impl.updatePrivateEndpointRule(request);
}
public NetworkConnectivityService impl() {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java
index 6c03595d4..316184e01 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java
@@ -10,7 +10,9 @@
/** Properties of the new network connectivity configuration. */
@Generated
public class NetworkConnectivityConfiguration {
- /** The Databricks account ID that hosts the credential. */
+ /**
+ * Your Databricks account ID. You can find your account ID in your Databricks accounts console.
+ */
@JsonProperty("account_id")
private String accountId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
index 16b4dd419..4bd996e8a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
@@ -35,7 +35,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration(
}
@Override
- public NccAzurePrivateEndpointRule createPrivateEndpointRule(
+ public NccPrivateEndpointRule createPrivateEndpointRule(
CreatePrivateEndpointRuleRequest request) {
String path =
String.format(
@@ -47,7 +47,7 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule(
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, NccAzurePrivateEndpointRule.class);
+ return apiClient.execute(req, NccPrivateEndpointRule.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -71,7 +71,7 @@ public void deleteNetworkConnectivityConfiguration(
}
@Override
- public NccAzurePrivateEndpointRule deletePrivateEndpointRule(
+ public NccPrivateEndpointRule deletePrivateEndpointRule(
DeletePrivateEndpointRuleRequest request) {
String path =
String.format(
@@ -83,7 +83,7 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule(
Request req = new Request("DELETE", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, NccAzurePrivateEndpointRule.class);
+ return apiClient.execute(req, NccPrivateEndpointRule.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -107,7 +107,7 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration(
}
@Override
- public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) {
+ public NccPrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRuleRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s",
@@ -118,7 +118,7 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRule
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, NccAzurePrivateEndpointRule.class);
+ return apiClient.execute(req, NccPrivateEndpointRule.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
@@ -141,7 +141,7 @@ public ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConf
}
@Override
- public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
+ public ListPrivateEndpointRulesResponse listPrivateEndpointRules(
ListPrivateEndpointRulesRequest request) {
String path =
String.format(
@@ -151,15 +151,15 @@ public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
Request req = new Request("GET", path);
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
- return apiClient.execute(req, ListNccAzurePrivateEndpointRulesResponse.class);
+ return apiClient.execute(req, ListPrivateEndpointRulesResponse.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
@Override
- public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
- UpdateNccAzurePrivateEndpointRulePublicRequest request) {
+ public NccPrivateEndpointRule updatePrivateEndpointRule(
+ UpdateNccPrivateEndpointRuleRequest request) {
String path =
String.format(
"/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s",
@@ -172,7 +172,7 @@ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, NccAzurePrivateEndpointRule.class);
+ return apiClient.execute(req, NccPrivateEndpointRule.class);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
index 55abae74d..eeaa80e88 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
@@ -53,7 +53,7 @@ NetworkConnectivityConfiguration createNetworkConnectivityConfiguration(
* [serverless private link]:
* https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
*/
- NccAzurePrivateEndpointRule createPrivateEndpointRule(
+ NccPrivateEndpointRule createPrivateEndpointRule(
CreatePrivateEndpointRuleRequest createPrivateEndpointRuleRequest);
/**
@@ -73,7 +73,7 @@ void deleteNetworkConnectivityConfiguration(
* `deactivated` field is set to `true` and the private endpoint is not available to your
* serverless compute resources.
*/
- NccAzurePrivateEndpointRule deletePrivateEndpointRule(
+ NccPrivateEndpointRule deletePrivateEndpointRule(
DeletePrivateEndpointRuleRequest deletePrivateEndpointRuleRequest);
/**
@@ -89,7 +89,7 @@ NetworkConnectivityConfiguration getNetworkConnectivityConfiguration(
*
* Gets the private endpoint rule.
*/
- NccAzurePrivateEndpointRule getPrivateEndpointRule(
+ NccPrivateEndpointRule getPrivateEndpointRule(
GetPrivateEndpointRuleRequest getPrivateEndpointRuleRequest);
/**
@@ -105,7 +105,7 @@ ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurati
*
* Gets an array of private endpoint rules.
*/
- ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
+ ListPrivateEndpointRulesResponse listPrivateEndpointRules(
ListPrivateEndpointRulesRequest listPrivateEndpointRulesRequest);
/**
@@ -114,7 +114,6 @@ ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
* Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed
* resources is allowed to be updated.
*/
- NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
- UpdateNccAzurePrivateEndpointRulePublicRequest
- updateNccAzurePrivateEndpointRulePublicRequest);
+ NccPrivateEndpointRule updatePrivateEndpointRule(
+ UpdateNccPrivateEndpointRuleRequest updateNccPrivateEndpointRuleRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java
similarity index 82%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java
index 666de476e..7d38074a6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccPrivateEndpointRuleRequest.java
@@ -11,8 +11,11 @@
/** Update a private endpoint rule */
@Generated
-public class UpdateNccAzurePrivateEndpointRulePublicRequest {
- /** Your Network Connectivity Configuration ID. */
+public class UpdateNccPrivateEndpointRuleRequest {
+ /**
+ * The ID of a network connectivity configuration, which is the parent resource of this private
+ * endpoint rule object.
+ */
@JsonIgnore private String networkConnectivityConfigId;
/**
@@ -36,7 +39,7 @@ public class UpdateNccAzurePrivateEndpointRulePublicRequest {
@QueryParam("update_mask")
private String updateMask;
- public UpdateNccAzurePrivateEndpointRulePublicRequest setNetworkConnectivityConfigId(
+ public UpdateNccPrivateEndpointRuleRequest setNetworkConnectivityConfigId(
String networkConnectivityConfigId) {
this.networkConnectivityConfigId = networkConnectivityConfigId;
return this;
@@ -46,7 +49,7 @@ public String getNetworkConnectivityConfigId() {
return networkConnectivityConfigId;
}
- public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRule(
+ public UpdateNccPrivateEndpointRuleRequest setPrivateEndpointRule(
UpdatePrivateEndpointRule privateEndpointRule) {
this.privateEndpointRule = privateEndpointRule;
return this;
@@ -56,7 +59,7 @@ public UpdatePrivateEndpointRule getPrivateEndpointRule() {
return privateEndpointRule;
}
- public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRuleId(
+ public UpdateNccPrivateEndpointRuleRequest setPrivateEndpointRuleId(
String privateEndpointRuleId) {
this.privateEndpointRuleId = privateEndpointRuleId;
return this;
@@ -66,7 +69,7 @@ public String getPrivateEndpointRuleId() {
return privateEndpointRuleId;
}
- public UpdateNccAzurePrivateEndpointRulePublicRequest setUpdateMask(String updateMask) {
+ public UpdateNccPrivateEndpointRuleRequest setUpdateMask(String updateMask) {
this.updateMask = updateMask;
return this;
}
@@ -79,8 +82,7 @@ public String getUpdateMask() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- UpdateNccAzurePrivateEndpointRulePublicRequest that =
- (UpdateNccAzurePrivateEndpointRulePublicRequest) o;
+ UpdateNccPrivateEndpointRuleRequest that = (UpdateNccPrivateEndpointRuleRequest) o;
return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
&& Objects.equals(privateEndpointRule, that.privateEndpointRule)
&& Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId)
@@ -95,7 +97,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(UpdateNccAzurePrivateEndpointRulePublicRequest.class)
+ return new ToStringer(UpdateNccPrivateEndpointRuleRequest.class)
.add("networkConnectivityConfigId", networkConnectivityConfigId)
.add("privateEndpointRule", privateEndpointRule)
.add("privateEndpointRuleId", privateEndpointRuleId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java
index f7df95078..94975cd2e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java
@@ -15,7 +15,7 @@
@Generated
public class UpdatePrivateEndpointRule {
/**
- * Only used by private endpoints to customer-managed resources.
+ * Only used by private endpoints to customer-managed private endpoint services.
*
* Domain names of target private link service. When updating this field, the full list of
* target domain_names must be specified.
@@ -23,6 +23,26 @@ public class UpdatePrivateEndpointRule {
@JsonProperty("domain_names")
private Collection Update this field to activate/deactivate this private endpoint to allow egress access from
+ * serverless compute resources.
+ */
+ @JsonProperty("enabled")
+ private Boolean enabled;
+
+ /**
+ * Only used by private endpoints towards AWS S3 service.
+ *
+ * The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket
+ * names must be in the same region as the NCC/endpoint service. When updating this field, we
+ * perform full update on this field. Please ensure a full list of desired resource_names is
+ * provided.
+ */
+ @JsonProperty("resource_names")
+ private Collection