From 74e7eadc6cde28a3d4dd4c9bd4d82e99ddee45f7 Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Mon, 26 Aug 2024 13:57:11 +0200 Subject: [PATCH 01/10] initial code, endpoint, routes, signatures --- .../model/dto/AdditionalDataItemDTO.scala | 2 +- .../model/dto/AdditionalDataPatchDTO.scala | 2 +- .../dto/AdditionalDataPatchItemDTO.scala | 30 +++++++++++++++++++ .../controller/PartitioningController.scala | 13 ++++---- .../PartitioningControllerImpl.scala | 6 ++++ .../absa/atum/server/api/http/Endpoints.scala | 11 +++++++ .../co/absa/atum/server/api/http/Routes.scala | 13 +++++++- .../repository/PartitioningRepository.scala | 14 ++++----- .../PartitioningRepositoryImpl.scala | 20 ++++--------- .../api/service/PartitioningService.scala | 15 ++++------ .../api/service/PartitioningServiceImpl.scala | 7 +++++ 11 files changed, 91 insertions(+), 42 deletions(-) create mode 100644 model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchItemDTO.scala diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataItemDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataItemDTO.scala index fa2993c53..2413d0c3a 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataItemDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataItemDTO.scala @@ -20,7 +20,7 @@ import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} case class AdditionalDataItemDTO( - value: String, + value: Option[String], author: String ) diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala index 0a113ff48..ec9112009 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala @@ -20,7 +20,7 @@ import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} case class AdditionalDataPatchDTO( - data: Map[String, AdditionalDataItemDTO] + data: Map[String, AdditionalDataPatchItemDTO] ) object AdditionalDataPatchDTO { diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchItemDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchItemDTO.scala new file mode 100644 index 000000000..b8743945b --- /dev/null +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchItemDTO.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2021 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package za.co.absa.atum.model.dto + +import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} +import io.circe.{Decoder, Encoder} + +case class AdditionalDataPatchItemDTO( + value: String, + author: String +) + +object AdditionalDataPatchItemDTO { + implicit val encoderAdditionalDataPatchItem: Encoder[AdditionalDataPatchItemDTO] = deriveEncoder + implicit val decoderAdditionalDataPatchItem: Decoder[AdditionalDataPatchItemDTO] = deriveDecoder +} diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala index 2dfc53016..9b8cc2afa 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala @@ -16,13 +16,7 @@ package za.co.absa.atum.server.api.controller -import za.co.absa.atum.model.dto.{ - AdditionalDataSubmitDTO, - AtumContextDTO, - CheckpointDTO, - CheckpointQueryDTO, - PartitioningSubmitDTO -} +import za.co.absa.atum.model.dto._ import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} import zio.IO @@ -42,6 +36,11 @@ trait PartitioningController { additionalData: AdditionalDataSubmitDTO ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO]] + def patchPartitioningAdditionalDataV2( + partitioningId: Long, + additionalDataPatchDTO: AdditionalDataPatchDTO + ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataDTO]] + def getPartitioningCheckpointsV2( checkpointQueryDTO: CheckpointQueryDTO ): IO[ErrorResponse, MultiSuccessResponse[CheckpointDTO]] diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala index 6fe9efff9..0cb7de4f8 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala @@ -77,6 +77,12 @@ class PartitioningControllerImpl(partitioningService: PartitioningService) ) } + override def patchPartitioningAdditionalDataV2( + partitioningId: Long, + additionalDataPatchDTO: AdditionalDataPatchDTO + ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataDTO]] = { + ??? + } } object PartitioningControllerImpl { diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala index 0b79fddf6..1ebba2ecb 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala @@ -78,6 +78,17 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[SingleSuccessResponse[AdditionalDataSubmitDTO]]) } + protected val patchPartitioningAdditionalDataEndpointV2 + : PublicEndpoint[(Long, AdditionalDataPatchDTO), ErrorResponse, SingleSuccessResponse[ + AdditionalDataDTO + ], Any] = { + apiV2.patch + .in(V2Paths.Partitionings / path[Long]("partitioningId") / V2Paths.AdditionalData) + .in(jsonBody[AdditionalDataPatchDTO]) + .out(statusCode(StatusCode.Ok)) + .out(jsonBody[SingleSuccessResponse[AdditionalDataDTO]]) + } + protected val getPartitioningCheckpointEndpointV2 : PublicEndpoint[(Long, UUID), ErrorResponse, SingleSuccessResponse[CheckpointV2DTO], Any] = { apiV2.get diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala index 3b20484a2..f8190c4f2 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala @@ -25,7 +25,7 @@ import sttp.tapir.server.http4s.ztapir.ZHttp4sServerInterpreter import sttp.tapir.server.interceptor.metrics.MetricsRequestInterceptor import sttp.tapir.swagger.bundle.SwaggerInterpreter import sttp.tapir.ztapir._ -import za.co.absa.atum.model.dto.{CheckpointDTO, CheckpointV2DTO} +import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataPatchDTO, CheckpointDTO, CheckpointV2DTO} import za.co.absa.atum.server.Constants.{SwaggerApiName, SwaggerApiVersion} import za.co.absa.atum.server.api.controller.{CheckpointController, FlowController, PartitioningController} import za.co.absa.atum.server.api.http.ApiPaths.V2Paths @@ -62,6 +62,16 @@ trait Routes extends Endpoints with ServerOptions { createOrUpdateAdditionalDataEndpointV2, PartitioningController.createOrUpdateAdditionalDataV2 ), + createServerEndpoint[ + (Long, AdditionalDataPatchDTO), + ErrorResponse, + SingleSuccessResponse[AdditionalDataDTO] + ]( + patchPartitioningAdditionalDataEndpointV2, + { case (partitioningId: Long, additionalDataPatchDTO: AdditionalDataPatchDTO) => + PartitioningController.patchPartitioningAdditionalDataV2(partitioningId, additionalDataPatchDTO) + } + ), createServerEndpoint[ (Long, UUID), ErrorResponse, @@ -89,6 +99,7 @@ trait Routes extends Endpoints with ServerOptions { createPartitioningEndpointV1, createPartitioningEndpointV2, createOrUpdateAdditionalDataEndpointV2, + patchPartitioningAdditionalDataEndpointV2, getPartitioningCheckpointsEndpointV2, getPartitioningCheckpointEndpointV2, getFlowCheckpointsEndpointV2 diff --git a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala index 52e7c6c02..deb486daf 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala @@ -16,14 +16,7 @@ package za.co.absa.atum.server.api.repository -import za.co.absa.atum.model.dto.{ - InitialAdditionalDataDTO, - AdditionalDataSubmitDTO, - CheckpointQueryDTO, - MeasureDTO, - PartitioningDTO, - PartitioningSubmitDTO -} +import za.co.absa.atum.model.dto._ import za.co.absa.atum.server.api.exception.DatabaseError import za.co.absa.atum.server.model.CheckpointFromDB import zio.IO @@ -39,5 +32,10 @@ trait PartitioningRepository { def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[DatabaseError, Unit] + def patchAdditionalData( + partitioningId: Long, + additionalData: AdditionalDataPatchDTO + ): IO[DatabaseError, AdditionalDataDTO] + def getPartitioningCheckpoints(checkpointQueryDTO: CheckpointQueryDTO): IO[DatabaseError, Seq[CheckpointFromDB]] } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala index 68ff69276..79e2e0092 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala @@ -16,22 +16,9 @@ package za.co.absa.atum.server.api.repository -import za.co.absa.atum.model.dto.{ - InitialAdditionalDataDTO, - AdditionalDataSubmitDTO, - CheckpointQueryDTO, - MeasureDTO, - PartitioningDTO, - PartitioningSubmitDTO -} +import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataPatchDTO, AdditionalDataSubmitDTO, CheckpointQueryDTO, InitialAdditionalDataDTO, MeasureDTO, PartitioningDTO, PartitioningSubmitDTO} import za.co.absa.atum.server.model.MeasureFromDB -import za.co.absa.atum.server.api.database.runs.functions.{ - CreateOrUpdateAdditionalData, - CreatePartitioningIfNotExists, - GetPartitioningAdditionalData, - GetPartitioningCheckpoints, - GetPartitioningMeasures -} +import za.co.absa.atum.server.api.database.runs.functions.{CreateOrUpdateAdditionalData, CreatePartitioningIfNotExists, GetPartitioningAdditionalData, GetPartitioningCheckpoints, GetPartitioningMeasures} import za.co.absa.atum.server.api.exception.DatabaseError import za.co.absa.atum.server.model.CheckpointFromDB import zio._ @@ -83,6 +70,9 @@ class PartitioningRepositoryImpl( ) } + override def patchAdditionalData(partitioningId: Long, additionalData: AdditionalDataPatchDTO): IO[DatabaseError, AdditionalDataDTO] = { + ??? + } } object PartitioningRepositoryImpl { diff --git a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala index 2dce04b51..0de58fca2 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala @@ -16,15 +16,7 @@ package za.co.absa.atum.server.api.service -import za.co.absa.atum.model.dto.{ - InitialAdditionalDataDTO, - AdditionalDataSubmitDTO, - CheckpointDTO, - CheckpointQueryDTO, - MeasureDTO, - PartitioningDTO, - PartitioningSubmitDTO -} +import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataPatchDTO, AdditionalDataSubmitDTO, CheckpointDTO, CheckpointQueryDTO, InitialAdditionalDataDTO, MeasureDTO, PartitioningDTO, PartitioningSubmitDTO} import za.co.absa.atum.server.api.exception.ServiceError import zio.IO import zio.macros.accessible @@ -39,5 +31,10 @@ trait PartitioningService { def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] + def patchAdditionalData( + partitioningId: Long, + additionalData: AdditionalDataPatchDTO + ): IO[ServiceError, AdditionalDataDTO] + def getPartitioningCheckpoints(checkpointQueryDTO: CheckpointQueryDTO): IO[ServiceError, Seq[CheckpointDTO]] } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala index eebb7a2ba..59d5f30ee 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala @@ -73,6 +73,13 @@ class PartitioningServiceImpl(partitioningRepository: PartitioningRepository) } yield checkpointDTOs } + + override def patchAdditionalData( + partitioningId: Long, + additionalData: AdditionalDataPatchDTO + ): IO[ServiceError, AdditionalDataDTO] = { + ??? + } } object PartitioningServiceImpl { From f19acff1f96c2e0f8f63e612085eebfc99d08f31 Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Thu, 29 Aug 2024 17:14:51 +0200 Subject: [PATCH 02/10] patch endpoint functionality and tests --- ...5.16__create_or_update_additional_data.sql | 66 +++++++------- ...UpdateAdditionalDataIntegrationTests.scala | 40 ++++++--- .../model/dto/AdditionalDataPatchDTO.scala | 7 +- .../controller/PartitioningController.scala | 6 +- .../PartitioningControllerImpl.scala | 18 ++-- .../CreateOrUpdateAdditionalData.scala | 29 ++++-- .../absa/atum/server/api/http/Endpoints.scala | 17 ++-- .../co/absa/atum/server/api/http/Routes.scala | 10 +-- .../repository/PartitioningRepository.scala | 4 +- .../PartitioningRepositoryImpl.scala | 24 +++-- .../api/service/PartitioningService.scala | 2 +- .../api/service/PartitioningServiceImpl.scala | 17 ++-- .../model/AdditionalDataItemFromDB.scala | 21 ++++- .../za/co/absa/atum/server/api/TestData.scala | 8 ++ .../PartitioningControllerUnitTests.scala | 21 +++-- ...UpdateAdditionalDataIntegrationTests.scala | 18 ++-- .../WriteCheckpointV2IntegrationTests.scala | 4 +- ...PatchAdditionalDataEndpointUnitTests.scala | 89 +++++++++++++++++++ .../PartitioningRepositoryUnitTests.scala | 25 +++--- .../PartitioningServiceUnitTests.scala | 29 +++--- 20 files changed, 299 insertions(+), 156 deletions(-) create mode 100644 server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala diff --git a/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql b/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql index 8f48ae1d5..15469a06e 100644 --- a/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql +++ b/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql @@ -14,15 +14,17 @@ */ CREATE OR REPLACE FUNCTION runs.create_or_update_additional_data( - IN i_partitioning JSONB, - IN i_additional_data HSTORE, - IN i_by_user TEXT, - OUT status INTEGER, - OUT status_text TEXT, - OUT id_additional_data BIGINT -) RETURNS record AS + IN i_partitioning_id BIGINT, + IN i_additional_data HSTORE, + IN i_by_user TEXT, + OUT status INTEGER, + OUT status_text TEXT, + OUT o_ad_name TEXT, + OUT o_ad_value TEXT, + OUT o_ad_author TEXT +) RETURNS SETOF record AS $$ -------------------------------------------------------------------------------- + ------------------------------------------------------------------------------- -- -- Function: runs.create_or_update_additional_data(3) -- Adds the additional data for the input partitioning. If additional data of a given name already @@ -30,64 +32,62 @@ $$ -- additional data history table. -- -- Parameters: --- i_partitioning - partitioning to add the additional data for +-- i_partitioning_id - id of partitioning to add the additional data for -- i_additional_data - sets of key/value pairs representing name and values of the additional data -- i_by_user - user behind the change (an author of AD records if there will be something to upsert) -- -- Returns: -- status - Status code -- status_text - Status text --- id_additional_data - id of the data added +-- ad_name - Name of the additional data +-- ad_value - Value of the additional data +-- ad_author - Author of the additional data -- -- Status codes: --- 11 - Additional data have been added --- 12 - Additional data have been upserted +-- 11 - Additional data have been updated, added or both -- 14 - No changes in additional data (this is when they already existed) -- 41 - Partitioning not found -- ------------------------------------------------------------------------------- DECLARE - _fk_partitioning BIGINT; - _records_updated BOOLEAN; + _records_updated BOOLEAN; BEGIN - _fk_partitioning := runs._get_id_partitioning(i_partitioning, true); - - IF _fk_partitioning IS NULL THEN + PERFORM 1 FROM runs.partitionings WHERE id_partitioning = i_partitioning_id; + IF NOT FOUND THEN status := 41; status_text := 'Partitioning not found'; + RETURN NEXT; RETURN; END IF; -- 1. (backup) get records that already exist but values differ, -- then insert them into AD history table and -- then update the actual AD table with new values - _records_updated := runs._update_existing_additional_data(_fk_partitioning, i_additional_data, i_by_user); + _records_updated := runs._update_existing_additional_data(i_partitioning_id, i_additional_data, i_by_user); -- 2. (insert) get records that do not not exist yet and insert it into ad table -- (their original rows were previously saved in step 1) INSERT INTO runs.additional_data (fk_partitioning, ad_name, ad_value, created_by) - SELECT _fk_partitioning, ad_input.key, ad_input.value, i_by_user + SELECT i_partitioning_id, ad_input.key, ad_input.value, i_by_user FROM each(i_additional_data) AS ad_input ON CONFLICT (fk_partitioning, ad_name) DO NOTHING; - IF _records_updated THEN - status := 12; - status_text := 'Additional data have been upserted'; + -- 3. return the updated additional data (all, not only updated/added records) + IF not _records_updated AND not found THEN + RETURN QUERY + SELECT 14, 'No changes in additional data', GPAD.ad_name, GPAD.ad_value, GPAD.ad_author + FROM runs.get_partitioning_additional_data(i_partitioning_id) AS GPAD; + RETURN; ELSE - IF found THEN - status := 11; - status_text := 'Additional data have been added'; - ELSE - status := 14; - status_text := 'No changes in additional data'; - END IF; + RETURN QUERY + SELECT 11, 'Additional data have been updated, added or both', GPAD.ad_name, GPAD.ad_value, GPAD.ad_author + FROM runs.get_partitioning_additional_data(i_partitioning_id) AS GPAD; + RETURN; END IF; - - RETURN; END; $$ LANGUAGE plpgsql VOLATILE SECURITY DEFINER; -ALTER FUNCTION runs.create_or_update_additional_data(JSONB, HSTORE, TEXT) OWNER TO atum_owner; -GRANT EXECUTE ON FUNCTION runs.create_or_update_additional_data(JSONB, HSTORE, TEXT) TO atum_user; +ALTER FUNCTION runs.create_or_update_additional_data(BIGINT, HSTORE, TEXT) OWNER TO atum_owner; +GRANT EXECUTE ON FUNCTION runs.create_or_update_additional_data(BIGINT, HSTORE, TEXT) TO atum_user; diff --git a/database/src/test/scala/za/co/absa/atum/database/runs/CreateOrUpdateAdditionalDataIntegrationTests.scala b/database/src/test/scala/za/co/absa/atum/database/runs/CreateOrUpdateAdditionalDataIntegrationTests.scala index 679450fb2..32d8d679c 100644 --- a/database/src/test/scala/za/co/absa/atum/database/runs/CreateOrUpdateAdditionalDataIntegrationTests.scala +++ b/database/src/test/scala/za/co/absa/atum/database/runs/CreateOrUpdateAdditionalDataIntegrationTests.scala @@ -20,7 +20,7 @@ import za.co.absa.balta.DBTestSuite import za.co.absa.balta.classes.JsonBString import za.co.absa.balta.classes.setter.CustomDBType -class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ +class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite { private val fncCreateOrUpdateAdditionalData = "runs.create_or_update_additional_data" @@ -70,15 +70,35 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ ) function(fncCreateOrUpdateAdditionalData) - .setParam("i_partitioning", partitioning) + .setParam("i_partitioning_id", fkPartitioning) .setParam("i_additional_data", inputADToUpsert) .setParam("i_by_user", "MikeRusty") .execute { queryResult => assert(queryResult.hasNext) val row = queryResult.next() - assert(row.getInt("status").contains(12)) - assert(row.getString("status_text").contains("Additional data have been upserted")) + assert(row.getInt("status").contains(11)) + assert(row.getString("status_text").contains("Additional data have been updated, added or both")) + assert(row.getString("o_ad_name").contains("PrimaryOwner")) + assert(row.getString("o_ad_value").contains("TechnicalManagerA")) + assert(row.getString("o_ad_author").contains("SuperTool")) + + assert(queryResult.hasNext) + val row2 = queryResult.next() + + assert(row2.getInt("status").contains(11)) + assert(row2.getString("status_text").contains("Additional data have been updated, added or both")) + assert(row2.getString("o_ad_name").contains("SecondaryOwner")) + assert(row2.getString("o_ad_value").contains("AnalystNew")) + assert(row2.getString("o_ad_author").contains("MikeRusty")) + + assert(queryResult.hasNext) + val row3 = queryResult.next() + assert(row3.getInt("status").contains(11)) + assert(row3.getString("status_text").contains("Additional data have been updated, added or both")) + assert(row3.getString("o_ad_name").contains("IsDatasetInDatalake")) + assert(row3.getString("o_ad_value").contains("true")) + assert(row3.getString("o_ad_author").contains("MikeRusty")) assert(!queryResult.hasNext) } @@ -134,7 +154,7 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ ) function(fncCreateOrUpdateAdditionalData) - .setParam("i_partitioning", partitioning) + .setParam("i_partitioning_id", fkPartitioning) .setParam("i_additional_data", inputADToUpsert) .setParam("i_by_user", "MikeRusty") .execute { queryResult => @@ -142,9 +162,7 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ val row = queryResult.next() assert(row.getInt("status").contains(11)) - assert(row.getString("status_text").contains("Additional data have been added")) - - assert(!queryResult.hasNext) + assert(row.getString("status_text").contains("Additional data have been updated, added or both")) } assert(table("runs.additional_data").count() == 5) @@ -199,7 +217,7 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ ) function(fncCreateOrUpdateAdditionalData) - .setParam("i_partitioning", partitioning) + .setParam("i_partitioning_id", fkPartitioning) .setParam("i_additional_data", inputADToUpsert) .setParam("i_by_user", "MikeRusty") .execute { queryResult => @@ -208,8 +226,6 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ assert(row.getInt("status").contains(14)) assert(row.getString("status_text").contains("No changes in additional data")) - - assert(!queryResult.hasNext) } assert(table("runs.additional_data").count(add("fk_partitioning", fkPartitioning)) == 2) @@ -228,7 +244,7 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{ ) function(fncCreateOrUpdateAdditionalData) - .setParam("i_partitioning", partitioning) + .setParam("i_partitioning_id", 0L) .setParam("i_additional_data", inputADToInsert) .setParam("i_by_user", "MikeRusty") .execute { queryResult => diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala index ec9112009..cc91ce386 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala @@ -19,8 +19,13 @@ package za.co.absa.atum.model.dto import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} +//case class AdditionalDataPatchDTO( +// data: Map[String, AdditionalDataPatchItemDTO] +//) + case class AdditionalDataPatchDTO( - data: Map[String, AdditionalDataPatchItemDTO] + byUser: String, + data: Map[String, String] ) object AdditionalDataPatchDTO { diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala index facfcd1cb..2f87c9edd 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala @@ -36,9 +36,9 @@ trait PartitioningController { partitioningId: Long ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataDTO]] - def createOrUpdateAdditionalDataV2( - additionalData: AdditionalDataSubmitDTO - ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO]] +// def createOrUpdateAdditionalDataV2( +// additionalData: AdditionalDataSubmitDTO +// ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO]] def patchPartitioningAdditionalDataV2( partitioningId: Long, diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala index e1b8f36ed..702df5bf2 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala @@ -55,17 +55,6 @@ class PartitioningControllerImpl(partitioningService: PartitioningService) mapToSingleSuccessResponse(createPartitioningIfNotExistsV1(partitioningSubmitDTO)) } - override def createOrUpdateAdditionalDataV2( - additionalData: AdditionalDataSubmitDTO - ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO]] = { - mapToSingleSuccessResponse( - serviceCall[Unit, AdditionalDataSubmitDTO]( - partitioningService.createOrUpdateAdditionalData(additionalData), - _ => additionalData - ) - ) - } - override def getPartitioningCheckpointsV2( checkpointQueryDTO: CheckpointQueryDTO ): IO[ErrorResponse, MultiSuccessResponse[CheckpointDTO]] = { @@ -85,11 +74,16 @@ class PartitioningControllerImpl(partitioningService: PartitioningService) ) ) } + override def patchPartitioningAdditionalDataV2( partitioningId: Long, additionalDataPatchDTO: AdditionalDataPatchDTO ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataDTO]] = { - ??? + mapToSingleSuccessResponse( + serviceCall[AdditionalDataDTO, AdditionalDataDTO]( + partitioningService.patchAdditionalData(partitioningId, additionalDataPatchDTO) + ) + ) } } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala index ca8281a55..bafddf3c5 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala @@ -17,31 +17,42 @@ package za.co.absa.atum.server.api.database.runs.functions import doobie.implicits.toSqlInterpolator -import za.co.absa.atum.model.dto.AdditionalDataSubmitDTO import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs -import za.co.absa.atum.server.model.PartitioningForDB +import za.co.absa.atum.server.model.AdditionalDataItemFromDB import za.co.absa.db.fadb.DBSchema -import za.co.absa.db.fadb.doobie.DoobieFunction.DoobieSingleResultFunctionWithStatus +import za.co.absa.db.fadb.doobie.DoobieFunction.DoobieMultipleResultFunctionWithAggStatus import za.co.absa.db.fadb.doobie.DoobieEngine import za.co.absa.db.fadb.status.handling.implementations.StandardStatusHandling import zio._ import io.circe.syntax._ - import doobie.postgres.implicits._ -import za.co.absa.db.fadb.doobie.postgres.circe.implicits.jsonbPut +import za.co.absa.atum.model.dto.AdditionalDataPatchDTO +import za.co.absa.atum.server.api.database.runs.functions.CreateOrUpdateAdditionalData.CreateOrUpdateAdditionalDataArgs +import za.co.absa.db.fadb.status.aggregation.implementations.ByFirstRowStatusAggregator class CreateOrUpdateAdditionalData(implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) - extends DoobieSingleResultFunctionWithStatus[AdditionalDataSubmitDTO, Unit, Task](values => + extends DoobieMultipleResultFunctionWithAggStatus[CreateOrUpdateAdditionalDataArgs, Option[ + AdditionalDataItemFromDB + ], Task](args => Seq( - fr"${PartitioningForDB.fromSeqPartitionDTO(values.partitioning).asJson}", - fr"${values.additionalData.map { case (k, v) => (k, v.orNull) }}", - fr"${values.author}" + fr"${args.partitioningId}", + fr"${args.additionalData.data}", + fr"${args.additionalData.byUser}" ) ) with StandardStatusHandling + with ByFirstRowStatusAggregator { + + override def fieldsToSelect: Seq[String] = super.fieldsToSelect ++ Seq("o_ad_name", "o_ad_value", "o_ad_author") +} object CreateOrUpdateAdditionalData { + case class CreateOrUpdateAdditionalDataArgs( + partitioningId: Long, + additionalData: AdditionalDataPatchDTO + ) + val layer: URLayer[PostgresDatabaseProvider, CreateOrUpdateAdditionalData] = ZLayer { for { dbProvider <- ZIO.service[PostgresDatabaseProvider] diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala index 25c223014..de9ff2f46 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala @@ -78,14 +78,14 @@ trait Endpoints extends BaseEndpoints { .errorOutVariantPrepend(notFoundErrorOneOfVariant) } - protected val createOrUpdateAdditionalDataEndpointV2 - : PublicEndpoint[AdditionalDataSubmitDTO, ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO], Any] = { - apiV2.post - .in(CreateOrUpdateAdditionalData) - .in(jsonBody[AdditionalDataSubmitDTO]) - .out(statusCode(StatusCode.Ok)) - .out(jsonBody[SingleSuccessResponse[AdditionalDataSubmitDTO]]) - } +// protected val createOrUpdateAdditionalDataEndpointV2 +// : PublicEndpoint[AdditionalDataSubmitDTO, ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO], Any] = { +// apiV2.post +// .in(CreateOrUpdateAdditionalData) +// .in(jsonBody[AdditionalDataSubmitDTO]) +// .out(statusCode(StatusCode.Ok)) +// .out(jsonBody[SingleSuccessResponse[AdditionalDataSubmitDTO]]) +// } protected val patchPartitioningAdditionalDataEndpointV2 : PublicEndpoint[(Long, AdditionalDataPatchDTO), ErrorResponse, SingleSuccessResponse[ @@ -96,6 +96,7 @@ trait Endpoints extends BaseEndpoints { .in(jsonBody[AdditionalDataPatchDTO]) .out(statusCode(StatusCode.Ok)) .out(jsonBody[SingleSuccessResponse[AdditionalDataDTO]]) + .errorOutVariantPrepend(notFoundErrorOneOfVariant) } protected val getPartitioningCheckpointEndpointV2 diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala index 85b25b385..c292aadd8 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala @@ -62,10 +62,10 @@ trait Routes extends Endpoints with ServerOptions { getPartitioningAdditionalDataEndpointV2, PartitioningController.getPartitioningAdditionalDataV2 ), - createServerEndpoint( - createOrUpdateAdditionalDataEndpointV2, - PartitioningController.createOrUpdateAdditionalDataV2 - ), +// createServerEndpoint( +// createOrUpdateAdditionalDataEndpointV2, +// PartitioningController.createOrUpdateAdditionalDataV2 +// ), createServerEndpoint[ (Long, AdditionalDataPatchDTO), ErrorResponse, @@ -102,7 +102,7 @@ trait Routes extends Endpoints with ServerOptions { postCheckpointEndpointV2, createPartitioningEndpointV1, createPartitioningEndpointV2, - createOrUpdateAdditionalDataEndpointV2, +// createOrUpdateAdditionalDataEndpointV2, patchPartitioningAdditionalDataEndpointV2, getPartitioningCheckpointsEndpointV2, getPartitioningCheckpointEndpointV2, diff --git a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala index 3e461e1f5..7e8031b68 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepository.scala @@ -34,9 +34,7 @@ trait PartitioningRepository { partitioningId: Long ): IO[DatabaseError, AdditionalDataDTO] - def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[DatabaseError, Unit] - - def patchAdditionalData( + def createOrUpdateAdditionalData( partitioningId: Long, additionalData: AdditionalDataPatchDTO ): IO[DatabaseError, AdditionalDataDTO] diff --git a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala index fc6eff57d..243a16a37 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala @@ -17,12 +17,10 @@ package za.co.absa.atum.server.api.repository import za.co.absa.atum.model.dto._ -import za.co.absa.atum.server.model.{AdditionalDataFromDB, AdditionalDataItemFromDB, CheckpointFromDB, MeasureFromDB} +import za.co.absa.atum.server.api.database.runs.functions.CreateOrUpdateAdditionalData.CreateOrUpdateAdditionalDataArgs import za.co.absa.atum.server.api.database.runs.functions._ -import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataPatchDTO, AdditionalDataSubmitDTO, CheckpointQueryDTO, InitialAdditionalDataDTO, MeasureDTO, PartitioningDTO, PartitioningSubmitDTO} -import za.co.absa.atum.server.model.MeasureFromDB -import za.co.absa.atum.server.api.database.runs.functions.{CreateOrUpdateAdditionalData, CreatePartitioningIfNotExists, GetPartitioningAdditionalData, GetPartitioningCheckpoints, GetPartitioningMeasures} import za.co.absa.atum.server.api.exception.DatabaseError +import za.co.absa.atum.server.model.{AdditionalDataFromDB, AdditionalDataItemFromDB, CheckpointFromDB, MeasureFromDB} import zio._ import zio.interop.catz.asyncInstance @@ -43,8 +41,14 @@ class PartitioningRepositoryImpl( ) } - override def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[DatabaseError, Unit] = { - dbSingleResultCallWithStatus(createOrUpdateAdditionalDataFn(additionalData), "createOrUpdateAdditionalData") + override def createOrUpdateAdditionalData( + partitioningId: Long, + additionalData: AdditionalDataPatchDTO + ): IO[DatabaseError, AdditionalDataDTO] = { + dbMultipleResultCallWithAggregatedStatus( + createOrUpdateAdditionalDataFn(CreateOrUpdateAdditionalDataArgs(partitioningId, additionalData)), + "createOrUpdateAdditionalData" + ).map(AdditionalDataItemFromDB.additionalDataFromDBItems) } override def getPartitioningMeasures(partitioning: PartitioningDTO): IO[DatabaseError, Seq[MeasureDTO]] = { @@ -72,17 +76,11 @@ class PartitioningRepositoryImpl( ) } - override def patchAdditionalData(partitioningId: Long, additionalData: AdditionalDataPatchDTO): IO[DatabaseError, AdditionalDataDTO] = { - ??? - } override def getPartitioningAdditionalDataV2(partitioningId: Long): IO[DatabaseError, AdditionalDataDTO] = { dbMultipleResultCallWithAggregatedStatus( getPartitioningAdditionalDataV2Fn(partitioningId), "getPartitioningAdditionalData" - ).map(_.collect { case Some(AdditionalDataItemFromDB(adName, adValue, author)) => - adName -> Some(AdditionalDataItemDTO(adValue, author)) - }.toMap) - .map(AdditionalDataDTO(_)) + ).map(AdditionalDataItemFromDB.additionalDataFromDBItems) } } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala index 3049af5d1..26bbd3377 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala @@ -31,7 +31,7 @@ trait PartitioningService { def getPartitioningAdditionalDataV2(partitioningId: Long): IO[ServiceError, AdditionalDataDTO] - def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] +// def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] def patchAdditionalData( partitioningId: Long, diff --git a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala index 584b3fb44..6052a7054 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala @@ -34,12 +34,12 @@ class PartitioningServiceImpl(partitioningRepository: PartitioningRepository) ) } - override def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] = { - repositoryCall( - partitioningRepository.createOrUpdateAdditionalData(additionalData), - "createOrUpdateAdditionalData" - ) - } +// override def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] = { +// repositoryCall( +// partitioningRepository.createOrUpdateAdditionalData(additionalData), +// "createOrUpdateAdditionalData" +// ) +// } override def getPartitioningMeasures(partitioning: PartitioningDTO): IO[ServiceError, Seq[MeasureDTO]] = { repositoryCall( @@ -85,7 +85,10 @@ class PartitioningServiceImpl(partitioningRepository: PartitioningRepository) partitioningId: Long, additionalData: AdditionalDataPatchDTO ): IO[ServiceError, AdditionalDataDTO] = { - ??? + repositoryCall( + partitioningRepository.createOrUpdateAdditionalData(partitioningId, additionalData), + "createOrUpdateAdditionalData" + ) } } diff --git a/server/src/main/scala/za/co/absa/atum/server/model/AdditionalDataItemFromDB.scala b/server/src/main/scala/za/co/absa/atum/server/model/AdditionalDataItemFromDB.scala index 2f86ea78c..3413dddcc 100644 --- a/server/src/main/scala/za/co/absa/atum/server/model/AdditionalDataItemFromDB.scala +++ b/server/src/main/scala/za/co/absa/atum/server/model/AdditionalDataItemFromDB.scala @@ -16,8 +16,25 @@ package za.co.absa.atum.server.model -case class AdditionalDataItemFromDB ( +import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataItemDTO} + +case class AdditionalDataItemFromDB( adName: String, adValue: Option[String], - author: String, + author: String ) + +object AdditionalDataItemFromDB { + def additionalDataFromDBItems(dbItems: Seq[Option[AdditionalDataItemFromDB]]): AdditionalDataDTO = { + AdditionalDataDTO( + dbItems.flatten + .map(item => + item.adValue match { + case Some(value) => item.adName -> Some(AdditionalDataItemDTO(Option(value), item.author)) + case None => item.adName -> None + } + ) + .toMap + ) + } +} diff --git a/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala b/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala index 42abeaba7..d3d34a2fd 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala @@ -95,6 +95,14 @@ trait TestData { protected val additionalDataDTO3: AdditionalDataDTO = AdditionalDataDTO(Map.empty) + protected val additionalDataPatchDTO1: AdditionalDataPatchDTO = AdditionalDataPatchDTO( + byUser = "author", + data = Map( + "key1" -> "value1", + "key3" -> "value3" + ) + ) + val mainValue: TypedValue = TypedValue( value = "123", valueType = ResultValueType.LongValue diff --git a/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala index deadc77d2..0cde994c2 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala @@ -41,9 +41,11 @@ object PartitioningControllerUnitTests extends ZIOSpecDefault with TestData { when(partitioningServiceMock.getPartitioningAdditionalData(partitioningDTO1)) .thenReturn(ZIO.succeed(Map.empty)) - when(partitioningServiceMock.createOrUpdateAdditionalData(additionalDataSubmitDTO1)) - .thenReturn(ZIO.unit) - when(partitioningServiceMock.createOrUpdateAdditionalData(additionalDataSubmitDTO2)) + when(partitioningServiceMock.patchAdditionalData(1L, additionalDataPatchDTO1)) + .thenReturn(ZIO.succeed(additionalDataDTO1)) + when(partitioningServiceMock.patchAdditionalData(0L, additionalDataPatchDTO1)) + .thenReturn(ZIO.fail(NotFoundServiceError("Partitioning not found"))) + when(partitioningServiceMock.patchAdditionalData(2L, additionalDataPatchDTO1)) .thenReturn(ZIO.fail(GeneralServiceError("boom!"))) when(partitioningServiceMock.getPartitioningCheckpoints(checkpointQueryDTO1)) @@ -76,16 +78,21 @@ object PartitioningControllerUnitTests extends ZIOSpecDefault with TestData { ) } ), - suite("CreateOrUpdateAdditionalDataSuite")( + suite("PatchAdditionalDataSuite")( test("Returns expected AdditionalDataSubmitDTO") { for { - result <- PartitioningController.createOrUpdateAdditionalDataV2(additionalDataSubmitDTO1) - expected = SingleSuccessResponse(additionalDataSubmitDTO1, uuid1) + result <- PartitioningController.patchPartitioningAdditionalDataV2(1L, additionalDataPatchDTO1) + expected = SingleSuccessResponse(additionalDataDTO1, uuid1) actual = result.copy(requestId = uuid1) } yield assert(actual)(equalTo(expected)) }, + test("Returns expected NotFoundErrorResponse") { + assertZIO(PartitioningController.patchPartitioningAdditionalDataV2(0L, additionalDataPatchDTO1).exit)( + failsWithA[NotFoundErrorResponse] + ) + }, test("Returns expected InternalServerErrorResponse") { - assertZIO(PartitioningController.createOrUpdateAdditionalDataV2(additionalDataSubmitDTO2).exit)( + assertZIO(PartitioningController.patchPartitioningAdditionalDataV2(2L, additionalDataPatchDTO1).exit)( failsWithA[InternalServerErrorResponse] ) } diff --git a/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalDataIntegrationTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalDataIntegrationTests.scala index 94bb6564c..dc948ed54 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalDataIntegrationTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalDataIntegrationTests.scala @@ -16,33 +16,25 @@ package za.co.absa.atum.server.api.database.runs.functions -import za.co.absa.atum.model.dto.{AdditionalDataSubmitDTO, PartitionDTO} import za.co.absa.atum.server.ConfigProviderTest -import za.co.absa.atum.server.api.TestTransactorProvider import za.co.absa.atum.server.api.database.PostgresDatabaseProvider +import za.co.absa.atum.server.api.database.runs.functions.CreateOrUpdateAdditionalData.CreateOrUpdateAdditionalDataArgs +import za.co.absa.atum.server.api.{TestData, TestTransactorProvider} import za.co.absa.db.fadb.exceptions.DataNotFoundException import za.co.absa.db.fadb.status.FunctionStatus import zio._ import zio.interop.catz.asyncInstance import zio.test._ -object CreateOrUpdateAdditionalDataIntegrationTests extends ConfigProviderTest { +object CreateOrUpdateAdditionalDataIntegrationTests extends ConfigProviderTest with TestData { override def spec: Spec[TestEnvironment with Scope, Any] = { suite("CreateOrUpdateAdditionalDataIntegrationSuite")( - test("Returns expected Right with Unit") { - val additionalDataSubmitDTO = AdditionalDataSubmitDTO( - partitioning = Seq(PartitionDTO("key1", "val1"), PartitionDTO("key2", "val2")), - additionalData = Map[String, Option[String]]( - "ownership" -> Some("total"), - "role" -> Some("primary") - ), - author = "testAuthor" - ) + test("Returns expected DataNotFoundException") { for { createOrUpdateAdditionalData <- ZIO.service[CreateOrUpdateAdditionalData] - result <- createOrUpdateAdditionalData(additionalDataSubmitDTO) + result <- createOrUpdateAdditionalData(CreateOrUpdateAdditionalDataArgs(1L, additionalDataPatchDTO1)) } yield assertTrue(result == Left(DataNotFoundException(FunctionStatus(41, "Partitioning not found")))) } ).provide( diff --git a/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointV2IntegrationTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointV2IntegrationTests.scala index 87a2ec89a..eeadaeaef 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointV2IntegrationTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointV2IntegrationTests.scala @@ -17,14 +17,14 @@ package za.co.absa.atum.server.api.database.runs.functions import za.co.absa.atum.model.ResultValueType -import za.co.absa.atum.model.dto._ import za.co.absa.atum.model.dto.MeasureResultDTO.TypedValue +import za.co.absa.atum.model.dto._ import za.co.absa.atum.server.ConfigProviderTest import za.co.absa.atum.server.api.TestTransactorProvider import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.functions.WriteCheckpointV2.WriteCheckpointArgs import za.co.absa.db.fadb.exceptions.DataConflictException -import za.co.absa.db.fadb.status.{FunctionStatus, Row} +import za.co.absa.db.fadb.status.FunctionStatus import zio._ import zio.interop.catz.asyncInstance import zio.test._ diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala new file mode 100644 index 000000000..e29764323 --- /dev/null +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala @@ -0,0 +1,89 @@ +package za.co.absa.atum.server.api.http + +import io.circe +import sttp.client3.circe._ +import org.mockito.Mockito.{mock, when} +import sttp.client3.circe.asJson +import sttp.client3.testing.SttpBackendStub +import sttp.client3.{Identity, RequestT, ResponseException, UriContext, basicRequest} +import sttp.model.StatusCode +import sttp.tapir.server.stub.TapirStubInterpreter +import sttp.tapir.ztapir.{RIOMonadError, RichZEndpoint} +import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataPatchDTO} +import za.co.absa.atum.server.api.TestData +import za.co.absa.atum.server.api.controller.PartitioningController +import za.co.absa.atum.server.model.SuccessResponse.SingleSuccessResponse +import za.co.absa.atum.server.model.{InternalServerErrorResponse, NotFoundErrorResponse} +import zio.test.Assertion.equalTo +import zio.test.{Spec, TestEnvironment, ZIOSpecDefault, assertZIO} +import zio.{Scope, ZIO, ZLayer} + +object PatchAdditionalDataEndpointUnitTests extends ZIOSpecDefault with Endpoints with TestData { + + private val partitioningControllerMock: PartitioningController = mock(classOf[PartitioningController]) + + when(partitioningControllerMock.patchPartitioningAdditionalDataV2(1L, additionalDataPatchDTO1)) + .thenReturn(ZIO.succeed(SingleSuccessResponse(additionalDataDTO1, uuid1))) + when(partitioningControllerMock.patchPartitioningAdditionalDataV2(0L, additionalDataPatchDTO1)) + .thenReturn(ZIO.fail(NotFoundErrorResponse("error"))) + when(partitioningControllerMock.patchPartitioningAdditionalDataV2(2L, additionalDataPatchDTO1)) + .thenReturn(ZIO.fail(InternalServerErrorResponse("error"))) + + private val partitioningControllerMockLayer = ZLayer.succeed(partitioningControllerMock) + + private val patchAdditionalDataEndpointLogic = patchPartitioningAdditionalDataEndpointV2 + .zServerLogic({ case (partitioningId: Long, additionalDataPatchDTO: AdditionalDataPatchDTO) => + PartitioningController.patchPartitioningAdditionalDataV2(partitioningId, additionalDataPatchDTO) + }) + + override def spec: Spec[TestEnvironment with Scope, Any] = { + val backendStub = TapirStubInterpreter(SttpBackendStub.apply(new RIOMonadError[PartitioningController])) + .whenServerEndpoint(patchAdditionalDataEndpointLogic) + .thenRunLogic() + .backend() + + suite("PatchAdditionalDataEndpointUnitTests")( + test("Returns expected AdditionalDataDTO") { + val response = patchRequestForId(1L) + .body(additionalDataPatchDTO1) + .send(backendStub) + + val body = response.map(_.body) + val statusCode = response.map(_.code) + + assertZIO(body <&> statusCode)( + equalTo(Right(SingleSuccessResponse(additionalDataDTO1, uuid1)), StatusCode.Ok) + ) + }, + test("Returns NotFoundErrorResponse") { + val response = patchRequestForId(0L) + .body(additionalDataPatchDTO1) + .send(backendStub) + + val statusCode = response.map(_.code) + + assertZIO(statusCode)(equalTo(StatusCode.NotFound)) + }, + test("Returns InternalServerErrorResponse") { + val response = patchRequestForId(2L) + .body(additionalDataPatchDTO1) + .send(backendStub) + + val statusCode = response.map(_.code) + + assertZIO(statusCode)(equalTo(StatusCode.InternalServerError)) + } + ) + + }.provide(partitioningControllerMockLayer) + + private def patchRequestForId(id: Long): RequestT[Identity, Either[ + ResponseException[String, circe.Error], + SingleSuccessResponse[AdditionalDataDTO] + ], Any] = { + basicRequest + .patch(uri"https://test.com/api/v2/partitionings/$id/additional-data") + .response(asJson[SingleSuccessResponse[AdditionalDataDTO]]) + } + +} diff --git a/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala index 0dc4a150a..cf729f547 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala @@ -19,6 +19,7 @@ package za.co.absa.atum.server.api.repository import org.mockito.Mockito.{mock, when} import za.co.absa.atum.model.dto.{AdditionalDataDTO, AdditionalDataItemDTO} import za.co.absa.atum.server.api.TestData +import za.co.absa.atum.server.api.database.runs.functions.CreateOrUpdateAdditionalData.CreateOrUpdateAdditionalDataArgs import za.co.absa.atum.server.api.database.runs.functions._ import za.co.absa.atum.server.api.exception.DatabaseError import za.co.absa.atum.server.api.exception.DatabaseError._ @@ -47,11 +48,11 @@ object PartitioningRepositoryUnitTests extends ZIOSpecDefault with TestData { // Create Additional Data Mocks private val createOrUpdateAdditionalDataMock = mock(classOf[CreateOrUpdateAdditionalData]) - when(createOrUpdateAdditionalDataMock.apply(additionalDataSubmitDTO1)) - .thenReturn(ZIO.right(Row(FunctionStatus(0, "success"), ()))) - when(createOrUpdateAdditionalDataMock.apply(additionalDataSubmitDTO2)) - .thenReturn(ZIO.left(ErrorInDataException(FunctionStatus(50, "error in AD data")))) - when(createOrUpdateAdditionalDataMock.apply(additionalDataSubmitDTO3)) + when(createOrUpdateAdditionalDataMock.apply(CreateOrUpdateAdditionalDataArgs(1L, additionalDataPatchDTO1))) + .thenReturn(ZIO.right(Seq(Row(FunctionStatus(11, "Additional data have been updated, added or both"), Option.empty[AdditionalDataItemFromDB])))) + when(createOrUpdateAdditionalDataMock.apply(CreateOrUpdateAdditionalDataArgs(0L, additionalDataPatchDTO1))) + .thenReturn(ZIO.left(DataNotFoundException(FunctionStatus(41, "Partitioning not found")))) + when(createOrUpdateAdditionalDataMock.apply(CreateOrUpdateAdditionalDataArgs(2L, additionalDataPatchDTO1))) .thenReturn(ZIO.fail(new Exception("boom!"))) private val createOrUpdateAdditionalDataMockLayer = ZLayer.succeed(createOrUpdateAdditionalDataMock) @@ -130,23 +131,23 @@ object PartitioningRepositoryUnitTests extends ZIOSpecDefault with TestData { suite("CreateOrUpdateAdditionalDataSuite")( test("Returns expected Right with Unit") { for { - result <- PartitioningRepository.createOrUpdateAdditionalData(additionalDataSubmitDTO1) - } yield assertTrue(result == ()) + result <- PartitioningRepository.createOrUpdateAdditionalData(1L, additionalDataPatchDTO1) + } yield assertTrue(result.isInstanceOf[AdditionalDataDTO]) }, test("Returns expected Left with StatusException") { for { - result <- PartitioningRepository.createOrUpdateAdditionalData(additionalDataSubmitDTO2).exit + result <- PartitioningRepository.createOrUpdateAdditionalData(0L, additionalDataPatchDTO1).exit } yield assertTrue( result == Exit.fail( - GeneralDatabaseError( - "Exception caused by operation: 'createOrUpdateAdditionalData': (50) error in AD data" + NotFoundDatabaseError( + "Partitioning not found" ) ) ) }, test("Returns expected DatabaseError") { - assertZIO(PartitioningRepository.createOrUpdateAdditionalData(additionalDataSubmitDTO3).exit)( - failsWithA[DatabaseError] + assertZIO(PartitioningRepository.createOrUpdateAdditionalData(2L, additionalDataPatchDTO1).exit)( + failsWithA[GeneralDatabaseError] ) } ), diff --git a/server/src/test/scala/za/co/absa/atum/server/api/service/PartitioningServiceUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/service/PartitioningServiceUnitTests.scala index f87fa1aa3..138788ab1 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/service/PartitioningServiceUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/service/PartitioningServiceUnitTests.scala @@ -36,10 +36,11 @@ object PartitioningServiceUnitTests extends ZIOSpecDefault with TestData { when(partitioningRepositoryMock.createPartitioningIfNotExists(partitioningSubmitDTO3)) .thenReturn(ZIO.fail(GeneralDatabaseError("boom!"))) - when(partitioningRepositoryMock.createOrUpdateAdditionalData(additionalDataSubmitDTO1)).thenReturn(ZIO.unit) - when(partitioningRepositoryMock.createOrUpdateAdditionalData(additionalDataSubmitDTO2)) - .thenReturn(ZIO.fail(GeneralDatabaseError("error in AD data"))) - when(partitioningRepositoryMock.createOrUpdateAdditionalData(additionalDataSubmitDTO3)) + when(partitioningRepositoryMock.createOrUpdateAdditionalData(1L, additionalDataPatchDTO1)) + .thenReturn(ZIO.succeed(additionalDataDTO1)) + when(partitioningRepositoryMock.createOrUpdateAdditionalData(0L, additionalDataPatchDTO1)) + .thenReturn(ZIO.fail(NotFoundDatabaseError("Partitioning not found"))) + when(partitioningRepositoryMock.createOrUpdateAdditionalData(2L, additionalDataPatchDTO1)) .thenReturn(ZIO.fail(GeneralDatabaseError("boom!"))) when(partitioningRepositoryMock.getPartitioningMeasures(partitioningDTO1)) @@ -86,22 +87,24 @@ object PartitioningServiceUnitTests extends ZIOSpecDefault with TestData { ) } ), - suite("CreateOrUpdateAdditionalDataSuite")( + suite("PatchAdditionalDataSuite")( test("Returns expected Right with Unit") { for { - result <- PartitioningService.createOrUpdateAdditionalData(additionalDataSubmitDTO1) - } yield assertTrue(result == ()) + result <- PartitioningService.patchAdditionalData(1L, additionalDataPatchDTO1) + } yield assertTrue(result == additionalDataDTO1) }, - test("Returns expected Left with StatusException") { + test("Returns expected NotFoundServiceError") { for { - result <- PartitioningService.createOrUpdateAdditionalData(additionalDataSubmitDTO2).exit + result <- PartitioningService.patchAdditionalData(0L, additionalDataPatchDTO1).exit } yield assertTrue( - result == Exit.fail(GeneralServiceError("Failed to perform 'createOrUpdateAdditionalData': error in AD data")) + result == Exit.fail( + NotFoundServiceError("Failed to perform 'createOrUpdateAdditionalData': Partitioning not found") + ) ) }, - test("Returns expected ServiceError") { - assertZIO(PartitioningService.createOrUpdateAdditionalData(additionalDataSubmitDTO3).exit)( - failsWithA[ServiceError] + test("Returns expected GeneralServiceError") { + assertZIO(PartitioningService.patchAdditionalData(2L, additionalDataPatchDTO1).exit)( + failsWithA[GeneralServiceError] ) } ), From 2525b50a1db0bfb9ac2bd2eb658acb0aedeada76 Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Thu, 29 Aug 2024 17:20:45 +0200 Subject: [PATCH 03/10] fix --- .../server/api/repository/PartitioningRepositoryUnitTests.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala index cf729f547..c971376a8 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryUnitTests.scala @@ -140,7 +140,7 @@ object PartitioningRepositoryUnitTests extends ZIOSpecDefault with TestData { } yield assertTrue( result == Exit.fail( NotFoundDatabaseError( - "Partitioning not found" + "Exception caused by operation: 'createOrUpdateAdditionalData': (41) Partitioning not found" ) ) ) From 915d997361217fb07f45d191893d2f4087b80e47 Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Thu, 29 Aug 2024 17:21:42 +0200 Subject: [PATCH 04/10] fix --- ...ioningAdditionalDataV2EndpointUnitTests.scala | 16 ++++++++++++++++ ...rtitioningCheckpointV2EndpointUnitTests.scala | 16 ++++++++++++++++ .../PatchAdditionalDataEndpointUnitTests.scala | 16 ++++++++++++++++ 3 files changed, 48 insertions(+) diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningAdditionalDataV2EndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningAdditionalDataV2EndpointUnitTests.scala index 3c500f787..f945b5c0b 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningAdditionalDataV2EndpointUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningAdditionalDataV2EndpointUnitTests.scala @@ -1,3 +1,19 @@ +/* + * Copyright 2021 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package za.co.absa.atum.server.api.http import org.mockito.Mockito.{mock, when} diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningCheckpointV2EndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningCheckpointV2EndpointUnitTests.scala index a1c46ccf8..2317ab129 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningCheckpointV2EndpointUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/GetPartitioningCheckpointV2EndpointUnitTests.scala @@ -1,3 +1,19 @@ +/* + * Copyright 2021 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package za.co.absa.atum.server.api.http import org.mockito.Mockito.{mock, when} diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala index e29764323..b6842e8a6 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/PatchAdditionalDataEndpointUnitTests.scala @@ -1,3 +1,19 @@ +/* + * Copyright 2021 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package za.co.absa.atum.server.api.http import io.circe From 3636cd9b19273f8c5ca4abff279970094aaf7b3b Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Thu, 29 Aug 2024 17:26:56 +0200 Subject: [PATCH 05/10] minors --- .../za/co/absa/atum/server/api/http/Endpoints.scala | 9 --------- .../scala/za/co/absa/atum/server/api/http/Routes.scala | 5 ----- .../atum/server/api/service/PartitioningService.scala | 2 -- .../server/api/service/PartitioningServiceImpl.scala | 7 ------- 4 files changed, 23 deletions(-) diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala index de9ff2f46..34163cadc 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala @@ -78,15 +78,6 @@ trait Endpoints extends BaseEndpoints { .errorOutVariantPrepend(notFoundErrorOneOfVariant) } -// protected val createOrUpdateAdditionalDataEndpointV2 -// : PublicEndpoint[AdditionalDataSubmitDTO, ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO], Any] = { -// apiV2.post -// .in(CreateOrUpdateAdditionalData) -// .in(jsonBody[AdditionalDataSubmitDTO]) -// .out(statusCode(StatusCode.Ok)) -// .out(jsonBody[SingleSuccessResponse[AdditionalDataSubmitDTO]]) -// } - protected val patchPartitioningAdditionalDataEndpointV2 : PublicEndpoint[(Long, AdditionalDataPatchDTO), ErrorResponse, SingleSuccessResponse[ AdditionalDataDTO diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala index c292aadd8..37a84ca7f 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Routes.scala @@ -62,10 +62,6 @@ trait Routes extends Endpoints with ServerOptions { getPartitioningAdditionalDataEndpointV2, PartitioningController.getPartitioningAdditionalDataV2 ), -// createServerEndpoint( -// createOrUpdateAdditionalDataEndpointV2, -// PartitioningController.createOrUpdateAdditionalDataV2 -// ), createServerEndpoint[ (Long, AdditionalDataPatchDTO), ErrorResponse, @@ -102,7 +98,6 @@ trait Routes extends Endpoints with ServerOptions { postCheckpointEndpointV2, createPartitioningEndpointV1, createPartitioningEndpointV2, -// createOrUpdateAdditionalDataEndpointV2, patchPartitioningAdditionalDataEndpointV2, getPartitioningCheckpointsEndpointV2, getPartitioningCheckpointEndpointV2, diff --git a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala index 26bbd3377..ebd87e6a6 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningService.scala @@ -31,8 +31,6 @@ trait PartitioningService { def getPartitioningAdditionalDataV2(partitioningId: Long): IO[ServiceError, AdditionalDataDTO] -// def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] - def patchAdditionalData( partitioningId: Long, additionalData: AdditionalDataPatchDTO diff --git a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala index 6052a7054..e649297d9 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/service/PartitioningServiceImpl.scala @@ -34,13 +34,6 @@ class PartitioningServiceImpl(partitioningRepository: PartitioningRepository) ) } -// override def createOrUpdateAdditionalData(additionalData: AdditionalDataSubmitDTO): IO[ServiceError, Unit] = { -// repositoryCall( -// partitioningRepository.createOrUpdateAdditionalData(additionalData), -// "createOrUpdateAdditionalData" -// ) -// } - override def getPartitioningMeasures(partitioning: PartitioningDTO): IO[ServiceError, Seq[MeasureDTO]] = { repositoryCall( partitioningRepository.getPartitioningMeasures(partitioning), From bb82ce034f112ebaf3cd25b0333040debb16a40b Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Fri, 30 Aug 2024 09:46:02 +0200 Subject: [PATCH 06/10] minors --- .../co/absa/atum/server/api/controller/BaseController.scala | 3 +-- .../atum/server/api/controller/PartitioningController.scala | 4 ---- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala index 5b8f3bd2b..5e88951ae 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala @@ -19,8 +19,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.server.api.exception.ServiceError import za.co.absa.atum.server.api.exception.ServiceError._ import za.co.absa.atum.server.api.http.ApiPaths -import za.co.absa.atum.server.model.{ConflictErrorResponse, ErrorResponse, InternalServerErrorResponse, NotFoundErrorResponse} -import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} +import za.co.absa.atum.server.model.SuccessResponse._ import za.co.absa.atum.server.model._ import zio._ diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala index 2d1997f08..2d27efa3b 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala @@ -36,10 +36,6 @@ trait PartitioningController { partitioningId: Long ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataDTO]] -// def createOrUpdateAdditionalDataV2( -// additionalData: AdditionalDataSubmitDTO -// ): IO[ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO]] - def patchPartitioningAdditionalDataV2( partitioningId: Long, additionalDataPatchDTO: AdditionalDataPatchDTO From d7b127de4bb9dd2f37a603df94abbc86aee5dfc9 Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Fri, 30 Aug 2024 09:47:28 +0200 Subject: [PATCH 07/10] minors --- .../za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala | 4 ---- 1 file changed, 4 deletions(-) diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala index cc91ce386..f6b424cda 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataPatchDTO.scala @@ -19,10 +19,6 @@ package za.co.absa.atum.model.dto import io.circe.generic.semiauto.{deriveDecoder, deriveEncoder} import io.circe.{Decoder, Encoder} -//case class AdditionalDataPatchDTO( -// data: Map[String, AdditionalDataPatchItemDTO] -//) - case class AdditionalDataPatchDTO( byUser: String, data: Map[String, String] From 0994909134f3142c5f64b01103890bd08099fc4c Mon Sep 17 00:00:00 2001 From: Pavel Salamon Date: Fri, 30 Aug 2024 09:49:19 +0200 Subject: [PATCH 08/10] minors --- .../atum/server/api/repository/PartitioningRepositoryImpl.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala index 64c6e1ffb..63788ea1a 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/repository/PartitioningRepositoryImpl.scala @@ -20,7 +20,7 @@ import za.co.absa.atum.model.dto._ import za.co.absa.atum.server.api.database.runs.functions.CreateOrUpdateAdditionalData.CreateOrUpdateAdditionalDataArgs import za.co.absa.atum.server.api.database.runs.functions._ import za.co.absa.atum.server.api.exception.DatabaseError -import za.co.absa.atum.server.model.{AdditionalDataFromDB, AdditionalDataItemFromDB, CheckpointFromDB, MeasureFromDB} +import za.co.absa.atum.server.model.{AdditionalDataFromDB, AdditionalDataItemFromDB, CheckpointFromDB, MeasureFromDB, PartitioningFromDB} import zio._ import zio.interop.catz.asyncInstance import za.co.absa.atum.server.api.exception.DatabaseError.GeneralDatabaseError From 087bd1417ec252ad1e255a1c825b86ab23cdd081 Mon Sep 17 00:00:00 2001 From: salamonpavel Date: Tue, 3 Sep 2024 10:05:07 +0200 Subject: [PATCH 09/10] Update database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql Co-authored-by: David Benedeki <14905969+benedeki@users.noreply.github.com> --- .../postgres/runs/V1.5.16__create_or_update_additional_data.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql b/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql index 15469a06e..ef6b67887 100644 --- a/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql +++ b/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql @@ -30,6 +30,7 @@ $$ -- Adds the additional data for the input partitioning. If additional data of a given name already -- exists for such partitioning, the value is updated and the old value is moved to the -- additional data history table. +-- The function returns all actual additional data of the partitioning. -- -- Parameters: -- i_partitioning_id - id of partitioning to add the additional data for From 3bfea09af917ed9876a467e3cb7c1d4f6f2a7032 Mon Sep 17 00:00:00 2001 From: salamonpavel Date: Tue, 3 Sep 2024 10:18:31 +0200 Subject: [PATCH 10/10] Update database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql Co-authored-by: David Benedeki <14905969+benedeki@users.noreply.github.com> --- .../postgres/runs/V1.5.16__create_or_update_additional_data.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql b/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql index ef6b67887..97e018e71 100644 --- a/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql +++ b/database/src/main/postgres/runs/V1.5.16__create_or_update_additional_data.sql @@ -24,7 +24,7 @@ CREATE OR REPLACE FUNCTION runs.create_or_update_additional_data( OUT o_ad_author TEXT ) RETURNS SETOF record AS $$ - ------------------------------------------------------------------------------- +------------------------------------------------------------------------------- -- -- Function: runs.create_or_update_additional_data(3) -- Adds the additional data for the input partitioning. If additional data of a given name already