diff --git a/agent/src/main/scala/za/co/absa/atum/agent/dispatcher/HttpDispatcher.scala b/agent/src/main/scala/za/co/absa/atum/agent/dispatcher/HttpDispatcher.scala index cc7ea6623..31821a05e 100644 --- a/agent/src/main/scala/za/co/absa/atum/agent/dispatcher/HttpDispatcher.scala +++ b/agent/src/main/scala/za/co/absa/atum/agent/dispatcher/HttpDispatcher.scala @@ -22,7 +22,7 @@ import sttp.client3._ import sttp.model.Uri import za.co.absa.atum.agent.exception.AtumAgentException.HttpException import za.co.absa.atum.model.dto.{AdditionalDataSubmitDTO, AtumContextDTO, CheckpointDTO, PartitioningSubmitDTO} -import za.co.absa.atum.model.utils.SerializationUtils +import za.co.absa.atum.model.utils.JsonSyntaxExtensions._ class HttpDispatcher(config: Config) extends Dispatcher(config: Config) with Logging { import HttpDispatcher._ @@ -47,19 +47,17 @@ class HttpDispatcher(config: Config) extends Dispatcher(config: Config) with Log override protected[agent] def createPartitioning(partitioning: PartitioningSubmitDTO): AtumContextDTO = { val request = commonAtumRequest .post(createPartitioningEndpoint) - .body(SerializationUtils.asJson(partitioning)) + .body(partitioning.asJsonString) val response = backend.send(request) - SerializationUtils.fromJson[AtumContextDTO]( - handleResponseBody(response) - ) + handleResponseBody(response).as[AtumContextDTO] } override protected[agent] def saveCheckpoint(checkpoint: CheckpointDTO): Unit = { val request = commonAtumRequest .post(createCheckpointEndpoint) - .body(SerializationUtils.asJson(checkpoint)) + .body(checkpoint.asJsonString) val response = backend.send(request) @@ -69,7 +67,7 @@ class HttpDispatcher(config: Config) extends Dispatcher(config: Config) with Log override protected[agent] def saveAdditionalData(additionalDataSubmitDTO: AdditionalDataSubmitDTO): Unit = { val request = commonAtumRequest .post(createAdditionalDataEndpoint) - .body(SerializationUtils.asJson(additionalDataSubmitDTO)) + .body(additionalDataSubmitDTO.asJsonString) val response = backend.send(request) diff --git a/agent/src/main/scala/za/co/absa/atum/agent/model/Measure.scala b/agent/src/main/scala/za/co/absa/atum/agent/model/Measure.scala index dd4d599fb..1a5e634e2 100644 --- a/agent/src/main/scala/za/co/absa/atum/agent/model/Measure.scala +++ b/agent/src/main/scala/za/co/absa/atum/agent/model/Measure.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.types.{DataType, DecimalType, LongType, StringType} import org.apache.spark.sql.{Column, DataFrame} import za.co.absa.atum.agent.core.MeasurementProcessor import za.co.absa.atum.agent.core.MeasurementProcessor.MeasurementFunction -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType +import za.co.absa.atum.model.ResultValueType /** * Type of different measures to be applied to the columns. @@ -57,7 +57,7 @@ object AtumMeasure { } override def measuredColumns: Seq[String] = Seq.empty - override val resultValueType: ResultValueType = ResultValueType.Long + override val resultValueType: ResultValueType = ResultValueType.LongValue } object RecordCount { private[agent] val measureName: String = "count" @@ -76,7 +76,7 @@ object AtumMeasure { } override def measuredColumns: Seq[String] = measuredCols - override val resultValueType: ResultValueType = ResultValueType.Long + override val resultValueType: ResultValueType = ResultValueType.LongValue } object DistinctRecordCount { private[agent] val measureName: String = "distinctCount" @@ -93,7 +93,7 @@ object AtumMeasure { } override def measuredColumns: Seq[String] = Seq(measuredCol) - override val resultValueType: ResultValueType = ResultValueType.BigDecimal + override val resultValueType: ResultValueType = ResultValueType.BigDecimalValue } object SumOfValuesOfColumn { private[agent] val measureName: String = "aggregatedTotal" @@ -110,7 +110,7 @@ object AtumMeasure { } override def measuredColumns: Seq[String] = Seq(measuredCol) - override val resultValueType: ResultValueType = ResultValueType.BigDecimal + override val resultValueType: ResultValueType = ResultValueType.BigDecimalValue } object AbsSumOfValuesOfColumn { private[agent] val measureName: String = "absAggregatedTotal" @@ -125,7 +125,7 @@ object AtumMeasure { } override def measuredColumns: Seq[String] = Seq(measuredCol) - override val resultValueType: ResultValueType = ResultValueType.String + override val resultValueType: ResultValueType = ResultValueType.StringValue } object SumOfHashesOfColumn { private[agent] val measureName: String = "hashCrc32" diff --git a/agent/src/main/scala/za/co/absa/atum/agent/model/MeasureResult.scala b/agent/src/main/scala/za/co/absa/atum/agent/model/MeasureResult.scala index f95214401..463dc632f 100644 --- a/agent/src/main/scala/za/co/absa/atum/agent/model/MeasureResult.scala +++ b/agent/src/main/scala/za/co/absa/atum/agent/model/MeasureResult.scala @@ -17,7 +17,7 @@ package za.co.absa.atum.agent.model import za.co.absa.atum.agent.exception.AtumAgentException.MeasurementException -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType +import za.co.absa.atum.model.ResultValueType /** * This trait defines a contract for a measure result. @@ -80,13 +80,13 @@ object MeasureResult { resultValue match { case l: Long => - MeasureResultProvided[Long](l, ResultValueType.Long) + MeasureResultProvided[Long](l, ResultValueType.LongValue) case d: Double => - MeasureResultProvided[Double](d, ResultValueType.Double) + MeasureResultProvided[Double](d, ResultValueType.DoubleValue) case bd: BigDecimal => - MeasureResultProvided[BigDecimal](bd, ResultValueType.BigDecimal) + MeasureResultProvided[BigDecimal](bd, ResultValueType.BigDecimalValue) case s: String => - MeasureResultProvided[String](s, ResultValueType.String) + MeasureResultProvided[String](s, ResultValueType.StringValue) case unsupportedType => val className = unsupportedType.getClass.getSimpleName diff --git a/agent/src/main/scala/za/co/absa/atum/agent/model/MeasurementBuilder.scala b/agent/src/main/scala/za/co/absa/atum/agent/model/MeasurementBuilder.scala index 009e6df14..6fa560b6c 100644 --- a/agent/src/main/scala/za/co/absa/atum/agent/model/MeasurementBuilder.scala +++ b/agent/src/main/scala/za/co/absa/atum/agent/model/MeasurementBuilder.scala @@ -17,6 +17,7 @@ package za.co.absa.atum.agent.model import za.co.absa.atum.agent.exception.AtumAgentException.MeasurementException +import za.co.absa.atum.model.dto.MeasureResultDTO.TypedValue import za.co.absa.atum.model.dto.{MeasureDTO, MeasureResultDTO, MeasurementDTO} /** @@ -41,7 +42,7 @@ private [agent] object MeasurementBuilder { val measureDTO = MeasureDTO(measure.measureName, measure.measuredColumns) val measureResultDTO = MeasureResultDTO( - MeasureResultDTO.TypedValue(measureResult.resultValue.toString, measureResult.resultValueType) + TypedValue(measureResult.resultValue.toString, measureResult.resultValueType) ) MeasurementDTO(measureDTO, measureResultDTO) } diff --git a/agent/src/test/scala/za/co/absa/atum/agent/AtumContextUnitTests.scala b/agent/src/test/scala/za/co/absa/atum/agent/AtumContextUnitTests.scala index 67ec7b9a9..75585f485 100644 --- a/agent/src/test/scala/za/co/absa/atum/agent/AtumContextUnitTests.scala +++ b/agent/src/test/scala/za/co/absa/atum/agent/AtumContextUnitTests.scala @@ -25,8 +25,8 @@ import org.scalatest.matchers.should.Matchers import za.co.absa.atum.agent.AtumContext.AtumPartitions import za.co.absa.atum.agent.model.AtumMeasure.{RecordCount, SumOfValuesOfColumn} import za.co.absa.atum.agent.model.{Measure, MeasureResult, MeasurementBuilder, UnknownMeasure} +import za.co.absa.atum.model.ResultValueType import za.co.absa.atum.model.dto.CheckpointDTO -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType class AtumContextUnitTests extends AnyFlatSpec with Matchers { @@ -100,7 +100,7 @@ class AtumContextUnitTests extends AnyFlatSpec with Matchers { assert(argument.getValue.author == authorTest) assert(argument.getValue.partitioning == AtumPartitions.toSeqPartitionDTO(atumPartitions)) assert(argument.getValue.measurements.head.result.mainValue.value == "3") - assert(argument.getValue.measurements.head.result.mainValue.valueType == ResultValueType.Long) + assert(argument.getValue.measurements.head.result.mainValue.valueType == ResultValueType.LongValue) } "createCheckpointOnProvidedData" should "create a Checkpoint on provided data" in { @@ -115,7 +115,7 @@ class AtumContextUnitTests extends AnyFlatSpec with Matchers { val measurements: Map[Measure, MeasureResult] = Map( RecordCount("col") -> MeasureResult(1L), SumOfValuesOfColumn("col") -> MeasureResult(BigDecimal(1)), - UnknownMeasure("customMeasureName", Seq("col"), ResultValueType.BigDecimal) -> MeasureResult(BigDecimal(1)) + UnknownMeasure("customMeasureName", Seq("col"), ResultValueType.BigDecimalValue) -> MeasureResult(BigDecimal(1)) ) atumContext.createCheckpointOnProvidedData( @@ -172,7 +172,7 @@ class AtumContextUnitTests extends AnyFlatSpec with Matchers { assert(argumentFirst.getValue.author == authorTest) assert(argumentFirst.getValue.partitioning == AtumPartitions.toSeqPartitionDTO(atumPartitions)) assert(argumentFirst.getValue.measurements.head.result.mainValue.value == "4") - assert(argumentFirst.getValue.measurements.head.result.mainValue.valueType == ResultValueType.Long) + assert(argumentFirst.getValue.measurements.head.result.mainValue.valueType == ResultValueType.LongValue) atumContext.addMeasure(SumOfValuesOfColumn("columnForSum")) when(mockAgent.currentUser).thenReturn(authorTest + "Another") // maybe a process changed the author / current user @@ -185,7 +185,7 @@ class AtumContextUnitTests extends AnyFlatSpec with Matchers { assert(argumentSecond.getValue.author == authorTest + "Another") assert(argumentSecond.getValue.partitioning == AtumPartitions.toSeqPartitionDTO(atumPartitions)) assert(argumentSecond.getValue.measurements.tail.head.result.mainValue.value == "22.5") - assert(argumentSecond.getValue.measurements.tail.head.result.mainValue.valueType == ResultValueType.BigDecimal) + assert(argumentSecond.getValue.measurements.tail.head.result.mainValue.valueType == ResultValueType.BigDecimalValue) } "addAdditionalData" should "add key/value pair to map for additional data" in { diff --git a/agent/src/test/scala/za/co/absa/atum/agent/model/AtumMeasureUnitTests.scala b/agent/src/test/scala/za/co/absa/atum/agent/model/AtumMeasureUnitTests.scala index 17bd3be2a..7f2278f91 100644 --- a/agent/src/test/scala/za/co/absa/atum/agent/model/AtumMeasureUnitTests.scala +++ b/agent/src/test/scala/za/co/absa/atum/agent/model/AtumMeasureUnitTests.scala @@ -23,7 +23,7 @@ import org.scalatest.matchers.should.Matchers import za.co.absa.atum.agent.AtumAgent import za.co.absa.atum.agent.AtumContext.{AtumPartitions, DatasetWrapper} import za.co.absa.atum.agent.model.AtumMeasure._ -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType +import za.co.absa.atum.model.ResultValueType import za.co.absa.spark.commons.test.SparkTestBase class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase { self => @@ -94,17 +94,17 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase // Assertions assert(dfPersonCntResult.resultValue == "1000") - assert(dfPersonCntResult.resultValueType == ResultValueType.Long) + assert(dfPersonCntResult.resultValueType == ResultValueType.LongValue) assert(dfFullCntResult.resultValue == "1000") - assert(dfFullCntResult.resultValueType == ResultValueType.Long) + assert(dfFullCntResult.resultValueType == ResultValueType.LongValue) assert(dfFullSalaryAbsSumResult.resultValue == "2987144") - assert(dfFullSalaryAbsSumResult.resultValueType == ResultValueType.BigDecimal) + assert(dfFullSalaryAbsSumResult.resultValueType == ResultValueType.BigDecimalValue) assert(dfFullHashResult.resultValue == "2044144307532") - assert(dfFullHashResult.resultValueType == ResultValueType.String) + assert(dfFullHashResult.resultValueType == ResultValueType.StringValue) assert(dfExtraPersonSalarySumResult.resultValue == "2986144") - assert(dfExtraPersonSalarySumResult.resultValueType == ResultValueType.BigDecimal) + assert(dfExtraPersonSalarySumResult.resultValueType == ResultValueType.BigDecimalValue) assert(dfFullSalarySumResult.resultValue == "2987144") - assert(dfFullSalarySumResult.resultValueType == ResultValueType.BigDecimal) + assert(dfFullSalarySumResult.resultValueType == ResultValueType.BigDecimalValue) } "AbsSumOfValuesOfColumn" should "return expected value" in { @@ -119,7 +119,7 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase val result = salaryAbsSum.function(df) assert(result.resultValue == "300.3") - assert(result.resultValueType == ResultValueType.BigDecimal) + assert(result.resultValueType == ResultValueType.BigDecimalValue) } "AbsSumOfValuesOfColumn" should "return expected value for null result" in { @@ -134,7 +134,7 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase val result = salaryAbsSum.function(df) assert(result.resultValue == "0") - assert(result.resultValueType == ResultValueType.BigDecimal) + assert(result.resultValueType == ResultValueType.BigDecimalValue) } "RecordCount" should "return expected value" in { @@ -149,7 +149,7 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase val result = distinctCount.function(df) assert(result.resultValue == "4") - assert(result.resultValueType == ResultValueType.Long) + assert(result.resultValueType == ResultValueType.LongValue) } "DistinctRecordCount" should "return expected value for multiple columns" in { @@ -164,7 +164,7 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase val result = distinctCount.function(df) assert(result.resultValue == "3") - assert(result.resultValueType == ResultValueType.Long) + assert(result.resultValueType == ResultValueType.LongValue) } "DistinctRecordCount" should "fail requirements when no control columns given" in { @@ -183,7 +183,7 @@ class AtumMeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase val result = distinctCount.function(df) assert(result.resultValue == "4") - assert(result.resultValueType == ResultValueType.BigDecimal) + assert(result.resultValueType == ResultValueType.BigDecimalValue) } } diff --git a/agent/src/test/scala/za/co/absa/atum/agent/model/MeasureUnitTests.scala b/agent/src/test/scala/za/co/absa/atum/agent/model/MeasureUnitTests.scala index aef1a7fe6..d96d0ac1e 100644 --- a/agent/src/test/scala/za/co/absa/atum/agent/model/MeasureUnitTests.scala +++ b/agent/src/test/scala/za/co/absa/atum/agent/model/MeasureUnitTests.scala @@ -21,9 +21,9 @@ import org.scalatest.matchers.should.Matchers import za.co.absa.atum.agent.AtumAgent import za.co.absa.atum.agent.AtumContext.AtumPartitions import za.co.absa.atum.agent.model.AtumMeasure.{AbsSumOfValuesOfColumn, RecordCount, SumOfHashesOfColumn, SumOfValuesOfColumn} -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType import za.co.absa.spark.commons.test.SparkTestBase import za.co.absa.atum.agent.AtumContext._ +import za.co.absa.atum.model.ResultValueType class MeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase { self => @@ -92,17 +92,17 @@ class MeasureUnitTests extends AnyFlatSpec with Matchers with SparkTestBase { se // Assertions assert(dfPersonCntResult.resultValue == "1000") - assert(dfPersonCntResult.resultValueType == ResultValueType.Long) + assert(dfPersonCntResult.resultValueType == ResultValueType.LongValue) assert(dfFullCntResult.resultValue == "1000") - assert(dfFullCntResult.resultValueType == ResultValueType.Long) + assert(dfFullCntResult.resultValueType == ResultValueType.LongValue) assert(dfFullSalaryAbsSumResult.resultValue == "2987144") - assert(dfFullSalaryAbsSumResult.resultValueType == ResultValueType.BigDecimal) + assert(dfFullSalaryAbsSumResult.resultValueType == ResultValueType.BigDecimalValue) assert(dfFullHashResult.resultValue == "2044144307532") - assert(dfFullHashResult.resultValueType == ResultValueType.String) + assert(dfFullHashResult.resultValueType == ResultValueType.StringValue) assert(dfExtraPersonSalarySumResult.resultValue == "2986144") - assert(dfExtraPersonSalarySumResult.resultValueType == ResultValueType.BigDecimal) + assert(dfExtraPersonSalarySumResult.resultValueType == ResultValueType.BigDecimalValue) assert(dfFullSalarySumResult.resultValue == "2987144") - assert(dfFullSalarySumResult.resultValueType == ResultValueType.BigDecimal) + assert(dfFullSalarySumResult.resultValueType == ResultValueType.BigDecimalValue) } } diff --git a/agent/src/test/scala/za/co/absa/atum/agent/model/MeasurementBuilderUnitTests.scala b/agent/src/test/scala/za/co/absa/atum/agent/model/MeasurementBuilderUnitTests.scala index f96fefbf6..048349ad1 100644 --- a/agent/src/test/scala/za/co/absa/atum/agent/model/MeasurementBuilderUnitTests.scala +++ b/agent/src/test/scala/za/co/absa/atum/agent/model/MeasurementBuilderUnitTests.scala @@ -20,7 +20,8 @@ import org.scalatest.flatspec.AnyFlatSpec import za.co.absa.atum.agent.exception.AtumAgentException.MeasurementException import za.co.absa.atum.model.dto.{MeasureDTO, MeasureResultDTO, MeasurementDTO} import za.co.absa.atum.agent.model.AtumMeasure._ -import za.co.absa.atum.model.dto.MeasureResultDTO.{ResultValueType, TypedValue} +import za.co.absa.atum.model.ResultValueType +import za.co.absa.atum.model.dto.MeasureResultDTO.TypedValue class MeasurementBuilderUnitTests extends AnyFlatSpec { @@ -35,7 +36,7 @@ class MeasurementBuilderUnitTests extends AnyFlatSpec { val expectedMeasureDTO = MeasureDTO("aggregatedTotal", Seq("col")) val expectedMeasureResultDTO = MeasureResultDTO( - TypedValue("1", ResultValueType.BigDecimal) + TypedValue("1", ResultValueType.BigDecimalValue) ) assert(measurementDTO.measure == expectedMeasureDTO) @@ -49,7 +50,7 @@ class MeasurementBuilderUnitTests extends AnyFlatSpec { val measureResult = MeasureResult(BigDecimal(3.14)) val measurementDTO = MeasurementBuilder.buildMeasurementDTO(measure, measureResult) - val expectedTypedValue = TypedValue("3.14", ResultValueType.BigDecimal) + val expectedTypedValue = TypedValue("3.14", ResultValueType.BigDecimalValue) assert(measurementDTO.result.mainValue == expectedTypedValue) } @@ -59,11 +60,11 @@ class MeasurementBuilderUnitTests extends AnyFlatSpec { "when Measurement provided" in { val measure = SumOfValuesOfColumn("col") - val measureResult = MeasureResult("stringValue", ResultValueType.BigDecimal) + val measureResult = MeasureResult("string", ResultValueType.BigDecimalValue) val measurementDTO = MeasurementBuilder.buildMeasurementDTO(measure, measureResult) - val expectedTypedValue = TypedValue("stringValue", ResultValueType.BigDecimal) + val expectedTypedValue = TypedValue("string", ResultValueType.BigDecimalValue) assert(measurementDTO.result.mainValue == expectedTypedValue) } @@ -72,14 +73,14 @@ class MeasurementBuilderUnitTests extends AnyFlatSpec { "build MeasurementDTO for BigDecimal type of result value when measured by Agent" in { val measure = SumOfValuesOfColumn("col") - val measureResult = MeasureResult("1", ResultValueType.BigDecimal) + val measureResult = MeasureResult("1", ResultValueType.BigDecimalValue) val measurementDTO = MeasurementBuilder.buildMeasurementDTO(measure, measureResult) val expectedMeasureDTO = MeasureDTO("aggregatedTotal", Seq("col")) val expectedMeasureResultDTO = MeasureResultDTO( - TypedValue("1", ResultValueType.BigDecimal) + TypedValue("1", ResultValueType.BigDecimalValue) ) assert(measurementDTO.measure == expectedMeasureDTO) @@ -88,25 +89,25 @@ class MeasurementBuilderUnitTests extends AnyFlatSpec { "buildAndValidateMeasurementsDTO" should "build Seq[MeasurementDTO] for multiple measures, all unique" in { val measurements: Map[Measure, MeasureResult] = Map( - DistinctRecordCount(Seq("col")) -> MeasureResult("1", ResultValueType.Long), + DistinctRecordCount(Seq("col")) -> MeasureResult("1", ResultValueType.LongValue), SumOfValuesOfColumn("col1") -> MeasureResult(BigDecimal(1.2)), SumOfValuesOfColumn("col2") -> MeasureResult(BigDecimal(1.3)), - UnknownMeasure("unknownMeasure", Seq("col"), ResultValueType.BigDecimal) -> MeasureResult(BigDecimal(1.1)) + UnknownMeasure("unknownMeasure", Seq("col"), ResultValueType.BigDecimalValue) -> MeasureResult(BigDecimal(1.1)) ) val measurementDTOs = MeasurementBuilder.buildAndValidateMeasurementsDTO(measurements) val expectedMeasurementDTO = Set( MeasurementDTO( - MeasureDTO("distinctCount", Seq("col")), MeasureResultDTO(TypedValue("1", ResultValueType.Long)) + MeasureDTO("distinctCount", Seq("col")), MeasureResultDTO(TypedValue("1", ResultValueType.LongValue)) ), MeasurementDTO( - MeasureDTO("aggregatedTotal", Seq("col1")), MeasureResultDTO(TypedValue("1.2", ResultValueType.BigDecimal)) + MeasureDTO("aggregatedTotal", Seq("col1")), MeasureResultDTO(TypedValue("1.2", ResultValueType.BigDecimalValue)) ), MeasurementDTO( - MeasureDTO("aggregatedTotal", Seq("col2")), MeasureResultDTO(TypedValue("1.3", ResultValueType.BigDecimal)) + MeasureDTO("aggregatedTotal", Seq("col2")), MeasureResultDTO(TypedValue("1.3", ResultValueType.BigDecimalValue)) ), MeasurementDTO( - MeasureDTO("unknownMeasure", Seq("col")), MeasureResultDTO(TypedValue("1.1", ResultValueType.BigDecimal)) + MeasureDTO("unknownMeasure", Seq("col")), MeasureResultDTO(TypedValue("1.1", ResultValueType.BigDecimalValue)) ) ) @@ -149,7 +150,7 @@ class MeasurementBuilderUnitTests extends AnyFlatSpec { val measure = SumOfValuesOfColumn("col") assertThrows[MeasurementException]( - MeasurementBuilder.buildAndValidateMeasurementsDTO(Map(measure -> MeasureResult("stringValue", ResultValueType.String))) + MeasurementBuilder.buildAndValidateMeasurementsDTO(Map(measure -> MeasureResult("string", ResultValueType.StringValue))) ) } } diff --git a/build.sbt b/build.sbt index 9d7686d0e..14f3a0bf1 100644 --- a/build.sbt +++ b/build.sbt @@ -104,4 +104,3 @@ lazy val database = (projectMatrix in file("database")) ): _* ) .addSingleScalaBuild(Setup.serverAndDbScalaVersion, Dependencies.databaseDependencies) - diff --git a/model/src/main/scala/za/co/absa/atum/model/ResultValueType.scala b/model/src/main/scala/za/co/absa/atum/model/ResultValueType.scala new file mode 100644 index 000000000..f4c2edf70 --- /dev/null +++ b/model/src/main/scala/za/co/absa/atum/model/ResultValueType.scala @@ -0,0 +1,43 @@ +/* + * Copyright 2021 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package za.co.absa.atum.model + +import io.circe.{Decoder, Encoder} + +sealed trait ResultValueType + +object ResultValueType { + case object StringValue extends ResultValueType + case object LongValue extends ResultValueType + case object BigDecimalValue extends ResultValueType + case object DoubleValue extends ResultValueType + + implicit val encodeResultValueType: Encoder[ResultValueType] = Encoder.encodeString.contramap { + case ResultValueType.StringValue => "String" + case ResultValueType.LongValue => "Long" + case ResultValueType.BigDecimalValue => "BigDecimal" + case ResultValueType.DoubleValue => "Double" + } + + implicit val decodeResultValueType: Decoder[ResultValueType] = Decoder.decodeString.emap { + case "String" => Right(ResultValueType.StringValue) + case "Long" => Right(ResultValueType.LongValue) + case "BigDecimal" => Right(ResultValueType.BigDecimalValue) + case "Double" => Right(ResultValueType.DoubleValue) + case other => Left(s"Cannot decode $other as ResultValueType") + } +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataSubmitDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataSubmitDTO.scala index 5beb3aaa1..529d39a5a 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataSubmitDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AdditionalDataSubmitDTO.scala @@ -16,8 +16,17 @@ package za.co.absa.atum.model.dto + +import io.circe.generic.semiauto._ +import io.circe._ + case class AdditionalDataSubmitDTO ( partitioning: PartitioningDTO, additionalData: AdditionalDataDTO, author: String ) + +object AdditionalDataSubmitDTO { + implicit val decodeAdditionalDataSubmitDTO: Decoder[AdditionalDataSubmitDTO] = deriveDecoder[AdditionalDataSubmitDTO] + implicit val encodeAdditionalDataSubmitDTO: Encoder[AdditionalDataSubmitDTO] = deriveEncoder[AdditionalDataSubmitDTO] +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/AtumContextDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/AtumContextDTO.scala index 360986636..486869d43 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/AtumContextDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/AtumContextDTO.scala @@ -16,8 +16,17 @@ package za.co.absa.atum.model.dto + +import io.circe.generic.semiauto._ +import io.circe._ + case class AtumContextDTO( partitioning: PartitioningDTO, measures: Set[MeasureDTO] = Set.empty, additionalData: AdditionalDataDTO = Map.empty ) + +object AtumContextDTO { + implicit val decodeAtumContextDTO: Decoder[AtumContextDTO] = deriveDecoder + implicit val encodeAtumContextDTO: Encoder[AtumContextDTO] = deriveEncoder +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointDTO.scala index b833e128f..5b5c0daa4 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointDTO.scala @@ -16,6 +16,9 @@ package za.co.absa.atum.model.dto +import io.circe.generic.semiauto._ +import io.circe._ + import java.time.ZonedDateTime import java.util.UUID @@ -29,3 +32,8 @@ case class CheckpointDTO( processEndTime: Option[ZonedDateTime], measurements: Set[MeasurementDTO] ) + +object CheckpointDTO { + implicit val decodeCheckpointDTO: Decoder[CheckpointDTO] = deriveDecoder + implicit val encodeCheckpointDTO: Encoder[CheckpointDTO] = deriveEncoder +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointQueryDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointQueryDTO.scala index d47ec5536..6c2d2f95a 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointQueryDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/CheckpointQueryDTO.scala @@ -16,8 +16,16 @@ package za.co.absa.atum.model.dto +import io.circe.generic.semiauto._ +import io.circe._ + case class CheckpointQueryDTO( partitioning: PartitioningDTO, limit: Option[Int], checkpointName: Option[String] ) + +object CheckpointQueryDTO { + implicit val decodeCheckpointQueryDTO: Decoder[CheckpointQueryDTO] = deriveDecoder + implicit val encodeCheckpointQueryDTO: Encoder[CheckpointQueryDTO] = deriveEncoder +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/MeasureDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/MeasureDTO.scala index 89ce8d018..00220c37f 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/MeasureDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/MeasureDTO.scala @@ -16,7 +16,15 @@ package za.co.absa.atum.model.dto +import io.circe._ +import io.circe.generic.semiauto._ + case class MeasureDTO( measureName: String, measuredColumns: Seq[String] ) + +object MeasureDTO { + implicit val decodeMeasureDTO: Decoder[MeasureDTO] = deriveDecoder[MeasureDTO] + implicit val encoderMeasureDTO: Encoder[MeasureDTO] = deriveEncoder[MeasureDTO] +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/MeasureResultDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/MeasureResultDTO.scala index 61b26cac6..29ad02001 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/MeasureResultDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/MeasureResultDTO.scala @@ -16,7 +16,9 @@ package za.co.absa.atum.model.dto -import io.circe.{Decoder, Encoder} +import io.circe._ +import io.circe.generic.semiauto._ +import za.co.absa.atum.model.ResultValueType case class MeasureResultDTO( mainValue: MeasureResultDTO.TypedValue, @@ -29,38 +31,11 @@ object MeasureResultDTO { valueType: ResultValueType ) - sealed trait ResultValueType - - object ResultValueType { - case object String extends ResultValueType - case object Long extends ResultValueType - case object BigDecimal extends ResultValueType - case object Double extends ResultValueType + object TypedValue { + implicit val encodeTypedValue: Encoder[TypedValue] = deriveEncoder + implicit val decodeTypedValue: Decoder[TypedValue] = deriveDecoder } - - implicit val encodeResultValueType: Encoder[MeasureResultDTO.ResultValueType] = Encoder.encodeString.contramap { - case MeasureResultDTO.ResultValueType.String => "String" - case MeasureResultDTO.ResultValueType.Long => "Long" - case MeasureResultDTO.ResultValueType.BigDecimal => "BigDecimal" - case MeasureResultDTO.ResultValueType.Double => "Double" - } - - implicit val decodeResultValueType: Decoder[MeasureResultDTO.ResultValueType] = Decoder.decodeString.emap { - case "String" => Right(MeasureResultDTO.ResultValueType.String) - case "Long" => Right(MeasureResultDTO.ResultValueType.Long) - case "BigDecimal" => Right(MeasureResultDTO.ResultValueType.BigDecimal) - case "Double" => Right(MeasureResultDTO.ResultValueType.Double) - case other => Left(s"Cannot decode $other as ResultValueType") - } - - implicit val encodeTypedValue: Encoder[MeasureResultDTO.TypedValue] = - Encoder.forProduct2("value", "valueType")(tv => (tv.value, tv.valueType)) - - implicit val decodeTypedValue: Decoder[MeasureResultDTO.TypedValue] = - Decoder.forProduct2("value", "valueType")(MeasureResultDTO.TypedValue.apply) - - implicit val decodeMeasureResultDTO: Decoder[MeasureResultDTO] = - Decoder.forProduct2("mainValue", "supportValues")(MeasureResultDTO.apply) - + implicit val encodeMeasureResultDTO: Encoder[MeasureResultDTO] = deriveEncoder + implicit val decodeMeasureResultDTO: Decoder[MeasureResultDTO] = deriveDecoder } diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/MeasurementDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/MeasurementDTO.scala index d216c483c..d087c53f1 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/MeasurementDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/MeasurementDTO.scala @@ -16,7 +16,15 @@ package za.co.absa.atum.model.dto +import io.circe.generic.semiauto._ +import io.circe._ + case class MeasurementDTO( measure: MeasureDTO, result: MeasureResultDTO ) + +object MeasurementDTO { + implicit val decodeMeasurementDTO: Decoder[MeasurementDTO] = deriveDecoder[MeasurementDTO] + implicit val encodeMeasurementDTO: Encoder[MeasurementDTO] = deriveEncoder[MeasurementDTO] +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/PartitionDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/PartitionDTO.scala index 4b19b20df..053c1e1c2 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/PartitionDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/PartitionDTO.scala @@ -16,7 +16,15 @@ package za.co.absa.atum.model.dto +import io.circe.generic.semiauto._ +import io.circe._ + case class PartitionDTO( key: String, value: String ) + +object PartitionDTO { + implicit val decodePartitionDTO: Decoder[PartitionDTO] = deriveDecoder + implicit val encodePartitionDTO: Encoder[PartitionDTO] = deriveEncoder +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/PartitioningSubmitDTO.scala b/model/src/main/scala/za/co/absa/atum/model/dto/PartitioningSubmitDTO.scala index 6599273b5..ad4fce6e0 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/PartitioningSubmitDTO.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/PartitioningSubmitDTO.scala @@ -16,8 +16,16 @@ package za.co.absa.atum.model.dto +import io.circe.generic.semiauto._ +import io.circe._ + case class PartitioningSubmitDTO ( partitioning: PartitioningDTO, parentPartitioning: Option[PartitioningDTO], authorIfNew: String ) + +object PartitioningSubmitDTO { + implicit val decodePartitioningSubmitDTO: Decoder[PartitioningSubmitDTO] = deriveDecoder + implicit val encodePartitioningSubmitDTO: Encoder[PartitioningSubmitDTO] = deriveEncoder +} diff --git a/model/src/main/scala/za/co/absa/atum/model/dto/package.scala b/model/src/main/scala/za/co/absa/atum/model/dto/package.scala index 03a475e8e..bca1a6c77 100644 --- a/model/src/main/scala/za/co/absa/atum/model/dto/package.scala +++ b/model/src/main/scala/za/co/absa/atum/model/dto/package.scala @@ -16,7 +16,16 @@ package za.co.absa.atum.model + +import io.circe.generic.semiauto._ +import io.circe._ + package object dto { type PartitioningDTO = Seq[PartitionDTO] type AdditionalDataDTO = Map[String, Option[String]] + + // Implicit encoders and decoders for AdditionalDataDTO + implicit val decodeAdditionalDataDTO: Decoder[AdditionalDataDTO] = Decoder.decodeMap[String, Option[String]] + implicit val encodeAdditionalDataDTO: Encoder[AdditionalDataDTO] = Encoder.encodeMap[String, Option[String]] + } diff --git a/model/src/main/scala/za/co/absa/atum/model/utils/OptionImplicits.scala b/model/src/main/scala/za/co/absa/atum/model/utils/JsonSyntaxExtensions.scala similarity index 55% rename from model/src/main/scala/za/co/absa/atum/model/utils/OptionImplicits.scala rename to model/src/main/scala/za/co/absa/atum/model/utils/JsonSyntaxExtensions.scala index 8591ffcb1..4a95546a9 100644 --- a/model/src/main/scala/za/co/absa/atum/model/utils/OptionImplicits.scala +++ b/model/src/main/scala/za/co/absa/atum/model/utils/JsonSyntaxExtensions.scala @@ -16,16 +16,23 @@ package za.co.absa.atum.model.utils -object OptionImplicits { - implicit class OptionEnhancements[T](val option: Option[T]) extends AnyVal { - /** - * Gets the `option` value or throws the provided exception - * - * @param exception the exception to throw in case the `option` is None - * @return - */ - def getOrThrow(exception: => Throwable): T = { - option.getOrElse(throw exception) +import io.circe.parser.decode +import io.circe.syntax._ +import io.circe.{Decoder, Encoder} + +object JsonSyntaxExtensions { + + implicit class JsonSerializationSyntax[T: Encoder](obj: T) { + def asJsonString: String = obj.asJson.noSpaces } - } + + implicit class JsonDeserializationSyntax(jsonStr: String) { + def as[T: Decoder]: T = { + decode[T](jsonStr) match { + case Right(value) => value + case Left(error) => throw new RuntimeException(s"Failed to decode JSON: $error") + } + } + } + } diff --git a/model/src/main/scala/za/co/absa/atum/model/utils/SerializationUtils.scala b/model/src/main/scala/za/co/absa/atum/model/utils/SerializationUtils.scala deleted file mode 100644 index fcd95568d..000000000 --- a/model/src/main/scala/za/co/absa/atum/model/utils/SerializationUtils.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright 2021 ABSA Group Limited - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package za.co.absa.atum.model.utils - -import org.json4s.JsonAST.JString -import org.json4s.jackson.Serialization -import org.json4s.jackson.Serialization.{write, writePretty} -import org.json4s.{CustomSerializer, Formats, JNull, NoTypeHints, ext} -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType -import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType._ - -import java.time.format.DateTimeFormatter - -object SerializationUtils { - - implicit private val formatsJson: Formats = - Serialization.formats(NoTypeHints).withBigDecimal + - ext.UUIDSerializer + - ZonedDateTimeSerializer + - ResultValueTypeSerializer - - // TODO "yyyy-MM-dd'T'hh:mm:ss.SSS'Z'" OR TODO "yyyy-MM-dd HH:mm:ss.SSSSSSX" - val timestampFormat: DateTimeFormatter = DateTimeFormatter.ISO_ZONED_DATE_TIME - - /** - * The method returns arbitrary object as a Json string. - * - * @return A string representing the object in Json format - */ - def asJson[T <: AnyRef](obj: T): String = { - write[T](obj) - } - - /** - * The method returns arbitrary object as a pretty Json string. - * - * @return A string representing the object in Json format - */ - def asJsonPretty[T <: AnyRef](obj: T): String = { - writePretty[T](obj) - } - - /** - * The method returns arbitrary object parsed from Json string. - * - * @return An object deserialized from the Json string - */ - def fromJson[T <: AnyRef](jsonStr: String)(implicit m: Manifest[T]): T = { - Serialization.read[T](jsonStr) - } - - private case object ResultValueTypeSerializer extends CustomSerializer[ResultValueType](format => ( - { - case JString(resultValType) => resultValType match { - case "String" => String - case "Long" => Long - case "BigDecimal" => BigDecimal - case "Double" => Double - } - case JNull => null - }, - { - case resultValType: ResultValueType => resultValType match { - case String => JString("String") - case Long => JString("Long") - case BigDecimal => JString("BigDecimal") - case Double => JString("Double") - } - })) - -} diff --git a/model/src/main/scala/za/co/absa/atum/model/utils/ZonedDateTimeSerializer.scala b/model/src/main/scala/za/co/absa/atum/model/utils/ZonedDateTimeSerializer.scala deleted file mode 100644 index 6fe76dd61..000000000 --- a/model/src/main/scala/za/co/absa/atum/model/utils/ZonedDateTimeSerializer.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2021 ABSA Group Limited - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package za.co.absa.atum.model.utils - -import org.json4s.JsonAST.JString -import org.json4s.{CustomSerializer, JNull} - -import java.time.ZonedDateTime - -case object ZonedDateTimeSerializer extends CustomSerializer[ZonedDateTime](_ => ( - { - case JString(s) => ZonedDateTime.parse(s, SerializationUtils.timestampFormat) - case JNull => null - }, - { - case d: ZonedDateTime => JString(SerializationUtils.timestampFormat.format(d)) - } -)) diff --git a/model/src/test/scala/za/co/absa/atum/model/utils/SerializationUtilsUnitTests.scala b/model/src/test/scala/za/co/absa/atum/model/utils/SerializationUtilsUnitTests.scala index 91ebc9ab9..bd11a7ffb 100644 --- a/model/src/test/scala/za/co/absa/atum/model/utils/SerializationUtilsUnitTests.scala +++ b/model/src/test/scala/za/co/absa/atum/model/utils/SerializationUtilsUnitTests.scala @@ -17,8 +17,10 @@ package za.co.absa.atum.model.utils import org.scalatest.flatspec.AnyFlatSpecLike -import za.co.absa.atum.model.dto.MeasureResultDTO.{ResultValueType, TypedValue} +import za.co.absa.atum.model.ResultValueType +import za.co.absa.atum.model.dto.MeasureResultDTO.TypedValue import za.co.absa.atum.model.dto._ +import za.co.absa.atum.model.utils.JsonSyntaxExtensions._ import za.co.absa.atum.model.utils.SerializationUtilsTest.StringLinearization import java.time.{ZoneId, ZoneOffset, ZonedDateTime} @@ -27,7 +29,7 @@ import java.util.UUID class SerializationUtilsUnitTests extends AnyFlatSpecLike { // AdditionalDataDTO - "asJson" should "serialize AdditionalDataDTO into json string" in { + "asJsonString" should "serialize AdditionalDataDTO into json string" in { val additionalDataDTO = AdditionalDataSubmitDTO( Seq(PartitionDTO("key", "val")), Map[String, Option[String]]( @@ -41,10 +43,10 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { val expectedAdditionalDataJson = """ |{"partitioning":[{"key":"key","value":"val"}], - |"additionalData":{"key1":"val1","key2":"val2"}, + |"additionalData":{"key1":"val1","key2":"val2","key3":null}, |"author":"testAuthor"} |""".linearize - val actualAdditionalDataJson = SerializationUtils.asJson(additionalDataDTO) + val actualAdditionalDataJson = additionalDataDTO.asJsonString assert(actualAdditionalDataJson == expectedAdditionalDataJson) } @@ -55,42 +57,45 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |{"partitioning":[{"key":"key","value":"val"}], |"additionalData":{"key1":"val1","key2":"val2"}, |"author":"testAuthor"} - |""".linearize + |""".stripMargin val expectedAdditionalDataDTO = AdditionalDataSubmitDTO( - Seq[PartitionDTO](PartitionDTO("key", "val")), - Map[String, Option[String]]( + Seq(PartitionDTO("key", "val")), + Map( "key1" -> Some("val1"), "key2" -> Some("val2") ), "testAuthor" ) - val actualAdditionalDataDTO = SerializationUtils.fromJson[AdditionalDataSubmitDTO](additionalDataDTOJson) + val actualAdditionalDataDTO = additionalDataDTOJson.as[AdditionalDataSubmitDTO] assert(actualAdditionalDataDTO == expectedAdditionalDataDTO) } - "asJson" should "serialize empty AdditionalDataDTO into json string" in { + "asJsonString" should "serialize empty AdditionalDataDTO into json string" in { val additionalDataDTO = AdditionalDataSubmitDTO(Seq.empty, Map.empty, "testAuthor") val expectedAdditionalDataJson = """{"partitioning":[],"additionalData":{},"author":"testAuthor"}""" - val actualAdditionalDataJson = SerializationUtils.asJson(additionalDataDTO) + val actualAdditionalDataJson = additionalDataDTO.asJsonString assert(actualAdditionalDataJson == expectedAdditionalDataJson) } "fromJson" should "deserialize empty AdditionalDataDTO from json string" in { - val additionalDataDTOJsonString = """{"partitioning":[],"additionalData":{},"author":"testAuthor"}""" + val additionalDataDTOJsonString = + """ + |{"partitioning":[],"additionalData":{},"author":"testAuthor"} + |""".stripMargin val expectedAdditionalDataDTO = AdditionalDataSubmitDTO(Seq.empty, Map.empty, "testAuthor") - val actualAdditionalDataDTO = SerializationUtils.fromJson[AdditionalDataSubmitDTO](additionalDataDTOJsonString) + val actualAdditionalDataDTO = additionalDataDTOJsonString.as[AdditionalDataSubmitDTO] assert(actualAdditionalDataDTO == expectedAdditionalDataDTO) } // AtumContextDTO - "asJson" should "serialize AtumContextDTO into json string" in { + "asJsonString" should "serialize AtumContextDTO into json string" in { val seqPartitionDTO = Seq(PartitionDTO("key", "val")) val seqMeasureDTO = Set(MeasureDTO("count", Seq("col"))) @@ -103,7 +108,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"measures":[{"measureName":"count","measuredColumns":["col"]}], |"additionalData":{} |}""".linearize - val actualAdditionalDataJson = SerializationUtils.asJson(atumContextDTO) + val actualAdditionalDataJson = atumContextDTO.asJsonString assert(actualAdditionalDataJson == expectedAdditionalDataJson) } @@ -115,25 +120,25 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"partitioning":[{"key":"key","value":"val"}], |"measures":[{"measureName":"count","measuredColumns":["col"]}], |"additionalData":{} - |}""".linearize + |}""".stripMargin val seqPartitionDTO = Seq(PartitionDTO("key", "val")) val seqMeasureDTO = Set(MeasureDTO("count", Seq("col"))) val expectedAtumContextDTO = AtumContextDTO(partitioning = seqPartitionDTO, measures = seqMeasureDTO) - val actualAtumContextDTO = SerializationUtils.fromJson[AtumContextDTO](atumContextDTOJson) + val actualAtumContextDTO = atumContextDTOJson.as[AtumContextDTO] assert(actualAtumContextDTO == expectedAtumContextDTO) } - "asJson" should "serialize AtumContextDTO without measures into json string" in { + "asJsonString" should "serialize AtumContextDTO without measures into json string" in { val seqPartitionDTO = Seq(PartitionDTO("key", "val")) val atumContextDTO = AtumContextDTO(partitioning = seqPartitionDTO) val expectedAdditionalDataJson = """{"partitioning":[{"key":"key","value":"val"}],"measures":[],"additionalData":{}}""" - val actualAdditionalDataJson = SerializationUtils.asJson(atumContextDTO) + val actualAdditionalDataJson = atumContextDTO.asJsonString assert(actualAdditionalDataJson == expectedAdditionalDataJson) } @@ -144,13 +149,13 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { val expectedSeqPartitionDTO = Seq(PartitionDTO("key", "val")) val expectedAtumContextDTO = AtumContextDTO(partitioning = expectedSeqPartitionDTO) - val actualAtumContextDTO = SerializationUtils.fromJson[AtumContextDTO](atumContextDTOJson) + val actualAtumContextDTO = atumContextDTOJson.as[AtumContextDTO] assert(actualAtumContextDTO == expectedAtumContextDTO) } // CheckpointDTO - "asJson" should "serialize CheckpointDTO into json string" in { + "asJsonString" should "serialize CheckpointDTO into json string" in { val uuid = UUID.randomUUID() val seqPartitionDTO = Seq(PartitionDTO("key", "val")) val timeWithZone = ZonedDateTime.of(2023, 10, 24, 10, 20, 59, 5000000, ZoneId.of("CET")) @@ -158,7 +163,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { val setMeasurementDTO = Set( MeasurementDTO( measure = MeasureDTO("count", Seq("col")), result = MeasureResultDTO( - mainValue = TypedValue("1", ResultValueType.Long) + mainValue = TypedValue("1", ResultValueType.LongValue) ) ) ) @@ -188,7 +193,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"result":{"mainValue":{"value":"1","valueType":"Long"},"supportValues":{}}}] |} |""".linearize - val actualCheckpointDTOJson = SerializationUtils.asJson(checkpointDTO) + val actualCheckpointDTOJson = checkpointDTO.asJsonString assert(actualCheckpointDTOJson == expectedCheckpointDTOJson) } @@ -211,13 +216,13 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"measurements":[{"measure":{"measureName":"count","measuredColumns":["col"]}, |"result":{"mainValue":{"value":"1","valueType":"Long"},"supportValues":{}}}] |} - |""".linearize + |""".stripMargin val setMeasurementDTO = Set( MeasurementDTO( measure = MeasureDTO("count", Seq("col")), result = MeasureResultDTO( - mainValue = TypedValue("1", ResultValueType.Long) + mainValue = TypedValue("1", ResultValueType.LongValue) ) ) ) @@ -233,17 +238,17 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { measurements = setMeasurementDTO ) - val actualCheckpointDTO = SerializationUtils.fromJson[CheckpointDTO](checkpointDTOJson) + val actualCheckpointDTO = checkpointDTOJson.as[CheckpointDTO] assert(actualCheckpointDTO == expectedCheckpointDTO) } // MeasureDTO - "asJson" should "serialize MeasureDTO into json string" in { + "asJsonString" should "serialize MeasureDTO into json string" in { val measureDTO = MeasureDTO("count", Seq("col")) val expectedMeasureDTOJson = """{"measureName":"count","measuredColumns":["col"]}""" - val actualMeasureDTOJson = SerializationUtils.asJson(measureDTO) + val actualMeasureDTOJson = measureDTO.asJsonString assert(actualMeasureDTOJson == expectedMeasureDTOJson) } @@ -252,15 +257,15 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { val measureDTOJson = """{"measureName":"count","measuredColumns":["col"]}""" val expectedMeasureDTO = MeasureDTO("count", Seq("col")) - val actualMeasureDTO = SerializationUtils.fromJson[MeasureDTO](measureDTOJson) + val actualMeasureDTO = measureDTOJson.as[MeasureDTO] assert(actualMeasureDTO == expectedMeasureDTO) } // MeasurementDTO - "asJson" should "serialize MeasurementDTO into json string" in { + "asJsonString" should "serialize MeasurementDTO into json string" in { val measureDTO = MeasureDTO("count", Seq("col")) - val measureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.Long)) + val measureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.LongValue)) val measurementDTO = MeasurementDTO(measureDTO, measureResultDTO) @@ -272,7 +277,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"supportValues":{}} |} |""".linearize - val actualMeasurementDTOJson = SerializationUtils.asJson(measurementDTO) + val actualMeasurementDTOJson = measurementDTO.asJsonString assert(actualMeasurementDTOJson == expectedMeasurementDTOJson) } @@ -287,20 +292,20 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |""".stripMargin val measureDTO = MeasureDTO("count", Seq("col")) - val measureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.Long)) + val measureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.LongValue)) val expectedMeasurementDTO = MeasurementDTO(measureDTO, measureResultDTO) - val actualMeasurementDTO = SerializationUtils.fromJson[MeasurementDTO](measurementDTOJson) + val actualMeasurementDTO = measurementDTOJson.as[MeasurementDTO] assert(actualMeasurementDTO == expectedMeasurementDTO) } // MeasureResultDTO - "asJson" should "serialize MeasureResultDTO into json string" in { - val measureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.Long)) + "asJsonString" should "serialize MeasureResultDTO into json string" in { + val measureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.LongValue)) val expectedMeasureResultDTOJson = """{"mainValue":{"value":"1","valueType":"Long"},"supportValues":{}}""" - val actualMeasureResultDTOJson = SerializationUtils.asJson(measureResultDTO) + val actualMeasureResultDTOJson = measureResultDTO.asJsonString assert(actualMeasureResultDTOJson == expectedMeasureResultDTOJson) } @@ -308,18 +313,18 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { "fromJson" should "deserialize MeasureResultDTO from json string" in { val measureResultDTOJson = """{"mainValue":{"value":"1","valueType":"Long"},"supportValues":{}}""" - val expectedMeasureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.Long)) - val actualMeasureResultDTO = SerializationUtils.fromJson[MeasureResultDTO](measureResultDTOJson) + val expectedMeasureResultDTO = MeasureResultDTO(mainValue = TypedValue("1", ResultValueType.LongValue)) + val actualMeasureResultDTO = measureResultDTOJson.as[MeasureResultDTO] assert(actualMeasureResultDTO == expectedMeasureResultDTO) } // PartitionDTO - "asJson" should "serialize PartitionDTO into json string" in { + "asJsonString" should "serialize PartitionDTO into json string" in { val partitionDTO = PartitionDTO("key", "val") val expectedPartitionDTOJson = """{"key":"key","value":"val"}""" - val actualPartitionDTOJson = SerializationUtils.asJson(partitionDTO) + val actualPartitionDTOJson = partitionDTO.asJsonString assert(actualPartitionDTOJson == expectedPartitionDTOJson) } @@ -328,13 +333,13 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { val partitionDTOJson = """{"key":"key","value":"val"}""" val expectedPartitionDTO = PartitionDTO("key", "val") - val actualPartitionDTO = SerializationUtils.fromJson[PartitionDTO](partitionDTOJson) + val actualPartitionDTO = partitionDTOJson.as[PartitionDTO] assert(actualPartitionDTO == expectedPartitionDTO) } // PartitioningDTO - "asJson" should "serialize PartitioningDTO into json string" in { + "asJsonString" should "serialize PartitioningDTO into json string" in { val partitionDTO = PartitionDTO("key", "val") val partitioningDTO = PartitioningSubmitDTO( @@ -343,8 +348,8 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { authorIfNew = "authorTest" ) - val expectedPartitioningDTOJson = """{"partitioning":[{"key":"key","value":"val"}],"authorIfNew":"authorTest"}""" - val actualPartitioningDTOJson = SerializationUtils.asJson(partitioningDTO) + val expectedPartitioningDTOJson = """{"partitioning":[{"key":"key","value":"val"}],"parentPartitioning":null,"authorIfNew":"authorTest"}""" + val actualPartitioningDTOJson = partitioningDTO.asJsonString assert(actualPartitioningDTOJson == expectedPartitioningDTOJson) } @@ -359,12 +364,12 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { authorIfNew = "authorTest" ) - val actualPartitioningDTO = SerializationUtils.fromJson[PartitioningSubmitDTO](partitioningDTOJson) + val actualPartitioningDTO = partitioningDTOJson.as[PartitioningSubmitDTO] assert(actualPartitioningDTO == expectedPartitioningDTO) } - "asJson" should "serialize PartitioningDTO with parent partitioning into json string" in { + "asJsonString" should "serialize PartitioningDTO with parent partitioning into json string" in { val partitionDTO = PartitionDTO("key", "val") val parentPartitionDTO = PartitionDTO("parentKey", "parentVal") @@ -382,13 +387,12 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"authorIfNew":"authorTest" |} |""".linearize - val actualPartitioningDTOJson = SerializationUtils.asJson(partitioningDTO) + val actualPartitioningDTOJson = partitioningDTO.asJsonString assert(actualPartitioningDTOJson == expectedPartitioningDTOJson) } - - "asJson" should "serialize Seq[PartitionDTO] into json string" in { + "asJsonString" should "serialize Seq[PartitionDTO] into json string" in { val partitionDTO = Seq( PartitionDTO("key1", "val1"), PartitionDTO("key2", "val2"), @@ -396,7 +400,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { ) val expectedPartitionDTOJson = """[{"key":"key1","value":"val1"},{"key":"key2","value":"val2"},{"key":"key3","value":"val3"}]""" - val actualPartitionDTOJson = SerializationUtils.asJson(partitionDTO) + val actualPartitionDTOJson = partitionDTO.asJsonString assert(actualPartitionDTOJson == expectedPartitionDTOJson) } @@ -409,7 +413,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { |"parentPartitioning":[{"key":"parentKey","value":"parentVal"}], |"authorIfNew":"authorTest" |} - |""".linearize + |""".stripMargin val expectedPartitionDTO = PartitionDTO("key", "val") val expectedParentPartitionDTO = PartitionDTO("parentKey", "parentVal") @@ -419,7 +423,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike { authorIfNew = "authorTest" ) - val actualPartitioningDTO = SerializationUtils.fromJson[PartitioningSubmitDTO](partitioningDTOJson) + val actualPartitioningDTO = partitioningDTOJson.as[PartitioningSubmitDTO] assert(actualPartitioningDTO == expectedPartitioningDTO) } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index ee4790e6a..0a003bdb7 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -27,9 +27,6 @@ object Dependencies { val scalaLangJava8Compat = "1.0.2" val balta = "0.1.0" - val jacksonModuleScala = "2.14.2" - val circeVersion = "0.14.5" - val specs2 = "4.10.0" val typesafeConfig = "1.4.2" @@ -47,22 +44,6 @@ object Dependencies { val fadb = "0.4.1" - val json4s_spark2 = "3.5.3" - val json4s_spark3 = "3.7.0-M11" - def json4s(scalaVersion: Version): String = { - // TODO done this impractical way until https://github.com/AbsaOSS/commons/issues/134 - val maj2 = Component("2") - val min11 = Component("11") - val min12 = Component("12") - val min13 = Component("13") - scalaVersion.components match { - case Seq(`maj2`, `min11`, _) => json4s_spark2 - case Seq(`maj2`, `min12`, _) => json4s_spark3 - case Seq(`maj2`, `min13`, _) => json4s_spark3 - case _ => throw new IllegalArgumentException("Only Scala 2.11, 2.12, and 2.13 are currently supported.") - } - } - val logback = "1.2.3" val zio = "2.0.19" @@ -74,8 +55,8 @@ object Dependencies { val tapir = "1.9.6" val http4sBlazeBackend = "0.23.15" val http4sPrometheus = "0.23.6" - val playJson = "3.0.1" - val sttpPlayJson = "3.9.3" + val circeJson = "0.14.7" + val sttpCirceJson = "3.9.7" val awssdk = "2.23.15" @@ -106,27 +87,17 @@ object Dependencies { ) } - private def jsonSerdeDependencies(scalaVersion: Version): Seq[ModuleID] = { - val json4sVersion = Versions.json4s(scalaVersion) + private def jsonSerdeDependencies: Seq[ModuleID] = { - lazy val jacksonModuleScala = "com.fasterxml.jackson.module" %% "jackson-module-scala" % Versions.jacksonModuleScala - - lazy val json4sExt = "org.json4s" %% "json4s-ext" % json4sVersion - lazy val json4sCore = "org.json4s" %% "json4s-core" % json4sVersion - lazy val json4sJackson = "org.json4s" %% "json4s-jackson" % json4sVersion - lazy val json4sNative = "org.json4s" %% "json4s-native" % json4sVersion % Provided - - lazy val circeCore = "io.circe" %% "circe-core" % Versions.circeVersion - lazy val circeParser = "io.circe" %% "circe-parser" % Versions.circeVersion + // Circe dependencies + lazy val circeCore = "io.circe" %% "circe-core" % Versions.circeJson + lazy val circeParser = "io.circe" %% "circe-parser" % Versions.circeJson + lazy val circeGeneric = "io.circe" %% "circe-generic" % Versions.circeJson Seq( - jacksonModuleScala, - json4sExt, - json4sCore, - json4sJackson, - json4sNative, circeCore, circeParser, + circeGeneric, ) } @@ -135,7 +106,6 @@ object Dependencies { val tapirOrg = "com.softwaremill.sttp.tapir" val http4sOrg = "org.http4s" val faDbOrg = "za.co.absa.db.fa-db" - val playOrg = "org.playframework" val sbtOrg = "com.github.sbt" val logbackOrg = "ch.qos.logback" val awsSdkOrg = "software.amazon.awssdk" @@ -163,9 +133,17 @@ object Dependencies { lazy val tapirPrometheus = tapirOrg %% "tapir-prometheus-metrics" % Versions.tapir lazy val tapirStubServer = tapirOrg %% "tapir-sttp-stub-server" % Versions.tapir % Test - // json - lazy val playJson = playOrg %% "play-json" % Versions.playJson - lazy val sttpPlayJson = sttpClient3Org %% "play-json" % Versions.sttpPlayJson % Test + lazy val tapirCirce = tapirOrg %% "tapir-json-circe" % Versions.tapir + lazy val tapirOpenApiDocs = tapirOrg %% "tapir-openapi-docs" % Versions.tapir + lazy val tapirOpenApiCirceYaml = tapirOrg %% "tapir-openapi-circe-yaml" % Versions.tapir + lazy val tapirHttp4sServer = tapirOrg %% "tapir-http4s-server" % Versions.tapir + lazy val tapirCore = tapirOrg %% "tapir-core" % Versions.tapir + lazy val tapirSwaggerUi = tapirOrg %% "tapir-swagger-ui-http4s" % Versions.tapir + + // STTP core and Circe integration + lazy val sttpCirce = sttpClient3Org %% "circe" % Versions.sttpCirceJson % Test + lazy val sttpCore = sttpClient3Org %% "core" % Versions.sttpCirceJson + lazy val clientBackend = sttpClient3Org %% "async-http-client-backend-zio" % Versions.sttpCirceJson // Fa-db lazy val faDbDoobie = faDbOrg %% "doobie" % Versions.fadb @@ -197,10 +175,11 @@ object Dependencies { tapirHttp4sZio, tapirSwagger, tapirPlayJson, + tapirCirce, tapirPrometheus, tapirStubServer, - playJson, - sttpPlayJson, + sttpCirce, + sttpCore, awsSecretsManagerSdk, zioTest, zioTestSbt, @@ -249,7 +228,7 @@ object Dependencies { typeSafeConfig ) ++ testDependencies ++ - jsonSerdeDependencies(scalaVersion) + jsonSerdeDependencies } def databaseDependencies: Seq[ModuleID] = { diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala index 7f290c97f..6b12e1ef9 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/BaseController.scala @@ -17,7 +17,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.server.api.exception.ServiceError -import za.co.absa.atum.server.model.ErrorResponse.{ErrorResponse, GeneralErrorResponse, InternalServerErrorResponse} +import za.co.absa.atum.server.model.{ErrorResponse, GeneralErrorResponse, InternalServerErrorResponse} import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} import za.co.absa.db.fadb.exceptions.StatusException import zio._ diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointController.scala index 402b67884..6b547c3cb 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointController.scala @@ -17,7 +17,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.model.dto.CheckpointDTO -import za.co.absa.atum.server.model.ErrorResponse.ErrorResponse +import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.SingleSuccessResponse import zio.IO import zio.macros.accessible diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointControllerImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointControllerImpl.scala index 69a140d55..8815a89c2 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointControllerImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/CheckpointControllerImpl.scala @@ -18,7 +18,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.model.dto.CheckpointDTO import za.co.absa.atum.server.api.service.CheckpointService -import za.co.absa.atum.server.model.ErrorResponse.ErrorResponse +import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.SingleSuccessResponse import zio._ diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowController.scala index 20783e9f4..d122a93c2 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowController.scala @@ -17,7 +17,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.model.dto.{CheckpointDTO, CheckpointQueryDTO} -import za.co.absa.atum.server.model.ErrorResponse.ErrorResponse +import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.MultiSuccessResponse import zio.IO import zio.macros.accessible diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowControllerImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowControllerImpl.scala index 23d12d8d2..8382b212c 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowControllerImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/FlowControllerImpl.scala @@ -18,7 +18,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.model.dto.{CheckpointDTO, CheckpointQueryDTO} import za.co.absa.atum.server.api.service.FlowService -import za.co.absa.atum.server.model.ErrorResponse.ErrorResponse +import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.MultiSuccessResponse import zio._ diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala index c5e7dc737..2dfc53016 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningController.scala @@ -23,7 +23,7 @@ import za.co.absa.atum.model.dto.{ CheckpointQueryDTO, PartitioningSubmitDTO } -import za.co.absa.atum.server.model.ErrorResponse.ErrorResponse +import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} import zio.IO import zio.macros.accessible diff --git a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala index 964d57634..d37cc94a1 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/controller/PartitioningControllerImpl.scala @@ -19,7 +19,7 @@ package za.co.absa.atum.server.api.controller import za.co.absa.atum.model.dto._ import za.co.absa.atum.server.api.exception.ServiceError import za.co.absa.atum.server.api.service.PartitioningService -import za.co.absa.atum.server.model.ErrorResponse.{ErrorResponse, InternalServerErrorResponse} +import za.co.absa.atum.server.model.{ErrorResponse, InternalServerErrorResponse} import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} import zio._ diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/DoobieImplicits.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/DoobieImplicits.scala index a1131e65b..28163b264 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/DoobieImplicits.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/DoobieImplicits.scala @@ -18,20 +18,47 @@ package za.co.absa.atum.server.api.database import cats.Show import cats.data.NonEmptyList -import doobie.{Get, Put} import doobie.postgres.implicits._ +import doobie.{Get, Put} +import io.circe.{Json => CirceJson} import org.postgresql.jdbc.PgArray import org.postgresql.util.PGobject +import io.circe.parser._ -import scala.util.{Failure, Success, Try} +import scala.util.Try -object DoobieImplicits { +package object DoobieImplicits { - private implicit val showPGobject: Show[PGobject] = Show.show(_.getValue.take(250)) private implicit val showPgArray: Show[PgArray] = Show.fromToString implicit val getMapWithOptionStringValues: Get[Map[String, Option[String]]] = Get[Map[String, String]] - .tmap(map => map.map { case (k, v) => k -> Option(v) }) + .tmap(map => map.map { case (k, v) => k -> Option(v) }) + + private def circeJsonListToPGJsonArrayString(jsonList: List[CirceJson]): String = { + val arrayElements = jsonList.map { x => + // Convert to compact JSON string and escape inner quotes + val escapedJsonString = x.noSpaces.replace("\"", "\\\"") + // Wrap in double quotes for the array element + s""""$escapedJsonString"""" + } + + arrayElements.mkString("{", ",", "}") + } + + private def pgArrayToListOfCirceJson(pgArray: PgArray): Either[String, List[CirceJson]] = { + Try { + Option(pgArray.getArray) match { + case Some(array: Array[_]) => array.collect { + case str: String => parse(str).toTry.get + case other => parse(other.toString).toTry.get + }.toList + case None => List.empty[CirceJson] + case _ => throw new IllegalArgumentException("Unexpected type encountered.") + } + } + .toEither + .left.map(_.getMessage) + } object Sequence { @@ -42,7 +69,7 @@ object DoobieImplicits { object Json { - implicit val jsonArrayPutUsingString: Put[List[String]] = { + implicit val jsonArrayPut: Put[List[CirceJson]] = { Put.Advanced .other[PGobject]( NonEmptyList.of("json[]") @@ -50,57 +77,24 @@ object DoobieImplicits { .tcontramap { a => val o = new PGobject o.setType("json[]") - o.setValue(a.mkString("{", ",", "}")) + o.setValue(circeJsonListToPGJsonArrayString(a)) o } } - implicit val jsonArrayGetUsingString: Get[List[String]] = { - def parsePgArray(a: PgArray): Either[String, List[String]] = { - Try(a.getArray.asInstanceOf[List[String]]) match { - case Failure(exception) => Left(exception.toString) - case Success(value) => Right(value) - } - } - + implicit val jsonArrayGet: Get[List[CirceJson]] = { Get.Advanced .other[PgArray]( NonEmptyList.of("json[]") ) - .temap(a => parsePgArray(a)) - } - - implicit val jsonPutUsingString: Put[String] = { - Put.Advanced - .other[PGobject]( - NonEmptyList.of("json") - ) - .tcontramap { a => - val o = new PGobject - o.setType("json") - o.setValue(a) - o - } - } - - implicit val jsonGetUsingString: Get[String] = { - Get.Advanced - .other[PGobject]( - NonEmptyList.of("json") - ) - .temap(a => - Try(a.getValue) match { - case Failure(exception) => Left(exception.toString) - case Success(value) => Right(value) - } - ) + .temap(pgArray => pgArrayToListOfCirceJson(pgArray)) } } object Jsonb { - implicit val jsonbArrayPutUsingString: Put[List[String]] = { + implicit val jsonbArrayPut: Put[List[CirceJson]] = { Put.Advanced .other[PGobject]( NonEmptyList.of("jsonb[]") @@ -108,51 +102,11 @@ object DoobieImplicits { .tcontramap { a => val o = new PGobject o.setType("jsonb[]") - o.setValue(a.mkString("{", ",", "}")) + o.setValue(circeJsonListToPGJsonArrayString(a)) o } } - implicit val jsonbArrayGetUsingString: Get[List[String]] = { - def parsePgArray(a: PgArray): Either[String, List[String]] = { - Try(a.getArray.asInstanceOf[List[String]]) match { - case Failure(exception) => Left(exception.toString) - case Success(value) => Right(value) - } - } - - Get.Advanced - .other[PgArray]( - NonEmptyList.of("jsonb[]") - ) - .temap(a => parsePgArray(a)) - } - - implicit val jsonbPutUsingString: Put[String] = { - Put.Advanced - .other[PGobject]( - NonEmptyList.of("jsonb") - ) - .tcontramap { a => - val o = new PGobject - o.setType("jsonb") - o.setValue(a) - o - } - } - - implicit val jsonbGetUsingString: Get[String] = { - Get.Advanced - .other[PGobject]( - NonEmptyList.of("jsonb") - ) - .temap(a => - Try(a.getValue) match { - case Failure(exception) => Left(exception.toString) - case Success(value) => Right(value) - } - ) - } } } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/flows/functions/GetFlowCheckpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/flows/functions/GetFlowCheckpoints.scala index 723a0530f..49718bce4 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/flows/functions/GetFlowCheckpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/flows/functions/GetFlowCheckpoints.scala @@ -19,7 +19,6 @@ package za.co.absa.atum.server.api.database.flows.functions import doobie.Fragment import doobie.implicits.toSqlInterpolator import doobie.util.Read -import play.api.libs.json.Json import za.co.absa.atum.model.dto.CheckpointQueryDTO import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.flows.Flows @@ -33,9 +32,9 @@ import zio.interop.catz._ import za.co.absa.atum.server.api.database.DoobieImplicits.Sequence.get import doobie.postgres.implicits._ -import doobie.postgres.circe.jsonb.implicits._ +import doobie.postgres.circe.jsonb.implicits.jsonbPut +import doobie.postgres.circe.json.implicits.jsonGet import io.circe.syntax.EncoderOps -import io.circe.generic.auto._ class GetFlowCheckpoints(implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) extends DoobieMultipleResultFunction[CheckpointQueryDTO, CheckpointFromDB, Task] { @@ -51,14 +50,11 @@ class GetFlowCheckpoints(implicit schema: DBSchema, dbEngine: DoobieEngine[Task] override def sql(values: CheckpointQueryDTO)(implicit read: Read[CheckpointFromDB]): Fragment = { val partitioning = PartitioningForDB.fromSeqPartitionDTO(values.partitioning) - val partitioningNormalized = Json.toJson(partitioning).toString + val partitioningNormalized = partitioning.asJson sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString - partitioningNormalized - }, + $partitioningNormalized, ${values.limit}, ${values.checkpointName} ) AS ${Fragment.const(alias)};""" diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala index 0a0e1babe..c63717839 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreateOrUpdateAdditionalData.scala @@ -19,7 +19,6 @@ package za.co.absa.atum.server.api.database.runs.functions import doobie.Fragment import doobie.implicits.toSqlInterpolator import doobie.util.Read -import play.api.libs.json.Json import za.co.absa.atum.model.dto.AdditionalDataSubmitDTO import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs @@ -30,25 +29,24 @@ import za.co.absa.db.fadb.doobie.{DoobieEngine, StatusWithData} import za.co.absa.db.fadb.status.handling.implementations.StandardStatusHandling import zio._ import zio.interop.catz._ +import io.circe.syntax._ import doobie.postgres.implicits._ +import doobie.postgres.circe.jsonb.implicits.jsonbPut class CreateOrUpdateAdditionalData(implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) - extends DoobieSingleResultFunctionWithStatus[AdditionalDataSubmitDTO, Unit, Task] + extends DoobieSingleResultFunctionWithStatus[AdditionalDataSubmitDTO, Unit, Task] with StandardStatusHandling { override def sql(values: AdditionalDataSubmitDTO)(implicit read: Read[StatusWithData[Unit]]): Fragment = { val partitioning = PartitioningForDB.fromSeqPartitionDTO(values.partitioning) - val partitioningJsonString = Json.toJson(partitioning).toString + val partitioningJson = partitioning.asJson // implicits from Doobie can't handle Map[String, Option[String]] -> HStore, so we converted None to null basically val additionalDataNormalized = values.additionalData.map{ case (k, v) => (k, v.orNull)} sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString - partitioningJsonString - }, + $partitioningJson, $additionalDataNormalized, ${values.author} ) ${Fragment.const(alias)};""" diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreatePartitioningIfNotExists.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreatePartitioningIfNotExists.scala index 7a83a0031..2bbcb59e3 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreatePartitioningIfNotExists.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/CreatePartitioningIfNotExists.scala @@ -16,10 +16,10 @@ package za.co.absa.atum.server.api.database.runs.functions + import doobie.Fragment import doobie.implicits.toSqlInterpolator import doobie.util.Read -import play.api.libs.json.Json import za.co.absa.atum.model.dto.PartitioningSubmitDTO import za.co.absa.atum.server.model.PartitioningForDB import za.co.absa.db.fadb.DBSchema @@ -30,30 +30,27 @@ import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs import zio._ import zio.interop.catz._ +import io.circe.syntax._ + +import doobie.postgres.circe.jsonb.implicits.jsonbPut class CreatePartitioningIfNotExists(implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) - extends DoobieSingleResultFunctionWithStatus[PartitioningSubmitDTO, Unit, Task] + extends DoobieSingleResultFunctionWithStatus[PartitioningSubmitDTO, Unit, Task] with StandardStatusHandling { override def sql(values: PartitioningSubmitDTO)(implicit read: Read[StatusWithData[Unit]]): Fragment = { val partitioning = PartitioningForDB.fromSeqPartitionDTO(values.partitioning) - val partitioningJsonString = Json.toJson(partitioning).toString + val partitioningJson = partitioning.asJson - val parentPartitioningJsonString = values.parentPartitioning.map { parentPartitioning => + val parentPartitioningJson = values.parentPartitioning.map { parentPartitioning => val parentPartitioningForDB = PartitioningForDB.fromSeqPartitionDTO(parentPartitioning) - Json.toJson(parentPartitioningForDB).toString + parentPartitioningForDB.asJson } sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString - partitioningJsonString - }, + $partitioningJson, ${values.authorIfNew}, - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString - parentPartitioningJsonString - } + $parentPartitioningJson ) ${Fragment.const(alias)};""" } } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningAdditionalData.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningAdditionalData.scala index 54c6671ac..a87b8c91e 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningAdditionalData.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningAdditionalData.scala @@ -19,7 +19,6 @@ package za.co.absa.atum.server.api.database.runs.functions import doobie.Fragment import doobie.implicits.toSqlInterpolator import doobie.util.Read -import play.api.libs.json.Json import za.co.absa.atum.model.dto.PartitioningDTO import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs @@ -29,23 +28,23 @@ import za.co.absa.db.fadb.doobie.DoobieFunction.DoobieMultipleResultFunction import za.co.absa.db.fadb.doobie.DoobieEngine import zio.interop.catz.asyncInstance import zio.{Task, URLayer, ZIO, ZLayer} +import io.circe.syntax._ + import za.co.absa.atum.server.api.database.DoobieImplicits.getMapWithOptionStringValues +import doobie.postgres.circe.jsonb.implicits.jsonbPut class GetPartitioningAdditionalData (implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) extends DoobieMultipleResultFunction[PartitioningDTO, (String, Option[String]), Task] { - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString override val fieldsToSelect: Seq[String] = Seq("ad_name", "ad_value") override def sql(values: PartitioningDTO)(implicit read: Read[(String, Option[String])]): Fragment = { val partitioning: PartitioningForDB = PartitioningForDB.fromSeqPartitionDTO(values) - val partitioningJsonString = Json.toJson(partitioning).toString + val partitioningJson = partitioning.asJson sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - partitioningJsonString - } + $partitioningJson ) ${Fragment.const(alias)};""" } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningCheckpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningCheckpoints.scala index de626d1c6..7c3587395 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningCheckpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningCheckpoints.scala @@ -19,7 +19,6 @@ package za.co.absa.atum.server.api.database.runs.functions import doobie.Fragment import doobie.implicits.toSqlInterpolator import doobie.util.Read -import play.api.libs.json.Json import za.co.absa.atum.model.dto.CheckpointQueryDTO import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs @@ -29,14 +28,12 @@ import za.co.absa.db.fadb.doobie.DoobieEngine import za.co.absa.db.fadb.doobie.DoobieFunction.DoobieMultipleResultFunction import zio._ import zio.interop.catz._ +import io.circe.syntax._ import za.co.absa.atum.server.api.database.DoobieImplicits.Sequence.get -import doobie.postgres.circe.jsonb.implicits.jsonbGet import doobie.postgres.implicits._ -import doobie.postgres.circe.jsonb.implicits._ -import io.circe.syntax.EncoderOps -import io.circe.generic.auto._ - +import doobie.postgres.circe.jsonb.implicits.jsonbPut +import doobie.postgres.circe.json.implicits.jsonGet class GetPartitioningCheckpoints (implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) extends DoobieMultipleResultFunction[CheckpointQueryDTO, CheckpointFromDB, Task] { @@ -55,14 +52,11 @@ class GetPartitioningCheckpoints (implicit schema: DBSchema, dbEngine: DoobieEng override def sql(values: CheckpointQueryDTO)(implicit read: Read[CheckpointFromDB]): Fragment = { val partitioning = PartitioningForDB.fromSeqPartitionDTO(values.partitioning) - val partitioningNormalized = Json.toJson(partitioning).toString + val partitioningNormalized = partitioning.asJson sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString - partitioningNormalized - }, + $partitioningNormalized, ${values.limit}, ${values.checkpointName} ) AS ${Fragment.const(alias)};""" diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningMeasures.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningMeasures.scala index 023a38c20..9c7c72029 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningMeasures.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/GetPartitioningMeasures.scala @@ -19,7 +19,6 @@ package za.co.absa.atum.server.api.database.runs.functions import doobie.Fragment import doobie.implicits.toSqlInterpolator import doobie.util.Read -import play.api.libs.json.Json import za.co.absa.atum.model.dto.{MeasureDTO, PartitioningDTO} import za.co.absa.atum.server.model.PartitioningForDB import za.co.absa.db.fadb.DBSchema @@ -29,23 +28,23 @@ import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs import zio._ import zio.interop.catz._ +import io.circe.syntax._ + import za.co.absa.atum.server.api.database.DoobieImplicits.Sequence.get +import doobie.postgres.circe.jsonb.implicits.jsonbPut class GetPartitioningMeasures (implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) extends DoobieMultipleResultFunction[PartitioningDTO, MeasureDTO, Task] { - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString override val fieldsToSelect: Seq[String] = Seq("measure_name", "measured_columns") override def sql(values: PartitioningDTO)(implicit read: Read[MeasureDTO]): Fragment = { val partitioning = PartitioningForDB.fromSeqPartitionDTO(values) - val partitioningJsonString = Json.toJson(partitioning).toString + val partitioningJson = partitioning.asJson sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - partitioningJsonString - } + $partitioningJson ) ${Fragment.const(alias)};""" } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpoint.scala b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpoint.scala index 45acefad1..fbd829622 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpoint.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpoint.scala @@ -17,7 +17,7 @@ package za.co.absa.atum.server.api.database.runs.functions import doobie.Fragment -import doobie.implicits._ +import doobie.implicits.toSqlInterpolator import doobie.util.Read import za.co.absa.atum.model.dto.CheckpointDTO import za.co.absa.atum.server.model.PartitioningForDB @@ -29,9 +29,12 @@ import za.co.absa.atum.server.api.database.PostgresDatabaseProvider import za.co.absa.atum.server.api.database.runs.Runs import zio._ import zio.interop.catz._ -import play.api.libs.json.Json -import za.co.absa.atum.server.model.PlayJsonImplicits.writesMeasurementDTO - +import io.circe.syntax._ +import za.co.absa.atum.model.dto.MeasureResultDTO._ +import za.co.absa.atum.server.api.database.DoobieImplicits.Sequence.get +import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbArrayPut +import doobie.postgres.circe.jsonb.implicits.jsonbGet +import doobie.postgres.circe.jsonb.implicits.jsonbPut import doobie.postgres.implicits._ class WriteCheckpoint(implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) @@ -40,26 +43,21 @@ class WriteCheckpoint(implicit schema: DBSchema, dbEngine: DoobieEngine[Task]) override def sql(values: CheckpointDTO)(implicit read: Read[StatusWithData[Unit]]): Fragment = { val partitioning = PartitioningForDB.fromSeqPartitionDTO(values.partitioning) - val partitioningNormalized = Json.toJson(partitioning).toString + val partitioningNormalized = partitioning.asJson + // List[String] containing json data has to be properly escaped // It would be safer to use Json data type and derive Put instance val measurementsNormalized = { - values.measurements.map(x => s"\"${Json.toJson(x).toString.replaceAll("\"", "\\\\\"")}\"") + values.measurements.toList.map(_.asJson) } sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}( - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString - partitioningNormalized - }, + $partitioningNormalized, ${values.id}, ${values.name}, ${values.processStartTime}, ${values.processEndTime}, - ${ - import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbArrayPutUsingString - measurementsNormalized.toList - }, + $measurementsNormalized, ${values.measuredByAtumAgent}, ${values.author} ) ${Fragment.const(alias)};""" diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/BaseEndpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/BaseEndpoints.scala index 4f067fbc5..422f044ef 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/BaseEndpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/BaseEndpoints.scala @@ -18,12 +18,12 @@ package za.co.absa.atum.server.api.http import sttp.model.StatusCode import sttp.tapir.generic.auto.schemaForCaseClass -import sttp.tapir.json.play.jsonBody +import sttp.tapir.json.circe.jsonBody +import za.co.absa.atum.server.model.{BadRequestResponse, ErrorResponse, GeneralErrorResponse, InternalServerErrorResponse} import sttp.tapir.typelevel.MatchType import sttp.tapir.ztapir._ import sttp.tapir.{EndpointOutput, PublicEndpoint} import za.co.absa.atum.server.Constants.Endpoints.{Api, V1, V2} -import za.co.absa.atum.server.model.ErrorResponse._ import java.util.UUID diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala index 339a0dded..f86103204 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/Endpoints.scala @@ -16,21 +16,21 @@ package za.co.absa.atum.server.api.http + import sttp.model.StatusCode import sttp.tapir.generic.auto.schemaForCaseClass -import sttp.tapir.json.play.jsonBody import sttp.tapir.ztapir._ -import sttp.tapir.{PublicEndpoint, endpoint} +import sttp.tapir.json.circe.jsonBody import za.co.absa.atum.model.dto._ import za.co.absa.atum.server.Constants.Endpoints._ -import za.co.absa.atum.server.model.ErrorResponse.ErrorResponse -import za.co.absa.atum.server.model.PlayJsonImplicits._ +import za.co.absa.atum.server.model.ErrorResponse import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} +import sttp.tapir.{PublicEndpoint, endpoint} + trait Endpoints extends BaseEndpoints { - protected val createCheckpointEndpointV1 - : PublicEndpoint[CheckpointDTO, ErrorResponse, CheckpointDTO, Any] = { + protected val createCheckpointEndpointV1: PublicEndpoint[CheckpointDTO, ErrorResponse, CheckpointDTO, Any] = { apiV1.post .in(pathToAPIv1CompatibleFormat(CreateCheckpoint)) .in(jsonBody[CheckpointDTO]) @@ -38,8 +38,7 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[CheckpointDTO]) } - protected val createCheckpointEndpointV2 - : PublicEndpoint[CheckpointDTO, ErrorResponse, SingleSuccessResponse[CheckpointDTO], Any] = { + protected val createCheckpointEndpointV2: PublicEndpoint[CheckpointDTO, ErrorResponse, SingleSuccessResponse[CheckpointDTO], Any] = { apiV2.post .in(CreateCheckpoint) .in(jsonBody[CheckpointDTO]) @@ -47,8 +46,7 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[SingleSuccessResponse[CheckpointDTO]]) } - protected val createPartitioningEndpointV1 - : PublicEndpoint[PartitioningSubmitDTO, ErrorResponse, AtumContextDTO, Any] = { + protected val createPartitioningEndpointV1: PublicEndpoint[PartitioningSubmitDTO, ErrorResponse, AtumContextDTO, Any] = { apiV1.post .in(pathToAPIv1CompatibleFormat(CreatePartitioning)) .in(jsonBody[PartitioningSubmitDTO]) @@ -56,8 +54,7 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[AtumContextDTO]) } - protected val createPartitioningEndpointV2 - : PublicEndpoint[PartitioningSubmitDTO, ErrorResponse, SingleSuccessResponse[AtumContextDTO], Any] = { + protected val createPartitioningEndpointV2: PublicEndpoint[PartitioningSubmitDTO, ErrorResponse, SingleSuccessResponse[AtumContextDTO], Any] = { apiV2.post .in(CreatePartitioning) .in(jsonBody[PartitioningSubmitDTO]) @@ -65,8 +62,7 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[SingleSuccessResponse[AtumContextDTO]]) } - protected val createOrUpdateAdditionalDataEndpointV2 - : PublicEndpoint[AdditionalDataSubmitDTO, ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO], Any] = { + protected val createOrUpdateAdditionalDataEndpointV2: PublicEndpoint[AdditionalDataSubmitDTO, ErrorResponse, SingleSuccessResponse[AdditionalDataSubmitDTO], Any] = { apiV2.post .in(CreateOrUpdateAdditionalData) .in(jsonBody[AdditionalDataSubmitDTO]) @@ -74,8 +70,7 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[SingleSuccessResponse[AdditionalDataSubmitDTO]]) } - protected val getPartitioningCheckpointsEndpointV2 - : PublicEndpoint[CheckpointQueryDTO, ErrorResponse, MultiSuccessResponse[CheckpointDTO], Any] = { + protected val getPartitioningCheckpointsEndpointV2: PublicEndpoint[CheckpointQueryDTO, ErrorResponse, MultiSuccessResponse[CheckpointDTO], Any] = { apiV2.get .in(GetPartitioningCheckpoints) .in(jsonBody[CheckpointQueryDTO]) @@ -83,8 +78,7 @@ trait Endpoints extends BaseEndpoints { .out(jsonBody[MultiSuccessResponse[CheckpointDTO]]) } - protected val getFlowCheckpointsEndpointV2 - : PublicEndpoint[CheckpointQueryDTO, ErrorResponse, MultiSuccessResponse[CheckpointDTO], Any] = { + protected val getFlowCheckpointsEndpointV2: PublicEndpoint[CheckpointQueryDTO, ErrorResponse, MultiSuccessResponse[CheckpointDTO], Any] = { apiV2.post .in(GetFlowCheckpoints) .in(jsonBody[CheckpointQueryDTO]) @@ -98,5 +92,4 @@ trait Endpoints extends BaseEndpoints { protected val healthEndpoint: PublicEndpoint[Unit, Unit, Unit, Any] = endpoint.get.in(Health) - } diff --git a/server/src/main/scala/za/co/absa/atum/server/api/http/ServerOptions.scala b/server/src/main/scala/za/co/absa/atum/server/api/http/ServerOptions.scala index f6bbe79cd..1eefa98c1 100644 --- a/server/src/main/scala/za/co/absa/atum/server/api/http/ServerOptions.scala +++ b/server/src/main/scala/za/co/absa/atum/server/api/http/ServerOptions.scala @@ -19,7 +19,7 @@ package za.co.absa.atum.server.api.http import sttp.monad.MonadError import sttp.tapir.DecodeResult import sttp.tapir.generic.auto.schemaForCaseClass -import sttp.tapir.json.play.jsonBody +import sttp.tapir.json.circe.jsonBody import sttp.tapir.server.http4s.Http4sServerOptions import sttp.tapir.server.interceptor.DecodeFailureContext import sttp.tapir.server.interceptor.decodefailure.DecodeFailureHandler @@ -27,7 +27,7 @@ import sttp.tapir.server.interceptor.decodefailure.DefaultDecodeFailureHandler.r import sttp.tapir.server.interceptor.metrics.MetricsRequestInterceptor import sttp.tapir.server.model.ValuedEndpointOutput import sttp.tapir.ztapir.{headers, statusCode} -import za.co.absa.atum.server.model.ErrorResponse.BadRequestResponse +import za.co.absa.atum.server.model.BadRequestResponse import zio.interop.catz._ trait ServerOptions { diff --git a/server/src/main/scala/za/co/absa/atum/server/model/ErrorResponse.scala b/server/src/main/scala/za/co/absa/atum/server/model/ErrorResponse.scala index 174ff2f2a..bf17272b4 100644 --- a/server/src/main/scala/za/co/absa/atum/server/model/ErrorResponse.scala +++ b/server/src/main/scala/za/co/absa/atum/server/model/ErrorResponse.scala @@ -16,37 +16,41 @@ package za.co.absa.atum.server.model -import play.api.libs.json.{Json, Reads, Writes} + +import io.circe._ +import io.circe.generic.semiauto._ import java.util.UUID object ErrorResponse { + implicit val decodeErrorResponse: Decoder[ErrorResponse] = deriveDecoder + implicit val encodeErrorResponse: Encoder[ErrorResponse] = deriveEncoder +} sealed trait ErrorResponse extends ResponseEnvelope { def message: String } - implicit val reads: Reads[ErrorResponse] = Json.reads[ErrorResponse] - implicit val writes: Writes[ErrorResponse] = Json.writes[ErrorResponse] - final case class BadRequestResponse(message: String, requestId: UUID = UUID.randomUUID()) extends ErrorResponse - implicit val readsBadRequestResponse: Reads[BadRequestResponse] = Json.reads[BadRequestResponse] - implicit val writesBadRequestResponse: Writes[BadRequestResponse] = Json.writes[BadRequestResponse] +object BadRequestResponse { + implicit val decodeBadRequestResponse: Decoder[BadRequestResponse] = deriveDecoder + implicit val encodeBadRequestResponse: Encoder[BadRequestResponse] = deriveEncoder +} final case class GeneralErrorResponse(message: String, requestId: UUID = UUID.randomUUID()) extends ErrorResponse - implicit val readsGeneralErrorResponse: Reads[GeneralErrorResponse] = Json.reads[GeneralErrorResponse] - implicit val writesGeneralErrorResponse: Writes[GeneralErrorResponse] = Json.writes[GeneralErrorResponse] +object GeneralErrorResponse { + implicit val decodeGeneralErrorResponse: Decoder[GeneralErrorResponse] = deriveDecoder + implicit val encodeGeneralErrorResponse: Encoder[GeneralErrorResponse] = deriveEncoder +} final case class InternalServerErrorResponse(message: String, requestId: UUID = UUID.randomUUID()) extends ErrorResponse - implicit val readsInternalServerErrorResponse: Reads[InternalServerErrorResponse] = - Json.reads[InternalServerErrorResponse] - implicit val writesInternalServerErrorResponse: Writes[InternalServerErrorResponse] = - Json.writes[InternalServerErrorResponse] - +object InternalServerErrorResponse { + implicit val decodeInternalServerErrorResponse: Decoder[InternalServerErrorResponse] = deriveDecoder + implicit val encodeInternalServerErrorResponse: Encoder[InternalServerErrorResponse] = deriveEncoder } diff --git a/server/src/main/scala/za/co/absa/atum/server/model/PartitioningForDB.scala b/server/src/main/scala/za/co/absa/atum/server/model/PartitioningForDB.scala index 55a349923..4b0132d13 100644 --- a/server/src/main/scala/za/co/absa/atum/server/model/PartitioningForDB.scala +++ b/server/src/main/scala/za/co/absa/atum/server/model/PartitioningForDB.scala @@ -16,7 +16,8 @@ package za.co.absa.atum.server.model -import play.api.libs.json.{Json, Writes} +import io.circe.generic.semiauto._ +import io.circe.{Decoder, Encoder} import za.co.absa.atum.model.dto.PartitioningDTO private[server] case class PartitioningForDB private ( @@ -28,12 +29,12 @@ private[server] case class PartitioningForDB private ( object PartitioningForDB { def fromSeqPartitionDTO(partitioning: PartitioningDTO): PartitioningForDB = { - val allKeys = partitioning.map(_.key) - val mapOfKeysAndValues = partitioning.map(p => p.key -> p.value).toMap[String, String] + val allKeys: Seq[String] = partitioning.map(_.key) + val mapOfKeysAndValues: Map[String, String] = partitioning.map(p => p.key -> p.value).toMap PartitioningForDB(keys = allKeys, keysToValues = mapOfKeysAndValues) } - implicit val writes: Writes[PartitioningForDB] = Json.writes - + implicit val encoder: Encoder[PartitioningForDB] = deriveEncoder + implicit val decoder: Decoder[PartitioningForDB] = deriveDecoder } diff --git a/server/src/main/scala/za/co/absa/atum/server/model/PlayJsonImplicits.scala b/server/src/main/scala/za/co/absa/atum/server/model/PlayJsonImplicits.scala deleted file mode 100644 index a4e32fe0f..000000000 --- a/server/src/main/scala/za/co/absa/atum/server/model/PlayJsonImplicits.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2021 ABSA Group Limited - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package za.co.absa.atum.server.model - -import play.api.libs.functional.syntax.toFunctionalBuilderOps -import play.api.libs.json._ -import za.co.absa.atum.model.dto.MeasureResultDTO.{ResultValueType, TypedValue} -import za.co.absa.atum.model.dto._ -import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} - -object PlayJsonImplicits { - - implicit val optionStringReads: Reads[Option[String]] = new Reads[Option[String]] { - def reads(json: JsValue): JsResult[Option[String]] = json match { - case JsNull => JsSuccess(None) - case JsString(s) => JsSuccess(Some(s)) - case _ => JsError("Expected JsString or JsNull") - } - } - - implicit val optionStringWrites: Writes[Option[String]] = new Writes[Option[String]] { - def writes(opt: Option[String]): JsValue = opt match { - case Some(s) => JsString(s) - case None => JsNull - } - } - - implicit val resultValueTypeReads: Reads[ResultValueType] = new Reads[ResultValueType] { - override def reads(json: JsValue): JsResult[ResultValueType] = json match { - case JsString("String") => JsSuccess(ResultValueType.String) - case JsString("Long") => JsSuccess(ResultValueType.Long) - case JsString("BigDecimal") => JsSuccess(ResultValueType.BigDecimal) - case JsString("Double") => JsSuccess(ResultValueType.Double) - case _ => JsError("Invalid ResultValueType") - } - } - - implicit val resultValueTypeWrites: Writes[ResultValueType] = new Writes[ResultValueType] { - def writes(resultValueType: ResultValueType): JsValue = resultValueType match { - case ResultValueType.String => Json.toJson("String") - case ResultValueType.Long => Json.toJson("Long") - case ResultValueType.BigDecimal => Json.toJson("BigDecimal") - case ResultValueType.Double => Json.toJson("Double") - } - } - - implicit val readsMeasureDTO: Reads[MeasureDTO] = Json.reads[MeasureDTO] - implicit val writesMeasureDTO: Writes[MeasureDTO] = Json.writes[MeasureDTO] - - implicit val readsTypedValue: Reads[MeasureResultDTO.TypedValue] = Json.reads[MeasureResultDTO.TypedValue] - implicit val writesTypedValue: Writes[MeasureResultDTO.TypedValue] = Json.writes[MeasureResultDTO.TypedValue] - - implicit val readsMeasureResultDTO: Reads[MeasureResultDTO] = { - ((__ \ "mainValue").read[MeasureResultDTO.TypedValue] and - (__ \ "supportValues").readNullable[Map[String, TypedValue]].map(_.getOrElse(Map.empty)) - )(MeasureResultDTO.apply _) - } - - implicit val writesMeasureResultDTO: Writes[MeasureResultDTO] = Json.writes[MeasureResultDTO] - - implicit val readsMeasurementDTO: Reads[MeasurementDTO] = Json.reads[MeasurementDTO] - implicit val writesMeasurementDTO: Writes[MeasurementDTO] = Json.writes[MeasurementDTO] - - implicit val readsPartitionDTO: Reads[PartitionDTO] = Json.reads[PartitionDTO] - implicit val writesPartitionDTO: Writes[PartitionDTO] = Json.writes[PartitionDTO] - - implicit val readsCheckpointDTO: Reads[CheckpointDTO] = Json.reads[CheckpointDTO] - implicit val writesCheckpointDTO: Writes[CheckpointDTO] = Json.writes[CheckpointDTO] - - implicit val readsPartitioningSubmitDTO: Reads[PartitioningSubmitDTO] = Json.reads[PartitioningSubmitDTO] - implicit val writesPartitioningSubmitDTO: Writes[PartitioningSubmitDTO] = Json.writes[PartitioningSubmitDTO] - - implicit val readsStringMap: Reads[Map[String, Option[String]]] = Reads.mapReads[Option[String]] - implicit val writesStringMap: OWrites[MapWrites.Map[String, Option[String]]] = - Writes.genericMapWrites[Option[String], MapWrites.Map] - - implicit val readsAdditionalDataSubmitDTO: Reads[AdditionalDataSubmitDTO] = Json.reads[AdditionalDataSubmitDTO] - implicit val writesAdditionalDataSubmitDTO: Writes[AdditionalDataSubmitDTO] = Json.writes[AdditionalDataSubmitDTO] - - implicit val readsAtumContextDTO: Reads[AtumContextDTO] = Json.reads[AtumContextDTO] - implicit val writesAtumContextDTO: Writes[AtumContextDTO] = Json.writes[AtumContextDTO] - - implicit def formatSingleSuccessResponse[T: Format]: Format[SingleSuccessResponse[T]] = Json.format[SingleSuccessResponse[T]] - implicit def formatMultiSuccessResponse[T: Format]: Format[MultiSuccessResponse[T]] = Json.format[MultiSuccessResponse[T]] - - implicit val readsCheckpointQueryDTO: Reads[CheckpointQueryDTO] = Json.reads[CheckpointQueryDTO] - implicit val writesCheckpointQueryDTO: Writes[CheckpointQueryDTO] = Json.writes[CheckpointQueryDTO] -} diff --git a/server/src/main/scala/za/co/absa/atum/server/model/SuccessResponse.scala b/server/src/main/scala/za/co/absa/atum/server/model/SuccessResponse.scala index 8ab99255d..f3898cc66 100644 --- a/server/src/main/scala/za/co/absa/atum/server/model/SuccessResponse.scala +++ b/server/src/main/scala/za/co/absa/atum/server/model/SuccessResponse.scala @@ -16,6 +16,9 @@ package za.co.absa.atum.server.model +import io.circe._ +import io.circe.generic.semiauto._ + import java.util.UUID object SuccessResponse { @@ -25,7 +28,17 @@ object SuccessResponse { case class SingleSuccessResponse[T](data: T, requestId: UUID = UUID.randomUUID()) extends SuccessResponse + object SingleSuccessResponse { + implicit def encoder[T: Encoder]: Encoder[SingleSuccessResponse[T]] = deriveEncoder + implicit def decoder[T: Decoder]: Decoder[SingleSuccessResponse[T]] = deriveDecoder + } + case class MultiSuccessResponse[T](data: Seq[T], requestId: UUID = UUID.randomUUID()) extends SuccessResponse + object MultiSuccessResponse { + implicit def encoder[T: Encoder]: Encoder[MultiSuccessResponse[T]] = deriveEncoder + implicit def decoder[T: Decoder]: Decoder[MultiSuccessResponse[T]] = deriveDecoder + } + } diff --git a/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala b/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala index 2fa568e28..19acbfdc9 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/TestData.scala @@ -23,7 +23,7 @@ import za.co.absa.atum.server.model.CheckpointFromDB import java.time.ZonedDateTime import java.util.UUID import MeasureResultDTO.TypedValue -import MeasureResultDTO.ResultValueType._ +import za.co.absa.atum.model.ResultValueType trait TestData { @@ -73,17 +73,17 @@ trait TestData { val mainValue: TypedValue = TypedValue( value = "123", - valueType = Long + valueType = ResultValueType.LongValue ) val supportValue1: TypedValue = TypedValue( value = "123456789", - valueType = Long + valueType = ResultValueType.LongValue ) val supportValue2: TypedValue = TypedValue( value = "12345.6789", - valueType = BigDecimal + valueType = ResultValueType.BigDecimalValue ) // Measure Result DTO @@ -194,7 +194,7 @@ trait TestData { author = "author", measuredByAtumAgent = true, measureName = measureDTO1.measureName, - measuredColumns = measureDTO1.measuredColumns, + measuredColumns = measureDTO1.measuredColumns.toIndexedSeq, measurementValue = parser .parse( """ @@ -229,7 +229,7 @@ trait TestData { author = "author2", measuredByAtumAgent = true, measureName = measureDTO2.measureName, - measuredColumns = measureDTO2.measuredColumns, + measuredColumns = measureDTO2.measuredColumns.toIndexedSeq, checkpointStartTime = checkpointDTO2.processStartTime, checkpointEndTime = checkpointDTO2.processEndTime diff --git a/server/src/test/scala/za/co/absa/atum/server/api/controller/FlowControllerUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/controller/FlowControllerUnitTests.scala index 98bd3a0f1..be3f586b7 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/controller/FlowControllerUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/controller/FlowControllerUnitTests.scala @@ -20,7 +20,7 @@ import org.mockito.Mockito.{mock, when} import za.co.absa.atum.server.api.TestData import za.co.absa.atum.server.api.exception.ServiceError import za.co.absa.atum.server.api.service.FlowService -import za.co.absa.atum.server.model.ErrorResponse.InternalServerErrorResponse +import za.co.absa.atum.server.model.InternalServerErrorResponse import zio._ import zio.test.Assertion.failsWithA import zio.test._ diff --git a/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala index 340949d8e..7de77eb40 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/controller/PartitioningControllerUnitTests.scala @@ -21,8 +21,8 @@ import za.co.absa.atum.model.dto.CheckpointDTO import za.co.absa.atum.server.api.TestData import za.co.absa.atum.server.api.exception.ServiceError import za.co.absa.atum.server.api.service.PartitioningService -import za.co.absa.atum.server.model.ErrorResponse.InternalServerErrorResponse -import za.co.absa.atum.server.model.SuccessResponse.{MultiSuccessResponse, SingleSuccessResponse} +import za.co.absa.atum.server.model.InternalServerErrorResponse +import za.co.absa.atum.server.model.SuccessResponse.SingleSuccessResponse import zio._ import zio.test.Assertion.{equalTo, failsWithA} import zio.test._ diff --git a/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointIntegrationTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointIntegrationTests.scala index bf2d8386d..6c397dd9f 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointIntegrationTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/database/runs/functions/WriteCheckpointIntegrationTests.scala @@ -16,8 +16,9 @@ package za.co.absa.atum.server.api.database.runs.functions -import za.co.absa.atum.model.dto.MeasureResultDTO.{ResultValueType, TypedValue} -import za.co.absa.atum.model.dto._ +import za.co.absa.atum.model.ResultValueType +import za.co.absa.atum.model.dto.{CheckpointDTO, MeasureDTO, MeasureResultDTO, MeasurementDTO, PartitionDTO} +import za.co.absa.atum.model.dto.MeasureResultDTO.TypedValue import za.co.absa.atum.server.ConfigProviderTest import za.co.absa.atum.server.api.TestTransactorProvider import za.co.absa.atum.server.api.database.PostgresDatabaseProvider @@ -35,15 +36,16 @@ object WriteCheckpointIntegrationTests extends ConfigProviderTest { suite("WriteCheckpointSuite")( test("Returns expected Left with DataNotFoundException as related partitioning is not in the database") { + val checkpointDTO = CheckpointDTO( id = UUID.randomUUID(), name = "name", author = "author", - partitioning = Seq(PartitionDTO("key1", "val1"), PartitionDTO("key2", "val2")), + partitioning = Seq(PartitionDTO("key4", "value4")), processStartTime = ZonedDateTime.now(), - processEndTime = None, + processEndTime = Option(ZonedDateTime.now()), measurements = - Set(MeasurementDTO(MeasureDTO("count", Seq("*")), MeasureResultDTO(TypedValue("1", ResultValueType.Long)))) + Set(MeasurementDTO(MeasureDTO("count", Seq("*")), MeasureResultDTO(TypedValue("1", ResultValueType.LongValue)))) ) for { writeCheckpoint <- ZIO.service[WriteCheckpoint] diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/CreateCheckpointEndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/CreateCheckpointEndpointUnitTests.scala index a232eabf0..0621d7ca4 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/http/CreateCheckpointEndpointUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/CreateCheckpointEndpointUnitTests.scala @@ -17,17 +17,16 @@ package za.co.absa.atum.server.api.http import org.mockito.Mockito.{mock, when} -import sttp.client3._ -import sttp.client3.playJson._ import sttp.client3.testing.SttpBackendStub +import sttp.client3.{UriContext, basicRequest} +import sttp.client3.circe._ import sttp.model.StatusCode import sttp.tapir.server.stub.TapirStubInterpreter import sttp.tapir.ztapir.{RIOMonadError, RichZEndpoint} import za.co.absa.atum.model.dto.CheckpointDTO import za.co.absa.atum.server.api.TestData import za.co.absa.atum.server.api.controller.CheckpointController -import za.co.absa.atum.server.model.ErrorResponse.{GeneralErrorResponse, InternalServerErrorResponse} -import za.co.absa.atum.server.model.PlayJsonImplicits._ +import za.co.absa.atum.server.model.{GeneralErrorResponse, InternalServerErrorResponse} import za.co.absa.atum.server.model.SuccessResponse.SingleSuccessResponse import zio._ import zio.test.Assertion.equalTo @@ -46,8 +45,8 @@ object CreateCheckpointEndpointUnitTests extends ZIOSpecDefault with Endpoints w private val checkpointControllerMockLayer = ZLayer.succeed(checkpointControllerMock) - private val createCheckpointServerEndpoint = - createCheckpointEndpointV2.zServerLogic(CheckpointController.createCheckpointV2) + private val createCheckpointServerEndpoint = createCheckpointEndpointV2 + .zServerLogic(CheckpointController.createCheckpointV2) def spec: Spec[TestEnvironment with Scope, Any] = { val backendStub = TapirStubInterpreter(SttpBackendStub.apply(new RIOMonadError[CheckpointController])) @@ -92,5 +91,4 @@ object CreateCheckpointEndpointUnitTests extends ZIOSpecDefault with Endpoints w }.provide( checkpointControllerMockLayer ) - } diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/CreatePartitioningEndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/CreatePartitioningEndpointUnitTests.scala index 9f5a5fe4a..cb5cc7c96 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/http/CreatePartitioningEndpointUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/CreatePartitioningEndpointUnitTests.scala @@ -17,17 +17,16 @@ package za.co.absa.atum.server.api.http import org.mockito.Mockito.{mock, when} -import sttp.client3._ -import sttp.client3.playJson._ import sttp.client3.testing.SttpBackendStub +import sttp.client3.{UriContext, basicRequest} +import sttp.client3.circe._ import sttp.model.StatusCode import sttp.tapir.server.stub.TapirStubInterpreter import sttp.tapir.ztapir.{RIOMonadError, RichZEndpoint} import za.co.absa.atum.model.dto.AtumContextDTO import za.co.absa.atum.server.api.TestData import za.co.absa.atum.server.api.controller.PartitioningController -import za.co.absa.atum.server.model.ErrorResponse.{GeneralErrorResponse, InternalServerErrorResponse} -import za.co.absa.atum.server.model.PlayJsonImplicits._ +import za.co.absa.atum.server.model.{GeneralErrorResponse, InternalServerErrorResponse} import za.co.absa.atum.server.model.SuccessResponse.SingleSuccessResponse import zio._ import zio.test.Assertion.equalTo diff --git a/server/src/test/scala/za/co/absa/atum/server/api/http/GetFlowCheckpointsEndpointUnitTests.scala b/server/src/test/scala/za/co/absa/atum/server/api/http/GetFlowCheckpointsEndpointUnitTests.scala index 699ae3a34..7e8514e7c 100644 --- a/server/src/test/scala/za/co/absa/atum/server/api/http/GetFlowCheckpointsEndpointUnitTests.scala +++ b/server/src/test/scala/za/co/absa/atum/server/api/http/GetFlowCheckpointsEndpointUnitTests.scala @@ -17,17 +17,16 @@ package za.co.absa.atum.server.api.http import org.mockito.Mockito.{mock, when} -import sttp.client3._ -import sttp.client3.playJson._ import sttp.client3.testing.SttpBackendStub +import sttp.client3.{UriContext, basicRequest} +import sttp.client3.circe._ import sttp.model.StatusCode import sttp.tapir.server.stub.TapirStubInterpreter import sttp.tapir.ztapir.{RIOMonadError, RichZEndpoint} import za.co.absa.atum.model.dto.CheckpointDTO import za.co.absa.atum.server.api.TestData import za.co.absa.atum.server.api.controller.FlowController -import za.co.absa.atum.server.model.ErrorResponse.{GeneralErrorResponse, InternalServerErrorResponse} -import za.co.absa.atum.server.model.PlayJsonImplicits._ +import za.co.absa.atum.server.model.{GeneralErrorResponse, InternalServerErrorResponse} import za.co.absa.atum.server.model.SuccessResponse.MultiSuccessResponse import zio._ import zio.test.Assertion.equalTo