Skip to content

Commit

Permalink
fixing bugs and using circe instead of Json4s dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
TebaleloS committed Jun 25, 2024
1 parent 935cde0 commit 3c2ca00
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import sttp.model.Uri
import za.co.absa.atum.agent.exception.AtumAgentException.HttpException
import za.co.absa.atum.model.dto.{AdditionalDataSubmitDTO, AtumContextDTO, CheckpointDTO, PartitioningSubmitDTO}
import za.co.absa.atum.model.utils.SerializationUtils
import io.circe.generic.auto._

class HttpDispatcher(config: Config) extends Dispatcher(config: Config) with Logging {
import HttpDispatcher._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,70 +16,80 @@

package za.co.absa.atum.model.utils

import org.json4s.JsonAST.JString
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization.{write, writePretty}
import org.json4s.{CustomSerializer, Formats, JNull, NoTypeHints, ext}
import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType
import za.co.absa.atum.model.dto.MeasureResultDTO.ResultValueType._

import io.circe.{Decoder, Encoder}
import io.circe.syntax._
import io.circe.parser._
import java.time.ZonedDateTime
import java.time.format.DateTimeFormatter
import java.util.UUID

object SerializationUtils {

implicit private val formatsJson: Formats =
Serialization.formats(NoTypeHints).withBigDecimal +
ext.UUIDSerializer +
ZonedDateTimeSerializer +
ResultValueTypeSerializer

// TODO "yyyy-MM-dd'T'hh:mm:ss.SSS'Z'" OR TODO "yyyy-MM-dd HH:mm:ss.SSSSSSX"
val timestampFormat: DateTimeFormatter = DateTimeFormatter.ISO_ZONED_DATE_TIME

implicit val encodeZonedDateTime: Encoder[ZonedDateTime] = Encoder.encodeString.contramap[ZonedDateTime](_.format(timestampFormat))
implicit val decodeZonedDateTime: Decoder[ZonedDateTime] = Decoder.decodeString.emap { str =>
Right(ZonedDateTime.parse(str, timestampFormat))
}

implicit val encodeUUID: Encoder[UUID] = Encoder.encodeString.contramap[UUID](_.toString)
implicit val decodeUUID: Decoder[UUID] = Decoder.decodeString.emap { str =>
Right(UUID.fromString(str))
}

/**
* The method returns arbitrary object as a Json string.
*
* @return A string representing the object in Json format
*/
def asJson[T <: AnyRef](obj: T): String = {
write[T](obj)
def asJson[T: Encoder](obj: T): String = {
obj.asJson.noSpaces
}

/**
* The method returns arbitrary object as a pretty Json string.
*
* @return A string representing the object in Json format
*/
def asJsonPretty[T <: AnyRef](obj: T): String = {
writePretty[T](obj)
def asJsonPretty[T: Encoder](obj: T): String = {
obj.asJson.spaces2
}

/**
* The method returns arbitrary object parsed from Json string.
*
* @return An object deserialized from the Json string
*/
def fromJson[T <: AnyRef](jsonStr: String)(implicit m: Manifest[T]): T = {
Serialization.read[T](jsonStr)
def fromJson[T: Decoder](jsonStr: String): T = {
decode[T](jsonStr) match {
case Right(value) => value
case Left(error) => throw new RuntimeException(s"Failed to decode JSON: $error")
}
}

private case object ResultValueTypeSerializer extends CustomSerializer[ResultValueType](format => (
{
case JString(resultValType) => resultValType match {
case "String" => String
case "Long" => Long
case "BigDecimal" => BigDecimal
case "Double" => Double
}
case JNull => null
},
{
case resultValType: ResultValueType => resultValType match {
case String => JString("String")
case Long => JString("Long")
case BigDecimal => JString("BigDecimal")
case Double => JString("Double")
}
}))
sealed trait ResultValueType
object ResultValueType {
case object String extends ResultValueType
case object Long extends ResultValueType
case object BigDecimal extends ResultValueType
case object Double extends ResultValueType

implicit val encodeResultValueType: Encoder[ResultValueType] = Encoder.encodeString.contramap {
case ResultValueType.String => "String"
case ResultValueType.Long => "Long"
case ResultValueType.BigDecimal => "BigDecimal"
case ResultValueType.Double => "Double"
}

implicit val decodeResultValueType: Decoder[ResultValueType] = Decoder.decodeString.emap {
case "String" => Right(ResultValueType.String)
case "Long" => Right(ResultValueType.Long)
case "BigDecimal" => Right(ResultValueType.BigDecimal)
case "Double" => Right(ResultValueType.Double)
case other => Left(s"Cannot decode $other as ResultValueType")
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package za.co.absa.atum.model.utils

import io.circe.generic.auto._
import org.scalatest.flatspec.AnyFlatSpecLike
import za.co.absa.atum.model.dto.MeasureResultDTO.{ResultValueType, TypedValue}
import za.co.absa.atum.model.dto._
Expand All @@ -41,7 +42,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike {
val expectedAdditionalDataJson =
"""
|{"partitioning":[{"key":"key","value":"val"}],
|"additionalData":{"key1":"val1","key2":"val2"},
|"additionalData":{"key1":"val1","key2":"val2","key3":null},
|"author":"testAuthor"}
|""".linearize
val actualAdditionalDataJson = SerializationUtils.asJson(additionalDataDTO)
Expand Down Expand Up @@ -343,7 +344,7 @@ class SerializationUtilsUnitTests extends AnyFlatSpecLike {
authorIfNew = "authorTest"
)

val expectedPartitioningDTOJson = """{"partitioning":[{"key":"key","value":"val"}],"authorIfNew":"authorTest"}"""
val expectedPartitioningDTOJson = """{"partitioning":[{"key":"key","value":"val"}],"parentPartitioning":null,"authorIfNew":"authorTest"}"""
val actualPartitioningDTOJson = SerializationUtils.asJson(partitioningDTO)

assert(actualPartitioningDTOJson == expectedPartitioningDTOJson)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import cats.Show
import cats.data.NonEmptyList
import doobie.{Get, Put}
import doobie.postgres.implicits._
import io.circe.Json
import org.postgresql.jdbc.PgArray
import org.postgresql.util.PGobject

Expand All @@ -42,15 +43,15 @@ object DoobieImplicits {

object Json {

implicit val jsonArrayPutUsingString: Put[List[String]] = {
implicit val jsonArrayPut: Put[List[Json]] = {
Put.Advanced
.other[PGobject](
NonEmptyList.of("json[]")
)
.tcontramap { a =>
val o = new PGobject
o.setType("json[]")
o.setValue(a.mkString("{", ",", "}"))
o.setValue(a.map(x => s"\"${x.noSpaces.replaceAll("\"", """\"""")}\"").mkString("{", ",", "}"))
o
}
}
Expand Down Expand Up @@ -100,15 +101,15 @@ object DoobieImplicits {

object Jsonb {

implicit val jsonbArrayPutUsingString: Put[List[String]] = {
implicit val jsonbArrayPut: Put[List[Json]] = {
Put.Advanced
.other[PGobject](
NonEmptyList.of("jsonb[]")
)
.tcontramap { a =>
val o = new PGobject
o.setType("jsonb[]")
o.setValue(a.mkString("{", ",", "}"))
o.setValue(a.map(x => s"\"${x.noSpaces.replaceAll("\"", """\"""")}\"").mkString("{", ",", "}"))
o
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ class CreatePartitioningIfNotExists(implicit schema: DBSchema, dbEngine: DoobieE
import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString
partitioningJsonString
},
${values.authorIfNew},
${
${values.authorIfNew},
${
import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbPutUsingString
parentPartitioningJsonString
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
package za.co.absa.atum.server.api.database.runs.functions

import doobie.Fragment
import doobie.implicits._
import doobie.implicits.toSqlInterpolator
import doobie.util.Read
import za.co.absa.atum.model.dto.CheckpointDTO
import za.co.absa.atum.server.model.PartitioningForDB
Expand All @@ -31,13 +31,12 @@ import zio._
import zio.interop.catz._
import io.circe.syntax._
import io.circe.generic.auto._

import za.co.absa.atum.model.dto.MeasureResultDTO._
import za.co.absa.atum.server.model.CirceJsonImplicits._
import za.co.absa.atum.server.api.database.DoobieImplicits.Sequence.get
import doobie.postgres.circe.jsonb.implicits.jsonbGet
import doobie.postgres.implicits._
import doobie.postgres.circe.jsonb.implicits._
import io.circe.Json

class WriteCheckpoint(implicit schema: DBSchema, dbEngine: DoobieEngine[Task])
extends DoobieSingleResultFunctionWithStatus[CheckpointDTO, Unit, Task]
Expand All @@ -50,7 +49,7 @@ class WriteCheckpoint(implicit schema: DBSchema, dbEngine: DoobieEngine[Task])
// List[String] containing json data has to be properly escaped
// It would be safer to use Json data type and derive Put instance
val measurementsNormalized = {
values.measurements.map(x => x.asJson.noSpaces)
values.measurements.toList.map(_.asJson)
}

val sqlDebug = sql"""SELECT ${Fragment.const(selectEntry)} FROM ${Fragment.const(functionName)}(
Expand All @@ -63,8 +62,8 @@ class WriteCheckpoint(implicit schema: DBSchema, dbEngine: DoobieEngine[Task])
${values.processStartTime},
${values.processEndTime},
${
import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbArrayPutUsingString
measurementsNormalized.toList
import za.co.absa.atum.server.api.database.DoobieImplicits.Jsonb.jsonbArrayPut
measurementsNormalized
},
${values.measuredByAtumAgent},
${values.author}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,12 @@ object WriteCheckpointIntegrationTests extends ConfigProviderTest {

suite("WriteCheckpointSuite")(
test("Returns expected Left with DataNotFoundException as related partitioning is not in the database") {

val checkpointDTO = CheckpointDTO(
id = UUID.randomUUID(),
name = "name",
author = "author",
partitioning = Seq(PartitionDTO("key1", "val1"), PartitionDTO("key2", "val2")),
partitioning = Seq(PartitionDTO("key2", "value2")),
processStartTime = ZonedDateTime.now(),
processEndTime = Option(ZonedDateTime.now()),
measurements =
Expand All @@ -53,7 +54,7 @@ object WriteCheckpointIntegrationTests extends ConfigProviderTest {
).provide(
WriteCheckpoint.layer,
PostgresDatabaseProvider.layer,
TestTransactorProvider.layerWithRollback
TestTransactorProvider.layerWithoutRollback
)
}

Expand Down

0 comments on commit 3c2ca00

Please sign in to comment.