diff --git a/build.sbt b/build.sbt index b68b4d3cd..7a8668684 100644 --- a/build.sbt +++ b/build.sbt @@ -207,9 +207,8 @@ lazy val common = (project in file("common")) .settings(name := "nj-common") .settings( libraryDependencies ++= List( - "org.apache.commons" % "commons-lang3" % "3.17.0", - "io.dropwizard.metrics" % "metrics-core" % metricsV % Provided, - "org.typelevel" %% "log4cats-core" % log4catsV % Provided + "org.apache.commons" % "commons-lang3" % "3.17.0", + "io.dropwizard.metrics" % "metrics-core" % metricsV % Provided ) ++ baseLib ++ testLib ) diff --git a/common/src/main/scala/com/github/chenharryhua/nanjin/common/NJLogLevel.scala b/common/src/main/scala/com/github/chenharryhua/nanjin/common/NJLogLevel.scala deleted file mode 100644 index 62bcf5f5b..000000000 --- a/common/src/main/scala/com/github/chenharryhua/nanjin/common/NJLogLevel.scala +++ /dev/null @@ -1,23 +0,0 @@ -package com.github.chenharryhua.nanjin.common - -import enumeratum.values.{CatsOrderValueEnum, IntCirceEnum, IntEnum, IntEnumEntry} -import org.typelevel.log4cats.extras.LogLevel - -import scala.collection.immutable - -sealed abstract class NJLogLevel(override val value: Int, val logLevel: LogLevel) - extends IntEnumEntry with Product with Serializable - -object NJLogLevel - extends CatsOrderValueEnum[Int, NJLogLevel] with IntEnum[NJLogLevel] with IntCirceEnum[NJLogLevel] { - override val values: immutable.IndexedSeq[NJLogLevel] = findValues - - case object ALL extends NJLogLevel(1, LogLevel.Trace) - case object TRACE extends NJLogLevel(2, LogLevel.Trace) - case object DEBUG extends NJLogLevel(3, LogLevel.Debug) - case object INFO extends NJLogLevel(4, LogLevel.Info) - case object WARN extends NJLogLevel(5, LogLevel.Warn) - case object ERROR extends NJLogLevel(6, LogLevel.Error) - case object FATAL extends NJLogLevel(7, LogLevel.Error) - case object OFF extends NJLogLevel(8, LogLevel.Error) -} diff --git a/common/src/test/scala/mtest/common/NJLogLevelTest.scala b/common/src/test/scala/mtest/common/NJLogLevelTest.scala deleted file mode 100644 index 0f6bf4634..000000000 --- a/common/src/test/scala/mtest/common/NJLogLevelTest.scala +++ /dev/null @@ -1,41 +0,0 @@ -package mtest.common - -import com.github.chenharryhua.nanjin.common.NJLogLevel -import io.circe.syntax.EncoderOps -import org.scalatest.funsuite.AnyFunSuite -import org.typelevel.log4cats.extras.LogLevel - -class NJLogLevelTest extends AnyFunSuite { - - test("should be compared: all < off") { - import cats.syntax.order.* - val all: NJLogLevel = NJLogLevel.ALL - val off: NJLogLevel = NJLogLevel.OFF - assert(all < off) - } - - test("should be compared: info == info") { - assert(NJLogLevel.INFO === NJLogLevel.INFO) - } - - test("json") { - val l1: NJLogLevel = NJLogLevel.ALL - val l2: NJLogLevel = NJLogLevel.TRACE - val l3: NJLogLevel = NJLogLevel.DEBUG - val l4: NJLogLevel = NJLogLevel.INFO - val l5: NJLogLevel = NJLogLevel.WARN - val l6: NJLogLevel = NJLogLevel.ERROR - val l7: NJLogLevel = NJLogLevel.FATAL - val l8: NJLogLevel = NJLogLevel.OFF - - assert(l1.logLevel === LogLevel.Trace) - assert(l2.logLevel === LogLevel.Trace) - assert(l3.asJson.noSpaces === """ 3 """.trim) - assert(l4.asJson.noSpaces === """ 4 """.trim) - assert(l5.asJson.noSpaces === """ 5 """.trim) - assert(l6.asJson.noSpaces === """ 6 """.trim) - assert(l7.asJson.noSpaces === """ 7 """.trim) - assert(l8.asJson.noSpaces === """ 8 """.trim) - assert(l1.productPrefix === "ALL") - } -} diff --git a/database/src/main/scala/com/github/chenharryhua/nanjin/database/NJHikari.scala b/database/src/main/scala/com/github/chenharryhua/nanjin/database/DBConfig.scala similarity index 74% rename from database/src/main/scala/com/github/chenharryhua/nanjin/database/NJHikari.scala rename to database/src/main/scala/com/github/chenharryhua/nanjin/database/DBConfig.scala index b3a7f64ba..5880b524a 100644 --- a/database/src/main/scala/com/github/chenharryhua/nanjin/database/NJHikari.scala +++ b/database/src/main/scala/com/github/chenharryhua/nanjin/database/DBConfig.scala @@ -12,10 +12,10 @@ import fs2.Stream * @param updateOps * set operations apply to the initial config */ -sealed abstract class NJHikari(cfg: HikariConfig, updateOps: List[HikariConfig => Unit]) { +sealed abstract class DBConfig(cfg: HikariConfig, updateOps: List[HikariConfig => Unit]) { - final def set(f: HikariConfig => Unit): NJHikari = - new NJHikari(cfg, f :: updateOps) {} + final def set(f: HikariConfig => Unit): DBConfig = + new DBConfig(cfg, f :: updateOps) {} final lazy val hikariConfig: HikariConfig = { updateOps.reverse.foreach(_(cfg)) @@ -23,15 +23,15 @@ sealed abstract class NJHikari(cfg: HikariConfig, updateOps: List[HikariConfig = cfg } - final def transactorResource[F[_]: Async]: Resource[F, HikariTransactor[F]] = + final def transactorR[F[_]: Async]: Resource[F, HikariTransactor[F]] = HikariTransactor.fromHikariConfig[F](hikariConfig) - final def transactorStream[F[_]: Async]: Stream[F, HikariTransactor[F]] = - Stream.resource(transactorResource) + final def transactorS[F[_]: Async]: Stream[F, HikariTransactor[F]] = + Stream.resource(transactorR) } -object NJHikari { - def apply(db: Postgres): NJHikari = { +object DBConfig { + def apply(db: Postgres): DBConfig = { val initConfig: HikariConfig = { val cfg = new HikariConfig cfg.setDriverClassName("org.postgresql.Driver") @@ -40,10 +40,10 @@ object NJHikari { cfg.setPassword(db.password.value) cfg } - new NJHikari(initConfig, Nil) {} + new DBConfig(initConfig, Nil) {} } - def apply(db: Redshift): NJHikari = { + def apply(db: Redshift): DBConfig = { val initConfig: HikariConfig = { val cfg = new HikariConfig cfg.setDriverClassName("com.amazon.redshift.jdbc42.Driver") @@ -54,10 +54,10 @@ object NJHikari { cfg.addDataSourceProperty("sslfactory", "com.amazon.redshift.ssl.NonValidatingFactory") cfg } - new NJHikari(initConfig, Nil) {} + new DBConfig(initConfig, Nil) {} } - def apply(db: SqlServer): NJHikari = { + def apply(db: SqlServer): DBConfig = { val initConfig: HikariConfig = { val cfg = new HikariConfig cfg.setDriverClassName("com.microsoft.sqlserver.jdbc.SQLServerDriver") @@ -66,6 +66,6 @@ object NJHikari { cfg.setPassword(db.password.value) cfg } - new NJHikari(initConfig, Nil) {} + new DBConfig(initConfig, Nil) {} } } diff --git a/database/src/test/scala/mtest/database/DoobieMetaTest.scala b/database/src/test/scala/mtest/database/DoobieMetaTest.scala index db4789bcd..7232c6aba 100644 --- a/database/src/test/scala/mtest/database/DoobieMetaTest.scala +++ b/database/src/test/scala/mtest/database/DoobieMetaTest.scala @@ -4,7 +4,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.implicits.catsSyntaxApplicativeId import com.github.chenharryhua.nanjin.common.database.* -import com.github.chenharryhua.nanjin.database.NJHikari +import com.github.chenharryhua.nanjin.database.DBConfig import doobie.ConnectionIO import fs2.Stream import org.scalatest.funsuite.AnyFunSuite @@ -24,7 +24,7 @@ class DoobieMetaTest extends AnyFunSuite with FunSuiteDiscipline with Configurat test("setter") { val username = Username("postgres") val password = Password("postgres") - val nj = NJHikari(postgres) + val nj = DBConfig(postgres) .set(_.setUsername("superceded by last update")) .set(_.setUsername(username.value)) .set(_.setPassword(password.value)) @@ -33,7 +33,7 @@ class DoobieMetaTest extends AnyFunSuite with FunSuiteDiscipline with Configurat assert(nj.hikariConfig.getMaximumPoolSize == 10) val stream: Stream[IO, Int] = for { - tnx <- nj.transactorStream[IO] + tnx <- nj.transactorS[IO] n <- Stream.eval(tnx.trans.apply(42.pure[ConnectionIO])) } yield n diff --git a/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/NJDateTimeRange.scala b/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/DateTimeRange.scala similarity index 55% rename from datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/NJDateTimeRange.scala rename to datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/DateTimeRange.scala index 545e99029..0538418dc 100644 --- a/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/NJDateTimeRange.scala +++ b/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/DateTimeRange.scala @@ -13,9 +13,9 @@ import java.time.* import scala.concurrent.duration.FiniteDuration // lazy range -@Lenses final case class NJDateTimeRange( - private val start: Option[NJDateTimeRange.TimeTypes], - private val end: Option[NJDateTimeRange.TimeTypes], +@Lenses final case class DateTimeRange( + private val start: Option[DateTimeRange.TimeTypes], + private val end: Option[DateTimeRange.TimeTypes], zoneId: ZoneId) { private object calcDateTime extends Poly1 { @@ -50,12 +50,12 @@ import scala.concurrent.duration.FiniteDuration def dayStrings: List[String] = days.map(d => NJTimestamp(d, zoneId).`Year=yyyy/Month=mm/Day=dd`(zoneId)) - def subranges(interval: FiniteDuration): List[NJDateTimeRange] = + def subranges(interval: FiniteDuration): List[DateTimeRange] = (startTimestamp, endTimestamp).traverseN { (s, e) => s.milliseconds .until(e.milliseconds, interval.toMillis) .toList - .map(a => NJDateTimeRange(zoneId).withStartTime(a).withEndTime(a + interval.toMillis)) + .map(a => DateTimeRange(zoneId).withStartTime(a).withEndTime(a + interval.toMillis)) }.flatten def period: Option[Period] = @@ -64,64 +64,64 @@ import scala.concurrent.duration.FiniteDuration def javaDuration: Option[java.time.Duration] = (zonedStartTime, zonedEndTime).mapN((s, e) => java.time.Duration.between(s, e)) - def withZoneId(zoneId: ZoneId): NJDateTimeRange = - NJDateTimeRange.zoneId.replace(zoneId)(this) + def withZoneId(zoneId: ZoneId): DateTimeRange = + DateTimeRange.zoneId.replace(zoneId)(this) - def withZoneId(zoneId: String): NJDateTimeRange = - NJDateTimeRange.zoneId.replace(ZoneId.of(zoneId))(this) + def withZoneId(zoneId: String): DateTimeRange = + DateTimeRange.zoneId.replace(ZoneId.of(zoneId))(this) implicit private def coproductPrism[A](implicit - evInject: Inject[NJDateTimeRange.TimeTypes, A], - evSelector: Selector[NJDateTimeRange.TimeTypes, A]): Prism[NJDateTimeRange.TimeTypes, A] = - Prism[NJDateTimeRange.TimeTypes, A](evSelector.apply)(evInject.apply) + evInject: Inject[DateTimeRange.TimeTypes, A], + evSelector: Selector[DateTimeRange.TimeTypes, A]): Prism[DateTimeRange.TimeTypes, A] = + Prism[DateTimeRange.TimeTypes, A](evSelector.apply)(evInject.apply) - private def setStart[A](a: A)(implicit prism: Prism[NJDateTimeRange.TimeTypes, A]): NJDateTimeRange = - NJDateTimeRange.start.replace(Some(prism.reverseGet(a)))(this) + private def setStart[A](a: A)(implicit prism: Prism[DateTimeRange.TimeTypes, A]): DateTimeRange = + DateTimeRange.start.replace(Some(prism.reverseGet(a)))(this) - private def setEnd[A](a: A)(implicit prism: Prism[NJDateTimeRange.TimeTypes, A]): NJDateTimeRange = - NJDateTimeRange.end.replace(Some(prism.reverseGet(a)))(this) + private def setEnd[A](a: A)(implicit prism: Prism[DateTimeRange.TimeTypes, A]): DateTimeRange = + DateTimeRange.end.replace(Some(prism.reverseGet(a)))(this) // start - def withStartTime(ts: LocalTime): NJDateTimeRange = setStart(toLocalDateTime(ts)) - def withStartTime(ts: LocalDate): NJDateTimeRange = setStart(toLocalDateTime(ts)) - def withStartTime(ts: LocalDateTime): NJDateTimeRange = setStart(ts) - def withStartTime(ts: OffsetDateTime): NJDateTimeRange = setStart(NJTimestamp(ts)) - def withStartTime(ts: ZonedDateTime): NJDateTimeRange = setStart(NJTimestamp(ts)) - def withStartTime(ts: Instant): NJDateTimeRange = setStart(NJTimestamp(ts)) - def withStartTime(ts: Long): NJDateTimeRange = setStart(NJTimestamp(ts)) - def withStartTime(ts: Timestamp): NJDateTimeRange = setStart(NJTimestamp(ts)) - def withStartTime(ts: String): NJDateTimeRange = setStart(ts) + def withStartTime(ts: LocalTime): DateTimeRange = setStart(toLocalDateTime(ts)) + def withStartTime(ts: LocalDate): DateTimeRange = setStart(toLocalDateTime(ts)) + def withStartTime(ts: LocalDateTime): DateTimeRange = setStart(ts) + def withStartTime(ts: OffsetDateTime): DateTimeRange = setStart(NJTimestamp(ts)) + def withStartTime(ts: ZonedDateTime): DateTimeRange = setStart(NJTimestamp(ts)) + def withStartTime(ts: Instant): DateTimeRange = setStart(NJTimestamp(ts)) + def withStartTime(ts: Long): DateTimeRange = setStart(NJTimestamp(ts)) + def withStartTime(ts: Timestamp): DateTimeRange = setStart(NJTimestamp(ts)) + def withStartTime(ts: String): DateTimeRange = setStart(ts) // end - def withEndTime(ts: LocalTime): NJDateTimeRange = setEnd(toLocalDateTime(ts)) - def withEndTime(ts: LocalDate): NJDateTimeRange = setEnd(toLocalDateTime(ts)) - def withEndTime(ts: LocalDateTime): NJDateTimeRange = setEnd(ts) - def withEndTime(ts: OffsetDateTime): NJDateTimeRange = setEnd(NJTimestamp(ts)) - def withEndTime(ts: ZonedDateTime): NJDateTimeRange = setEnd(NJTimestamp(ts)) - def withEndTime(ts: Instant): NJDateTimeRange = setEnd(NJTimestamp(ts)) - def withEndTime(ts: Long): NJDateTimeRange = setEnd(NJTimestamp(ts)) - def withEndTime(ts: Timestamp): NJDateTimeRange = setEnd(NJTimestamp(ts)) - def withEndTime(ts: String): NJDateTimeRange = setEnd(ts) - - def withNSeconds(seconds: Long): NJDateTimeRange = { + def withEndTime(ts: LocalTime): DateTimeRange = setEnd(toLocalDateTime(ts)) + def withEndTime(ts: LocalDate): DateTimeRange = setEnd(toLocalDateTime(ts)) + def withEndTime(ts: LocalDateTime): DateTimeRange = setEnd(ts) + def withEndTime(ts: OffsetDateTime): DateTimeRange = setEnd(NJTimestamp(ts)) + def withEndTime(ts: ZonedDateTime): DateTimeRange = setEnd(NJTimestamp(ts)) + def withEndTime(ts: Instant): DateTimeRange = setEnd(NJTimestamp(ts)) + def withEndTime(ts: Long): DateTimeRange = setEnd(NJTimestamp(ts)) + def withEndTime(ts: Timestamp): DateTimeRange = setEnd(NJTimestamp(ts)) + def withEndTime(ts: String): DateTimeRange = setEnd(ts) + + def withNSeconds(seconds: Long): DateTimeRange = { val now = LocalDateTime.now withStartTime(now.minusSeconds(seconds)).withEndTime(now) } - def withTimeRange(start: String, end: String): NJDateTimeRange = + def withTimeRange(start: String, end: String): DateTimeRange = withStartTime(start).withEndTime(end) - def withOneDay(ts: LocalDate): NJDateTimeRange = + def withOneDay(ts: LocalDate): DateTimeRange = withStartTime(ts).withEndTime(ts.plusDays(1)) - def withOneDay(ts: String): NJDateTimeRange = + def withOneDay(ts: String): DateTimeRange = DateTimeParser.localDateParser.parse(ts).map(withOneDay) match { case Left(ex) => throw ex.parseException(ts) case Right(day) => day } - def withToday: NJDateTimeRange = withOneDay(LocalDate.now) - def withYesterday: NJDateTimeRange = withOneDay(LocalDate.now.minusDays(1)) + def withToday: DateTimeRange = withOneDay(LocalDate.now) + def withYesterday: DateTimeRange = withOneDay(LocalDate.now.minusDays(1)) def isInBetween(ts: Long): Boolean = (startTimestamp, endTimestamp) match { @@ -136,7 +136,7 @@ import scala.concurrent.duration.FiniteDuration duration.map(DurationFormatter.defaultFormatter.format).getOrElse("infinite") } -object NJDateTimeRange { +object DateTimeRange { final type TimeTypes = NJTimestamp :+: @@ -144,8 +144,8 @@ object NJDateTimeRange { String :+: // date-time in string, like "03:12" CNil - implicit final val partialOrderNJDateTimeRange: PartialOrder[NJDateTimeRange] & Show[NJDateTimeRange] = - new PartialOrder[NJDateTimeRange] with Show[NJDateTimeRange] { + implicit final val partialOrderNJDateTimeRange: PartialOrder[DateTimeRange] & Show[DateTimeRange] = + new PartialOrder[DateTimeRange] with Show[DateTimeRange] { private def lessStart(a: Option[NJTimestamp], b: Option[NJTimestamp]): Boolean = (a, b) match { @@ -161,7 +161,7 @@ object NJDateTimeRange { case (Some(x), Some(y)) => x > y } - override def partialCompare(x: NJDateTimeRange, y: NJDateTimeRange): Double = + override def partialCompare(x: DateTimeRange, y: DateTimeRange): Double = (x, y) match { case (a, b) if a.endTimestamp === b.endTimestamp && a.startTimestamp === b.startTimestamp => 0.0 @@ -174,10 +174,10 @@ object NJDateTimeRange { case _ => Double.NaN } - override def show(x: NJDateTimeRange): String = x.toString + override def show(x: DateTimeRange): String = x.toString } - def apply(zoneId: ZoneId): NJDateTimeRange = NJDateTimeRange(None, None, zoneId) + def apply(zoneId: ZoneId): DateTimeRange = DateTimeRange(None, None, zoneId) } diff --git a/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/NJLocalTime.scala b/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/LocalTimeRange.scala similarity index 58% rename from datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/NJLocalTime.scala rename to datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/LocalTimeRange.scala index 0ecc7f178..5e31c6709 100644 --- a/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/NJLocalTime.scala +++ b/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/LocalTimeRange.scala @@ -1,21 +1,10 @@ package com.github.chenharryhua.nanjin.datetime -import java.time.{Duration as JavaDuration, Instant, LocalTime, ZoneId, ZonedDateTime} +import java.time.{Instant, LocalTime, ZoneId, ZonedDateTime} import scala.concurrent.duration.{Duration, FiniteDuration} -import scala.jdk.DurationConverters.{JavaDurationOps, ScalaDurationOps} +import scala.jdk.DurationConverters.ScalaDurationOps -final case class NJLocalTime(value: LocalTime) { - - def distance(other: LocalTime): FiniteDuration = { - val dur = JavaDuration.between(value, other) - val res = if (dur.isNegative) { - dur.plusHours(24) - } else dur - res.toScala - } -} - -final case class NJLocalTimeRange(start: LocalTime, duration: FiniteDuration, zoneId: ZoneId) { +final case class LocalTimeRange(start: LocalTime, duration: FiniteDuration, zoneId: ZoneId) { // start time inclusive, end time exclusive def inBetween(instant: Instant): Boolean = diff --git a/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/datetime.scala b/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/datetime.scala index f105736e6..bb4609665 100644 --- a/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/datetime.scala +++ b/datetime/src/main/scala/com/github/chenharryhua/nanjin/datetime/datetime.scala @@ -1,8 +1,9 @@ package com.github.chenharryhua.nanjin -import java.time.{LocalDate, LocalDateTime, LocalTime} +import java.time.{Duration as JavaDuration, LocalDate, LocalDateTime, LocalTime} import java.util.concurrent.TimeUnit import scala.concurrent.duration.{Duration, FiniteDuration} +import scala.jdk.DurationConverters.JavaDurationOps package object datetime { object instances extends DateTimeInstances @@ -10,11 +11,11 @@ package object datetime { def toLocalDateTime(ts: LocalTime): LocalDateTime = ts.atDate(LocalDate.now) def toLocalDateTime(ts: LocalDate): LocalDateTime = ts.atTime(LocalTime.MIDNIGHT) - final val oneMillisec: FiniteDuration = Duration(1, TimeUnit.MILLISECONDS) - final val oneSecond: FiniteDuration = Duration(1, TimeUnit.SECONDS) - final val oneMinute: FiniteDuration = Duration(1, TimeUnit.MINUTES) - final val oneHour: FiniteDuration = Duration(1, TimeUnit.HOURS) - final val oneDay: FiniteDuration = Duration(1, TimeUnit.DAYS) + final val oneMillisecond: FiniteDuration = Duration(1, TimeUnit.MILLISECONDS) + final val oneSecond: FiniteDuration = Duration(1, TimeUnit.SECONDS) + final val oneMinute: FiniteDuration = Duration(1, TimeUnit.MINUTES) + final val oneHour: FiniteDuration = Duration(1, TimeUnit.HOURS) + final val oneDay: FiniteDuration = Duration(1, TimeUnit.DAYS) def dayResolution(localDateTime: LocalDateTime): LocalDate = localDateTime.toLocalDate @@ -24,4 +25,11 @@ package object datetime { def minuteResolution(localDateTime: LocalDateTime): LocalDateTime = localDateTime.withSecond(0).withNano(0) + def distance(value: LocalTime, other: LocalTime): FiniteDuration = { + val dur = JavaDuration.between(value, other) + val res = if (dur.isNegative) { + dur.plusHours(24) + } else dur + res.toScala + } } diff --git a/datetime/src/test/scala/mtest/DateTimeParserTest.scala b/datetime/src/test/scala/mtest/DateTimeParserTest.scala index dafadef11..954e31449 100644 --- a/datetime/src/test/scala/mtest/DateTimeParserTest.scala +++ b/datetime/src/test/scala/mtest/DateTimeParserTest.scala @@ -2,15 +2,15 @@ package mtest import cats.Alternative import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.datetime.{DateTimeParser, NJDateTimeRange, NJTimestamp} +import com.github.chenharryhua.nanjin.datetime.{DateTimeParser, DateTimeRange, NJTimestamp} import org.scalatest.funsuite.AnyFunSuite import java.time.{LocalDate, LocalTime, ZoneId, ZonedDateTime} class DateTimeParserTest extends AnyFunSuite { - val zoneId: ZoneId = sydneyTime - val range: NJDateTimeRange = NJDateTimeRange(sydneyTime) + val zoneId: ZoneId = sydneyTime + val range: DateTimeRange = DateTimeRange(sydneyTime) test("Local Date") { assert( range.withStartTime("2020-01-01").startTimestamp.get === NJTimestamp( diff --git a/datetime/src/test/scala/mtest/DateTimeRangeTest.scala b/datetime/src/test/scala/mtest/DateTimeRangeTest.scala index dbef57d72..2bf52d077 100644 --- a/datetime/src/test/scala/mtest/DateTimeRangeTest.scala +++ b/datetime/src/test/scala/mtest/DateTimeRangeTest.scala @@ -26,14 +26,14 @@ import scala.util.Random class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configuration { - implicit val arbiNJDateTimeRange: Arbitrary[NJDateTimeRange] = + implicit val arbiNJDateTimeRange: Arbitrary[DateTimeRange] = Arbitrary(for { date <- genZonedDateTimeWithZone(None) inc <- Gen.choose[Long](1, 50 * 365 * 24 * 3600) // 50 years d = date.toLocalDateTime - } yield NJDateTimeRange(darwinTime).withStartTime(d).withEndTime(d.plusSeconds(inc))) + } yield DateTimeRange(darwinTime).withStartTime(d).withEndTime(d.plusSeconds(inc))) - implicit val cogen: Cogen[NJDateTimeRange] = + implicit val cogen: Cogen[DateTimeRange] = Cogen(m => m.startTimestamp.map(_.milliseconds).getOrElse(0)) implicit val arbParser: Arbitrary[DateTimeParser[Instant]] = Arbitrary( @@ -58,8 +58,8 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu .function1[Instant, Instant](genZonedDateTime.map(_.toInstant)) .map(f => DateTimeParser.alternativeDateTimeParser.pure(f))) - checkAll("NJDateTimeRange-UpperBounded", PartialOrderTests[NJDateTimeRange].partialOrder) - checkAll("NJDateTimeRange-PartialOrder", PartialOrderTests[NJDateTimeRange].partialOrder) + checkAll("NJDateTimeRange-UpperBounded", PartialOrderTests[DateTimeRange].partialOrder) + checkAll("NJDateTimeRange-PartialOrder", PartialOrderTests[DateTimeRange].partialOrder) checkAll("NJTimestamp", AlternativeTests[DateTimeParser].alternative[Instant, Instant, Instant]) test("order of applying time data does not matter") { @@ -67,7 +67,7 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu val startTime = LocalDateTime.of(2012, 10, 26, 18, 0, 0) val endTime = LocalDateTime.of(2012, 10, 26, 23, 0, 0) - val param = NJDateTimeRange(sydneyTime) + val param = DateTimeRange(sydneyTime) val a = param.withEndTime(endTime).withZoneId(zoneId).withStartTime(startTime) val b = param.withStartTime(startTime).withZoneId(zoneId).withEndTime(endTime) @@ -87,7 +87,7 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu val d2 = LocalDate.of(2012, 10, 27) val d3 = LocalDate.of(2012, 10, 28) - val dtr = NJDateTimeRange(beijingTime).withStartTime(d1).withEndTime("2012-10-28") + val dtr = DateTimeRange(beijingTime).withStartTime(d1).withEndTime("2012-10-28") assert(dtr.days.eqv(List(d1, d2))) @@ -95,7 +95,7 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu } test("infinite range should return empty list") { - assert(NJDateTimeRange(cairoTime).days.isEmpty) + assert(DateTimeRange(cairoTime).days.isEmpty) } test("days of same day should return empty list") { @@ -103,13 +103,13 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu val dt4 = LocalDateTime.of(d3, LocalTime.of(10, 1, 1)) val dt5 = LocalDateTime.of(d3, LocalTime.of(10, 1, 2)) - val sameDay = NJDateTimeRange(newyorkTime).withStartTime(dt4).withEndTime(dt5) + val sameDay = DateTimeRange(newyorkTime).withStartTime(dt4).withEndTime(dt5) assert(sameDay.days.isEmpty) } test("days") { val dr = - NJDateTimeRange(sydneyTime) + DateTimeRange(sydneyTime) .withStartTime("2020-12-20T23:00:00+11:00") .withEndTime("2020-12-29T01:00:00+11:00") @@ -130,7 +130,7 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu } test("fluent api") { - val dr = NJDateTimeRange(sydneyTime) + val dr = DateTimeRange(sydneyTime) .withOneDay(LocalDate.now()) .withOneDay(LocalDate.now().toString) .withToday @@ -186,7 +186,7 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu } test("subranges") { - val dr = NJDateTimeRange(sydneyTime).withStartTime("2021-01-01").withEndTime("2021-02-01") + val dr = DateTimeRange(sydneyTime).withStartTime("2021-01-01").withEndTime("2021-02-01") val sr = dr.subranges(24.hours) assert(sr.size == 31) assert(sr == dr.subranges(1.day)) @@ -194,7 +194,7 @@ class DateTimeRangeTest extends AnyFunSuite with FunSuiteDiscipline with Configu assert(sr(rd).endTimestamp == sr(rd + 1).startTimestamp) } test("subranges - irregular") { - val dr = NJDateTimeRange(sydneyTime).withStartTime("2021-01-01").withEndTime("2021-02-01T08:00") + val dr = DateTimeRange(sydneyTime).withStartTime("2021-01-01").withEndTime("2021-02-01T08:00") val sr = dr.subranges(12.hours) assert(sr.size == 63) sr.sliding(2).toList.map { diff --git a/datetime/src/test/scala/mtest/NJLocalTimeTest.scala b/datetime/src/test/scala/mtest/NJLocalTimeTest.scala index edb8092ce..6356add3a 100644 --- a/datetime/src/test/scala/mtest/NJLocalTimeTest.scala +++ b/datetime/src/test/scala/mtest/NJLocalTimeTest.scala @@ -1,7 +1,7 @@ package mtest import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.datetime.{NJLocalTime, NJLocalTimeRange} +import com.github.chenharryhua.nanjin.datetime.{distance, LocalTimeRange} import org.scalatest.funsuite.AnyFunSuite import java.time.{LocalDateTime, LocalTime} @@ -10,18 +10,18 @@ import scala.concurrent.duration.* class NJLocalTimeTest extends AnyFunSuite { test("local time distance") { - val base = NJLocalTime(LocalTime.of(18, 0)) + val base = LocalTime.of(18, 0) val localTime1 = LocalTime.of(19, 0) val localTime2 = LocalTime.of(17, 0) val localTime3 = LocalTime.of(18, 0) val localTime4 = LocalTime.of(17, 59, 59) - assert(base.distance(localTime1) == FiniteDuration(1, TimeUnit.HOURS)) - assert(base.distance(localTime2) == FiniteDuration(23, TimeUnit.HOURS)) - assert(base.distance(localTime3) == FiniteDuration(0, TimeUnit.HOURS)) - assert(base.distance(localTime4) == FiniteDuration(24, TimeUnit.HOURS).minus(1.second)) + assert(distance(base, localTime1) == FiniteDuration(1, TimeUnit.HOURS)) + assert(distance(base, localTime2) == FiniteDuration(23, TimeUnit.HOURS)) + assert(distance(base, localTime3) == FiniteDuration(0, TimeUnit.HOURS)) + assert(distance(base, localTime4) == FiniteDuration(24, TimeUnit.HOURS).minus(1.second)) } test("local time range - do not cross midnight") { - val ltr = NJLocalTimeRange(LocalTime.of(8, 0), FiniteDuration(8, TimeUnit.HOURS), sydneyTime) + val ltr = LocalTimeRange(LocalTime.of(8, 0), FiniteDuration(8, TimeUnit.HOURS), sydneyTime) val d1 = LocalDateTime.of(2012, 10, 22, 8, 0, 0).atZone(sydneyTime).toInstant val d2 = LocalDateTime.of(2015, 7, 25, 16, 0, 0).atZone(sydneyTime).toInstant assert(ltr.inBetween(d1)) @@ -35,7 +35,7 @@ class NJLocalTimeTest extends AnyFunSuite { assert(!ltr.inBetween(d5)) } test("local time range - cross midnight") { - val ltr = NJLocalTimeRange(LocalTime.of(22, 0), FiniteDuration(8, TimeUnit.HOURS), sydneyTime) + val ltr = LocalTimeRange(LocalTime.of(22, 0), FiniteDuration(8, TimeUnit.HOURS), sydneyTime) val d1 = LocalDateTime.of(2012, 1, 1, 22, 0, 0).atZone(sydneyTime).toInstant val d2 = LocalDateTime.of(2013, 2, 2, 6, 0, 0).atZone(sydneyTime).toInstant assert(ltr.inBetween(d1)) @@ -51,7 +51,7 @@ class NJLocalTimeTest extends AnyFunSuite { assert(!ltr.inBetween(d6)) } test("local time range - start time match") { - val ltr = NJLocalTimeRange(LocalTime.of(22, 0), FiniteDuration(2, TimeUnit.HOURS), sydneyTime) + val ltr = LocalTimeRange(LocalTime.of(22, 0), FiniteDuration(2, TimeUnit.HOURS), sydneyTime) val d1 = LocalDateTime.of(2012, 1, 1, 22, 0, 0).atZone(sydneyTime).toInstant val d2 = LocalDateTime.of(2013, 2, 2, 0, 0, 0).atZone(sydneyTime).toInstant assert(ltr.inBetween(d1)) @@ -64,13 +64,13 @@ class NJLocalTimeTest extends AnyFunSuite { } test("duration <= 0") { - val ltr = NJLocalTimeRange(LocalTime.of(22, 0), FiniteDuration(-2, TimeUnit.HOURS), sydneyTime) + val ltr = LocalTimeRange(LocalTime.of(22, 0), FiniteDuration(-2, TimeUnit.HOURS), sydneyTime) val d1 = LocalDateTime.of(2012, 1, 1, 21, 0, 0).atZone(sydneyTime).toInstant assert(!ltr.inBetween(d1)) } test("duration >= 24 hours") { - val ltr = NJLocalTimeRange(LocalTime.of(22, 0), FiniteDuration(24, TimeUnit.HOURS), sydneyTime) + val ltr = LocalTimeRange(LocalTime.of(22, 0), FiniteDuration(24, TimeUnit.HOURS), sydneyTime) val d1 = LocalDateTime.of(2012, 1, 1, 22, 0, 0).atZone(sydneyTime).toInstant assert(ltr.inBetween(d1)) } diff --git a/example/src/main/scala/example/aws_task_template.scala b/example/src/main/scala/example/aws_task_template.scala index b02d63c4d..133f10475 100644 --- a/example/src/main/scala/example/aws_task_template.scala +++ b/example/src/main/scala/example/aws_task_template.scala @@ -7,7 +7,7 @@ import com.github.chenharryhua.nanjin.common.HostName import com.github.chenharryhua.nanjin.common.chrono.Policy.* import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime import com.github.chenharryhua.nanjin.guard.TaskGuard -import com.github.chenharryhua.nanjin.guard.event.NJEvent +import com.github.chenharryhua.nanjin.guard.event.Event import com.github.chenharryhua.nanjin.guard.observers.console import fs2.Stream import io.circe.Json @@ -31,13 +31,13 @@ object aws_task_template { .withRestartThreshold(5.hours) .addBrief(ecs.container_metadata[IO])) - private val service1: Stream[IO, NJEvent] = task + private val service1: Stream[IO, Event] = task .service("s1") .updateConfig(_.addBrief(Json.obj("a" -> 1.asJson))) .updateConfig(_.withHttpServer(_.withPort(port"1026"))) .eventStream(_ => IO.never) - private val service2: Stream[IO, NJEvent] = task + private val service2: Stream[IO, Event] = task .service("s2") .updateConfig(_.addBrief(Json.obj("b" -> 2.asJson))) .updateConfig(_.withHttpServer(_.withPort(port"1027"))) diff --git a/example/src/main/scala/example/kafka_spark.scala b/example/src/main/scala/example/kafka_spark.scala index a60dee433..4d475101c 100644 --- a/example/src/main/scala/example/kafka_spark.scala +++ b/example/src/main/scala/example/kafka_spark.scala @@ -3,7 +3,7 @@ package example import cats.effect.IO import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.spark.{SparKafkaContext, SparkSessionExt, SparkSettings} import eu.timepit.refined.auto.* import fs2.kafka.Acks @@ -21,7 +21,7 @@ object kafka_spark { sparKafka.dump(topic, path) // batch dump a kafka topic with date range - private val dateRange = NJDateTimeRange(sydneyTime).withYesterday + private val dateRange = DateTimeRange(sydneyTime).withYesterday sparKafka.dump(topic, path, dateRange) // load saved data into kafka diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/action/Herald.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/action/Herald.scala index 0268920fb..3d796f0a2 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/action/Herald.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/action/Herald.scala @@ -4,8 +4,8 @@ import cats.effect.kernel.Sync import cats.effect.std.Console import cats.syntax.all.* import com.github.chenharryhua.nanjin.guard.config.{AlarmLevel, ServiceParams} -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServiceMessage -import com.github.chenharryhua.nanjin.guard.event.{NJError, NJEvent} +import com.github.chenharryhua.nanjin.guard.event.Event.ServiceMessage +import com.github.chenharryhua.nanjin.guard.event.{Error, Event} import com.github.chenharryhua.nanjin.guard.translator.{jsonHelper, textHelper, ColorScheme} import fs2.concurrent.Channel import io.circe.Encoder @@ -36,14 +36,14 @@ object Herald { private[guard] class Impl[F[_]]( serviceParams: ServiceParams, - channel: Channel[F, NJEvent] + channel: Channel[F, Event] )(implicit F: Sync[F]) extends Herald[F] { private def toServiceMessage[S: Encoder]( msg: S, level: AlarmLevel, - error: Option[NJError]): F[ServiceMessage] = + error: Option[Error]): F[ServiceMessage] = serviceParams.zonedNow.map(ts => ServiceMessage( serviceParams = serviceParams, @@ -52,7 +52,7 @@ object Herald { error = error, message = Encoder[S].apply(msg))) - private def alarm[S: Encoder](msg: S, level: AlarmLevel, error: Option[NJError]): F[Unit] = + private def alarm[S: Encoder](msg: S, level: AlarmLevel, error: Option[Error]): F[Unit] = toServiceMessage(msg, level, error).flatMap(channel.send).void override def error[S: Encoder](msg: S): F[Unit] = alarm(msg, AlarmLevel.Error, None) @@ -61,9 +61,9 @@ object Herald { override def done[S: Encoder](msg: S): F[Unit] = alarm(msg, AlarmLevel.Done, None) override def error[S: Encoder](ex: Throwable)(msg: S): F[Unit] = - alarm(msg, AlarmLevel.Error, Some(NJError(ex))) + alarm(msg, AlarmLevel.Error, Some(Error(ex))) override def warn[S: Encoder](ex: Throwable)(msg: S): F[Unit] = - alarm(msg, AlarmLevel.Warn, Some(NJError(ex))) + alarm(msg, AlarmLevel.Warn, Some(Error(ex))) // console @@ -85,8 +85,8 @@ object Herald { toServiceMessage(msg, AlarmLevel.Done, None).flatMap(m => cns.println(toText(m))) override def consoleError[S: Encoder](ex: Throwable)(msg: S)(implicit cns: Console[F]): F[Unit] = - toServiceMessage(msg, AlarmLevel.Error, Some(NJError(ex))).flatMap(m => cns.println(toText(m))) + toServiceMessage(msg, AlarmLevel.Error, Some(Error(ex))).flatMap(m => cns.println(toText(m))) override def consoleWarn[S: Encoder](ex: Throwable)(msg: S)(implicit cns: Console[F]): F[Unit] = - toServiceMessage(msg, AlarmLevel.Warn, Some(NJError(ex))).flatMap(m => cns.println(toText(m))) + toServiceMessage(msg, AlarmLevel.Warn, Some(Error(ex))).flatMap(m => cns.println(toText(m))) } } diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/NJEvent.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/Event.scala similarity index 85% rename from guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/NJEvent.scala rename to guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/Event.scala index 76bb3052e..c9f87ab99 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/NJEvent.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/Event.scala @@ -8,20 +8,20 @@ import io.circe.generic.JsonCodec import java.time.{Duration, ZonedDateTime} @JsonCodec -sealed trait NJEvent extends Product with Serializable { +sealed trait Event extends Product with Serializable { def timestamp: ZonedDateTime // event timestamp - when the event occurs def serviceParams: ServiceParams final def upTime: Duration = serviceParams.upTime(timestamp) } -object NJEvent { +object Event { - final case class ServiceStart(serviceParams: ServiceParams, tick: Tick) extends NJEvent { + final case class ServiceStart(serviceParams: ServiceParams, tick: Tick) extends Event { val timestamp: ZonedDateTime = tick.zonedWakeup } - final case class ServicePanic(serviceParams: ServiceParams, tick: Tick, error: NJError) extends NJEvent { + final case class ServicePanic(serviceParams: ServiceParams, tick: Tick, error: Error) extends Event { val timestamp: ZonedDateTime = tick.zonedAcquire } @@ -29,17 +29,17 @@ object NJEvent { serviceParams: ServiceParams, timestamp: ZonedDateTime, cause: ServiceStopCause) - extends NJEvent + extends Event final case class ServiceMessage( serviceParams: ServiceParams, timestamp: ZonedDateTime, level: AlarmLevel, - error: Option[NJError], + error: Option[Error], message: Json - ) extends NJEvent + ) extends Event - sealed trait MetricEvent extends NJEvent { + sealed trait MetricEvent extends Event { def index: MetricIndex def snapshot: MetricSnapshot def took: Duration // time took to retrieve snapshot diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/data.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/data.scala index 46ec15e92..6ee2a4fbd 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/data.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/data.scala @@ -11,11 +11,11 @@ import java.time.ZonedDateTime import scala.jdk.CollectionConverters.ListHasAsScala @JsonCodec -final case class NJError private (message: String, stack: List[String]) +final case class Error private (message: String, stack: List[String]) -object NJError { - def apply(ex: Throwable): NJError = - NJError( +object Error { + def apply(ex: Throwable): Error = + Error( ExceptionUtils.getRootCauseMessage(ex), ExceptionUtils.getRootCauseStackTraceList(ex).asScala.toList.map(_.replace("\t", ""))) } @@ -40,7 +40,7 @@ object ServiceStopCause { case object Successfully extends ServiceStopCause(0) case object Maintenance extends ServiceStopCause(1) case object ByCancellation extends ServiceStopCause(2) - final case class ByException(error: NJError) extends ServiceStopCause(3) + final case class ByException(error: Error) extends ServiceStopCause(3) private val SUCCESSFULLY: String = "Successfully" private val BY_CANCELLATION: String = "ByCancellation" @@ -62,6 +62,6 @@ object ServiceStopCause { case MAINTENANCE => Right(Maintenance) case unknown => Left(DecodingFailure(s"unrecognized: $unknown", Nil)) }.widen, - _.downField(BY_EXCEPTION).as[NJError].map(err => ByException(err)).widen + _.downField(BY_EXCEPTION).as[Error].map(err => ByException(err)).widen ).reduceLeft(_ or _) } diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/eventFilters.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/eventFilters.scala index a9ca12de2..34afc8f55 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/eventFilters.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/event/eventFilters.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.guard.event import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.chrono.crontabs -import com.github.chenharryhua.nanjin.guard.event.NJEvent.MetricReport +import com.github.chenharryhua.nanjin.guard.event.Event.MetricReport import cron4s.CronExpr import cron4s.lib.javatime.javaTemporalInstance import cron4s.syntax.all.* @@ -20,7 +20,7 @@ object eventFilters { * * in every interval, only one MetricReport is allowed to pass */ - def sampling(interval: FiniteDuration)(evt: NJEvent): Boolean = + def sampling(interval: FiniteDuration)(evt: Event): Boolean = evt match { case MetricReport(mrt, sp, _, _) => mrt match { @@ -41,7 +41,7 @@ object eventFilters { * * report index mod divisor === 0 */ - def sampling(divisor: Refined[Int, Positive])(evt: NJEvent): Boolean = + def sampling(divisor: Refined[Int, Positive])(evt: Event): Boolean = evt match { case MetricReport(mrt, _, _, _) => mrt match { @@ -53,7 +53,7 @@ object eventFilters { /** cron based sampling */ - def sampling(cronExpr: CronExpr)(evt: NJEvent): Boolean = + def sampling(cronExpr: CronExpr)(evt: Event): Boolean = evt match { case MetricReport(mrt, _, _, _) => mrt match { @@ -64,27 +64,27 @@ object eventFilters { case _ => true } - def sampling(f: crontabs.type => CronExpr)(evt: NJEvent): Boolean = + def sampling(f: crontabs.type => CronExpr)(evt: Event): Boolean = sampling(f(crontabs))(evt) // mapFilter friendly - val metricReport: NJEvent => Option[NJEvent.MetricReport] = - GenPrism[NJEvent, NJEvent.MetricReport].getOption(_) + val metricReport: Event => Option[Event.MetricReport] = + GenPrism[Event, Event.MetricReport].getOption(_) - val metricReset: NJEvent => Option[NJEvent.MetricReset] = - GenPrism[NJEvent, NJEvent.MetricReset].getOption(_) + val metricReset: Event => Option[Event.MetricReset] = + GenPrism[Event, Event.MetricReset].getOption(_) - val serviceMessage: NJEvent => Option[NJEvent.ServiceMessage] = - GenPrism[NJEvent, NJEvent.ServiceMessage].getOption(_) + val serviceMessage: Event => Option[Event.ServiceMessage] = + GenPrism[Event, Event.ServiceMessage].getOption(_) - val serviceStart: NJEvent => Option[NJEvent.ServiceStart] = - GenPrism[NJEvent, NJEvent.ServiceStart].getOption(_) + val serviceStart: Event => Option[Event.ServiceStart] = + GenPrism[Event, Event.ServiceStart].getOption(_) - val serviceStop: NJEvent => Option[NJEvent.ServiceStop] = - GenPrism[NJEvent, NJEvent.ServiceStop].getOption(_) + val serviceStop: Event => Option[Event.ServiceStop] = + GenPrism[Event, Event.ServiceStop].getOption(_) - val servicePanic: NJEvent => Option[NJEvent.ServicePanic] = - GenPrism[NJEvent, NJEvent.ServicePanic].getOption(_) + val servicePanic: Event => Option[Event.ServicePanic] = + GenPrism[Event, Event.ServicePanic].getOption(_) } diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/FinalizeMonitor.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/FinalizeMonitor.scala index 6ba1452cc..bbcb45a9f 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/FinalizeMonitor.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/FinalizeMonitor.scala @@ -3,16 +3,16 @@ package com.github.chenharryhua.nanjin.guard.observers import cats.Monad import cats.effect.kernel.{Clock, Ref} import cats.implicits.{toFlatMapOps, toFunctorOps} -import com.github.chenharryhua.nanjin.guard.event.NJEvent.{ServiceStart, ServiceStop} -import com.github.chenharryhua.nanjin.guard.event.{NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.Event.{ServiceStart, ServiceStop} +import com.github.chenharryhua.nanjin.guard.event.{Event, ServiceStopCause} import fs2.Chunk import java.util.UUID final private class FinalizeMonitor[F[_]: Clock: Monad, A]( - translate: NJEvent => F[Option[A]], + translate: Event => F[Option[A]], ref: Ref[F, Map[UUID, ServiceStart]]) { - def monitoring(event: NJEvent): F[Unit] = event match { + def monitoring(event: Event): F[Unit] = event match { case ss: ServiceStart => ref.update(_.updated(ss.serviceParams.serviceId, ss)) case ss: ServiceStop => ref.update(_.removed(ss.serviceParams.serviceId)) case _ => Monad[F].unit diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/PrettyJsonTranslator.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/PrettyJsonTranslator.scala index ced8dfb53..ad9002a45 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/PrettyJsonTranslator.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/PrettyJsonTranslator.scala @@ -2,8 +2,8 @@ package com.github.chenharryhua.nanjin.guard.observers import cats.Applicative import com.github.chenharryhua.nanjin.common.chrono.Tick -import com.github.chenharryhua.nanjin.guard.event.NJEvent.* -import com.github.chenharryhua.nanjin.guard.event.{MetricSnapshot, NJEvent} +import com.github.chenharryhua.nanjin.guard.event.Event.* +import com.github.chenharryhua.nanjin.guard.event.{Event, MetricSnapshot} import com.github.chenharryhua.nanjin.guard.translator.* import io.circe.Json @@ -14,7 +14,7 @@ object PrettyJsonTranslator { private def took(dur: Duration): (String, Json) = "took" -> Json.fromString(fmt.format(dur)) - private def uptime(evt: NJEvent): (String, Json) = + private def uptime(evt: Event): (String, Json) = "up_time" -> Json.fromString(fmt.format(evt.upTime)) private def pretty_metrics(ss: MetricSnapshot): (String, Json) = diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/SimpleTextTranslator.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/SimpleTextTranslator.scala index 66c339e61..1b1152a7c 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/SimpleTextTranslator.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/SimpleTextTranslator.scala @@ -2,15 +2,15 @@ package com.github.chenharryhua.nanjin.guard.observers import cats.Applicative import cats.syntax.all.* -import com.github.chenharryhua.nanjin.guard.event.{NJError, NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.{Error, Event, ServiceStopCause} import com.github.chenharryhua.nanjin.guard.translator.{textConstants, textHelper, Translator} import io.circe.syntax.EncoderOps object SimpleTextTranslator { - import NJEvent.* + import Event.* import textConstants.* - private def service_event(se: NJEvent): String = { + private def service_event(se: Event): String = { val host: String = s"$CONSTANT_HOST:${textHelper.hostText(se.serviceParams)}" val sn: String = s"$CONSTANT_SERVICE:${se.serviceParams.serviceName.value}" val tn: String = s"$CONSTANT_TASK:${se.serviceParams.taskName.value}" @@ -20,7 +20,7 @@ object SimpleTextTranslator { | $host, $uptime""".stripMargin } - private def error_str(err: NJError): String = + private def error_str(err: Error): String = s"""Cause:${err.stack.mkString("\n\t")}""" private def service_started(evt: ServiceStart): String = { diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/console.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/console.scala index 56c6082da..96ece6ba6 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/console.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/console.scala @@ -3,7 +3,7 @@ package com.github.chenharryhua.nanjin.guard.observers import cats.effect.std.Console import cats.syntax.all.* import cats.{Endo, Monad} -import com.github.chenharryhua.nanjin.guard.event.NJEvent +import com.github.chenharryhua.nanjin.guard.event.Event import com.github.chenharryhua.nanjin.guard.translator.{textHelper, ColorScheme, Translator, UpdateTranslator} import io.circe.syntax.EncoderOps @@ -23,9 +23,9 @@ object console { apply[F](Translator.idTranslator.map(_.asJson.spaces2)) final class TextConsole[F[_]: Console: Monad](translator: Translator[F, String]) - extends (NJEvent => F[Unit]) with UpdateTranslator[F, String, TextConsole[F]] { + extends (Event => F[Unit]) with UpdateTranslator[F, String, TextConsole[F]] { private[this] val C = Console[F] - private[this] def coloring(evt: NJEvent): String = + private[this] def coloring(evt: Event): String = ColorScheme.decorate(evt).run(textHelper.consoleColor).value override def updateTranslator(f: Endo[Translator[F, String]]): TextConsole[F] = @@ -33,7 +33,7 @@ object console { private[this] val fmt: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") - override def apply(event: NJEvent): F[Unit] = + override def apply(event: Event): F[Unit] = translator .translate(event) .flatMap(_.traverse(evt => diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/Agent.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/Agent.scala index dfc4aff26..a2e4c2267 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/Agent.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/Agent.scala @@ -48,7 +48,7 @@ sealed trait Agent[F[_]] { final private class GeneralAgent[F[_]]( serviceParams: ServiceParams, metricRegistry: MetricRegistry, - channel: Channel[F, NJEvent], + channel: Channel[F, Event], measurement: Measurement)(implicit F: Async[F]) extends Agent[F] { diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/HttpRouter.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/HttpRouter.scala index 0ffa7bc2c..b8f9e7e03 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/HttpRouter.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/HttpRouter.scala @@ -6,12 +6,12 @@ import cats.effect.std.AtomicCell import cats.syntax.all.* import com.codahale.metrics.MetricRegistry import com.github.chenharryhua.nanjin.guard.config.ServiceParams -import com.github.chenharryhua.nanjin.guard.event.NJEvent.{MetricReport, ServicePanic} +import com.github.chenharryhua.nanjin.guard.event.Event.{MetricReport, ServicePanic} import com.github.chenharryhua.nanjin.guard.event.{ retrieveHealthChecks, + Event, MetricIndex, MetricSnapshot, - NJEvent, ServiceStopCause } import com.github.chenharryhua.nanjin.guard.translator.htmlHelper.htmlColoring @@ -40,7 +40,7 @@ private class HttpRouter[F[_]]( serviceParams: ServiceParams, panicHistory: AtomicCell[F, CircularFifoQueue[ServicePanic]], metricsHistory: AtomicCell[F, CircularFifoQueue[MetricReport]], - channel: Channel[F, NJEvent])(implicit F: Async[F]) + channel: Channel[F, Event])(implicit F: Async[F]) extends Http4sDsl[F] with all { private val html_header: Text.TypedTag[String] = diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/MetricsReport.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/MetricsReport.scala index 3ab376bc9..72d5f72fd 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/MetricsReport.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/MetricsReport.scala @@ -4,11 +4,11 @@ import cats.effect.kernel.Sync import cats.implicits.{toFlatMapOps, toFunctorOps} import com.codahale.metrics.MetricRegistry import com.github.chenharryhua.nanjin.guard.config.ServiceParams -import com.github.chenharryhua.nanjin.guard.event.{MetricIndex, NJEvent} +import com.github.chenharryhua.nanjin.guard.event.{Event, MetricIndex} import fs2.concurrent.Channel abstract class MetricsReport[F[_]] private[service] ( - channel: Channel[F, NJEvent], + channel: Channel[F, Event], serviceParams: ServiceParams, metricRegistry: MetricRegistry)(implicit F: Sync[F]) { diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ReStart.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ReStart.scala index 4a2194c4d..a5a62455a 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ReStart.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ReStart.scala @@ -6,8 +6,8 @@ import cats.effect.std.AtomicCell import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.chrono.TickStatus import com.github.chenharryhua.nanjin.guard.config.ServiceParams -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServicePanic -import com.github.chenharryhua.nanjin.guard.event.{NJError, NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.Event.ServicePanic +import com.github.chenharryhua.nanjin.guard.event.{Error, Event, ServiceStopCause} import fs2.Stream import fs2.concurrent.Channel import org.apache.commons.collections4.queue.CircularFifoQueue @@ -17,7 +17,7 @@ import java.time.Duration import scala.jdk.DurationConverters.JavaDurationOps final private class ReStart[F[_]]( - channel: Channel[F, NJEvent], + channel: Channel[F, Event], serviceParams: ServiceParams, panicHistory: AtomicCell[F, CircularFifoQueue[ServicePanic]], theService: F[Unit])(implicit F: Temporal[F]) @@ -35,7 +35,7 @@ final private class ReStart[F[_]]( case None => status } - val error = NJError(ex) + val error = Error(ex) tickStatus.next(now) match { case None => @@ -64,7 +64,7 @@ final private class ReStart[F[_]]( case ExitCase.Succeeded => publisher.serviceStop(channel, serviceParams, ServiceStopCause.Successfully) case ExitCase.Errored(e) => - publisher.serviceStop(channel, serviceParams, ServiceStopCause.ByException(NJError(e))) + publisher.serviceStop(channel, serviceParams, ServiceStopCause.ByException(Error(e))) case ExitCase.Canceled => publisher.serviceStop(channel, serviceParams, ServiceStopCause.ByCancellation) } diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ServiceGuard.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ServiceGuard.scala index 8d66ea093..5a7b0b2a4 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ServiceGuard.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/ServiceGuard.scala @@ -11,7 +11,7 @@ import com.github.chenharryhua.nanjin.common.UpdateConfig import com.github.chenharryhua.nanjin.common.chrono.* import com.github.chenharryhua.nanjin.guard.config.* import com.github.chenharryhua.nanjin.guard.event.* -import com.github.chenharryhua.nanjin.guard.event.NJEvent.{MetricReport, ServicePanic} +import com.github.chenharryhua.nanjin.guard.event.Event.{MetricReport, ServicePanic} import fs2.Stream import fs2.concurrent.Channel import fs2.io.net.Network @@ -52,14 +52,14 @@ final class ServiceGuard[F[_]: Network] private[guard] (serviceName: ServiceName zerothTick = zeroth ) - def eventStream(runAgent: Agent[F] => F[Unit]): Stream[F, NJEvent] = + def eventStream(runAgent: Agent[F] => F[Unit]): Stream[F, Event] = for { serviceParams <- Stream.eval(initStatus) panicHistory <- Stream.eval( AtomicCell[F].of(new CircularFifoQueue[ServicePanic](serviceParams.historyCapacity.panic))) metricsHistory <- Stream.eval( AtomicCell[F].of(new CircularFifoQueue[MetricReport](serviceParams.historyCapacity.metric))) - event <- Stream.eval(Channel.unbounded[F, NJEvent]).flatMap { channel => + event <- Stream.eval(Channel.unbounded[F, Event]).flatMap { channel => val metricRegistry: MetricRegistry = new MetricRegistry() val metrics_report: Stream[F, Nothing] = @@ -147,10 +147,10 @@ final class ServiceGuard[F[_]: Network] private[guard] (serviceName: ServiceName } } yield event - def eventStreamS[A](runAgent: Agent[F] => Stream[F, A]): Stream[F, NJEvent] = + def eventStreamS[A](runAgent: Agent[F] => Stream[F, A]): Stream[F, Event] = eventStream(agent => runAgent(agent).compile.drain) - def eventStreamR[A](runAgent: Agent[F] => Resource[F, A]): Stream[F, NJEvent] = + def eventStreamR[A](runAgent: Agent[F] => Resource[F, A]): Stream[F, Event] = eventStream(agent => runAgent(agent).use_) } diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/publisher.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/publisher.scala index 44fdd33d9..c9bdbd919 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/publisher.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/service/publisher.scala @@ -7,7 +7,7 @@ import com.codahale.metrics.MetricRegistry import com.github.chenharryhua.nanjin.common.chrono.Tick import com.github.chenharryhua.nanjin.guard.config.ServiceParams import com.github.chenharryhua.nanjin.guard.event.* -import com.github.chenharryhua.nanjin.guard.event.NJEvent.{ +import com.github.chenharryhua.nanjin.guard.event.Event.{ MetricReport, MetricReset, ServicePanic, @@ -20,7 +20,7 @@ import scala.jdk.CollectionConverters.CollectionHasAsScala private object publisher { def metricReport[F[_]: Sync]( - channel: Channel[F, NJEvent], + channel: Channel[F, Event], serviceParams: ServiceParams, metricRegistry: MetricRegistry, index: MetricIndex): F[MetricReport] = @@ -31,7 +31,7 @@ private object publisher { } yield mr def metricReset[F[_]: Sync]( - channel: Channel[F, NJEvent], + channel: Channel[F, Event], serviceParams: ServiceParams, metricRegistry: MetricRegistry, index: MetricIndex): F[Unit] = @@ -41,21 +41,18 @@ private object publisher { _ <- channel.send(mr) } yield metricRegistry.getCounters().values().asScala.foreach(c => c.dec(c.getCount)) - def serviceReStart[F[_]](channel: Channel[F, NJEvent], serviceParams: ServiceParams, tick: Tick)(implicit + def serviceReStart[F[_]](channel: Channel[F, Event], serviceParams: ServiceParams, tick: Tick)(implicit F: Functor[F]): F[Unit] = channel.send(ServiceStart(serviceParams, tick)).void - def servicePanic[F[_]]( - channel: Channel[F, NJEvent], - serviceParams: ServiceParams, - tick: Tick, - error: NJError)(implicit F: Functor[F]): F[ServicePanic] = { + def servicePanic[F[_]](channel: Channel[F, Event], serviceParams: ServiceParams, tick: Tick, error: Error)( + implicit F: Functor[F]): F[ServicePanic] = { val panic: ServicePanic = ServicePanic(serviceParams, tick, error) channel.send(panic).as(panic) } def serviceStop[F[_]: Clock]( - channel: Channel[F, NJEvent], + channel: Channel[F, Event], serviceParams: ServiceParams, cause: ServiceStopCause)(implicit F: Monad[F]): F[Unit] = serviceParams.zonedNow.flatMap { now => diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/ColorScheme.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/ColorScheme.scala index 543fd3968..ff9f2ce6c 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/ColorScheme.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/ColorScheme.scala @@ -6,8 +6,8 @@ import com.github.chenharryhua.nanjin.guard.config.CategoryKind.CounterKind import com.github.chenharryhua.nanjin.guard.config.{AlarmLevel, Category} import com.github.chenharryhua.nanjin.guard.event.{ retrieveHealthChecks, + Event, MetricSnapshot, - NJEvent, ServiceStopCause } import enumeratum.values.{CatsOrderValueEnum, IntEnum, IntEnumEntry} @@ -15,7 +15,7 @@ import enumeratum.values.{CatsOrderValueEnum, IntEnum, IntEnumEntry} sealed abstract class ColorScheme(override val value: Int) extends IntEnumEntry object ColorScheme extends CatsOrderValueEnum[Int, ColorScheme] with IntEnum[ColorScheme] { - import NJEvent.* + import Event.* case object GoodColor extends ColorScheme(0) // successful-ish case object InfoColor extends ColorScheme(1) // fyi case object WarnColor extends ColorScheme(2) // well, not so wrong @@ -43,8 +43,8 @@ object ColorScheme extends CatsOrderValueEnum[Int, ColorScheme] with IntEnum[Col counter_color.max(gauge_color) } - def decorate[A](evt: NJEvent): Cont[A, ColorScheme] = - Cont.pure[A, NJEvent](evt).map { + def decorate[A](evt: Event): Cont[A, ColorScheme] = + Cont.pure[A, Event](evt).map { case _: ServiceStart => InfoColor case _: ServicePanic => ErrorColor case ss: ServiceStop => diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/Translator.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/Translator.scala index d2a46381b..800c7b548 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/Translator.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/Translator.scala @@ -4,8 +4,8 @@ import alleycats.Pure import cats.data.{Kleisli, OptionT} import cats.syntax.all.* import cats.{Applicative, Endo, Functor, FunctorFilter, Monad, Traverse} -import com.github.chenharryhua.nanjin.guard.event.NJEvent -import com.github.chenharryhua.nanjin.guard.event.NJEvent.* +import com.github.chenharryhua.nanjin.guard.event.Event +import com.github.chenharryhua.nanjin.guard.event.Event.* import monocle.macros.Lenses trait UpdateTranslator[F[_], A, B] { @@ -21,7 +21,7 @@ trait UpdateTranslator[F[_], A, B] { metricReset: Kleisli[OptionT[F, *], MetricReset, A] ) { - def translate(event: NJEvent): F[Option[A]] = event match { + def translate(event: Event): F[Option[A]] = event match { case e: ServiceStart => serviceStart.run(e).value case e: ServicePanic => servicePanic.run(e).value case e: ServiceStop => serviceStop.run(e).value @@ -30,7 +30,7 @@ trait UpdateTranslator[F[_], A, B] { case e: ServiceMessage => serviceMessage.run(e).value } - def filter(f: NJEvent => Boolean)(implicit F: Applicative[F]): Translator[F, A] = + def filter(f: Event => Boolean)(implicit F: Applicative[F]): Translator[F, A] = Translator[F, A]( Kleisli(ss => if (f(ss)) serviceStart.run(ss) else OptionT(F.pure(None))), Kleisli(ss => if (f(ss)) servicePanic.run(ss) else OptionT(F.pure(None))), @@ -41,7 +41,7 @@ trait UpdateTranslator[F[_], A, B] { ) // for convenience - def traverse[G[_]](ge: G[NJEvent])(implicit F: Applicative[F], G: Traverse[G]): F[G[Option[A]]] = + def traverse[G[_]](ge: G[Event])(implicit F: Applicative[F], G: Traverse[G]): F[G[Option[A]]] = G.traverse(ge)(translate) def skipServiceStart(implicit F: Applicative[F]): Translator[F, A] = @@ -133,7 +133,7 @@ trait UpdateTranslator[F[_], A, B] { copy(serviceMessage = Kleisli(a => OptionT(F.pure(Some(f(a)))))) def flatMap[B](f: A => Translator[F, B])(implicit F: Monad[F]): Translator[F, B] = { - val go: NJEvent => F[Option[Translator[F, B]]] = { (evt: NJEvent) => translate(evt).map(_.map(f)) } + val go: Event => F[Option[Translator[F, B]]] = { (evt: Event) => translate(evt).map(_.map(f)) } Translator .empty[F, B] .withServiceStart(evt => go(evt).flatMap(_.flatTraverse(_.serviceStart.run(evt).value))) @@ -207,7 +207,7 @@ object Translator { override val functor: Functor[Translator[F, *]] = this override def mapFilter[A, B](fa: Translator[F, A])(f: A => Option[B]): Translator[F, B] = { - def go(e: NJEvent): F[Option[B]] = fa.translate(e).map(_.flatMap(f)) + def go(e: Event): F[Option[B]] = fa.translate(e).map(_.flatMap(f)) Translator .empty[F, B] .withServiceStart(go) @@ -219,7 +219,7 @@ object Translator { } } - def noop[F[_], A](implicit F: Applicative[F]): Kleisli[OptionT[F, *], NJEvent, A] = + def noop[F[_], A](implicit F: Applicative[F]): Kleisli[OptionT[F, *], Event, A] = Kleisli(_ => OptionT(F.pure(None))) def empty[F[_]: Applicative, A]: Translator[F, A] = @@ -232,8 +232,8 @@ object Translator { noop[F, A] ) - def idTranslator[F[_]](implicit F: Applicative[F]): Translator[F, NJEvent] = - Translator[F, NJEvent]( + def idTranslator[F[_]](implicit F: Applicative[F]): Translator[F, Event] = + Translator[F, Event]( Kleisli(x => OptionT(F.pure(Some(x)))), Kleisli(x => OptionT(F.pure(Some(x)))), Kleisli(x => OptionT(F.pure(Some(x)))), diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/htmlHelper.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/htmlHelper.scala index f91c0b536..317cc4762 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/htmlHelper.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/htmlHelper.scala @@ -1,10 +1,10 @@ package com.github.chenharryhua.nanjin.guard.translator import cats.Eval -import com.github.chenharryhua.nanjin.guard.event.NJEvent +import com.github.chenharryhua.nanjin.guard.event.Event object htmlHelper { - def htmlColoring(evt: NJEvent): String = ColorScheme + def htmlColoring(evt: Event): String = ColorScheme .decorate(evt) .run { case ColorScheme.GoodColor => Eval.now("color:darkgreen") diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/jsonHelper.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/jsonHelper.scala index f348ef264..aba9265ad 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/jsonHelper.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/jsonHelper.scala @@ -3,14 +3,14 @@ package com.github.chenharryhua.nanjin.guard.translator import cats.implicits.toShow import com.github.chenharryhua.nanjin.common.chrono.{Policy, Tick} import com.github.chenharryhua.nanjin.guard.config.ServiceParams -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServiceMessage -import com.github.chenharryhua.nanjin.guard.event.{MetricIndex, NJError, NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.Event.ServiceMessage +import com.github.chenharryhua.nanjin.guard.event.{Error, Event, MetricIndex, ServiceStopCause} import io.circe.Json import io.circe.syntax.EncoderOps object jsonHelper { - def timestamp(evt: NJEvent): (String, Json) = "timestamp" -> evt.timestamp.asJson + def timestamp(evt: Event): (String, Json) = "timestamp" -> evt.timestamp.asJson def service_id(sp: ServiceParams): (String, Json) = "service_id" -> sp.serviceId.asJson def service_params(sp: ServiceParams): (String, Json) = "params" -> sp.asJson def exit_code(sc: ServiceStopCause): (String, Json) = "exit_code" -> Json.fromInt(sc.exitCode) @@ -20,7 +20,7 @@ object jsonHelper { def policy(ap: Policy): (String, Json) = "policy" -> Json.fromString(ap.show) - def stack(err: NJError): (String, Json) = "stack" -> err.stack.asJson + def stack(err: Error): (String, Json) = "stack" -> err.stack.asJson def json_service_message(sm: ServiceMessage): Json = sm.error diff --git a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/textHelper.scala b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/textHelper.scala index 7026beb3d..6fa646ea4 100644 --- a/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/textHelper.scala +++ b/guard/src/main/scala/com/github/chenharryhua/nanjin/guard/translator/textHelper.scala @@ -3,8 +3,8 @@ package com.github.chenharryhua.nanjin.guard.translator import cats.Eval import cats.syntax.all.* import com.github.chenharryhua.nanjin.guard.config.ServiceParams -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServicePanic -import com.github.chenharryhua.nanjin.guard.event.{MetricIndex, MetricSnapshot, NJEvent} +import com.github.chenharryhua.nanjin.guard.event.Event.ServicePanic +import com.github.chenharryhua.nanjin.guard.event.{Event, MetricIndex, MetricSnapshot} import org.typelevel.cats.time.instances.{localdatetime, localtime} import java.time.temporal.ChronoUnit @@ -14,7 +14,7 @@ object textHelper extends localtime with localdatetime { def yamlMetrics(ss: MetricSnapshot): String = new SnapshotPolyglot(ss).toYaml - def uptimeText(evt: NJEvent): String = fmt.format(evt.upTime) + def uptimeText(evt: Event): String = fmt.format(evt.upTime) def tookText(dur: Duration): String = fmt.format(dur) @@ -30,15 +30,15 @@ object textHelper extends localtime with localdatetime { case MetricIndex.Periodic(tick) => show"${tick.index}" } - def eventTitle(evt: NJEvent): String = + def eventTitle(evt: Event): String = evt match { - case ss: NJEvent.ServiceStart => + case ss: Event.ServiceStart => if (ss.tick.index === 0) "Start Service" else "Restart Service" - case _: NJEvent.ServiceStop => "Service Stopped" - case _: NJEvent.ServicePanic => "Service Panic" - case _: NJEvent.ServiceMessage => "Service Message" - case _: NJEvent.MetricReport => "Metric Report" - case _: NJEvent.MetricReset => "Metric Reset" + case _: Event.ServiceStop => "Service Stopped" + case _: Event.ServicePanic => "Service Panic" + case _: Event.ServiceMessage => "Service Message" + case _: Event.MetricReport => "Metric Report" + case _: Event.MetricReset => "Metric Reset" } private def localTime_duration(start: ZonedDateTime, end: ZonedDateTime): (String, String) = { diff --git a/guard/src/test/scala/mtest/guard/ConsoleLogTest.scala b/guard/src/test/scala/mtest/guard/ConsoleLogTest.scala index 61702679d..87c18564b 100644 --- a/guard/src/test/scala/mtest/guard/ConsoleLogTest.scala +++ b/guard/src/test/scala/mtest/guard/ConsoleLogTest.scala @@ -5,7 +5,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.implicits.toFunctorFilterOps import com.github.chenharryhua.nanjin.guard.TaskGuard -import com.github.chenharryhua.nanjin.guard.event.{eventFilters, NJEvent} +import com.github.chenharryhua.nanjin.guard.event.{eventFilters, Event} import com.github.chenharryhua.nanjin.guard.observers.* import io.circe.Json import org.scalatest.funsuite.AnyFunSuite @@ -15,7 +15,7 @@ import scala.concurrent.duration.* // sbt "guard/testOnly mtest.guard.ConsoleLogTest" class ConsoleLogTest extends AnyFunSuite { - val service: fs2.Stream[IO, NJEvent] = + val service: fs2.Stream[IO, Event] = TaskGuard[IO]("nanjin") .service("observing") .updateConfig(_.addBrief(Json.fromString("brief"))) diff --git a/guard/src/test/scala/mtest/guard/EventFilterTest.scala b/guard/src/test/scala/mtest/guard/EventFilterTest.scala index 4df837951..4e69527c6 100644 --- a/guard/src/test/scala/mtest/guard/EventFilterTest.scala +++ b/guard/src/test/scala/mtest/guard/EventFilterTest.scala @@ -6,7 +6,7 @@ import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime import com.github.chenharryhua.nanjin.common.chrono.{tickStream, Policy} import com.github.chenharryhua.nanjin.guard.TaskGuard import com.github.chenharryhua.nanjin.guard.event.MetricIndex.Periodic -import com.github.chenharryhua.nanjin.guard.event.NJEvent.{MetricReport, ServiceStart, ServiceStop} +import com.github.chenharryhua.nanjin.guard.event.Event.{MetricReport, ServiceStart, ServiceStop} import com.github.chenharryhua.nanjin.guard.event.eventFilters import com.github.chenharryhua.nanjin.guard.service.ServiceGuard import eu.timepit.refined.auto.* diff --git a/guard/src/test/scala/mtest/guard/ServiceTest.scala b/guard/src/test/scala/mtest/guard/ServiceTest.scala index e1bfb70dd..7415f8cb8 100644 --- a/guard/src/test/scala/mtest/guard/ServiceTest.scala +++ b/guard/src/test/scala/mtest/guard/ServiceTest.scala @@ -8,7 +8,7 @@ import com.github.chenharryhua.nanjin.common.chrono.zones.londonTime import com.github.chenharryhua.nanjin.common.chrono.{crontabs, Policy, Tick} import com.github.chenharryhua.nanjin.guard.* import com.github.chenharryhua.nanjin.guard.event.* -import com.github.chenharryhua.nanjin.guard.event.NJEvent.* +import com.github.chenharryhua.nanjin.guard.event.Event.* import io.circe.Json import org.scalatest.funsuite.AnyFunSuite diff --git a/guard/src/test/scala/mtest/guard/TranslatorMonadTest.scala b/guard/src/test/scala/mtest/guard/TranslatorMonadTest.scala index ba047d662..d81d283c1 100644 --- a/guard/src/test/scala/mtest/guard/TranslatorMonadTest.scala +++ b/guard/src/test/scala/mtest/guard/TranslatorMonadTest.scala @@ -10,7 +10,7 @@ import com.github.chenharryhua.nanjin.common.chrono.{Policy, Tick, TickStatus} import com.github.chenharryhua.nanjin.guard.TaskGuard import com.github.chenharryhua.nanjin.guard.config.ServiceParams import com.github.chenharryhua.nanjin.guard.event.* -import com.github.chenharryhua.nanjin.guard.event.NJEvent.* +import com.github.chenharryhua.nanjin.guard.event.Event.* import com.github.chenharryhua.nanjin.guard.service.ServiceGuard import com.github.chenharryhua.nanjin.guard.translator.Translator import munit.DisciplineSuite @@ -20,11 +20,11 @@ import org.scalacheck.{Arbitrary, Gen} object gendata { val service: ServiceGuard[IO] = TaskGuard[IO]("monad").service("tailrecM") val tick: Tick = TickStatus.zeroth[IO](Policy.giveUp, sydneyTime).unsafeRunSync().tick - implicit val exhaustiveCheck: ExhaustiveCheck[NJEvent] = + implicit val exhaustiveCheck: ExhaustiveCheck[Event] = ExhaustiveCheck.instance(List(ServiceStart(null.asInstanceOf[ServiceParams], tick))) implicit def translatorEq: Eq[Translator[Option, Int]] = - Eq.by[Translator[Option, Int], NJEvent => Option[Option[Int]]](_.translate) + Eq.by[Translator[Option, Int], Event => Option[Option[Int]]](_.translate) implicit val arbiTranslator: Arbitrary[Translator[Option, Int]] = Arbitrary( diff --git a/guard/src/test/scala/mtest/guard/package.scala b/guard/src/test/scala/mtest/guard/package.scala index fd42102c8..ebb4880c9 100644 --- a/guard/src/test/scala/mtest/guard/package.scala +++ b/guard/src/test/scala/mtest/guard/package.scala @@ -3,7 +3,7 @@ package mtest import cats.effect.IO import cats.effect.std.Random import com.github.chenharryhua.nanjin.common.DurationFormatter -import com.github.chenharryhua.nanjin.guard.event.NJEvent +import com.github.chenharryhua.nanjin.guard.event.Event import io.circe.jawn.decode import io.circe.syntax.EncoderOps @@ -41,8 +41,8 @@ package object guard { val fmt: DurationFormatter = DurationFormatter.defaultFormatter - def checkJson(evt: NJEvent): NJEvent = - decode[NJEvent](evt.asJson.noSpaces) match { + def checkJson(evt: Event): Event = + decode[Event](evt.asJson.noSpaces) match { case Left(value) => throw value case Right(value) => assert(value == evt, s"${evt.toString} \n-------- ${value.toString}") diff --git a/http/src/main/scala/com/github/chenharryhua/nanjin/http/client/middleware/traceClient.scala b/http/src/main/scala/com/github/chenharryhua/nanjin/http/client/middleware/traceClient.scala index de60b3aa0..80feaa877 100644 --- a/http/src/main/scala/com/github/chenharryhua/nanjin/http/client/middleware/traceClient.scala +++ b/http/src/main/scala/com/github/chenharryhua/nanjin/http/client/middleware/traceClient.scala @@ -8,6 +8,7 @@ import org.http4s.{Header, Headers, Response} object traceClient { // steal from https://github.com/typelevel/natchez-http4s/blob/main/modules/http4s/src/main/scala/natchez/http4s/NatchezMiddleware.scala + // replace . with _ def apply[F[_]](parent: Span[F])(client: Client[F])(implicit ev: MonadCancel[F, Throwable]): Client[F] = Client { req => parent.span("http4s-client-request").flatMap { span => diff --git a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaAdminApi.scala b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaAdminApi.scala index 7e0b4d723..b9f49f3b9 100644 --- a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaAdminApi.scala +++ b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaAdminApi.scala @@ -4,7 +4,7 @@ import cats.Id import cats.effect.kernel.{Async, Resource} import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.datetime.{NJDateTimeRange, NJTimestamp} +import com.github.chenharryhua.nanjin.datetime.{DateTimeRange, NJTimestamp} import fs2.kafka.{AutoOffsetReset, ConsumerSettings, KafkaAdminClient} import org.apache.kafka.clients.admin.{NewTopic, TopicDescription} import org.apache.kafka.clients.consumer.{ConsumerRecord, OffsetAndMetadata} @@ -26,7 +26,7 @@ sealed trait KafkaAdminApi[F[_]] { def deleteConsumerGroupOffsets(groupId: String): F[Unit] def partitionsFor: F[ListOfTopicPartitions] - def offsetRangeFor(dtr: NJDateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] + def offsetRangeFor(dtr: DateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] def commitSync(groupId: String, offsets: Map[TopicPartition, OffsetAndMetadata]): F[Unit] def commitSync(groupId: String, partition: Int, offset: Long): F[Unit] @@ -115,7 +115,7 @@ object KafkaAdminApi { _ <- adminResource.use(_.deleteConsumerGroupOffsets(groupId, tps.value.toSet)) } yield () - override def offsetRangeFor(dtr: NJDateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] = + override def offsetRangeFor(dtr: DateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] = transientConsumer(initCS).offsetRangeFor(dtr) override def partitionsFor: F[ListOfTopicPartitions] = diff --git a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaContext.scala b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaContext.scala index 850ca9d69..fb44e5f4a 100644 --- a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaContext.scala +++ b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaContext.scala @@ -8,7 +8,7 @@ import cats.effect.std.UUIDGen import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.UpdateConfig import com.github.chenharryhua.nanjin.common.kafka.{TopicName, TopicNameL} -import com.github.chenharryhua.nanjin.kafka.streaming.{KafkaStreamsBuilder, NJStateStore} +import com.github.chenharryhua.nanjin.kafka.streaming.{KafkaStreamsBuilder, StateStores} import com.github.chenharryhua.nanjin.messages.kafka.codec.* import fs2.Stream import fs2.kafka.* @@ -30,9 +30,9 @@ final class KafkaContext[F[_]] private (val settings: KafkaSettings) def asKey[K: SerdeOf]: Serde[K] = SerdeOf[K].asKey(settings.schemaRegistrySettings.config).serde def asValue[V: SerdeOf]: Serde[V] = SerdeOf[V].asValue(settings.schemaRegistrySettings.config).serde - def asKey[K](avro: NJAvroCodec[K]): Serde[K] = + def asKey[K](avro: AvroCodec[K]): Serde[K] = SerdeOf[K](avro).asKey(settings.schemaRegistrySettings.config).serde - def asValue[V](avro: NJAvroCodec[V]): Serde[V] = + def asValue[V](avro: AvroCodec[V]): Serde[V] = SerdeOf[V](avro).asValue(settings.schemaRegistrySettings.config).serde def topic[K, V](topicDef: TopicDef[K, V]): KafkaTopic[F, K, V] = @@ -150,13 +150,13 @@ final class KafkaContext[F[_]] private (val settings: KafkaSettings) // streams - def store[K: SerdeOf, V: SerdeOf](storeName: TopicName): NJStateStore[K, V] = - NJStateStore[K, V]( + def store[K: SerdeOf, V: SerdeOf](storeName: TopicName): StateStores[K, V] = + StateStores[K, V]( storeName, settings.schemaRegistrySettings, RawKeyValueSerdePair[K, V](SerdeOf[K], SerdeOf[V])) - def store[K: SerdeOf, V: SerdeOf](storeName: TopicNameL): NJStateStore[K, V] = + def store[K: SerdeOf, V: SerdeOf](storeName: TopicNameL): StateStores[K, V] = store(TopicName(storeName)) def buildStreams(applicationId: String, topology: Reader[StreamsBuilder, Unit])(implicit diff --git a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaTopic.scala b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaTopic.scala index 3372a2601..9db890e34 100644 --- a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaTopic.scala +++ b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/KafkaTopic.scala @@ -3,7 +3,7 @@ package com.github.chenharryhua.nanjin.kafka import cats.effect.kernel.{Async, Resource, Sync} import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.kafka.{TopicName, TopicNameL} -import com.github.chenharryhua.nanjin.kafka.streaming.{KafkaStreamingConsumer, NJStateStore} +import com.github.chenharryhua.nanjin.kafka.streaming.{KafkaStreamingConsumer, StateStores} import com.github.chenharryhua.nanjin.messages.kafka.NJConsumerRecord import com.sksamuel.avro4s.AvroInputStream import fs2.Chunk @@ -59,11 +59,11 @@ final class KafkaTopic[F[_], K, V] private[kafka] (val topicDef: TopicDef[K, V], def asProduced: Produced[K, V] = Produced.`with`[K, V](serdePair.key.serde, serdePair.value.serde) - def asStateStore(storeName: TopicName): NJStateStore[K, V] = { + def asStateStore(storeName: TopicName): StateStores[K, V] = { require(storeName.value =!= topicName.value, "should provide a name other than the topic name") - NJStateStore[K, V](storeName, KeyValueSerdePair(serdePair.key, serdePair.value)) + StateStores[K, V](storeName, KeyValueSerdePair(serdePair.key, serdePair.value)) } - def asStateStore(storeName: TopicNameL): NJStateStore[K, V] = + def asStateStore(storeName: TopicNameL): StateStores[K, V] = asStateStore(TopicName(storeName)) // for testing diff --git a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TopicDef.scala b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TopicDef.scala index d1224bf99..7b0b9bbd9 100644 --- a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TopicDef.scala +++ b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TopicDef.scala @@ -4,7 +4,7 @@ import cats.Show import cats.kernel.Eq import cats.syntax.eq.* import com.github.chenharryhua.nanjin.common.kafka.{TopicName, TopicNameL} -import com.github.chenharryhua.nanjin.messages.kafka.codec.{NJAvroCodec, SerdeOf} +import com.github.chenharryhua.nanjin.messages.kafka.codec.{AvroCodec, SerdeOf} import com.github.chenharryhua.nanjin.messages.kafka.{NJConsumerRecord, NJProducerRecord} import com.sksamuel.avro4s.{Record, RecordFormat} import fs2.kafka.{ConsumerRecord, ProducerRecord} @@ -42,10 +42,10 @@ final class TopicDef[K, V] private (val topicName: TopicName, val rawSerdes: Raw def fromRecord(gr: IndexedRecord): NJProducerRecord[K, V] = rf.from(gr) } - lazy val consumerCodec: NJAvroCodec[NJConsumerRecord[K, V]] = + lazy val consumerCodec: AvroCodec[NJConsumerRecord[K, V]] = NJConsumerRecord.avroCodec(rawSerdes.key.avroCodec, rawSerdes.value.avroCodec) - lazy val producerCodec: NJAvroCodec[NJProducerRecord[K, V]] = + lazy val producerCodec: AvroCodec[NJProducerRecord[K, V]] = NJProducerRecord.avroCodec(rawSerdes.key.avroCodec, rawSerdes.value.avroCodec) lazy val consumerFormat: ConsumerFormat = new ConsumerFormat(RecordFormat(consumerCodec, consumerCodec)) @@ -63,10 +63,7 @@ object TopicDef { x.rawSerdes.key.avroCodec.schema == y.rawSerdes.key.avroCodec.schema && x.rawSerdes.value.avroCodec.schema == y.rawSerdes.value.avroCodec.schema - def apply[K, V]( - topicName: TopicName, - keySchema: NJAvroCodec[K], - valSchema: NJAvroCodec[V]): TopicDef[K, V] = { + def apply[K, V](topicName: TopicName, keySchema: AvroCodec[K], valSchema: AvroCodec[V]): TopicDef[K, V] = { val sk = SerdeOf(keySchema) val sv = SerdeOf(valSchema) new TopicDef(topicName, RawKeyValueSerdePair(sk, sv)) @@ -78,7 +75,7 @@ object TopicDef { new TopicDef(topicName, RawKeyValueSerdePair(sk, sv)) } - def apply[K: SerdeOf, V](topicName: TopicName, valSchema: NJAvroCodec[V]): TopicDef[K, V] = { + def apply[K: SerdeOf, V](topicName: TopicName, valSchema: AvroCodec[V]): TopicDef[K, V] = { val sk = SerdeOf[K] val sv = SerdeOf(valSchema) new TopicDef(topicName, RawKeyValueSerdePair(sk, sv)) diff --git a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumer.scala b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumer.scala index 4262be892..8377cec10 100644 --- a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumer.scala +++ b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumer.scala @@ -6,7 +6,7 @@ import cats.mtl.Ask import cats.syntax.all.* import cats.{Id, Monad} import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.datetime.{NJDateTimeRange, NJTimestamp} +import com.github.chenharryhua.nanjin.datetime.{DateTimeRange, NJTimestamp} import fs2.kafka.consumer.MkConsumer import fs2.kafka.{ConsumerSettings, KafkaByteConsumer} import org.apache.kafka.clients.consumer.{ConsumerRecord, OffsetAndMetadata} @@ -89,7 +89,7 @@ private object KafkaPrimitiveConsumerApi { } sealed trait TransientConsumer[F[_]] extends KafkaPrimitiveConsumerApi[F] { - def offsetRangeFor(dtr: NJDateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] + def offsetRangeFor(dtr: DateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] def offsetRangeFor(start: NJTimestamp, end: NJTimestamp): F[TopicPartitionMap[Option[OffsetRange]]] def offsetRangeForAll: F[TopicPartitionMap[Option[OffsetRange]]] @@ -122,7 +122,7 @@ private object TransientConsumer { private[this] def execute[A](r: Kleisli[F, KafkaByteConsumer, A]): F[A] = consumer.use(r.run) - override def offsetRangeFor(dtr: NJDateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] = + override def offsetRangeFor(dtr: DateTimeRange): F[TopicPartitionMap[Option[OffsetRange]]] = execute { for { from <- dtr.startTimestamp.fold(kpc.beginningOffsets)(kpc.offsetsForTimes) diff --git a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/streaming/NJStateStore.scala b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/streaming/StateStores.scala similarity index 94% rename from kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/streaming/NJStateStore.scala rename to kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/streaming/StateStores.scala index 3ab417820..f9593f8f9 100644 --- a/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/streaming/NJStateStore.scala +++ b/kafka/src/main/scala/com/github/chenharryhua/nanjin/kafka/streaming/StateStores.scala @@ -36,7 +36,7 @@ final class SessionBytesStoreSupplierHelper[K, V] private[streaming] ( Stores.sessionStoreBuilder(supplier, registered.key.serde, registered.value.serde) } -final class NJStateStore[K, V] private (storeName: TopicName, registered: KeyValueSerdePair[K, V]) +final class StateStores[K, V] private (storeName: TopicName, registered: KeyValueSerdePair[K, V]) extends Serializable { def name: String = storeName.value @@ -130,13 +130,13 @@ final class NJStateStore[K, V] private (storeName: TopicName, registered: KeyVal } } -private[kafka] object NJStateStore { - def apply[K, V](storeName: TopicName, registered: KeyValueSerdePair[K, V]): NJStateStore[K, V] = - new NJStateStore[K, V](storeName, registered) +private[kafka] object StateStores { + def apply[K, V](storeName: TopicName, registered: KeyValueSerdePair[K, V]): StateStores[K, V] = + new StateStores[K, V](storeName, registered) def apply[K, V]( storeName: TopicName, srs: SchemaRegistrySettings, - rawSerdes: RawKeyValueSerdePair[K, V]): NJStateStore[K, V] = - apply[K, V](storeName, rawSerdes.register(srs, storeName)) + rawSerde: RawKeyValueSerdePair[K, V]): StateStores[K, V] = + apply[K, V](storeName, rawSerde.register(srs, storeName)) } diff --git a/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/ConsumerApiOffsetRangeTest.scala b/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/ConsumerApiOffsetRangeTest.scala index 33b36355a..b4268d0f3 100644 --- a/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/ConsumerApiOffsetRangeTest.scala +++ b/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/ConsumerApiOffsetRangeTest.scala @@ -4,7 +4,7 @@ import cats.Id import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.zones.darwinTime -import com.github.chenharryhua.nanjin.datetime.{NJDateTimeRange, NJTimestamp} +import com.github.chenharryhua.nanjin.datetime.{DateTimeRange, NJTimestamp} import eu.timepit.refined.auto.* import fs2.Stream import fs2.kafka.{ConsumerSettings, ProducerRecord, ProducerRecords, ProducerResult} @@ -48,7 +48,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { Map(new TopicPartition("range.test", 0) -> OffsetRange(Offset(1), Offset(2)))) - val r = NJDateTimeRange(darwinTime).withStartTime(110).withEndTime(250) + val r = DateTimeRange(darwinTime).withStartTime(110).withEndTime(250) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -59,7 +59,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { Map(new TopicPartition("range.test", 0) -> OffsetRange(Offset(0), Offset(2)))) - val r = NJDateTimeRange(darwinTime).withStartTime(100).withEndTime(300) + val r = DateTimeRange(darwinTime).withStartTime(100).withEndTime(300) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -70,7 +70,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { Map(new TopicPartition("range.test", 0) -> OffsetRange(Offset(0), Offset(3)))) - val r = NJDateTimeRange(darwinTime).withStartTime(100).withEndTime(310) + val r = DateTimeRange(darwinTime).withStartTime(100).withEndTime(310) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -81,7 +81,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { Map(new TopicPartition("range.test", 0) -> OffsetRange(Offset(1), Offset(3)))) - val r = NJDateTimeRange(darwinTime).withStartTime(110).withEndTime(500) + val r = DateTimeRange(darwinTime).withStartTime(110).withEndTime(500) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -92,7 +92,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { Map(new TopicPartition("range.test", 0) -> OffsetRange(Offset(0), Offset(1)))) - val r = NJDateTimeRange(darwinTime).withStartTime(10).withEndTime(110) + val r = DateTimeRange(darwinTime).withStartTime(10).withEndTime(110) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -101,7 +101,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { val expect = TopicPartitionMap(Map(new TopicPartition("range.test", 0) -> None)) - val r = NJDateTimeRange(darwinTime).withStartTime(10).withEndTime(30) + val r = DateTimeRange(darwinTime).withStartTime(10).withEndTime(30) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -110,7 +110,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { val expect = TopicPartitionMap(Map(new TopicPartition("range.test", 0) -> None)) - val r = NJDateTimeRange(darwinTime).withStartTime(500).withEndTime(600) + val r = DateTimeRange(darwinTime).withStartTime(500).withEndTime(600) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -119,7 +119,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { val expect = TopicPartitionMap(Map(new TopicPartition("range.test", 0) -> None)) - val r = NJDateTimeRange(darwinTime).withStartTime(110).withEndTime(120) + val r = DateTimeRange(darwinTime).withStartTime(110).withEndTime(120) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } @@ -130,7 +130,7 @@ class ConsumerApiOffsetRangeTest extends AnyFunSuite { Map(new TopicPartition("range.test", 0) -> OffsetRange(Offset(0), Offset(3)))) - val r = NJDateTimeRange(darwinTime) + val r = DateTimeRange(darwinTime) transientConsumer.offsetRangeFor(r).map(x => assert(x === expect)).unsafeRunSync() } diff --git a/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumerTest.scala b/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumerTest.scala index b55a8c249..c1d52eddd 100644 --- a/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumerTest.scala +++ b/kafka/src/test/scala/com/github/chenharryhua/nanjin/kafka/TransientConsumerTest.scala @@ -4,7 +4,7 @@ import cats.Id import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.datetime.{NJDateTimeRange, NJTimestamp} +import com.github.chenharryhua.nanjin.datetime.{DateTimeRange, NJTimestamp} import com.github.chenharryhua.nanjin.kafka.buildConsumer.* import fs2.kafka.ConsumerSettings import fs2.kafka.consumer.MkConsumer @@ -21,7 +21,7 @@ class TransientConsumerTest extends AnyFunSuite { val end: Map[TopicPartition, java.lang.Long] = Map(tp0 -> 10L, tp1 -> 10, tp2 -> 10) implicit val mkConsumer: MkConsumer[IO] = buildConsumer(begin, end, Map.empty) val consumer: TransientConsumer[IO] = TransientConsumer[IO](topicName, pcs) - val res = consumer.offsetRangeFor(NJDateTimeRange(sydneyTime)).unsafeRunSync() + val res = consumer.offsetRangeFor(DateTimeRange(sydneyTime)).unsafeRunSync() println(res) assert(res.value.size == 3) assert(res.value.forall(_._2.forall(_.distance == 10))) @@ -36,7 +36,7 @@ class TransientConsumerTest extends AnyFunSuite { tp2 -> new OffsetAndTimestamp(5, 0)) implicit val mkConsumer: MkConsumer[IO] = buildConsumer(begin, end, forTime) val consumer: TransientConsumer[IO] = TransientConsumer[IO](topicName, pcs) - val res = consumer.offsetRangeFor(NJDateTimeRange(sydneyTime)).unsafeRunSync() + val res = consumer.offsetRangeFor(DateTimeRange(sydneyTime)).unsafeRunSync() println(res) assert(res.value.size == 3) assert(res.value.forall(_._2.forall(_.distance == 10))) @@ -53,7 +53,7 @@ class TransientConsumerTest extends AnyFunSuite { implicit val mkConsumer: MkConsumer[IO] = buildConsumer(begin, end, forTime) val consumer: TransientConsumer[IO] = TransientConsumer[IO](topicName, pcs) val res = - consumer.offsetRangeFor(NJDateTimeRange(sydneyTime).withEndTime(LocalDate.now())).unsafeRunSync() + consumer.offsetRangeFor(DateTimeRange(sydneyTime).withEndTime(LocalDate.now())).unsafeRunSync() println(res) assert(res.value.size == 3) assert(res.value.forall(_._2.exists(_.distance == 5))) @@ -66,7 +66,7 @@ class TransientConsumerTest extends AnyFunSuite { implicit val mkConsumer: MkConsumer[IO] = buildConsumer(begin, end, forTime) val consumer: TransientConsumer[IO] = TransientConsumer[IO](topicName, pcs) val res = - consumer.offsetRangeFor(NJDateTimeRange(sydneyTime).withEndTime(LocalDate.now())).unsafeRunSync() + consumer.offsetRangeFor(DateTimeRange(sydneyTime).withEndTime(LocalDate.now())).unsafeRunSync() println(res) assert(res.value.size == 3) assert(res.value.forall(_._2.exists(_.distance == 10))) @@ -83,7 +83,7 @@ class TransientConsumerTest extends AnyFunSuite { implicit val mkConsumer: MkConsumer[IO] = buildConsumer(begin, end, forTime) val consumer: TransientConsumer[IO] = TransientConsumer[IO](topicName, pcs) val res = - consumer.offsetRangeFor(NJDateTimeRange(sydneyTime).withEndTime(LocalDate.now())).unsafeRunSync() + consumer.offsetRangeFor(DateTimeRange(sydneyTime).withEndTime(LocalDate.now())).unsafeRunSync() println(res) assert(res.value.size == 3) assert(res.value.forall(_._2.exists(_.distance == 5))) diff --git a/kafka/src/test/scala/mtest/kafka/AdminApiTest.scala b/kafka/src/test/scala/mtest/kafka/AdminApiTest.scala index bc78806f4..80fb86c1c 100644 --- a/kafka/src/test/scala/mtest/kafka/AdminApiTest.scala +++ b/kafka/src/test/scala/mtest/kafka/AdminApiTest.scala @@ -3,7 +3,7 @@ package mtest.kafka import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.datetime.{NJDateTimeRange, NJTimestamp} +import com.github.chenharryhua.nanjin.datetime.{DateTimeRange, NJTimestamp} import com.github.chenharryhua.nanjin.kafka.* import eu.timepit.refined.auto.* import io.circe.syntax.EncoderOps @@ -55,7 +55,7 @@ class AdminApiTest extends AnyFunSuite { admin.resetOffsetsForTimes(gid, NJTimestamp(0)) >> admin.resetOffsetsToEnd(gid) >> admin.lagBehind(gid) >> - admin.offsetRangeFor(NJDateTimeRange(sydneyTime).withToday) >> + admin.offsetRangeFor(DateTimeRange(sydneyTime).withToday) >> admin.partitionsFor >> admin.groups assert(gp.unsafeRunSync().map(_.value).contains(gid)) diff --git a/kafka/src/test/scala/mtest/kafka/MonitorApiTest.scala b/kafka/src/test/scala/mtest/kafka/MonitorApiTest.scala index 50f9aa98a..0e1880b3b 100644 --- a/kafka/src/test/scala/mtest/kafka/MonitorApiTest.scala +++ b/kafka/src/test/scala/mtest/kafka/MonitorApiTest.scala @@ -3,7 +3,7 @@ package mtest.kafka import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.KafkaTopic import eu.timepit.refined.auto.* import fs2.Stream @@ -50,7 +50,7 @@ class MonitorApiTest extends AnyFunSuite { test("cherry pick") { ctx .admin("monitor.test") - .offsetRangeFor(NJDateTimeRange(sydneyTime)) + .offsetRangeFor(DateTimeRange(sydneyTime)) .flatMap { kor => val range = kor.get(new TopicPartition("monitor.test", 0)).flatten.get ctx.cherryPick("monitor.test", 0, range.from.value) diff --git a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJConsumerRecord.scala b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJConsumerRecord.scala index 581522de7..29cfa15ab 100644 --- a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJConsumerRecord.scala +++ b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJConsumerRecord.scala @@ -5,7 +5,7 @@ import cats.data.Cont import cats.syntax.eq.catsSyntaxEq import cats.syntax.semigroup.catsSyntaxSemigroup import cats.kernel.Eq -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.sksamuel.avro4s.* import fs2.kafka.* import io.circe.{Decoder as JsonDecoder, Encoder as JsonEncoder} @@ -59,9 +59,7 @@ object NJConsumerRecord { def apply[K, V](cr: ConsumerRecord[K, V]): NJConsumerRecord[K, V] = cr.transformInto[NJConsumerRecord[K, V]] - def avroCodec[K, V]( - keyCodec: NJAvroCodec[K], - valCodec: NJAvroCodec[V]): NJAvroCodec[NJConsumerRecord[K, V]] = { + def avroCodec[K, V](keyCodec: AvroCodec[K], valCodec: AvroCodec[V]): AvroCodec[NJConsumerRecord[K, V]] = { implicit val schemaForKey: SchemaFor[K] = keyCodec.schemaFor implicit val schemaForVal: SchemaFor[V] = valCodec.schemaFor implicit val keyDecoder: Decoder[K] = keyCodec @@ -71,7 +69,7 @@ object NJConsumerRecord { val s: SchemaFor[NJConsumerRecord[K, V]] = implicitly val d: Decoder[NJConsumerRecord[K, V]] = implicitly val e: Encoder[NJConsumerRecord[K, V]] = implicitly - NJAvroCodec[NJConsumerRecord[K, V]](s, d.withSchema(s), e.withSchema(s)) + AvroCodec[NJConsumerRecord[K, V]](s, d.withSchema(s), e.withSchema(s)) } def schema(keySchema: Schema, valSchema: Schema): Schema = { diff --git a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJProducerRecord.scala b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJProducerRecord.scala index e1252916d..8fed757bf 100644 --- a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJProducerRecord.scala +++ b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/NJProducerRecord.scala @@ -5,7 +5,7 @@ import cats.data.Cont import cats.implicits.catsSyntaxEq import cats.kernel.Eq import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.sksamuel.avro4s.* import fs2.kafka.{Header, Headers, ProducerRecord} import io.circe.{Decoder as JsonDecoder, Encoder as JsonEncoder} @@ -58,9 +58,7 @@ object NJProducerRecord { def apply[K, V](topicName: TopicName, k: K, v: V): NJProducerRecord[K, V] = NJProducerRecord(topicName.value, None, None, None, Option(k), Option(v), Nil) - def avroCodec[K, V]( - keyCodec: NJAvroCodec[K], - valCodec: NJAvroCodec[V]): NJAvroCodec[NJProducerRecord[K, V]] = { + def avroCodec[K, V](keyCodec: AvroCodec[K], valCodec: AvroCodec[V]): AvroCodec[NJProducerRecord[K, V]] = { implicit val schemaForKey: SchemaFor[K] = keyCodec.schemaFor implicit val schemaForVal: SchemaFor[V] = valCodec.schemaFor implicit val keyDecoder: Decoder[K] = keyCodec @@ -70,7 +68,7 @@ object NJProducerRecord { val s: SchemaFor[NJProducerRecord[K, V]] = cachedImplicit val d: Decoder[NJProducerRecord[K, V]] = cachedImplicit val e: Encoder[NJProducerRecord[K, V]] = cachedImplicit - NJAvroCodec[NJProducerRecord[K, V]](s, d.withSchema(s), e.withSchema(s)) + AvroCodec[NJProducerRecord[K, V]](s, d.withSchema(s), e.withSchema(s)) } def schema(keySchema: Schema, valSchema: Schema): Schema = { diff --git a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/NJAvroCodec.scala b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/AvroCodec.scala similarity index 65% rename from messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/NJAvroCodec.scala rename to messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/AvroCodec.scala index 4b2f72930..f6cb5e5bc 100644 --- a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/NJAvroCodec.scala +++ b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/AvroCodec.scala @@ -11,7 +11,8 @@ import com.sksamuel.avro4s.{ import eu.timepit.refined.api.Refined import eu.timepit.refined.string.MatchesRegex import org.apache.avro.Schema -final class NJAvroCodec[A] private ( + +final class AvroCodec[A] private ( override val schemaFor: SchemaFor[A], avroDecoder: AvroDecoder[A], avroEncoder: AvroEncoder[A]) @@ -19,12 +20,12 @@ final class NJAvroCodec[A] private ( def idConversion(a: A): A = avroDecoder.decode(avroEncoder.encode(a)) - def withSchema(schema: Schema): NJAvroCodec[A] = - NJAvroCodec(schema)(avroDecoder, avroEncoder, schemaFor) + def withSchema(schema: Schema): AvroCodec[A] = + AvroCodec(schema)(avroDecoder, avroEncoder, schemaFor) - override def withSchema(schemaFor: SchemaFor[A]): NJAvroCodec[A] = withSchema(schemaFor.schema) - override def encode(value: A): AnyRef = avroEncoder.encode(value) - override def decode(value: Any): A = avroDecoder.decode(value) + override def withSchema(schemaFor: SchemaFor[A]): AvroCodec[A] = withSchema(schemaFor.schema) + override def encode(value: A): AnyRef = avroEncoder.encode(value) + override def decode(value: Any): A = avroDecoder.decode(value) /** https://avro.apache.org/docs/current/spec.html the grammar for a namespace is: * @@ -33,23 +34,23 @@ final class NJAvroCodec[A] private ( * empty namespace is not allowed */ private type Namespace = MatchesRegex["^[a-zA-Z0-9_.]+$"] - def withNamespace(namespace: String Refined Namespace): NJAvroCodec[A] = + def withNamespace(namespace: String Refined Namespace): AvroCodec[A] = withSchema(replaceNamespace(schema, namespace.value)) - def withoutNamespace: NJAvroCodec[A] = withSchema(removeNamespace(schema)) - def withoutDefaultField: NJAvroCodec[A] = withSchema(removeDefaultField(schema)) - def withoutDoc: NJAvroCodec[A] = withSchema(removeDocField(schema)) + def withoutNamespace: AvroCodec[A] = withSchema(removeNamespace(schema)) + def withoutDefaultField: AvroCodec[A] = withSchema(removeDefaultField(schema)) + def withoutDoc: AvroCodec[A] = withSchema(removeDocField(schema)) } -object NJAvroCodec { - def apply[A](sf: SchemaFor[A], dc: AvroDecoder[A], ec: AvroEncoder[A]): NJAvroCodec[A] = - new NJAvroCodec[A](sf, DecoderHelpers.buildWithSchema(dc, sf), EncoderHelpers.buildWithSchema(ec, sf)) +object AvroCodec { + def apply[A](sf: SchemaFor[A], dc: AvroDecoder[A], ec: AvroEncoder[A]): AvroCodec[A] = + new AvroCodec[A](sf, DecoderHelpers.buildWithSchema(dc, sf), EncoderHelpers.buildWithSchema(ec, sf)) - def apply[A](implicit dc: AvroDecoder[A], ec: AvroEncoder[A], sf: SchemaFor[A]): NJAvroCodec[A] = + def apply[A](implicit dc: AvroDecoder[A], ec: AvroEncoder[A], sf: SchemaFor[A]): AvroCodec[A] = apply[A](sf, dc, ec) def apply[A]( - schema: Schema)(implicit dc: AvroDecoder[A], ec: AvroEncoder[A], sf: SchemaFor[A]): NJAvroCodec[A] = { + schema: Schema)(implicit dc: AvroDecoder[A], ec: AvroEncoder[A], sf: SchemaFor[A]): AvroCodec[A] = { val b = backwardCompatibility(sf.schema, schema) val f = forwardCompatibility(sf.schema, schema) if (b.isEmpty && f.isEmpty) { @@ -60,6 +61,6 @@ object NJAvroCodec { } } - def apply[A: AvroDecoder: AvroEncoder: SchemaFor](schemaText: String): NJAvroCodec[A] = + def apply[A: AvroDecoder: AvroEncoder: SchemaFor](schemaText: String): AvroCodec[A] = apply[A]((new Schema.Parser).parse(schemaText)) } diff --git a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KJson.scala b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KJson.scala index 06750f69a..7f31b5e5a 100644 --- a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KJson.scala +++ b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KJson.scala @@ -80,7 +80,7 @@ object KJson { implicit def jsonSerde[A: JsonEncoder: JsonDecoder]: SerdeOf[KJson[A]] = new SerdeOf[KJson[A]] { - override val avroCodec: NJAvroCodec[KJson[A]] = NJAvroCodec[KJson[A]] + override val avroCodec: AvroCodec[KJson[A]] = AvroCodec[KJson[A]] override val serializer: Serializer[KJson[A]] = new Serializer[KJson[A]] with Serializable { diff --git a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KPB.scala b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KPB.scala index 97a19cd07..d07b8bb36 100644 --- a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KPB.scala +++ b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/KPB.scala @@ -72,7 +72,7 @@ object KPB { implicit def kpbSerde[A <: GeneratedMessage](implicit ev: GeneratedMessageCompanion[A]): SerdeOf[KPB[A]] = new SerdeOf[KPB[A]] { - override val avroCodec: NJAvroCodec[KPB[A]] = { + override val avroCodec: AvroCodec[KPB[A]] = { val kpbCodec: Codec[KPB[A]] = new Codec[KPB[A]] { override def decode(value: Any): KPB[A] = value match { case ab: Array[Byte] => KPB(ev.parseFrom(ab)) @@ -85,7 +85,7 @@ object KPB { override def schemaFor: SchemaFor[KPB[A]] = SchemaFor[Array[Byte]].forType[KPB[A]] } - NJAvroCodec[KPB[A]](kpbCodec.schemaFor, kpbCodec, kpbCodec) + AvroCodec[KPB[A]](kpbCodec.schemaFor, kpbCodec, kpbCodec) } override val serializer: Serializer[KPB[A]] = diff --git a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/SerdeOf.scala b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/SerdeOf.scala index 10e61c99e..dd991af92 100644 --- a/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/SerdeOf.scala +++ b/messages/src/main/scala/com/github/chenharryhua/nanjin/messages/kafka/codec/SerdeOf.scala @@ -42,7 +42,7 @@ sealed abstract class RegisteredSerde[A](serdeOf: SerdeOf[A]) extends Serializab } trait SerdeOf[A] extends Serde[A] with Serializable { outer => - def avroCodec: NJAvroCodec[A] + def avroCodec: AvroCodec[A] final def asKey(props: Map[String, String]): RegisteredSerde[A] = new RegisteredSerde(this) { @@ -60,13 +60,13 @@ trait SerdeOf[A] extends Serde[A] with Serializable { outer => private[codec] trait LowerPriority { implicit def avro4sCodec[A: SchemaFor: AvroEncoder: AvroDecoder]: SerdeOf[A] = - SerdeOf(NJAvroCodec[A]) + SerdeOf(AvroCodec[A]) } object SerdeOf extends LowerPriority { def apply[A](implicit ev: SerdeOf[A]): SerdeOf[A] = ev - def apply[A](codec: NJAvroCodec[A]): SerdeOf[A] = + def apply[A](codec: AvroCodec[A]): SerdeOf[A] = new SerdeOf[A] { override val serializer: Serializer[A] = @@ -109,13 +109,13 @@ object SerdeOf extends LowerPriority { case Some(value) => avroCodec.decode(deSer.deserialize(topic, value)) } } - override val avroCodec: NJAvroCodec[A] = codec + override val avroCodec: AvroCodec[A] = codec } // 1: String implicit object StringPrimitiveSerde extends SerdeOf[String] { - override val avroCodec: NJAvroCodec[String] = NJAvroCodec[String] + override val avroCodec: AvroCodec[String] = AvroCodec[String] override val serializer: Serializer[String] = new Serializer[String] with Serializable { @@ -147,7 +147,7 @@ object SerdeOf extends LowerPriority { // 2: Long implicit object LongPrimitiveSerde extends SerdeOf[Long] { - override val avroCodec: NJAvroCodec[Long] = NJAvroCodec[Long] + override val avroCodec: AvroCodec[Long] = AvroCodec[Long] override val serializer: Serializer[Long] = new Serializer[Long] with Serializable { @@ -179,7 +179,7 @@ object SerdeOf extends LowerPriority { // 3: array byte implicit object ByteArrayPrimitiveSerde extends SerdeOf[Array[Byte]] { - override val avroCodec: NJAvroCodec[Array[Byte]] = NJAvroCodec[Array[Byte]] + override val avroCodec: AvroCodec[Array[Byte]] = AvroCodec[Array[Byte]] override val serializer: Serializer[Array[Byte]] = new Serializer[Array[Byte]] with Serializable { @@ -211,7 +211,7 @@ object SerdeOf extends LowerPriority { // 4: byte buffer implicit object ByteBufferPrimitiveSerde extends SerdeOf[ByteBuffer] { - override val avroCodec: NJAvroCodec[ByteBuffer] = NJAvroCodec[ByteBuffer] + override val avroCodec: AvroCodec[ByteBuffer] = AvroCodec[ByteBuffer] override val serializer: Serializer[ByteBuffer] = new Serializer[ByteBuffer] with Serializable { @@ -243,7 +243,7 @@ object SerdeOf extends LowerPriority { // 5: short implicit object ShortPrimitiveSerde extends SerdeOf[Short] { - override val avroCodec: NJAvroCodec[Short] = NJAvroCodec[Short] + override val avroCodec: AvroCodec[Short] = AvroCodec[Short] override val serializer: Serializer[Short] = new Serializer[Short] with Serializable { @@ -273,7 +273,7 @@ object SerdeOf extends LowerPriority { // 6: float implicit object FloatPrimitiveSerde extends SerdeOf[Float] { - override val avroCodec: NJAvroCodec[Float] = NJAvroCodec[Float] + override val avroCodec: AvroCodec[Float] = AvroCodec[Float] override val serializer: Serializer[Float] = new Serializer[Float] with Serializable { @@ -305,7 +305,7 @@ object SerdeOf extends LowerPriority { // 7: double implicit object DoublePrimitiveSerde extends SerdeOf[Double] { - override val avroCodec: NJAvroCodec[Double] = NJAvroCodec[Double] + override val avroCodec: AvroCodec[Double] = AvroCodec[Double] override val serializer: Serializer[Double] = new Serializer[Double] with Serializable { @@ -337,7 +337,7 @@ object SerdeOf extends LowerPriority { // 8: int implicit object IntPrimitiveSerde extends SerdeOf[Int] { - override val avroCodec: NJAvroCodec[Int] = NJAvroCodec[Int] + override val avroCodec: AvroCodec[Int] = AvroCodec[Int] override val serializer: Serializer[Int] = new Serializer[Int] with Serializable { @@ -367,7 +367,7 @@ object SerdeOf extends LowerPriority { // 9: uuid implicit object UUIDPrimitiveSerde extends SerdeOf[UUID] { - override val avroCodec: NJAvroCodec[UUID] = NJAvroCodec[UUID] + override val avroCodec: AvroCodec[UUID] = AvroCodec[UUID] override val serializer: Serializer[UUID] = new Serializer[UUID] with Serializable { diff --git a/messages/src/test/scala/mtest/msg/codec/ManualAvroSchemaTest.scala b/messages/src/test/scala/mtest/msg/codec/ManualAvroSchemaTest.scala index a6f616021..0afb68425 100644 --- a/messages/src/test/scala/mtest/msg/codec/ManualAvroSchemaTest.scala +++ b/messages/src/test/scala/mtest/msg/codec/ManualAvroSchemaTest.scala @@ -1,6 +1,6 @@ package mtest.msg.codec -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import org.scalatest.funsuite.AnyFunSuite object ManualAvroSchemaTestData { @@ -151,25 +151,25 @@ class ManualAvroSchemaTest extends AnyFunSuite { import ManualAvroSchemaTestData.* test("add doc field") { - NJAvroCodec[UnderTest](UnderTest.schema1) + AvroCodec[UnderTest](UnderTest.schema1) } test("became optional a") { - assertThrows[Exception](NJAvroCodec[UnderTest](UnderTest.schema2)) + assertThrows[Exception](AvroCodec[UnderTest](UnderTest.schema2)) } test("add optional c without default") { - assertThrows[Exception](NJAvroCodec[UnderTest](UnderTest.schema3)) + assertThrows[Exception](AvroCodec[UnderTest](UnderTest.schema3)) } test("add optional c with default") { - assertThrows[Exception](NJAvroCodec[UnderTest](UnderTest.schema5)) + assertThrows[Exception](AvroCodec[UnderTest](UnderTest.schema5)) } test("only namespace is different") { - NJAvroCodec[UnderTest](UnderTest.schema4) + AvroCodec[UnderTest](UnderTest.schema4) } test("remove b") { - assertThrows[Exception](NJAvroCodec[UnderTest](UnderTest.schema6)) + assertThrows[Exception](AvroCodec[UnderTest](UnderTest.schema6)) } } diff --git a/messages/src/test/scala/mtest/msg/codec/SchemaChangeTest.scala b/messages/src/test/scala/mtest/msg/codec/SchemaChangeTest.scala index 8883c1cde..b048bf8b3 100644 --- a/messages/src/test/scala/mtest/msg/codec/SchemaChangeTest.scala +++ b/messages/src/test/scala/mtest/msg/codec/SchemaChangeTest.scala @@ -1,6 +1,6 @@ package mtest.msg.codec -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.sksamuel.avro4s.{AvroDoc, AvroNamespace} import eu.timepit.refined.auto.* import org.apache.avro.Schema @@ -21,8 +21,8 @@ object SchemaChangeTestData { """ {"type":"record","name":"UnderTest","namespace":"schema.test.top","doc":"top level case class","fields":[{"name":"a","type":"int"},{"name":"b","type":[{"type":"record","name":"Nest","namespace":"schema.test.nest","fields":[{"name":"a","type":"int"}]},{"type":"record","name":"Nest2","namespace":"schema.test.nest2","doc":"nest-2","fields":[{"name":"b","type":"string"}]}]},{"name":"c","type":["null","int"],"default":null}]} """ - val oldSchema: Schema = (new Schema.Parser).parse(schema) - val codec: NJAvroCodec[UnderTest] = NJAvroCodec[UnderTest](schema) + val oldSchema: Schema = (new Schema.Parser).parse(schema) + val codec: AvroCodec[UnderTest] = AvroCodec[UnderTest](schema) } @@ -30,7 +30,7 @@ class SchemaChangeTest extends AnyFunSuite { import SchemaChangeTestData.* test("remove default field") { - val newCodec: NJAvroCodec[UnderTest] = codec.withoutDefaultField + val newCodec: AvroCodec[UnderTest] = codec.withoutDefaultField val s = """ @@ -44,7 +44,7 @@ class SchemaChangeTest extends AnyFunSuite { } test("change namespace") { - val newCodec: NJAvroCodec[UnderTest] = codec.withNamespace("new.namespace") + val newCodec: AvroCodec[UnderTest] = codec.withNamespace("new.namespace") val s = """ {"type":"record","name":"UnderTest","namespace":"new.namespace","doc":"top level case class","fields":[{"name":"a","type":"int"},{"name":"b","type":[{"type":"record","name":"Nest","fields":[{"name":"a","type":"int"}]},{"type":"record","name":"Nest2","doc":"nest-2","fields":[{"name":"b","type":"string"}]}]},{"name":"c","type":["null","int"],"default":null}]}""" @@ -56,7 +56,7 @@ class SchemaChangeTest extends AnyFunSuite { } test("remove namespace") { - val newCodec: NJAvroCodec[UnderTest] = codec.withoutNamespace + val newCodec: AvroCodec[UnderTest] = codec.withoutNamespace val s = """ {"type":"record","name":"UnderTest","doc":"top level case class","fields":[{"name":"a","type":"int"},{"name":"b","type":[{"type":"record","name":"Nest","fields":[{"name":"a","type":"int"}]},{"type":"record","name":"Nest2","doc":"nest-2","fields":[{"name":"b","type":"string"}]}]},{"name":"c","type":["null","int"],"default":null}]} """ @@ -69,14 +69,14 @@ class SchemaChangeTest extends AnyFunSuite { } test("remove namespace - 1") { - val newCodec: NJAvroCodec[UnderTest] = codec.withoutNamespace + val newCodec: AvroCodec[UnderTest] = codec.withoutNamespace val data = UnderTest(1, Coproduct(Nest(1)), Some(1)) val en = newCodec.encode(data) assertThrows[Exception](codec.decode(en)) } test("remove namespace - 2") { - val newCodec: NJAvroCodec[UnderTest] = codec.withoutNamespace + val newCodec: AvroCodec[UnderTest] = codec.withoutNamespace val data = UnderTest(1, Coproduct(Nest(1)), Some(1)) val en = codec.encode(data) @@ -84,7 +84,7 @@ class SchemaChangeTest extends AnyFunSuite { } test("remove doc") { - val newCodec: NJAvroCodec[UnderTest] = codec.withoutDoc + val newCodec: AvroCodec[UnderTest] = codec.withoutDoc val s = """ {"type":"record","name":"UnderTest","namespace":"schema.test.top","fields":[{"name":"a","type":"int"},{"name":"b","type":[{"type":"record","name":"Nest","namespace":"schema.test.nest","fields":[{"name":"a","type":"int"}]},{"type":"record","name":"Nest2","namespace":"schema.test.nest2","fields":[{"name":"b","type":"string"}]}]},{"name":"c","type":["null","int"],"default":null}]}""" diff --git a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/cloudwatch/CloudWatchObserver.scala b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/cloudwatch/CloudWatchObserver.scala index f14a5b11c..c79b10888 100644 --- a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/cloudwatch/CloudWatchObserver.scala +++ b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/cloudwatch/CloudWatchObserver.scala @@ -5,11 +5,11 @@ import cats.syntax.all.* import com.github.chenharryhua.nanjin.aws.CloudWatch import com.github.chenharryhua.nanjin.common.aws.CloudWatchNamespace import com.github.chenharryhua.nanjin.guard.config.{MetricLabel, ServiceParams} -import com.github.chenharryhua.nanjin.guard.event.NJEvent.MetricReport +import com.github.chenharryhua.nanjin.guard.event.Event.MetricReport import com.github.chenharryhua.nanjin.guard.event.{ + Event, MetricIndex, MetricSnapshot, - NJEvent, Normalized, UnitNormalization } @@ -135,7 +135,7 @@ final class CloudWatchObserver[F[_]: Concurrent] private ( timer_count ::: meter_count ::: histogram_count ::: timer_histo ::: histograms } - def observe(namespace: CloudWatchNamespace): Pipe[F, NJEvent, NJEvent] = (events: Stream[F, NJEvent]) => { + def observe(namespace: CloudWatchNamespace): Pipe[F, Event, Event] = (events: Stream[F, Event]) => { def publish(cwc: CloudWatch[F], mds: List[MetricDatum]): F[Unit] = mds // https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricData.html .grouped(20) @@ -153,7 +153,7 @@ final class CloudWatchObserver[F[_]: Concurrent] private ( val data = computeDatum(mr, last.getOrElse(sp.serviceId, MetricSnapshot.empty.lookupCount)) publish(cwc, data) } - case NJEvent.ServiceStop(serviceParams, _, _) => + case Event.ServiceStop(serviceParams, _, _) => lookup.update(_.removed(serviceParams.serviceId)) case _ => F.unit } diff --git a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/EmailObserver.scala b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/EmailObserver.scala index ad45d1781..014c08180 100644 --- a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/EmailObserver.scala +++ b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/EmailObserver.scala @@ -10,8 +10,8 @@ import com.github.chenharryhua.nanjin.aws.* import com.github.chenharryhua.nanjin.common.aws.EmailContent import com.github.chenharryhua.nanjin.common.chrono.{tickStream, Policy, Tick} import com.github.chenharryhua.nanjin.common.{ChunkSize, EmailAddr} -import com.github.chenharryhua.nanjin.guard.event.NJEvent.{ServiceStart, ServiceStop} -import com.github.chenharryhua.nanjin.guard.event.{NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.Event.{ServiceStart, ServiceStop} +import com.github.chenharryhua.nanjin.guard.event.{Event, ServiceStopCause} import com.github.chenharryhua.nanjin.guard.translator.{ColorScheme, Translator, UpdateTranslator} import fs2.{Chunk, Pipe, Pull, Stream} import scalatags.Text @@ -62,7 +62,7 @@ final class EmailObserver[F[_]: UUIDGen] private ( def withCapacity(cs: ChunkSize): EmailObserver[F] = copy(capacity = cs) def withPolicy(policy: Policy, zoneId: ZoneId): EmailObserver[F] = copy(policy = policy, zoneId = zoneId) - private def translate(evt: NJEvent): F[Option[ColoredTag]] = + private def translate(evt: Event): F[Option[ColoredTag]] = translator.translate(evt).map(_.map(tag => ColoredTag(tag, ColorScheme.decorate(evt).eval.value))) private def compose_letter(tags: Chunk[ColoredTag]): Letter = { @@ -130,12 +130,12 @@ final class EmailObserver[F[_]: UUIDGen] private ( } } - def observe(from: EmailAddr, to: NonEmptyList[EmailAddr], subject: String): Pipe[F, NJEvent, NJEvent] = { + def observe(from: EmailAddr, to: NonEmptyList[EmailAddr], subject: String): Pipe[F, Event, Event] = { def go( - ss: Stream[F, Either[NJEvent, Tick]], + ss: Stream[F, Either[Event, Tick]], send_email: Chunk[ColoredTag] => F[Unit], - cache: Ref[F, Chunk[ColoredTag]]): Pull[F, NJEvent, Unit] = + cache: Ref[F, Chunk[ColoredTag]]): Pull[F, Event, Unit] = ss.pull.uncons1.flatMap { case Some((head, tail)) => head match { @@ -162,7 +162,7 @@ final class EmailObserver[F[_]: UUIDGen] private ( case None => Pull.done // leave cache to be handled by finalizer } - (events: Stream[F, NJEvent]) => + (events: Stream[F, Event]) => for { ses <- Stream.resource(client) state <- Stream.eval(F.ref(Map.empty[UUID, ServiceStart])) diff --git a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/HtmlTranslator.scala b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/HtmlTranslator.scala index 6df7e9867..cf63ca125 100644 --- a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/HtmlTranslator.scala +++ b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/ses/HtmlTranslator.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.guard.observers.ses import cats.Applicative import cats.syntax.all.* -import com.github.chenharryhua.nanjin.guard.event.{NJError, NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.{Error, Event, ServiceStopCause} import com.github.chenharryhua.nanjin.guard.translator.{htmlHelper, textConstants, textHelper, Translator} import io.circe.Json import io.circe.syntax.EncoderOps @@ -16,12 +16,12 @@ import java.time.temporal.ChronoUnit /** https://com-lihaoyi.github.io/scalatags/ */ private object HtmlTranslator extends all { - import NJEvent.* + import Event.* import htmlHelper.* import textConstants.* import textHelper.* - private def service_table(evt: NJEvent): generic.Frag[Builder, String] = { + private def service_table(evt: Event): generic.Frag[Builder, String] = { val serviceName: Text.TypedTag[String] = evt.serviceParams.homePage.fold(td(evt.serviceParams.serviceName.value))(hp => td(a(href := hp.value)(evt.serviceParams.serviceName.value))) @@ -44,7 +44,7 @@ private object HtmlTranslator extends all { private def json_text(js: Json): Text.TypedTag[String] = pre(small(js.spaces2)) - private def error_text(c: NJError): Text.TypedTag[String] = + private def error_text(c: Error): Text.TypedTag[String] = p(b(s"$CONSTANT_CAUSE: "), pre(small(c.stack.mkString("\n\t")))) // events diff --git a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackObserver.scala b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackObserver.scala index 3ee8cb1ca..181c19939 100644 --- a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackObserver.scala +++ b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackObserver.scala @@ -5,8 +5,8 @@ import cats.effect.kernel.{Clock, Concurrent, Resource} import cats.syntax.all.* import com.github.chenharryhua.nanjin.aws.SimpleNotificationService import com.github.chenharryhua.nanjin.common.aws.SnsArn -import com.github.chenharryhua.nanjin.guard.event.NJEvent -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServiceStart +import com.github.chenharryhua.nanjin.guard.event.Event +import com.github.chenharryhua.nanjin.guard.event.Event.ServiceStart import com.github.chenharryhua.nanjin.guard.observers.FinalizeMonitor import com.github.chenharryhua.nanjin.guard.translator.* import fs2.{Pipe, Stream} @@ -51,7 +51,7 @@ final class SlackObserver[F[_]: Clock]( client.publish(req.build()).attempt } - def observe(snsArn: SnsArn): Pipe[F, NJEvent, NJEvent] = (es: Stream[F, NJEvent]) => + def observe(snsArn: SnsArn): Pipe[F, Event, Event] = (es: Stream[F, Event]) => for { sns <- Stream.resource(client) ofm <- Stream.eval( diff --git a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackTranslator.scala b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackTranslator.scala index 2124c973a..2a3fa40de 100644 --- a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackTranslator.scala +++ b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sns/SlackTranslator.scala @@ -3,7 +3,7 @@ package com.github.chenharryhua.nanjin.guard.observers.sns import cats.syntax.all.* import cats.{Applicative, Eval} import com.github.chenharryhua.nanjin.guard.config.{AlarmLevel, ServiceParams} -import com.github.chenharryhua.nanjin.guard.event.{MetricSnapshot, NJError, NJEvent, ServiceStopCause} +import com.github.chenharryhua.nanjin.guard.event.{Error, Event, MetricSnapshot, ServiceStopCause} import com.github.chenharryhua.nanjin.guard.translator.textConstants.* import com.github.chenharryhua.nanjin.guard.translator.textHelper.* import com.github.chenharryhua.nanjin.guard.translator.{ColorScheme, SnapshotPolyglot, Translator} @@ -13,9 +13,9 @@ import org.typelevel.cats.time.instances.all import squants.information.{Bytes, Information} private object SlackTranslator extends all { - import NJEvent.* + import Event.* - private def coloring(evt: NJEvent): String = ColorScheme + private def coloring(evt: Event): String = ColorScheme .decorate(evt) .run { case ColorScheme.GoodColor => Eval.now("#36a64f") @@ -38,7 +38,7 @@ private object SlackTranslator extends all { JuxtaposeSection(TextField(CONSTANT_SERVICE, sn), TextField(CONSTANT_HOST, hostText(sp))) } - private def uptime_section(evt: NJEvent): JuxtaposeSection = + private def uptime_section(evt: Event): JuxtaposeSection = JuxtaposeSection( first = TextField(CONSTANT_UPTIME, uptimeText(evt)), second = TextField(CONSTANT_TIMEZONE, evt.serviceParams.zoneId.show)) @@ -66,7 +66,7 @@ private object SlackTranslator extends all { private def brief(json: Json): KeyValueSection = KeyValueSection(CONSTANT_BRIEF, s"```${abbreviate(json.spaces2)}```") - private def stack_trace(err: NJError): String = + private def stack_trace(err: Error): String = abbreviate(err.stack.mkString("\n\t")) // events diff --git a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sqs/SqsObserver.scala b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sqs/SqsObserver.scala index c0e0350b6..be17db0fe 100644 --- a/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sqs/SqsObserver.scala +++ b/observers/aws/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/sqs/SqsObserver.scala @@ -6,8 +6,8 @@ import cats.effect.std.UUIDGen import cats.syntax.all.* import com.github.chenharryhua.nanjin.aws.SimpleQueueService import com.github.chenharryhua.nanjin.common.aws.SqsUrl -import com.github.chenharryhua.nanjin.guard.event.NJEvent -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServiceStart +import com.github.chenharryhua.nanjin.guard.event.Event +import com.github.chenharryhua.nanjin.guard.event.Event.ServiceStart import com.github.chenharryhua.nanjin.guard.observers.FinalizeMonitor import com.github.chenharryhua.nanjin.guard.translator.{Translator, UpdateTranslator} import fs2.{Pipe, Stream} @@ -24,10 +24,10 @@ object SqsObserver { final class SqsObserver[F[_]: Clock: UUIDGen]( client: Resource[F, SimpleQueueService[F]], - translator: Translator[F, NJEvent])(implicit F: Concurrent[F]) - extends UpdateTranslator[F, NJEvent, SqsObserver[F]] { + translator: Translator[F, Event])(implicit F: Concurrent[F]) + extends UpdateTranslator[F, Event, SqsObserver[F]] { - private def translate(evt: NJEvent): F[Option[Json]] = + private def translate(evt: Event): F[Option[Json]] = translator.translate(evt).map(_.map(_.asJson)) private def send( @@ -37,8 +37,8 @@ final class SqsObserver[F[_]: Clock: UUIDGen]( UUIDGen[F].randomUUID.flatMap(uuid => sqs.sendMessage(builder.messageBody(json.noSpaces).messageDeduplicationId(uuid.show).build()).attempt) - private def internal(builder: SendMessageRequest.Builder): Pipe[F, NJEvent, NJEvent] = - (es: Stream[F, NJEvent]) => + private def internal(builder: SendMessageRequest.Builder): Pipe[F, Event, Event] = + (es: Stream[F, Event]) => for { sqs <- Stream.resource(client) ofm <- Stream.eval(F.ref[Map[UUID, ServiceStart]](Map.empty).map(new FinalizeMonitor(translate, _))) @@ -50,12 +50,12 @@ final class SqsObserver[F[_]: Clock: UUIDGen]( .onFinalize(ofm.terminated.flatMap(_.traverse(json => send(sqs, builder, json))).void) } yield event - def observe(builder: SendMessageRequest.Builder): Pipe[F, NJEvent, NJEvent] = internal(builder) + def observe(builder: SendMessageRequest.Builder): Pipe[F, Event, Event] = internal(builder) // events order should be preserved - def observe(url: SqsUrl.Fifo, messageGroupId: String): Pipe[F, NJEvent, NJEvent] = + def observe(url: SqsUrl.Fifo, messageGroupId: String): Pipe[F, Event, Event] = internal(SendMessageRequest.builder().queueUrl(url.value).messageGroupId(messageGroupId)) - override def updateTranslator(f: Endo[Translator[F, NJEvent]]): SqsObserver[F] = + override def updateTranslator(f: Endo[Translator[F, Event]]): SqsObserver[F] = new SqsObserver[F](client, f(translator)) } diff --git a/observers/aws/src/test/scala/mtest/aws/AwsObserverTest.scala b/observers/aws/src/test/scala/mtest/aws/AwsObserverTest.scala index c08b65b02..d7a1cdac5 100644 --- a/observers/aws/src/test/scala/mtest/aws/AwsObserverTest.scala +++ b/observers/aws/src/test/scala/mtest/aws/AwsObserverTest.scala @@ -6,7 +6,7 @@ import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.Policy import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime import com.github.chenharryhua.nanjin.guard.TaskGuard -import com.github.chenharryhua.nanjin.guard.event.NJEvent +import com.github.chenharryhua.nanjin.guard.event.Event import com.github.chenharryhua.nanjin.guard.observers.cloudwatch.CloudWatchObserver import com.github.chenharryhua.nanjin.guard.observers.ses.EmailObserver import com.github.chenharryhua.nanjin.guard.observers.sqs.SqsObserver @@ -16,7 +16,7 @@ import org.scalatest.funsuite.AnyFunSuite import scala.concurrent.duration.DurationInt class AwsObserverTest extends AnyFunSuite { - private val service: fs2.Stream[IO, NJEvent] = TaskGuard[IO]("aws") + private val service: fs2.Stream[IO, Event] = TaskGuard[IO]("aws") .service("test") .updateConfig(_.addBrief("brief").withRestartPolicy(Policy.fixedDelay(1.second).limited(1))) .eventStream { agent => diff --git a/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/JsonTranslator.scala b/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/JsonTranslator.scala index 4c2ff2b6c..cbb659f20 100644 --- a/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/JsonTranslator.scala +++ b/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/JsonTranslator.scala @@ -1,7 +1,7 @@ package com.github.chenharryhua.nanjin.guard.observers.postgres import cats.Applicative -import com.github.chenharryhua.nanjin.guard.event.{MetricSnapshot, NJEvent} +import com.github.chenharryhua.nanjin.guard.event.{Event, MetricSnapshot} import com.github.chenharryhua.nanjin.guard.translator.{jsonHelper, EventName, SnapshotPolyglot, Translator} import io.circe.Json import io.circe.syntax.EncoderOps @@ -9,7 +9,7 @@ import io.circe.syntax.EncoderOps import java.time.Duration private object JsonTranslator { - import NJEvent.* + import Event.* private def took(dur: Duration): (String, Json) = "took" -> dur.asJson diff --git a/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/PostgresObserver.scala b/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/PostgresObserver.scala index 432fbe4bf..760fba836 100644 --- a/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/PostgresObserver.scala +++ b/observers/database/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/postgres/PostgresObserver.scala @@ -4,8 +4,8 @@ import cats.Endo import cats.effect.kernel.{Clock, Concurrent, Resource} import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.database.TableName -import com.github.chenharryhua.nanjin.guard.event.NJEvent -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServiceStart +import com.github.chenharryhua.nanjin.guard.event.Event +import com.github.chenharryhua.nanjin.guard.event.Event.ServiceStart import com.github.chenharryhua.nanjin.guard.observers.FinalizeMonitor import com.github.chenharryhua.nanjin.guard.translator.{Translator, UpdateTranslator} import fs2.{Pipe, Stream} @@ -38,7 +38,7 @@ final class PostgresObserver[F[_]: Clock](session: Resource[F, Session[F]], tran private def execute(pg: PreparedCommand[F, Json], msg: Json): F[Either[Throwable, Completion]] = pg.execute(msg).attempt - def observe(tableName: TableName): Pipe[F, NJEvent, NJEvent] = (events: Stream[F, NJEvent]) => { + def observe(tableName: TableName): Pipe[F, Event, Event] = (events: Stream[F, Event]) => { val cmd: Command[Json] = sql"INSERT INTO #${tableName.value} VALUES ($json)".command for { pg <- Stream.resource(session.evalMap(_.prepare(cmd))) diff --git a/observers/influxdb/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/influxdb/InfluxdbObserver.scala b/observers/influxdb/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/influxdb/InfluxdbObserver.scala index eab2e4457..69ddb24ab 100644 --- a/observers/influxdb/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/influxdb/InfluxdbObserver.scala +++ b/observers/influxdb/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/influxdb/InfluxdbObserver.scala @@ -4,7 +4,7 @@ import cats.Endo import cats.effect.kernel.{Async, Resource} import cats.implicits.toShow import com.github.chenharryhua.nanjin.guard.config.ServiceParams -import com.github.chenharryhua.nanjin.guard.event.{NJEvent, Snapshot} +import com.github.chenharryhua.nanjin.guard.event.{Event, Snapshot} import com.github.chenharryhua.nanjin.guard.translator.metricConstants import com.github.chenharryhua.nanjin.guard.translator.textConstants.* import com.influxdb.client.domain.WritePrecision @@ -73,11 +73,11 @@ final class InfluxdbObserver[F[_]]( private def dimension(ms: Snapshot): Map[String, String] = Map(CONSTANT_LABEL -> ms.metricId.metricLabel.label) - val observe: Pipe[F, NJEvent, NJEvent] = (events: Stream[F, NJEvent]) => + val observe: Pipe[F, Event, Event] = (events: Stream[F, Event]) => for { writer <- Stream.resource(client.map(_.makeWriteApi(writeOptions(WriteOptions.builder()).build()))) event <- events.evalTap { - case mr @ NJEvent.MetricReport(_, sp, snapshot, _) => + case mr @ Event.MetricReport(_, sp, snapshot, _) => val spDimensions: Map[String, String] = dimension(sp) val timers: List[Point] = snapshot.timers.map { timer => val tagToAdd = dimension(timer) ++ spDimensions ++ tags diff --git a/observers/kafka/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/kafka/KafkaObserver.scala b/observers/kafka/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/kafka/KafkaObserver.scala index 602f78bde..7c6c20987 100644 --- a/observers/kafka/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/kafka/KafkaObserver.scala +++ b/observers/kafka/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/kafka/KafkaObserver.scala @@ -4,8 +4,8 @@ import cats.Endo import cats.effect.kernel.Async import cats.implicits.{toFlatMapOps, toFunctorOps, toTraverseOps} import com.github.chenharryhua.nanjin.common.kafka.{TopicName, TopicNameL} -import com.github.chenharryhua.nanjin.guard.event.NJEvent -import com.github.chenharryhua.nanjin.guard.event.NJEvent.ServiceStart +import com.github.chenharryhua.nanjin.guard.event.Event +import com.github.chenharryhua.nanjin.guard.event.Event.ServiceStart import com.github.chenharryhua.nanjin.guard.observers.FinalizeMonitor import com.github.chenharryhua.nanjin.guard.translator.{Translator, UpdateTranslator} import com.github.chenharryhua.nanjin.kafka.KafkaContext @@ -17,31 +17,30 @@ import io.circe.generic.JsonCodec import java.util.UUID @JsonCodec -final case class NJEventKey(task: String, service: String) +final case class EventKey(task: String, service: String) object KafkaObserver { def apply[F[_]: Async](ctx: KafkaContext[F]): KafkaObserver[F] = new KafkaObserver[F](ctx, Translator.idTranslator[F]) } -final class KafkaObserver[F[_]](ctx: KafkaContext[F], translator: Translator[F, NJEvent])(implicit - F: Async[F]) - extends UpdateTranslator[F, NJEvent, KafkaObserver[F]] { +final class KafkaObserver[F[_]](ctx: KafkaContext[F], translator: Translator[F, Event])(implicit F: Async[F]) + extends UpdateTranslator[F, Event, KafkaObserver[F]] { - def observe(topicName: TopicName): Pipe[F, NJEvent, NJEvent] = { - def translate(evt: NJEvent): F[Option[ProducerRecord[KJson[NJEventKey], KJson[NJEvent]]]] = + def observe(topicName: TopicName): Pipe[F, Event, Event] = { + def translate(evt: Event): F[Option[ProducerRecord[KJson[EventKey], KJson[Event]]]] = translator .translate(evt) .map( _.map(evt => ProducerRecord( topicName.value, - KJson(NJEventKey(evt.serviceParams.taskName.value, evt.serviceParams.serviceName.value)), + KJson(EventKey(evt.serviceParams.taskName.value, evt.serviceParams.serviceName.value)), KJson(evt)))) - (ss: Stream[F, NJEvent]) => + (ss: Stream[F, Event]) => for { - client <- ctx.topic[KJson[NJEventKey], KJson[NJEvent]](topicName).produce.client + client <- ctx.topic[KJson[EventKey], KJson[Event]](topicName).produce.client ofm <- Stream.eval(F.ref[Map[UUID, ServiceStart]](Map.empty).map(new FinalizeMonitor(translate, _))) event <- ss .evalTap(ofm.monitoring) @@ -50,9 +49,9 @@ final class KafkaObserver[F[_]](ctx: KafkaContext[F], translator: Translator[F, } yield event } - def observe(topicName: TopicNameL): Pipe[F, NJEvent, NJEvent] = + def observe(topicName: TopicNameL): Pipe[F, Event, Event] = observe(TopicName(topicName)) - override def updateTranslator(f: Endo[Translator[F, NJEvent]]): KafkaObserver[F] = + override def updateTranslator(f: Endo[Translator[F, Event]]): KafkaObserver[F] = new KafkaObserver(ctx, f(translator)) } diff --git a/observers/logging/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/logging/log.scala b/observers/logging/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/logging/log.scala index 19e3df791..62f304d4f 100644 --- a/observers/logging/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/logging/log.scala +++ b/observers/logging/src/main/scala/com/github/chenharryhua/nanjin/guard/observers/logging/log.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.guard.observers.logging import cats.{Endo, Eval} import cats.effect.kernel.Sync -import com.github.chenharryhua.nanjin.guard.event.NJEvent +import com.github.chenharryhua.nanjin.guard.event.Event import com.github.chenharryhua.nanjin.guard.observers.{PrettyJsonTranslator, SimpleTextTranslator} import com.github.chenharryhua.nanjin.guard.translator.{ColorScheme, Translator, UpdateTranslator} import org.typelevel.log4cats.SelfAwareStructuredLogger @@ -17,14 +17,14 @@ object log { def json[F[_]: Sync]: TextLogging[F] = apply(PrettyJsonTranslator[F].map(_.noSpaces)) final class TextLogging[F[_]: Sync](translator: Translator[F, String]) - extends (NJEvent => F[Unit]) with UpdateTranslator[F, String, TextLogging[F]] { + extends (Event => F[Unit]) with UpdateTranslator[F, String, TextLogging[F]] { private[this] lazy val logger: SelfAwareStructuredLogger[F] = Slf4jLogger.getLogger[F] override def updateTranslator(f: Endo[Translator[F, String]]): TextLogging[F] = new TextLogging[F](f(translator)) - override def apply(event: NJEvent): F[Unit] = + override def apply(event: Event): F[Unit] = translator.translate(event).flatMap { case Some(message) => ColorScheme.decorate(event).run(Eval.now).value match { diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoder.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoder.scala index 12bb14019..a8bbbdfcb 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoder.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoder.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark import com.github.chenharryhua.nanjin.kafka.TopicDef import com.github.chenharryhua.nanjin.messages.kafka.NJConsumerRecord -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.sksamuel.avro4s.{Decoder as AvroDecoder, Encoder as AvroEncoder, SchemaFor} import frameless.{TypedEncoder, TypedExpressionEncoder} import org.apache.spark.rdd.RDD @@ -12,7 +12,7 @@ import org.apache.spark.sql.types.* import scala.reflect.ClassTag -final class AvroTypedEncoder[A] private (val avroCodec: NJAvroCodec[A], val typedEncoder: TypedEncoder[A]) +final class AvroTypedEncoder[A] private (val avroCodec: AvroCodec[A], val typedEncoder: TypedEncoder[A]) extends Serializable { private val avroSchema: StructType = @@ -38,10 +38,10 @@ final class AvroTypedEncoder[A] private (val avroCodec: NJAvroCodec[A], val type object AvroTypedEncoder { - def apply[A](te: TypedEncoder[A], ac: NJAvroCodec[A]): AvroTypedEncoder[A] = + def apply[A](te: TypedEncoder[A], ac: AvroCodec[A]): AvroTypedEncoder[A] = new AvroTypedEncoder[A](ac, te) - def apply[A](ac: NJAvroCodec[A])(implicit te: TypedEncoder[A]): AvroTypedEncoder[A] = + def apply[A](ac: AvroCodec[A])(implicit te: TypedEncoder[A]): AvroTypedEncoder[A] = new AvroTypedEncoder[A](ac, te) def apply[A](implicit @@ -49,9 +49,9 @@ object AvroTypedEncoder { dec: AvroDecoder[A], enc: AvroEncoder[A], te: TypedEncoder[A]): AvroTypedEncoder[A] = - new AvroTypedEncoder[A](NJAvroCodec[A](sf, dec, enc), te) + new AvroTypedEncoder[A](AvroCodec[A](sf, dec, enc), te) - def apply[K, V](keyCodec: NJAvroCodec[K], valCodec: NJAvroCodec[V])(implicit + def apply[K, V](keyCodec: AvroCodec[K], valCodec: AvroCodec[V])(implicit tek: TypedEncoder[K], tev: TypedEncoder[V]): AvroTypedEncoder[NJConsumerRecord[K, V]] = { val ote: TypedEncoder[NJConsumerRecord[K, V]] = shapeless.cachedImplicit diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/SparKafkaContext.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/SparKafkaContext.scala index d6b4ef2b2..8d7f39d7a 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/SparKafkaContext.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/SparKafkaContext.scala @@ -5,7 +5,7 @@ import cats.effect.kernel.{Async, Sync} import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.ChunkSize import com.github.chenharryhua.nanjin.common.kafka.{TopicName, TopicNameL} -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.* import com.github.chenharryhua.nanjin.messages.kafka.codec.{gr2Jackson, SerdeOf} import com.github.chenharryhua.nanjin.messages.kafka.{CRMetaInfo, NJConsumerRecord} @@ -54,7 +54,7 @@ final class SparKafkaContext[F[_]](val sparkSession: SparkSession, val kafkaCont * @param dateRange * datetime range */ - def dump(topicName: TopicName, path: Url, dateRange: NJDateTimeRange)(implicit F: Async[F]): F[Long] = { + def dump(topicName: TopicName, path: Url, dateRange: DateTimeRange)(implicit F: Async[F]): F[Long] = { val grRdd: F[RDD[String]] = for { schemaPair <- kafkaContext.schemaRegistry.fetchAvroSchema(topicName) builder = new PullGenericRecord(kafkaContext.settings.schemaRegistrySettings, topicName, schemaPair) @@ -68,22 +68,22 @@ final class SparKafkaContext[F[_]](val sparkSession: SparkSession, val kafkaCont } def dump(topicName: TopicName, path: Url)(implicit F: Async[F]): F[Long] = - dump(topicName, path, NJDateTimeRange(utils.sparkZoneId(sparkSession))) + dump(topicName, path, DateTimeRange(utils.sparkZoneId(sparkSession))) - def dump(topicName: TopicNameL, path: Url, dateRange: NJDateTimeRange)(implicit F: Async[F]): F[Long] = + def dump(topicName: TopicNameL, path: Url, dateRange: DateTimeRange)(implicit F: Async[F]): F[Long] = dump(TopicName(topicName), path, dateRange) def dump(topicName: TopicNameL, path: Url)(implicit F: Async[F]): F[Long] = - dump(TopicName(topicName), path, NJDateTimeRange(utils.sparkZoneId(sparkSession))) + dump(TopicName(topicName), path, DateTimeRange(utils.sparkZoneId(sparkSession))) - def download[K: SerdeOf, V: SerdeOf](topicName: TopicNameL, path: Url, dateRange: NJDateTimeRange)(implicit + def download[K: SerdeOf, V: SerdeOf](topicName: TopicNameL, path: Url, dateRange: DateTimeRange)(implicit F: Async[F]): F[Long] = topic[K, V](topicName) .fromKafka(dateRange) .flatMap(_.output.jackson(path).withSaveMode(_.Overwrite).runWithCount[F]) def download[K: SerdeOf, V: SerdeOf](topicName: TopicNameL, path: Url)(implicit F: Async[F]): F[Long] = - download[K, V](topicName, path, NJDateTimeRange(utils.sparkZoneId(sparkSession))) + download[K, V](topicName, path, DateTimeRange(utils.sparkZoneId(sparkSession))) /** upload data from given folder to a kafka topic. files read in parallel * diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/CrRdd.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/CrRdd.scala index 23fe2832f..11a7dad8d 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/CrRdd.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/CrRdd.scala @@ -4,8 +4,8 @@ import cats.Endo import cats.effect.kernel.Sync import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.ChunkSize -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.datetime.DateTimeRange +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.messages.kafka.{CRMetaInfo, NJConsumerRecord, NJProducerRecord} import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import com.github.chenharryhua.nanjin.spark.persist.RddAvroFileHoarder @@ -17,12 +17,12 @@ import org.apache.spark.sql.SparkSession final class CrRdd[K, V] private[kafka] ( val rdd: RDD[NJConsumerRecord[K, V]], - ack: NJAvroCodec[K], - acv: NJAvroCodec[V], + ack: AvroCodec[K], + acv: AvroCodec[V], ss: SparkSession) extends Serializable { - protected val codec: NJAvroCodec[NJConsumerRecord[K, V]] = NJConsumerRecord.avroCodec(ack, acv) + protected val codec: AvroCodec[NJConsumerRecord[K, V]] = NJConsumerRecord.avroCodec(ack, acv) // transforms @@ -33,7 +33,7 @@ final class CrRdd[K, V] private[kafka] ( def partitionOf(num: Int): CrRdd[K, V] = filter(_.partition === num) def offsetRange(start: Long, end: Long): CrRdd[K, V] = transform(range.cr.offset(start, end)) - def timeRange(dr: NJDateTimeRange): CrRdd[K, V] = transform(range.cr.timestamp(dr)) + def timeRange(dr: DateTimeRange): CrRdd[K, V] = transform(range.cr.timestamp(dr)) def ascendTimestamp: CrRdd[K, V] = transform(sort.ascend.cr.timestamp) def descendTimestamp: CrRdd[K, V] = transform(sort.descend.cr.timestamp) @@ -44,17 +44,17 @@ final class CrRdd[K, V] private[kafka] ( def normalize: CrRdd[K, V] = transform(_.map(codec.idConversion)) - def bimap[K2, V2](k: K => K2, v: V => V2)(ack2: NJAvroCodec[K2], acv2: NJAvroCodec[V2]): CrRdd[K2, V2] = + def bimap[K2, V2](k: K => K2, v: V => V2)(ack2: AvroCodec[K2], acv2: AvroCodec[V2]): CrRdd[K2, V2] = new CrRdd[K2, V2](rdd.map(_.bimap(k, v)), ack2, acv2, ss).normalize def map[K2, V2](f: NJConsumerRecord[K, V] => NJConsumerRecord[K2, V2])( - ack2: NJAvroCodec[K2], - acv2: NJAvroCodec[V2]): CrRdd[K2, V2] = + ack2: AvroCodec[K2], + acv2: AvroCodec[V2]): CrRdd[K2, V2] = new CrRdd[K2, V2](rdd.map(f), ack2, acv2, ss).normalize def flatMap[K2, V2](f: NJConsumerRecord[K, V] => IterableOnce[NJConsumerRecord[K2, V2]])( - ack2: NJAvroCodec[K2], - acv2: NJAvroCodec[V2]): CrRdd[K2, V2] = + ack2: AvroCodec[K2], + acv2: AvroCodec[V2]): CrRdd[K2, V2] = new CrRdd[K2, V2](rdd.flatMap(f), ack2, acv2, ss).normalize def diff(other: RDD[NJConsumerRecord[K, V]]): CrRdd[K, V] = transform(_.subtract(other)) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/LoadTopicFile.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/LoadTopicFile.scala index 979fcf954..876433cb6 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/LoadTopicFile.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/LoadTopicFile.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark.kafka import com.github.chenharryhua.nanjin.kafka.TopicDef import com.github.chenharryhua.nanjin.messages.kafka.NJConsumerRecord -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.persist.loaders import com.sksamuel.avro4s.Decoder import io.circe.Decoder as JsonDecoder @@ -12,8 +12,8 @@ import org.apache.spark.sql.SparkSession final class LoadTopicFile[K, V] private[kafka] (topicDef: TopicDef[K, V], ss: SparkSession) extends Serializable { - private val ack: NJAvroCodec[K] = topicDef.rawSerdes.key.avroCodec - private val acv: NJAvroCodec[V] = topicDef.rawSerdes.value.avroCodec + private val ack: AvroCodec[K] = topicDef.rawSerdes.key.avroCodec + private val acv: AvroCodec[V] = topicDef.rawSerdes.value.avroCodec private val decoder: Decoder[NJConsumerRecord[K, V]] = NJConsumerRecord.avroCodec(ack, acv) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/PrRdd.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/PrRdd.scala index d3d9d1463..e540aafc3 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/PrRdd.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/PrRdd.scala @@ -5,9 +5,9 @@ import cats.effect.kernel.Sync import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.ChunkSize import com.github.chenharryhua.nanjin.common.kafka.{TopicName, TopicNameL} -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.messages.kafka.NJProducerRecord -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.persist.RddAvroFileHoarder import fs2.Stream import fs2.kafka.ProducerRecords @@ -15,7 +15,7 @@ import org.apache.spark.rdd.RDD final class PrRdd[K, V] private[kafka] ( val rdd: RDD[NJProducerRecord[K, V]], - codec: NJAvroCodec[NJProducerRecord[K, V]] + codec: AvroCodec[NJProducerRecord[K, V]] ) extends Serializable { // transform @@ -26,7 +26,7 @@ final class PrRdd[K, V] private[kafka] ( def partitionOf(num: Int): PrRdd[K, V] = filter(_.partition.exists(_ === num)) def offsetRange(start: Long, end: Long): PrRdd[K, V] = transform(range.pr.offset(start, end)) - def timeRange(dr: NJDateTimeRange): PrRdd[K, V] = transform(range.pr.timestamp(dr)) + def timeRange(dr: DateTimeRange): PrRdd[K, V] = transform(range.pr.timestamp(dr)) def ascendTimestamp: PrRdd[K, V] = transform(sort.ascend.pr.timestamp) def descendTimestamp: PrRdd[K, V] = transform(sort.descend.pr.timestamp) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/SparKafkaTopic.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/SparKafkaTopic.scala index d24ffa1b3..a34021749 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/SparKafkaTopic.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/SparKafkaTopic.scala @@ -4,9 +4,9 @@ import cats.Foldable import cats.effect.kernel.Async import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.* -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.messages.kafka.{NJConsumerRecord, NJProducerRecord} import com.github.chenharryhua.nanjin.spark.{utils, AvroTypedEncoder} import frameless.TypedEncoder @@ -22,10 +22,10 @@ final class SparKafkaTopic[F[_], K, V](val sparkSession: SparkSession, val topic def ate(implicit tek: TypedEncoder[K], tev: TypedEncoder[V]): AvroTypedEncoder[NJConsumerRecord[K, V]] = AvroTypedEncoder(topic.topicDef) - private val avroKeyCodec: NJAvroCodec[K] = topic.topicDef.rawSerdes.key.avroCodec - private val avroValCodec: NJAvroCodec[V] = topic.topicDef.rawSerdes.value.avroCodec + private val avroKeyCodec: AvroCodec[K] = topic.topicDef.rawSerdes.key.avroCodec + private val avroValCodec: AvroCodec[V] = topic.topicDef.rawSerdes.value.avroCodec - private def downloadKafka(dateTimeRange: NJDateTimeRange)(implicit F: Async[F]): F[CrRdd[K, V]] = + private def downloadKafka(dateTimeRange: DateTimeRange)(implicit F: Async[F]): F[CrRdd[K, V]] = sk.kafkaBatch(topic, sparkSession, dateTimeRange).map(crRdd) /** download topic according to datetime @@ -33,13 +33,13 @@ final class SparKafkaTopic[F[_], K, V](val sparkSession: SparkSession, val topic * @param dtr * : datetime */ - def fromKafka(dtr: NJDateTimeRange)(implicit F: Async[F]): F[CrRdd[K, V]] = + def fromKafka(dtr: DateTimeRange)(implicit F: Async[F]): F[CrRdd[K, V]] = downloadKafka(dtr) /** download all topic data, up to now */ def fromKafka(implicit F: Async[F]): F[CrRdd[K, V]] = - fromKafka(NJDateTimeRange(utils.sparkZoneId(sparkSession))) + fromKafka(DateTimeRange(utils.sparkZoneId(sparkSession))) /** download topic according to offset range * @param offsets diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/range.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/range.scala index b02911924..7cacd50c8 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/range.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/range.scala @@ -1,6 +1,6 @@ package com.github.chenharryhua.nanjin.spark.kafka -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.messages.kafka.{NJConsumerRecord, NJProducerRecord} import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset @@ -15,7 +15,7 @@ private[kafka] object range { object cr { - def timestamp[K, V](nd: NJDateTimeRange)(rdd: RDD[NJConsumerRecord[K, V]]): RDD[NJConsumerRecord[K, V]] = + def timestamp[K, V](nd: DateTimeRange)(rdd: RDD[NJConsumerRecord[K, V]]): RDD[NJConsumerRecord[K, V]] = rdd.filter(o => nd.isInBetween(o.timestamp)) def offset[K, V](start: Long, end: Long)(rdd: RDD[NJConsumerRecord[K, V]]): RDD[NJConsumerRecord[K, V]] = @@ -24,14 +24,14 @@ private[kafka] object range { object pr { - def timestamp[K, V](nd: NJDateTimeRange)(rdd: RDD[NJProducerRecord[K, V]]): RDD[NJProducerRecord[K, V]] = + def timestamp[K, V](nd: DateTimeRange)(rdd: RDD[NJProducerRecord[K, V]]): RDD[NJProducerRecord[K, V]] = rdd.filter(_.timestamp.exists(nd.isInBetween)) def offset[K, V](start: Long, end: Long)(rdd: RDD[NJProducerRecord[K, V]]): RDD[NJProducerRecord[K, V]] = rdd.filter(_.offset.exists(o => o >= start && o <= end)) } - def timestamp[K, V](nd: NJDateTimeRange)( + def timestamp[K, V](nd: DateTimeRange)( ds: Dataset[NJConsumerRecord[K, V]]): Dataset[NJConsumerRecord[K, V]] = { val f = udf(nd.isInBetween _) ds.filter(f(col("timestamp"))) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/sk.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/sk.scala index f521b0468..95dbff137 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/sk.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/kafka/sk.scala @@ -3,7 +3,7 @@ package com.github.chenharryhua.nanjin.spark.kafka import cats.effect.kernel.Async import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.* import com.github.chenharryhua.nanjin.messages.kafka.{NJConsumerRecord, NJHeader} import monocle.function.At.{atMap, remove} @@ -56,7 +56,7 @@ private[spark] object sk { def kafkaBatch[F[_]: Async, K, V]( topic: KafkaTopic[F, K, V], ss: SparkSession, - dateRange: NJDateTimeRange): F[RDD[NJConsumerRecord[K, V]]] = + dateRange: DateTimeRange): F[RDD[NJConsumerRecord[K, V]]] = KafkaContext[F](topic.settings) .admin(topic.topicName) .offsetRangeFor(dateRange) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJAvroKeyOutputFormat.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJAvroKeyOutputFormat.scala index 809edf8f3..3a283c172 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJAvroKeyOutputFormat.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJAvroKeyOutputFormat.scala @@ -20,7 +20,7 @@ import org.apache.hadoop.mapreduce.{JobContext, RecordWriter, TaskAttemptContext import java.io.{DataOutputStream, OutputStream} // avro build-in(AvroKeyOutputFormat) does not support s3, yet -final class NJAvroKeyOutputFormat extends AvroOutputFormatBase[AvroKey[GenericRecord], NullWritable] { +final private class NJAvroKeyOutputFormat extends AvroOutputFormatBase[AvroKey[GenericRecord], NullWritable] { @SuppressWarnings(Array("NullParameter")) override def checkOutputSpecs(job: JobContext): Unit = { diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJBinaryOutputFormat.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJBinaryOutputFormat.scala index 0ec4bdea5..a1a862490 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJBinaryOutputFormat.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJBinaryOutputFormat.scala @@ -14,7 +14,7 @@ import org.apache.hadoop.util.ReflectionUtils import java.io.DataOutputStream -final class NJBinaryOutputFormat extends FileOutputFormat[NullWritable, BytesWritable] { +final private class NJBinaryOutputFormat extends FileOutputFormat[NullWritable, BytesWritable] { @SuppressWarnings(Array("NullParameter")) override def checkOutputSpecs(job: JobContext): Unit = { diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJJacksonKeyOutputFormat.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJJacksonKeyOutputFormat.scala index 330c6d287..768c45455 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJJacksonKeyOutputFormat.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJJacksonKeyOutputFormat.scala @@ -20,7 +20,8 @@ import org.apache.hadoop.util.ReflectionUtils import java.io.{DataOutputStream, OutputStream} -final class NJJacksonKeyOutputFormat extends AvroOutputFormatBase[AvroKey[GenericRecord], NullWritable] { +final private class NJJacksonKeyOutputFormat + extends AvroOutputFormatBase[AvroKey[GenericRecord], NullWritable] { @SuppressWarnings(Array("NullParameter")) override def checkOutputSpecs(job: JobContext): Unit = { diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJTextOutputFormat.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJTextOutputFormat.scala index 17806ae9f..04cabccf6 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJTextOutputFormat.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJTextOutputFormat.scala @@ -13,7 +13,7 @@ import org.apache.hadoop.util.ReflectionUtils import java.io.DataOutputStream -final class NJTextOutputFormat extends FileOutputFormat[NullWritable, Text] { +final private class NJTextOutputFormat extends FileOutputFormat[NullWritable, Text] { @SuppressWarnings(Array("NullParameter")) override def checkOutputSpecs(job: JobContext): Unit = { diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveAvro.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveAvro.scala index 0f2d8e47b..18f83b05e 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveAvro.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveAvro.scala @@ -14,8 +14,8 @@ final class SaveAvro[A](rdd: RDD[A], encoder: AvroEncoder[A], cfg: HoarderConfig val params: HoarderParams = cfg.evalConfig - def withSaveMode(sm: SaveMode): SaveAvro[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveAvro[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveAvro[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveAvro[A] = withSaveMode(f(SparkSaveMode)) def withCompression(ac: AvroCompression): SaveAvro[A] = updateConfig(cfg.outputCompression(ac)) def withCompression(f: AvroCompression.type => AvroCompression): SaveAvro[A] = diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveBinaryAvro.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveBinaryAvro.scala index f6fbae417..22a8ce802 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveBinaryAvro.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveBinaryAvro.scala @@ -14,8 +14,8 @@ final class SaveBinaryAvro[A](rdd: RDD[A], encoder: AvroEncoder[A], cfg: Hoarder private def updateConfig(cfg: HoarderConfig): SaveBinaryAvro[A] = new SaveBinaryAvro[A](rdd, encoder, cfg) - def withSaveMode(sm: SaveMode): SaveBinaryAvro[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveBinaryAvro[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveBinaryAvro[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveBinaryAvro[A] = withSaveMode(f(SparkSaveMode)) def withCompression(bc: BinaryAvroCompression): SaveBinaryAvro[A] = updateConfig(cfg.outputCompression(bc)) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveCirce.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveCirce.scala index 70a3cfbe2..04dddaae1 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveCirce.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveCirce.scala @@ -16,8 +16,8 @@ final class SaveCirce[A](rdd: RDD[A], cfg: HoarderConfig, isKeepNull: Boolean, e private def updateConfig(cfg: HoarderConfig): SaveCirce[A] = new SaveCirce[A](rdd, cfg, isKeepNull, encoder) - def withSaveMode(sm: SaveMode): SaveCirce[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveCirce[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveCirce[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveCirce[A] = withSaveMode(f(SparkSaveMode)) def withCompression(cc: CirceCompression): SaveCirce[A] = updateConfig(cfg.outputCompression(cc)) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveJackson.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveJackson.scala index 14ffb1d72..5b7c3e139 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveJackson.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveJackson.scala @@ -14,8 +14,8 @@ final class SaveJackson[A](rdd: RDD[A], encoder: AvroEncoder[A], cfg: HoarderCon private def updateConfig(cfg: HoarderConfig): SaveJackson[A] = new SaveJackson[A](rdd, encoder, cfg) - def withSaveMode(sm: SaveMode): SaveJackson[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveJackson[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveJackson[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveJackson[A] = withSaveMode(f(SparkSaveMode)) def withCompression(jc: JacksonCompression): SaveJackson[A] = updateConfig(cfg.outputCompression(jc)) def withCompression(f: JacksonCompression.type => JacksonCompression): SaveJackson[A] = diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveKantanCsv.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveKantanCsv.scala index 37af2a2ce..82c37ee6d 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveKantanCsv.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveKantanCsv.scala @@ -18,8 +18,8 @@ final class SaveKantanCsv[A]( private def updateConfig(cfg: HoarderConfig): SaveKantanCsv[A] = new SaveKantanCsv[A](rdd, csvConfiguration, cfg, encoder) - def withSaveMode(sm: SaveMode): SaveKantanCsv[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveKantanCsv[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveKantanCsv[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveKantanCsv[A] = withSaveMode(f(SparkSaveMode)) def withCompression(kc: KantanCompression): SaveKantanCsv[A] = updateConfig(cfg.outputCompression(kc)) def withCompression(f: KantanCompression.type => KantanCompression): SaveKantanCsv[A] = diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveObjectFile.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveObjectFile.scala index a42713341..de46d516f 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveObjectFile.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveObjectFile.scala @@ -12,8 +12,8 @@ final class SaveObjectFile[A](rdd: RDD[A], cfg: HoarderConfig) extends Serializa private def updateConfig(cfg: HoarderConfig): SaveObjectFile[A] = new SaveObjectFile[A](rdd, cfg) - def withSaveMode(sm: SaveMode): SaveObjectFile[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveObjectFile[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveObjectFile[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveObjectFile[A] = withSaveMode(f(SparkSaveMode)) def run[F[_]](implicit F: Sync[F]): F[Unit] = new SaveModeAware[F](params.saveMode, params.outPath, rdd.sparkContext.hadoopConfiguration) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveParquet.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveParquet.scala index 68fd08093..527e76a87 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveParquet.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveParquet.scala @@ -13,8 +13,8 @@ final class SaveParquet[A](rdd: RDD[A], encoder: AvroEncoder[A], cfg: HoarderCon private def updateConfig(cfg: HoarderConfig): SaveParquet[A] = new SaveParquet[A](rdd, encoder, cfg) - def withSaveMode(sm: SaveMode): SaveParquet[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveParquet[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveParquet[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveParquet[A] = withSaveMode(f(SparkSaveMode)) def withCompression(pc: ParquetCompression): SaveParquet[A] = updateConfig(cfg.outputCompression(pc)) def withCompression(f: ParquetCompression.type => ParquetCompression): SaveParquet[A] = diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveProtobuf.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveProtobuf.scala index f0824fe57..c98648937 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveProtobuf.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveProtobuf.scala @@ -13,8 +13,8 @@ final class SaveProtobuf[A](rdd: RDD[A], cfg: HoarderConfig, evidence: A <:< Gen private def updateConfig(cfg: HoarderConfig): SaveProtobuf[A] = new SaveProtobuf[A](rdd, cfg, evidence) - def withSaveMode(sm: SaveMode): SaveProtobuf[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveProtobuf[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveProtobuf[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveProtobuf[A] = withSaveMode(f(SparkSaveMode)) def run[F[_]](implicit F: Sync[F]): F[Unit] = new SaveModeAware[F](params.saveMode, params.outPath, rdd.sparkContext.hadoopConfiguration) diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveText.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveText.scala index acd606c21..7d2dd6750 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveText.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SaveText.scala @@ -20,8 +20,8 @@ final class SaveText[A](rdd: RDD[A], cfg: HoarderConfig, show: Show[A], suffix: private def updateConfig(cfg: HoarderConfig): SaveText[A] = new SaveText[A](rdd, cfg, show, suffix) - def withSaveMode(sm: SaveMode): SaveText[A] = updateConfig(cfg.saveMode(sm)) - def withSaveMode(f: NJSaveMode.type => SaveMode): SaveText[A] = withSaveMode(f(NJSaveMode)) + def withSaveMode(sm: SaveMode): SaveText[A] = updateConfig(cfg.saveMode(sm)) + def withSaveMode(f: SparkSaveMode.type => SaveMode): SaveText[A] = withSaveMode(f(SparkSaveMode)) def withCompression(tc: TextCompression): SaveText[A] = updateConfig(cfg.outputCompression(tc)) def withCompression(f: TextCompression.type => TextCompression): SaveText[A] = diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJSaveMode.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SparkSaveMode.scala similarity index 92% rename from spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJSaveMode.scala rename to spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SparkSaveMode.scala index 80f5ce844..596a9c69e 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/NJSaveMode.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/persist/SparkSaveMode.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark.persist import org.apache.spark.sql.SaveMode -object NJSaveMode { +object SparkSaveMode { val Append: SaveMode = SaveMode.Append val Overwrite: SaveMode = SaveMode.Overwrite val ErrorIfExists: SaveMode = SaveMode.ErrorIfExists diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJConsoleSink.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkConsoleSink.scala similarity index 51% rename from spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJConsoleSink.scala rename to spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkConsoleSink.scala index 9a0aa9bc7..9307dd29a 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJConsoleSink.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkConsoleSink.scala @@ -7,28 +7,28 @@ import com.github.chenharryhua.nanjin.common.utils.random4d import fs2.Stream import org.apache.spark.sql.streaming.{DataStreamWriter, OutputMode, StreamingQueryProgress, Trigger} -final class NJConsoleSink[F[_], A]( +final class SparkConsoleSink[F[_], A]( dsw: DataStreamWriter[A], cfg: SStreamConfig, numRows: Int = 20, isTruncate: Boolean = false) - extends NJStreamSink[F] with UpdateConfig[SStreamConfig, NJConsoleSink[F, A]] { + extends SparkStreamSink[F] with UpdateConfig[SStreamConfig, SparkConsoleSink[F, A]] { override val params: SStreamParams = cfg.evalConfig - def rows(num: Int): NJConsoleSink[F, A] = new NJConsoleSink[F, A](dsw, cfg, num, isTruncate) - def truncate: NJConsoleSink[F, A] = new NJConsoleSink[F, A](dsw, cfg, numRows, true) - def untruncate: NJConsoleSink[F, A] = new NJConsoleSink[F, A](dsw, cfg, numRows, false) + def rows(num: Int): SparkConsoleSink[F, A] = new SparkConsoleSink[F, A](dsw, cfg, num, isTruncate) + def truncate: SparkConsoleSink[F, A] = new SparkConsoleSink[F, A](dsw, cfg, numRows, true) + def untruncate: SparkConsoleSink[F, A] = new SparkConsoleSink[F, A](dsw, cfg, numRows, false) - override def updateConfig(f: Endo[SStreamConfig]): NJConsoleSink[F, A] = - new NJConsoleSink[F, A](dsw, f(cfg), numRows, isTruncate) + override def updateConfig(f: Endo[SStreamConfig]): SparkConsoleSink[F, A] = + new SparkConsoleSink[F, A](dsw, f(cfg), numRows, isTruncate) - def trigger(trigger: Trigger): NJConsoleSink[F, A] = updateConfig(_.triggerMode(trigger)) + def trigger(trigger: Trigger): SparkConsoleSink[F, A] = updateConfig(_.triggerMode(trigger)) // https://spark.apache.org/docs/latest/structured-streaming-programming-guide.html#output-sinks - def append: NJConsoleSink[F, A] = updateConfig(_.appendMode) - def update: NJConsoleSink[F, A] = updateConfig(_.updateMode) - def complete: NJConsoleSink[F, A] = updateConfig(_.completeMode) - def queryName(name: String): NJConsoleSink[F, A] = updateConfig(_.queryName(name)) + def append: SparkConsoleSink[F, A] = updateConfig(_.appendMode) + def update: SparkConsoleSink[F, A] = updateConfig(_.updateMode) + def complete: SparkConsoleSink[F, A] = updateConfig(_.completeMode) + def queryName(name: String): SparkConsoleSink[F, A] = updateConfig(_.queryName(name)) override def stream(implicit F: Async[F]): Stream[F, StreamingQueryProgress] = ss.queryStream( diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJFileSink.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkFileSink.scala similarity index 57% rename from spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJFileSink.scala rename to spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkFileSink.scala index 60c52097b..50c86bea3 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJFileSink.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkFileSink.scala @@ -9,27 +9,27 @@ import org.apache.spark.sql.streaming.{DataStreamWriter, OutputMode, StreamingQu import scala.concurrent.duration.FiniteDuration -final class NJFileSink[F[_], A](dsw: DataStreamWriter[A], cfg: SStreamConfig, path: Url) - extends NJStreamSink[F] { +final class SparkFileSink[F[_], A](dsw: DataStreamWriter[A], cfg: SStreamConfig, path: Url) + extends SparkStreamSink[F] { override val params: SStreamParams = cfg.evalConfig - private def updateCfg(f: Endo[SStreamConfig]): NJFileSink[F, A] = - new NJFileSink[F, A](dsw, f(cfg), path) + private def updateCfg(f: Endo[SStreamConfig]): SparkFileSink[F, A] = + new SparkFileSink[F, A](dsw, f(cfg), path) - def parquet: NJFileSink[F, A] = updateCfg(_.parquetFormat) - def avro: NJFileSink[F, A] = updateCfg(_.avroFormat) + def parquet: SparkFileSink[F, A] = updateCfg(_.parquetFormat) + def avro: SparkFileSink[F, A] = updateCfg(_.avroFormat) - def triggerEvery(duration: FiniteDuration): NJFileSink[F, A] = + def triggerEvery(duration: FiniteDuration): SparkFileSink[F, A] = updateCfg(_.triggerMode(Trigger.ProcessingTime(duration))) - def withOptions(f: Endo[DataStreamWriter[A]]): NJFileSink[F, A] = - new NJFileSink(f(dsw), cfg, path) + def withOptions(f: Endo[DataStreamWriter[A]]): SparkFileSink[F, A] = + new SparkFileSink(f(dsw), cfg, path) - def queryName(name: String): NJFileSink[F, A] = updateCfg(_.queryName(name)) + def queryName(name: String): SparkFileSink[F, A] = updateCfg(_.queryName(name)) - def partitionBy(colNames: String*): NJFileSink[F, A] = - new NJFileSink[F, A](dsw.partitionBy(colNames*), cfg, path) + def partitionBy(colNames: String*): SparkFileSink[F, A] = + new SparkFileSink[F, A](dsw.partitionBy(colNames*), cfg, path) override def stream(implicit F: Async[F]): Stream[F, StreamingQueryProgress] = { val ps = toHadoopPath(path).toString diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJMemorySink.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkMemorySink.scala similarity index 64% rename from spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJMemorySink.scala rename to spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkMemorySink.scala index 3b4b11487..57703a197 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJMemorySink.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkMemorySink.scala @@ -6,18 +6,19 @@ import com.github.chenharryhua.nanjin.common.utils.random4d import fs2.Stream import org.apache.spark.sql.streaming.{DataStreamWriter, StreamingQueryProgress, Trigger} -final class NJMemorySink[F[_], A](dsw: DataStreamWriter[A], cfg: SStreamConfig) extends NJStreamSink[F] { +final class SparkMemorySink[F[_], A](dsw: DataStreamWriter[A], cfg: SStreamConfig) + extends SparkStreamSink[F] { override val params: SStreamParams = cfg.evalConfig - private def updateCfg(f: Endo[SStreamConfig]): NJMemorySink[F, A] = - new NJMemorySink[F, A](dsw, f(cfg)) + private def updateCfg(f: Endo[SStreamConfig]): SparkMemorySink[F, A] = + new SparkMemorySink[F, A](dsw, f(cfg)) // https://spark.apache.org/docs/latest/structured-streaming-programming-guide.html#output-sinks - def append: NJMemorySink[F, A] = updateCfg(_.appendMode) - def complete: NJMemorySink[F, A] = updateCfg(_.completeMode) + def append: SparkMemorySink[F, A] = updateCfg(_.appendMode) + def complete: SparkMemorySink[F, A] = updateCfg(_.completeMode) - def trigger(trigger: Trigger): NJMemorySink[F, A] = updateCfg(_.triggerMode(trigger)) + def trigger(trigger: Trigger): SparkMemorySink[F, A] = updateCfg(_.triggerMode(trigger)) override def stream(implicit F: Async[F]): Stream[F, StreamingQueryProgress] = ss.queryStream( diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkSStream.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkSStream.scala index da7624db1..3fea45fb1 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkSStream.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkSStream.scala @@ -36,16 +36,16 @@ final class SparkSStream[F[_], A](val dataset: Dataset[A], cfg: SStreamConfig) e // sinks - def consoleSink: NJConsoleSink[F, A] = - new NJConsoleSink[F, A](dataset.writeStream, cfg) + def consoleSink: SparkConsoleSink[F, A] = + new SparkConsoleSink[F, A](dataset.writeStream, cfg) - def fileSink(path: Url): NJFileSink[F, A] = - new NJFileSink[F, A](dataset.writeStream, cfg, path) + def fileSink(path: Url): SparkFileSink[F, A] = + new SparkFileSink[F, A](dataset.writeStream, cfg, path) - def memorySink(queryName: String): NJMemorySink[F, A] = - new NJMemorySink[F, A](dataset.writeStream, cfg.queryName(queryName)) + def memorySink(queryName: String): SparkMemorySink[F, A] = + new SparkMemorySink[F, A](dataset.writeStream, cfg.queryName(queryName)) - def datePartitionSink(path: Url): NJFileSink[F, Row] = { + def datePartitionSink(path: Url): SparkFileSink[F, Row] = { val year = udf((ts: Long) => NJTimestamp(ts).atZone(params.zoneId).toLocalDate.getYear) val month = udf((ts: Long) => NJTimestamp(ts).atZone(params.zoneId).toLocalDate.getMonthValue) val day = udf((ts: Long) => NJTimestamp(ts).atZone(params.zoneId).toLocalDate.getDayOfMonth) @@ -55,6 +55,6 @@ final class SparkSStream[F[_], A](val dataset: Dataset[A], cfg: SStreamConfig) e .withColumn("Month", month(dataset("timestamp"))) .withColumn("Day", day(dataset("timestamp"))) .writeStream - new NJFileSink[F, Row](ws, cfg, path).partitionBy("Year", "Month", "Day") + new SparkFileSink[F, Row](ws, cfg, path).partitionBy("Year", "Month", "Day") } } diff --git a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJStreamSink.scala b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkStreamSink.scala similarity index 89% rename from spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJStreamSink.scala rename to spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkStreamSink.scala index 6150402d1..c8d6241ea 100644 --- a/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/NJStreamSink.scala +++ b/spark/src/main/scala/com/github/chenharryhua/nanjin/spark/sstream/SparkStreamSink.scala @@ -5,7 +5,7 @@ import cats.syntax.all.* import fs2.Stream import org.apache.spark.sql.streaming.StreamingQueryProgress -trait NJStreamSink[F[_]] extends Serializable { +trait SparkStreamSink[F[_]] extends Serializable { def params: SStreamParams def stream(implicit F: Async[F]): Stream[F, StreamingQueryProgress] diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoderTest.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoderTest.scala index fe746f084..fab305be6 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoderTest.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/AvroTypedEncoderTest.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark import cats.effect.IO import cats.effect.unsafe.implicits.global -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.persist.{loaders, saveRDD} import com.github.chenharryhua.nanjin.terminals.{Compression, Hadoop} @@ -54,7 +54,7 @@ object AvroTypedEncoderTestData { implicit val roundingMode: BigDecimal.RoundingMode.Value = RoundingMode.HALF_UP - val codec: NJAvroCodec[Lion] = NJAvroCodec[Lion](schemaText) + val codec: AvroCodec[Lion] = AvroCodec[Lion](schemaText) implicit val encoder: TypedEncoder[Lion] = shapeless.cachedImplicit val ate: AvroTypedEncoder[Lion] = AvroTypedEncoder[Lion](codec) diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/kafka/CrPrTest.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/kafka/CrPrTest.scala index dbca6e35f..ed67ea396 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/kafka/CrPrTest.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/kafka/CrPrTest.scala @@ -2,10 +2,10 @@ package com.github.chenharryhua.nanjin.spark.kafka import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime import com.github.chenharryhua.nanjin.common.kafka.TopicName -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.TopicDef import com.github.chenharryhua.nanjin.messages.kafka.{CRMetaInfo, NJConsumerRecord} -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import com.github.chenharryhua.nanjin.spark.persist.RoosterData.{instant, timestamp} import com.github.chenharryhua.nanjin.spark.persist.{Rooster, RoosterData} @@ -40,10 +40,10 @@ class CrPrTest extends AnyFunSuite { val roosterATE = AvroTypedEncoder(rooster) val roosterLike = - TopicDef[Long, RoosterLike](TopicName("roosterLike"), NJAvroCodec[RoosterLike]) + TopicDef[Long, RoosterLike](TopicName("roosterLike"), AvroCodec[RoosterLike]) val roosterLike2 = - TopicDef[Long, RoosterLike2](TopicName("roosterLike2"), NJAvroCodec[RoosterLike2]) + TopicDef[Long, RoosterLike2](TopicName("roosterLike2"), AvroCodec[RoosterLike2]) val crRdd: CrRdd[Long, Rooster] = sparKafka .topic(rooster) @@ -86,7 +86,7 @@ class CrPrTest extends AnyFunSuite { test("time range") { val dr = - NJDateTimeRange(sydneyTime) + DateTimeRange(sydneyTime) .withStartTime(Instant.now.minusSeconds(50)) .withEndTime(Instant.now().plusSeconds(10)) assert(crRdd.timeRange(dr).rdd.collect().length == 4) diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Ant.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Ant.scala index f8201dd5a..81631b4e6 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Ant.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Ant.scala @@ -1,6 +1,6 @@ package com.github.chenharryhua.nanjin.spark.persist -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import com.sksamuel.avro4s.{Decoder, Encoder} import frameless.TypedEncoder @@ -54,7 +54,7 @@ object Ant { val avroEncoder: Encoder[Ant] = shapeless.cachedImplicit val avroDecoder: Decoder[Ant] = shapeless.cachedImplicit - val avroCodec: NJAvroCodec[Ant] = NJAvroCodec[Ant](schemaText) + val avroCodec: AvroCodec[Ant] = AvroCodec[Ant](schemaText) implicit val typedEncoder: TypedEncoder[Ant] = shapeless.cachedImplicit val ate: AvroTypedEncoder[Ant] = AvroTypedEncoder[Ant](avroCodec) diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Bee.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Bee.scala index c99e86770..4a1b0dcb2 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Bee.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Bee.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark.persist import cats.Eq import cats.syntax.all.* -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import com.sksamuel.avro4s.{Avro4sDecodingException, Decoder, Encoder, SchemaFor} import frameless.TypedEncoder @@ -71,7 +71,7 @@ object Bee { val avroEncoder: Encoder[Bee] = shapeless.cachedImplicit val avroDecoder: Decoder[Bee] = shapeless.cachedImplicit - val avroCodec: NJAvroCodec[Bee] = NJAvroCodec[Bee](schemaText) + val avroCodec: AvroCodec[Bee] = AvroCodec[Bee](schemaText) implicit val typedEncoder: TypedEncoder[Bee] = shapeless.cachedImplicit implicit val jsonCodec: Codec[Bee] = io.circe.generic.semiauto.deriveCodec[Bee] val ate: AvroTypedEncoder[Bee] = AvroTypedEncoder[Bee](avroCodec) diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Cop.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Cop.scala index 87cb9fb62..abc77e303 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Cop.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Cop.scala @@ -1,7 +1,7 @@ package com.github.chenharryhua.nanjin.spark.persist import com.github.chenharryhua.nanjin.common.transformers.* -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import com.github.chenharryhua.nanjin.spark.injection.* import frameless.TypedEncoder @@ -32,7 +32,7 @@ object CoproductCop { final case class CoCop(index: Int, cop: CaseObjectCop) object CoCop { - val avroCodec: NJAvroCodec[CoCop] = NJAvroCodec[CoCop] + val avroCodec: AvroCodec[CoCop] = AvroCodec[CoCop] implicit val circe: Codec[CoCop] = deriveCodec[CoCop] import frameless.TypedEncoder.injections.* @@ -44,7 +44,7 @@ object CoCop { final case class EmCop(index: Int, cop: EnumCoproduct.Value) object EmCop { - val avroCodec: NJAvroCodec[EmCop] = NJAvroCodec[EmCop] + val avroCodec: AvroCodec[EmCop] = AvroCodec[EmCop] implicit val te: TypedEncoder[EmCop] = shapeless.cachedImplicit val ate: AvroTypedEncoder[EmCop] = AvroTypedEncoder(te, avroCodec) implicit val circe: Codec[EmCop] = deriveCodec[EmCop] @@ -56,8 +56,8 @@ object EmCop { final case class CpCop(index: Int, cop: CoproductCop.Cop) object CpCop { - val avroCodec: NJAvroCodec[CpCop] = NJAvroCodec[CpCop] - implicit val circe: Codec[CpCop] = deriveCodec[CpCop] + val avroCodec: AvroCodec[CpCop] = AvroCodec[CpCop] + implicit val circe: Codec[CpCop] = deriveCodec[CpCop] import frameless.TypedEncoder.injections.* implicit val te: TypedEncoder[CpCop] = shapeless.cachedImplicit val ate: AvroTypedEncoder[CpCop] = AvroTypedEncoder(te, avroCodec) diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Fractual.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Fractual.scala index b96e93349..22c65b70f 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Fractual.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Fractual.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark.persist import cats.Applicative import cats.syntax.all.* -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import io.circe.Codec import io.circe.shapes.* import monocle.Traversal @@ -16,7 +16,7 @@ final case class Fractual(value: Option[Fractual.FType]) object Fractual { type FType = Int :+: String :+: List[Fractual] :+: Map[String, Fractual] :+: CNil - val avroCodec: NJAvroCodec[Fractual] = NJAvroCodec[Fractual] + val avroCodec: AvroCodec[Fractual] = AvroCodec[Fractual] implicit val json: Codec[Fractual] = io.circe.generic.semiauto.deriveCodec[Fractual] diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Jacket.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Jacket.scala index 1bc82125e..a36c11c3b 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Jacket.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Jacket.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark.persist import com.github.chenharryhua.nanjin.common.transformers.* import com.github.chenharryhua.nanjin.datetime.instances.* -import com.github.chenharryhua.nanjin.messages.kafka.codec.{KJson, NJAvroCodec} +import com.github.chenharryhua.nanjin.messages.kafka.codec.{AvroCodec, KJson} import io.circe.generic.JsonCodec import io.circe.{Codec, Json} import io.circe.generic.auto.* @@ -24,8 +24,8 @@ final case class Neck(d: Date, t: Timestamp, j: Json) final case class Jacket(a: Int, p: Pocket.Value, neck: KJson[Neck]) object Jacket { - val avroCodec: NJAvroCodec[Jacket] = NJAvroCodec[Jacket] - val circe: Codec[Jacket] = io.circe.generic.semiauto.deriveCodec[Jacket] + val avroCodec: AvroCodec[Jacket] = AvroCodec[Jacket] + val circe: Codec[Jacket] = io.circe.generic.semiauto.deriveCodec[Jacket] } object JacketData { diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Rooster.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Rooster.scala index 15db55ae3..49cb06efc 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Rooster.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Rooster.scala @@ -2,7 +2,7 @@ package com.github.chenharryhua.nanjin.spark.persist import cats.Show import com.github.chenharryhua.nanjin.datetime.instances.* -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import frameless.TypedEncoder import io.circe.Codec @@ -76,7 +76,7 @@ object Rooster { implicit val typedEncoder: TypedEncoder[Rooster] = shapeless.cachedImplicit - val avroCodec: NJAvroCodec[Rooster] = NJAvroCodec[Rooster](schema) + val avroCodec: AvroCodec[Rooster] = AvroCodec[Rooster](schema) val ate: AvroTypedEncoder[Rooster] = AvroTypedEncoder[Rooster](TypedEncoder[Rooster], avroCodec) diff --git a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Tablet.scala b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Tablet.scala index 4869396a4..a405d03dc 100644 --- a/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Tablet.scala +++ b/spark/src/test/scala/com/github/chenharryhua/nanjin/spark/persist/Tablet.scala @@ -1,7 +1,7 @@ package com.github.chenharryhua.nanjin.spark.persist import cats.Show -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.AvroTypedEncoder import com.github.chenharryhua.nanjin.spark.injection.* import frameless.TypedEncoder @@ -18,7 +18,7 @@ import scala.util.Random final case class Tablet(a: Int, b: Long, c: Float, d: LocalDate, e: Instant, f: String) object Tablet { - val avroCodec: NJAvroCodec[Tablet] = NJAvroCodec[Tablet] + val avroCodec: AvroCodec[Tablet] = AvroCodec[Tablet] implicit val te: TypedEncoder[Tablet] = shapeless.cachedImplicit val ate: AvroTypedEncoder[Tablet] = AvroTypedEncoder(avroCodec) implicit val re: RowEncoder[Tablet] = shapeless.cachedImplicit diff --git a/spark/src/test/scala/mtest/spark/kafka/DecimalTopicTest.scala b/spark/src/test/scala/mtest/spark/kafka/DecimalTopicTest.scala index a623e8325..cad998e3a 100644 --- a/spark/src/test/scala/mtest/spark/kafka/DecimalTopicTest.scala +++ b/spark/src/test/scala/mtest/spark/kafka/DecimalTopicTest.scala @@ -4,7 +4,7 @@ import cats.effect.IO import com.github.chenharryhua.nanjin.common.kafka.TopicName import com.github.chenharryhua.nanjin.kafka.{KafkaTopic, TopicDef} import com.github.chenharryhua.nanjin.messages.kafka.NJProducerRecord -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.kafka.SparKafkaTopic import frameless.TypedEncoder import io.circe.Codec @@ -64,7 +64,7 @@ object DecimalTopicTestCase { } implicit val roundingMode: BigDecimal.RoundingMode.Value = RoundingMode.HALF_UP - val codec: NJAvroCodec[HasDecimal] = NJAvroCodec[HasDecimal](schemaText) + val codec: AvroCodec[HasDecimal] = AvroCodec[HasDecimal](schemaText) val topicDef: TopicDef[Int, HasDecimal] = TopicDef[Int, HasDecimal](TopicName("kafka.decimal.test"), codec) diff --git a/spark/src/test/scala/mtest/spark/kafka/KJsonTest.scala b/spark/src/test/scala/mtest/spark/kafka/KJsonTest.scala index 1605349d7..9e8cdb098 100644 --- a/spark/src/test/scala/mtest/spark/kafka/KJsonTest.scala +++ b/spark/src/test/scala/mtest/spark/kafka/KJsonTest.scala @@ -3,7 +3,7 @@ package mtest.spark.kafka import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.KafkaTopic import com.github.chenharryhua.nanjin.messages.kafka.NJProducerRecord import com.github.chenharryhua.nanjin.messages.kafka.codec.KJson @@ -35,6 +35,6 @@ class KJsonTest extends AnyFunSuite { .unsafeRunSync() ctx.schemaRegistry.register(topic.topicDef).unsafeRunSync() sparKafka.topic(topic).fromKafka.flatMap(_.output.circe(root / "circe").run[IO]).unsafeRunSync() - sparKafka.dump(topic.topicName, root / "jackson", NJDateTimeRange(sydneyTime)).unsafeRunSync() + sparKafka.dump(topic.topicName, root / "jackson", DateTimeRange(sydneyTime)).unsafeRunSync() } } diff --git a/spark/src/test/scala/mtest/spark/kafka/PushPullGRTest.scala b/spark/src/test/scala/mtest/spark/kafka/PushPullGRTest.scala index 0c599fb22..e7fbaac64 100644 --- a/spark/src/test/scala/mtest/spark/kafka/PushPullGRTest.scala +++ b/spark/src/test/scala/mtest/spark/kafka/PushPullGRTest.scala @@ -4,7 +4,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.kafka.TopicName import com.github.chenharryhua.nanjin.kafka.TopicDef -import com.github.chenharryhua.nanjin.messages.kafka.codec.{immigrate, NJAvroCodec} +import com.github.chenharryhua.nanjin.messages.kafka.codec.{immigrate, AvroCodec} import com.sksamuel.avro4s.Record import eu.timepit.refined.auto.* import fs2.Stream @@ -25,9 +25,9 @@ class PushPullGRTest extends AnyFunSuite { val root = "./data/test/spark/kafka/push_pull" val baseTopic: TopicDef[Int, version1.Tiger] = - TopicDef[Int, version1.Tiger](topicName, NJAvroCodec[version1.Tiger]) + TopicDef[Int, version1.Tiger](topicName, AvroCodec[version1.Tiger]) val evolveTopic: TopicDef[Int, version2.Tiger] = - TopicDef[Int, version2.Tiger](topicName, NJAvroCodec[version2.Tiger]) + TopicDef[Int, version2.Tiger](topicName, AvroCodec[version2.Tiger]) val baseData: Stream[IO, Record] = Stream.range(0, 10).map(a => baseTopic.producerFormat.toRecord(a, version1.Tiger(a))).covary[IO] diff --git a/spark/src/test/scala/mtest/spark/kafka/SparKafkaTest.scala b/spark/src/test/scala/mtest/spark/kafka/SparKafkaTest.scala index dd2751813..11637bb5b 100644 --- a/spark/src/test/scala/mtest/spark/kafka/SparKafkaTest.scala +++ b/spark/src/test/scala/mtest/spark/kafka/SparKafkaTest.scala @@ -4,7 +4,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.kafka.TopicName import com.github.chenharryhua.nanjin.kafka.{KafkaTopic, NJKafkaByteConsume, TopicDef} -import com.github.chenharryhua.nanjin.messages.kafka.codec.{gr2BinAvro, gr2Circe, gr2Jackson, NJAvroCodec} +import com.github.chenharryhua.nanjin.messages.kafka.codec.{gr2BinAvro, gr2Circe, gr2Jackson, AvroCodec} import com.github.chenharryhua.nanjin.messages.kafka.{NJConsumerRecord, NJProducerRecord} import com.sksamuel.avro4s.SchemaFor import eu.timepit.refined.auto.* @@ -87,7 +87,7 @@ class SparKafkaTest extends AnyFunSuite { sparKafka .topic(src.topicDef) .crRdd(ds.rdd) - .bimap(_.toString, _ + 1)(NJAvroCodec[String], NJAvroCodec[Int]) + .bimap(_.toString, _ + 1)(AvroCodec[String], AvroCodec[Int]) .rdd .collect() .toSet @@ -110,9 +110,7 @@ class SparKafkaTest extends AnyFunSuite { sparKafka .topic(src.topicDef) .crRdd(ds.rdd) - .flatMap(m => m.value.map(x => m.focus(_.value).replace(Some(x - 1))))( - NJAvroCodec[Int], - NJAvroCodec[Int]) + .flatMap(m => m.value.map(x => m.focus(_.value).replace(Some(x - 1))))(AvroCodec[Int], AvroCodec[Int]) .rdd .collect() .toSet diff --git a/spark/src/test/scala/mtest/spark/kafka/kafka.scala b/spark/src/test/scala/mtest/spark/kafka/kafka.scala index 8f1a56391..17edbbc46 100644 --- a/spark/src/test/scala/mtest/spark/kafka/kafka.scala +++ b/spark/src/test/scala/mtest/spark/kafka/kafka.scala @@ -2,13 +2,13 @@ package mtest.spark import cats.effect.IO import com.github.chenharryhua.nanjin.common.chrono.zones.beijingTime -import com.github.chenharryhua.nanjin.datetime.NJDateTimeRange +import com.github.chenharryhua.nanjin.datetime.DateTimeRange import com.github.chenharryhua.nanjin.kafka.{KafkaContext, KafkaSettings} import com.github.chenharryhua.nanjin.spark.* import com.github.chenharryhua.nanjin.terminals.Hadoop package object kafka { - val range: NJDateTimeRange = NJDateTimeRange(beijingTime) + val range: DateTimeRange = DateTimeRange(beijingTime) val ctx: KafkaContext[IO] = KafkaContext[IO](KafkaSettings.local) val sparKafka: SparKafkaContext[IO] = sparkSession.alongWith(ctx) diff --git a/spark/src/test/scala/mtest/spark/pipe/ReadWriteTest.scala b/spark/src/test/scala/mtest/spark/pipe/ReadWriteTest.scala index fe5eec227..ab506fce5 100644 --- a/spark/src/test/scala/mtest/spark/pipe/ReadWriteTest.scala +++ b/spark/src/test/scala/mtest/spark/pipe/ReadWriteTest.scala @@ -3,7 +3,7 @@ package mtest.spark.pipe import cats.effect.IO import cats.effect.unsafe.implicits.global import com.github.chenharryhua.nanjin.common.chrono.Policy -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.table.LoadTable import com.github.chenharryhua.nanjin.spark.{AvroTypedEncoder, SparkSessionExt} import com.github.chenharryhua.nanjin.terminals.Hadoop @@ -34,7 +34,7 @@ object ReadWriteTestData { implicit val hd: RowEncoder[TestData] = shapeless.cachedImplicit implicit val ri: RowDecoder[TestData] = shapeless.cachedImplicit - val codec: NJAvroCodec[TestData] = NJAvroCodec[TestData] + val codec: AvroCodec[TestData] = AvroCodec[TestData] val toRecord: ToRecord[TestData] = ToRecord(codec) val loader: LoadTable[TestData] = sparkSession.loadTable(AvroTypedEncoder[TestData](codec)) diff --git a/spark/src/test/scala/mtest/spark/table/SparkTableTest.scala b/spark/src/test/scala/mtest/spark/table/SparkTableTest.scala index 40e9ebc3f..677e43d09 100644 --- a/spark/src/test/scala/mtest/spark/table/SparkTableTest.scala +++ b/spark/src/test/scala/mtest/spark/table/SparkTableTest.scala @@ -5,8 +5,8 @@ import cats.effect.kernel.Resource import cats.effect.unsafe.implicits.global import cats.syntax.all.* import com.github.chenharryhua.nanjin.common.chrono.zones.sydneyTime -import com.github.chenharryhua.nanjin.database.NJHikari -import com.github.chenharryhua.nanjin.messages.kafka.codec.NJAvroCodec +import com.github.chenharryhua.nanjin.database.DBConfig +import com.github.chenharryhua.nanjin.messages.kafka.codec.AvroCodec import com.github.chenharryhua.nanjin.spark.listeners.SparkContextListener import com.github.chenharryhua.nanjin.spark.table.LoadTable import com.github.chenharryhua.nanjin.spark.{AvroTypedEncoder, SparkSessionExt} @@ -69,7 +69,7 @@ class SparkTableTest extends AnyFunSuite { implicit val ss: SparkSession = sparkSession - val codec: NJAvroCodec[DBTable] = NJAvroCodec[DBTable] + val codec: AvroCodec[DBTable] = AvroCodec[DBTable] implicit val te: TypedEncoder[DBTable] = shapeless.cachedImplicit implicit val te2: TypedEncoder[PartialDBTable] = shapeless.cachedImplicit implicit val re: RowEncoder[DBTable] = shapeless.cachedImplicit @@ -90,11 +90,11 @@ class SparkTableTest extends AnyFunSuite { val dbData: DBTable = sample.toDB val pg: Resource[IO, HikariTransactor[IO]] = - HikariTransactor.fromHikariConfig[IO](NJHikari(postgres).set(_.setMaximumPoolSize(4)).hikariConfig) + HikariTransactor.fromHikariConfig[IO](DBConfig(postgres).set(_.setMaximumPoolSize(4)).hikariConfig) pg.use(txn => (DBTable.drop *> DBTable.create).transact(txn)).unsafeRunSync() - val hikari: HikariConfig = NJHikari(postgres).hikariConfig + val hikari: HikariConfig = DBConfig(postgres).hikariConfig val loader: LoadTable[DBTable] = ss.loadTable(ate) test("load data") {