Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove scalalogging library #4495

Merged
merged 4 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ val munitVersion = "1.0.0-M10"
val nimbusJoseJwtVersion = "9.37.1"
val postgresJdbcVersion = "42.6.0"
val pureconfigVersion = "0.17.4"
val scalaLoggingVersion = "3.9.5"
val scalaTestVersion = "3.2.17"
val scalaXmlVersion = "2.2.0"
val topBraidVersion = "1.3.2" // 1.4.1 fails to validate some test schemas
Expand Down Expand Up @@ -115,7 +114,7 @@ lazy val munit = "org.scalameta" %% "munit"
lazy val nimbusJoseJwt = "com.nimbusds" % "nimbus-jose-jwt" % nimbusJoseJwtVersion
lazy val pureconfig = "com.github.pureconfig" %% "pureconfig" % pureconfigVersion
lazy val pureconfigCats = "com.github.pureconfig" %% "pureconfig-cats" % pureconfigVersion
lazy val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion
lazy val scalaReflect = "org.scala-lang" % "scala-reflect" % scalaCompilerVersion
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I had to introduce this one as it was a transitive dependency from scalalogging that is needed for Magnolia

lazy val scalaTest = "org.scalatest" %% "scalatest" % scalaTestVersion
lazy val scalaXml = "org.scala-lang.modules" %% "scala-xml" % scalaXmlVersion
lazy val topBraidShacl = "org.topbraid" % "shacl" % topBraidVersion
Expand Down Expand Up @@ -214,7 +213,6 @@ lazy val kernel = project
log4cats,
pureconfig,
pureconfigCats,
scalaLogging,
munit % Test,
scalaTest % Test
),
Expand Down Expand Up @@ -289,6 +287,7 @@ lazy val rdf = project
jenaArq,
jsonldjava,
magnolia,
scalaReflect,
topBraidShacl,
akkaSlf4j % Test,
akkaTestKit % Test,
Expand Down Expand Up @@ -762,7 +761,6 @@ lazy val storage = project
circeGenericExtras,
logback,
pureconfig,
scalaLogging,
akkaHttpTestKit % Test,
akkaTestKit % Test,
mockito % Test,
Expand Down Expand Up @@ -796,7 +794,6 @@ lazy val tests = project
circeGenericExtras,
fs2,
logback,
scalaLogging,
akkaTestKit % Test,
akkaHttpTestKit % Test,
awsSdk % Test,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ package ch.epfl.bluebrain.nexus.delta.plugin
import cats.data.NonEmptyList
import cats.effect.IO
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.Logger
import ch.epfl.bluebrain.nexus.delta.plugin.PluginsLoader.PluginLoaderConfig
import ch.epfl.bluebrain.nexus.delta.sdk.error.PluginError
import ch.epfl.bluebrain.nexus.delta.sdk.error.PluginError.{ClassNotFoundError, MultiplePluginDefClassesFound, PluginLoadErrors}
import ch.epfl.bluebrain.nexus.delta.sdk.plugin.PluginDef
import com.typesafe.scalalogging.Logger
import io.github.classgraph.ClassGraph

import java.io.{File, FilenameFilter}
Expand All @@ -24,7 +24,7 @@ import scala.jdk.CollectionConverters._
* [[PluginsLoader]] configuration
*/
class PluginsLoader(loaderConfig: PluginLoaderConfig) {
private val logger: Logger = Logger[PluginsLoader]
private val logger = Logger[PluginsLoader]

private val parentClassLoader = this.getClass.getClassLoader

Expand Down Expand Up @@ -98,8 +98,7 @@ class PluginsLoader(loaderConfig: PluginLoaderConfig) {
value => IO.pure(value)
)
case Nil =>
logger.warn(s"Jar file '$jar' does not contain a 'PluginDef' implementation.")
IO.none
logger.warn(s"Jar file '$jar' does not contain a 'PluginDef' implementation.").as(None)
case multiple =>
IO.raiseError(MultiplePluginDefClassesFound(jar, multiple.toSet))

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package ch.epfl.bluebrain.nexus.delta.kernel

import cats.effect.{IO, Timer}
import com.typesafe.scalalogging.{Logger => ScalaLoggingLogger}
import org.typelevel.log4cats.Logger
import pureconfig.ConfigReader
import pureconfig.error.{CannotConvert, ConfigReaderFailures, ConvertFailure}
Expand Down Expand Up @@ -51,18 +50,6 @@ object RetryStrategy {
}
)

/**
* Log errors when retrying
*/
def logError[E](logger: ScalaLoggingLogger, action: String): (E, RetryDetails) => IO[Unit] = {
case (err, WillDelayAndRetry(nextDelay, retriesSoFar, _)) =>
val message = s"""Error $err while $action: retrying in ${nextDelay.toMillis}ms (retries so far: $retriesSoFar)"""
IO.delay(logger.warn(message))
case (err, GivingUp(totalRetries, _)) =>
val message = s"""Error $err while $action, giving up (total retries: $totalRetries)"""
IO.delay(logger.error(message))
}

/**
* Log errors when retrying
*/
Expand Down Expand Up @@ -106,27 +93,6 @@ object RetryStrategy {
onError
)

/**
* Retry strategy which retries on all non fatal errors and just outputs a log when an error occurs
*
* @param config
* the retry configuration
* @param logger
* the logger to use
* @param action
* the action that was performed
*/
def retryOnNonFatal(
config: RetryStrategyConfig,
logger: ScalaLoggingLogger,
action: String
): RetryStrategy[Throwable] =
RetryStrategy(
config,
(t: Throwable) => NonFatal(t),
(t: Throwable, d: RetryDetails) => logError(logger, action)(t, d)
)

def retryOnNonFatal(
config: RetryStrategyConfig,
logger: Logger[IO],
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing

import cats.effect.IO
import ch.epfl.bluebrain.nexus.delta.kernel.Logger
import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent
import ch.epfl.bluebrain.nexus.delta.kernel.syntax.kamonSyntax
import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViews
Expand All @@ -14,7 +15,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri
import ch.epfl.bluebrain.nexus.delta.sourcing.config.BatchConfig
import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem
import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Operation.Sink
import com.typesafe.scalalogging.Logger
import fs2.Chunk
import shapeless.Typeable

Expand Down Expand Up @@ -62,8 +62,8 @@ final class BlazegraphSink(
.bulk(namespace, bulk.queries)
.redeemWith(
err =>
IO
.delay(logger.error(s"Indexing in blazegraph namespace $namespace failed", err))
logger
.error(err)(s"Indexing in blazegraph namespace $namespace failed")
.as(elements.map { _.failed(err) }),
_ => IO.pure(markInvalidIdsAsFailed(elements, bulk.invalidIds))
)
Expand All @@ -83,7 +83,7 @@ final class BlazegraphSink(

object BlazegraphSink {

private val logger: Logger = Logger[BlazegraphSink]
private val logger = Logger[BlazegraphSink]

def apply(client: BlazegraphClient, batchConfig: BatchConfig, namespace: String)(implicit base: BaseUri) =
new BlazegraphSink(client, batchConfig.maxElements, batchConfig.maxInterval, namespace = namespace)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing
import cats.data.NonEmptyChain
import cats.effect.{ContextShift, IO, Timer}
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.Logger
import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.BlazegraphViews
import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphViewState
import ch.epfl.bluebrain.nexus.delta.sdk.stream.GraphResourceStream
Expand All @@ -13,7 +14,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter
import ch.epfl.bluebrain.nexus.delta.sourcing.state.GraphResource
import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Operation.Sink
import ch.epfl.bluebrain.nexus.delta.sourcing.stream._
import com.typesafe.scalalogging.Logger

/**
* Definition of a Blazegraph view to build a projection
Expand All @@ -26,7 +26,7 @@ sealed trait IndexingViewDef extends Product with Serializable {

object IndexingViewDef {

private val logger: Logger = Logger[IndexingViewDef]
private val logger = Logger[IndexingViewDef]

/**
* Active view eligible to be run as a projection by the supervisor
Expand Down Expand Up @@ -118,7 +118,7 @@ object IndexingViewDef {
} yield projection

IO.fromEither(compiled).onError { e =>
IO.delay(logger.error(s"View '${v.ref}' could not be compiled.", e))
logger.error(e)(s"View '${v.ref}' could not be compiled.")
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes}
import akka.stream.alpakka.sse.scaladsl.EventSource
import cats.effect.{ContextShift, IO}
import cats.implicits.{catsSyntaxApplicativeError, catsSyntaxFlatMapOps}
import ch.epfl.bluebrain.nexus.delta.kernel.Logger
import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.RemoteProjectSource
import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.stream.CompositeBranch
import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
Expand All @@ -24,7 +25,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.ElemStream
import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset
import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset.Start
import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{Elem, RemainingElems}
import com.typesafe.scalalogging.Logger
import io.circe.parser.decode
import fs2._

Expand Down Expand Up @@ -74,7 +74,7 @@ trait DeltaClient {

object DeltaClient {

private val logger: Logger = Logger[DeltaClient.type]
private val logger = Logger[DeltaClient.type]

private val accept = Accept(`application/json`.mediaType, RdfMediaTypes.`application/ld+json`)

Expand Down Expand Up @@ -144,8 +144,8 @@ object DeltaClient {
decode[Elem[Unit]](sse.data) match {
case Right(elem) => Stream.emit(elem)
case Left(err) =>
logger.error(s"Failed to decode sse event '$sse'", err)
Stream.empty
Stream.eval(logger.error(err)(s"Failed to decode sse event '$sse'")) >>
Stream.empty
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.BasicHttpCredentials
import cats.effect.{ContextShift, IO, Timer}
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.RetryStrategy
import ch.epfl.bluebrain.nexus.delta.kernel.RetryStrategy.logError
import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination
import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils
import ch.epfl.bluebrain.nexus.delta.kernel.{Logger, RetryStrategy}
import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.client.ElasticSearchClient.BulkResponse.MixedOutcomes.Outcome
import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.client.ElasticSearchClient._
import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.{EmptyResults, ResourcesSearchParams}
Expand All @@ -25,7 +25,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchResults.{ScoredSearc
import ch.epfl.bluebrain.nexus.delta.sdk.model.search.{AggregationResult, ResultEntry, SearchResults, SortList}
import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, Name}
import ch.epfl.bluebrain.nexus.delta.sdk.syntax._
import com.typesafe.scalalogging.Logger
import io.circe._
import io.circe.syntax._

Expand All @@ -43,7 +42,7 @@ class ElasticSearchClient(client: HttpClient, endpoint: Uri, maxIndexPathLength:
cs: ContextShift[IO]
) {
import as.dispatcher
private val logger: Logger = Logger[ElasticSearchClient]
private val logger = Logger[ElasticSearchClient]
private val serviceName = Name.unsafe("elasticsearch")
private val scriptPath = "_scripts"
private val docPath = "_doc"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.indexing
import cats.data.NonEmptyChain
import cats.effect.{ContextShift, IO, Timer}
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.Logger
import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.ElasticSearchViews
import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.client.IndexLabel
import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.{contexts, DefaultMapping, DefaultSettings, ElasticSearchViewState}
Expand All @@ -16,7 +17,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter
import ch.epfl.bluebrain.nexus.delta.sourcing.state.GraphResource
import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Operation.Sink
import ch.epfl.bluebrain.nexus.delta.sourcing.stream._
import com.typesafe.scalalogging.Logger
import io.circe.JsonObject

/**
Expand All @@ -32,7 +32,7 @@ sealed trait IndexingViewDef extends Product with Serializable {

object IndexingViewDef {

private val logger: Logger = Logger[IndexingViewDef]
private val logger = Logger[IndexingViewDef]

private val defaultContext = ContextValue(contexts.elasticsearchIndexing, contexts.indexingMetadata)

Expand Down Expand Up @@ -132,7 +132,7 @@ object IndexingViewDef {
} yield projection

IO.fromEither(compiled).onError { e =>
IO.delay(logger.error(s"View '${v.ref}' could not be compiled.", e))
logger.error(e)(s"View '${v.ref}' could not be compiled.")
}
}
}
Loading