From 27dcb96f5d8a9d4a899e45d7037e60f34d95ffde Mon Sep 17 00:00:00 2001 From: Simon Date: Tue, 12 Sep 2023 15:20:10 +0200 Subject: [PATCH 01/13] Update Delta dependencies (#4267) Co-authored-by: Simon Dumas --- build.sbt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index 83857c6cf5..46022e43e8 100755 --- a/build.sbt +++ b/build.sbt @@ -20,7 +20,7 @@ val akkaHttpCirceVersion = "1.39.2" val akkaCorsVersion = "1.2.0" val akkaVersion = "2.6.21" val alpakkaVersion = "3.0.4" -val apacheCompressVersion = "1.23.0" +val apacheCompressVersion = "1.24.0" val apacheIoVersion = "1.3.2" val awsSdkVersion = "2.17.184" val byteBuddyAgentVersion = "1.10.17" @@ -29,7 +29,7 @@ val caffeineVersion = "3.1.8" val catsEffectVersion = "2.5.5" val catsRetryVersion = "2.1.1" val catsVersion = "2.10.0" -val circeVersion = "0.14.5" +val circeVersion = "0.14.6" val circeOpticsVersion = "0.14.1" val circeExtrasVersions = "0.14.3" val classgraphVersion = "4.8.162" @@ -46,8 +46,8 @@ val kanelaAgentVersion = "1.0.17" val kindProjectorVersion = "0.13.2" val log4catsVersion = "1.7.0" val logbackVersion = "1.4.11" -val magnoliaVersion = "1.1.4" -val mockitoVersion = "1.17.14" +val magnoliaVersion = "1.1.6" +val mockitoVersion = "1.17.22" val monixVersion = "3.4.1" val monixBioVersion = "1.2.0" val munitVersion = "1.0.0-M8" @@ -55,7 +55,7 @@ val nimbusJoseJwtVersion = "9.31" val postgresJdbcVersion = "42.6.0" val pureconfigVersion = "0.17.4" val scalaLoggingVersion = "3.9.5" -val scalaTestVersion = "3.2.16" +val scalaTestVersion = "3.2.17" val scalaXmlVersion = "2.1.0" val topBraidVersion = "1.3.2" // 1.4.1 fails to validate some test schemas val testContainersVersion = "1.19.0" From 288460e5d85709bec7ff972311728d93df9bdd95 Mon Sep 17 00:00:00 2001 From: Daniel Bell Date: Thu, 14 Sep 2023 12:42:24 +0100 Subject: [PATCH 02/13] Update the authentication process for remote storages (#4241) * authenticate using keycloak * refactor AuthTokenProvider * use specific token error * scalafmt * clarify test usage for AuthTokenProvider * cache the access tokens * change migration code which was broken when merging master branch * use label for realm * rename config class to 'Credentials' * use the realm http client * add anonymous as an authentication method * add legacy auth token method to auth options * add docs * rename variable * credentials are not optional * rename AuthMethod to Credentials * TokenError -> AuthTokenError * KeycloakAuthService -> OpenIdAuthService * add logging to caching open id auth token provider * fix comment * pr feedback * add release notes * fix storage config * check realm is not deprecated * use ParsedToken * remove KeyValueStore.create * change AuthTokenProvider interface * move docs * docs change * add scaladoc * scalafmt --- .../storage/src/main/resources/storage.conf | 6 +- .../plugins/storage/StoragePluginModule.scala | 21 +++-- .../storage/storages/StoragesConfig.scala | 3 +- .../operations/remote/AuthTokenProvider.scala | 26 ------ .../client/RemoteDiskStorageClient.scala | 16 ++-- .../plugins/storage/files/FilesSpec.scala | 6 +- .../files/routes/FilesRoutesSpec.scala | 6 +- .../storage/storages/StorageFixtures.scala | 3 +- .../remote/RemoteDiskStorageAccessSpec.scala | 6 +- .../remote/RemoteStorageLinkFileSpec.scala | 6 +- .../RemoteStorageSaveAndFetchFileSpec.scala | 6 +- .../client/RemoteStorageClientSpec.scala | 6 +- .../delta/sdk/auth/AuthTokenProvider.scala | 84 +++++++++++++++++++ .../nexus/delta/sdk/auth/Credentials.scala | 48 +++++++++++ .../delta/sdk/auth/OpenIdAuthService.scala | 76 +++++++++++++++++ .../delta/sdk/error/AuthTokenError.scala | 57 +++++++++++++ .../nexus/delta/sdk/error/IdentityError.scala | 2 +- .../delta/sdk/identities/ParsedToken.scala | 2 +- .../sdk/identities/model/TokenRejection.scala | 7 +- .../sdk/marshalling/RdfExceptionHandler.scala | 3 +- .../identities/model/TokenRejectionSpec.scala | 8 +- .../paradox/docs/delta/api/storages-api.md | 2 + .../running-nexus/configuration/index.md | 31 ++++++- .../docs/releases/v1.9-release-notes.md | 2 + tests/docker/config/delta-postgres.conf | 7 +- .../kg/storages/remote-disk-response.json | 6 +- .../nexus/tests/kg/RemoteStorageSpec.scala | 4 +- 27 files changed, 375 insertions(+), 75 deletions(-) delete mode 100644 delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/AuthTokenProvider.scala create mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala create mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/Credentials.scala create mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala create mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala diff --git a/delta/plugins/storage/src/main/resources/storage.conf b/delta/plugins/storage/src/main/resources/storage.conf index ed404f3431..6f159cfd84 100644 --- a/delta/plugins/storage/src/main/resources/storage.conf +++ b/delta/plugins/storage/src/main/resources/storage.conf @@ -52,7 +52,9 @@ plugins.storage { # the default endpoint default-endpoint = "http://localhost:8084/v1" # the default credentials for the endpoint - default-credentials = null + credentials { + type: "anonymous" + } # the default digest algorithm digest-algorithm = "SHA-256" # the default permission required in order to download a file from a remote disk storage @@ -61,7 +63,7 @@ plugins.storage { default-write-permission = "files/write" # flag to decide whether or not to show the absolute location of the files in the metadata response show-location = true - # the default maximum allowed file size (in bytes) for uploaded files. 10 GB + # the default maximum allowed file size (in bytes) for uploaded files. 10 GB default-max-file-size = 10737418240 # Retry strategy for digest computation digest-computation = ${app.defaults.retry-strategy} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala index b4a4b4f53a..768939edc3 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala @@ -15,7 +15,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.Sto import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.contexts.{storages => storageCtxId, storagesMetadata => storageMetaCtxId} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageAccess -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.AuthTokenProvider import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.routes.StoragesRoutes import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.schemas.{storage => storagesSchemaId} @@ -25,6 +24,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteCon import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials, OpenIdAuthService} import ch.epfl.bluebrain.nexus.delta.sdk.deletion.ProjectDeletionTask import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig @@ -37,6 +37,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings +import ch.epfl.bluebrain.nexus.delta.sdk.realms.Realms import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.sse.SseEncoder import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors @@ -146,8 +147,12 @@ class StoragePluginModule(priority: Int) extends ModuleDef { many[ResourceShift[_, _, _]].ref[Storage.Shift] - make[AuthTokenProvider].from { (cfg: StorageTypeConfig) => - AuthTokenProvider(cfg) + make[OpenIdAuthService].from { (httpClient: HttpClient @Id("realm"), realms: Realms) => + new OpenIdAuthService(httpClient, realms) + } + + make[AuthTokenProvider].fromEffect { (authService: OpenIdAuthService) => + AuthTokenProvider(authService) } make[Files] @@ -226,8 +231,14 @@ class StoragePluginModule(priority: Int) extends ModuleDef { ( client: HttpClient @Id("storage"), as: ActorSystem[Nothing], - authTokenProvider: AuthTokenProvider - ) => new RemoteDiskStorageClient(client, authTokenProvider)(as.classicSystem) + authTokenProvider: AuthTokenProvider, + cfg: StorageTypeConfig + ) => + new RemoteDiskStorageClient( + client, + authTokenProvider, + cfg.remoteDisk.map(_.credentials).getOrElse(Credentials.Anonymous) + )(as.classicSystem) } many[ServiceDependency].addSet { diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesConfig.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesConfig.scala index 25b333f454..13dcd2e3d6 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesConfig.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesConfig.scala @@ -5,6 +5,7 @@ import cats.implicits.toBifunctorOps import ch.epfl.bluebrain.nexus.delta.kernel.{RetryStrategyConfig, Secret} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{AbsolutePath, DigestAlgorithm, StorageType} +import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import ch.epfl.bluebrain.nexus.delta.sdk.model.search.PaginationConfig import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission @@ -198,7 +199,7 @@ object StoragesConfig { final case class RemoteDiskStorageConfig( digestAlgorithm: DigestAlgorithm, defaultEndpoint: BaseUri, - defaultCredentials: Option[Secret[String]], + credentials: Credentials, defaultReadPermission: Permission, defaultWritePermission: Permission, showLocation: Boolean, diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/AuthTokenProvider.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/AuthTokenProvider.scala deleted file mode 100644 index 248dab973d..0000000000 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/AuthTokenProvider.scala +++ /dev/null @@ -1,26 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote - -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken -import monix.bio.UIO - -/** - * Provides an auth token for the service account, for use when comunicating with remote storage - */ -trait AuthTokenProvider { - def apply(): UIO[Option[AuthToken]] -} - -object AuthTokenProvider { - def apply(config: StorageTypeConfig): AuthTokenProvider = new AuthTokenProvider { - override def apply(): UIO[Option[AuthToken]] = - UIO.pure(config.remoteDisk.flatMap(_.defaultCredentials).map(secret => AuthToken(secret.value))) - } - def test(fixed: Option[AuthToken]): AuthTokenProvider = new AuthTokenProvider { - override def apply(): UIO[Option[AuthToken]] = UIO.pure(fixed) - } - - def test(implicit config: StorageTypeConfig): AuthTokenProvider = { - test(config.remoteDisk.flatMap(_.defaultCredentials).map(secret => AuthToken(secret.value))) - } -} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteDiskStorageClient.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteDiskStorageClient.scala index 1416422de8..02f3faa8a1 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteDiskStorageClient.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteDiskStorageClient.scala @@ -10,10 +10,10 @@ import akka.http.scaladsl.model.Uri.Path import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection.UnexpectedFetchError import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.MoveFileRejection.UnexpectedMoveError import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.{FetchFileRejection, MoveFileRejection, SaveFileRejection} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.AuthTokenProvider import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.model.RemoteDiskStorageFileAttributes import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceMarshalling._ import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError._ import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientError} @@ -32,8 +32,8 @@ import scala.concurrent.duration._ /** * The client to communicate with the remote storage service */ -final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenProvider)(implicit - as: ActorSystem +final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenProvider, credentials: Credentials)( + implicit as: ActorSystem ) { import as.dispatcher @@ -58,7 +58,7 @@ final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenP * the storage bucket name */ def exists(bucket: Label)(implicit baseUri: BaseUri): IO[HttpClientError, Unit] = { - getAuthToken().flatMap { authToken => + getAuthToken(credentials).flatMap { authToken => val endpoint = baseUri.endpoint / "buckets" / bucket.value val req = Head(endpoint).withCredentials(authToken) client(req) { @@ -82,7 +82,7 @@ final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenP relativePath: Path, entity: BodyPartEntity )(implicit baseUri: BaseUri): IO[SaveFileRejection, RemoteDiskStorageFileAttributes] = { - getAuthToken().flatMap { authToken => + getAuthToken(credentials).flatMap { authToken => val endpoint = baseUri.endpoint / "buckets" / bucket.value / "files" / relativePath val filename = relativePath.lastSegment.getOrElse("filename") val multipartForm = FormData(BodyPart("file", entity, Map("filename" -> filename))).toEntity() @@ -106,7 +106,7 @@ final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenP * the relative path to the file location */ def getFile(bucket: Label, relativePath: Path)(implicit baseUri: BaseUri): IO[FetchFileRejection, AkkaSource] = { - getAuthToken().flatMap { authToken => + getAuthToken(credentials).flatMap { authToken => val endpoint = baseUri.endpoint / "buckets" / bucket.value / "files" / relativePath client.toDataBytes(Get(endpoint).withCredentials(authToken)).mapError { case error @ HttpClientStatusError(_, `NotFound`, _) if !bucketNotFoundType(error) => @@ -129,7 +129,7 @@ final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenP bucket: Label, relativePath: Path )(implicit baseUri: BaseUri): IO[FetchFileRejection, RemoteDiskStorageFileAttributes] = { - getAuthToken().flatMap { authToken => + getAuthToken(credentials).flatMap { authToken => val endpoint = baseUri.endpoint / "buckets" / bucket.value / "attributes" / relativePath client.fromJsonTo[RemoteDiskStorageFileAttributes](Get(endpoint).withCredentials(authToken)).mapError { case error @ HttpClientStatusError(_, `NotFound`, _) if !bucketNotFoundType(error) => @@ -156,7 +156,7 @@ final class RemoteDiskStorageClient(client: HttpClient, getAuthToken: AuthTokenP sourceRelativePath: Path, destRelativePath: Path )(implicit baseUri: BaseUri): IO[MoveFileRejection, RemoteDiskStorageFileAttributes] = { - getAuthToken().flatMap { authToken => + getAuthToken(credentials).flatMap { authToken => val endpoint = baseUri.endpoint / "buckets" / bucket.value / "files" / destRelativePath val payload = Json.obj("source" -> sourceRelativePath.toString.asJson) client.fromJsonTo[RemoteDiskStorageFileAttributes](Put(endpoint, payload).withCredentials(authToken)).mapError { diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala index 60da88be89..9b01a64594 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala @@ -14,13 +14,13 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejec import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageType.{RemoteDiskStorage => RemoteStorageType} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{StorageRejection, StorageStatEntry} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.AkkaSourceHelpers -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.AuthTokenProvider import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.{StorageFixtures, Storages, StoragesConfig, StoragesStatistics} import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclSimpleCheck import ch.epfl.bluebrain.nexus.delta.sdk.acls.model.AclAddress +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.directives.FileResponse import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{Caller, ServiceAccount} @@ -69,8 +69,8 @@ class FilesSpec(docker: RemoteStorageDocker) implicit val typedSystem: typed.ActorSystem[Nothing] = system.toTyped implicit val httpClient: HttpClient = HttpClient()(httpClientConfig, system, sc) implicit val caller: Caller = Caller(bob, Set(bob, Group("mygroup", realm), Authenticated(realm))) - implicit val authTokenProvider: AuthTokenProvider = AuthTokenProvider.test - val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider) + implicit val authTokenProvider: AuthTokenProvider = AuthTokenProvider.anonymousForTest + val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider, Credentials.Anonymous) val tag = UserTag.unsafe("tag") val otherRead = Permission.unsafe("other/read") diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala index bb76ca313a..b22a4106d8 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala @@ -12,7 +12,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{FileAttributes import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes.FilesRoutesSpec.fileMetadata import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.{contexts => fileContexts, permissions, FileFixtures, Files, FilesConfig} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{StorageRejection, StorageStatEntry, StorageType} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.AuthTokenProvider import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.{contexts => storageContexts, permissions => storagesPermissions, StorageFixtures, Storages, StoragesConfig, StoragesStatistics} import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri @@ -23,6 +22,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteCon import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclSimpleCheck import ch.epfl.bluebrain.nexus.delta.sdk.acls.model.AclAddress +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient import ch.epfl.bluebrain.nexus.delta.sdk.identities.IdentitiesDummy @@ -53,8 +53,8 @@ class FilesRoutesSpec import akka.actor.typed.scaladsl.adapter._ implicit val typedSystem: typed.ActorSystem[Nothing] = system.toTyped val httpClient: HttpClient = HttpClient()(httpClientConfig, system, s) - val authTokenProvider: AuthTokenProvider = AuthTokenProvider.test - val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider) + val authTokenProvider: AuthTokenProvider = AuthTokenProvider.anonymousForTest + val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider, Credentials.Anonymous) // TODO: sort out how we handle this in tests implicit override def rcr: RemoteContextResolution = diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StorageFixtures.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StorageFixtures.scala index e95661694d..22b27b659d 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StorageFixtures.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StorageFixtures.scala @@ -5,6 +5,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.{Di import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageFields.{DiskStorageFields, RemoteDiskStorageFields, S3StorageFields} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{AbsolutePath, DigestAlgorithm} import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv +import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials.Anonymous import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ @@ -29,7 +30,7 @@ trait StorageFixtures extends TestHelpers with CirceLiteral { implicit val config: StorageTypeConfig = StorageTypeConfig( disk = DiskStorageConfig(diskVolume, Set(diskVolume,tmpVolume), DigestAlgorithm.default, permissions.read, permissions.write, showLocation = false, Some(5000), 50), amazon = Some(S3StorageConfig(DigestAlgorithm.default, Some("localhost"), Some(Secret(MinioDocker.RootUser)), Some(Secret(MinioDocker.RootPassword)), permissions.read, permissions.write, showLocation = false, 60)), - remoteDisk = Some(RemoteDiskStorageConfig(DigestAlgorithm.default, BaseUri("http://localhost", Label.unsafe("v1")), None, permissions.read, permissions.write, showLocation = false, 70, RetryStrategyConfig.AlwaysGiveUp)), + remoteDisk = Some(RemoteDiskStorageConfig(DigestAlgorithm.default, BaseUri("http://localhost", Label.unsafe("v1")), Anonymous, permissions.read, permissions.write, showLocation = false, 70, RetryStrategyConfig.AlwaysGiveUp)), ) val diskFields = DiskStorageFields(Some("diskName"), Some("diskDescription"), default = true, Some(tmpVolume), Some(Permission.unsafe("disk/read")), Some(Permission.unsafe("disk/write")), Some(1000), Some(50)) val diskVal = diskFields.toValue(config).get diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala index cf220d0f8f..40ff841563 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala @@ -9,6 +9,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.permissions._ import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientConfig} import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ @@ -37,8 +38,9 @@ class RemoteDiskStorageAccessSpec(docker: RemoteStorageDocker) implicit private val sc: Scheduler = Scheduler.global implicit private val httpConfig: HttpClientConfig = httpClientConfig private val httpClient: HttpClient = HttpClient() - private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.test - private val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider) + private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.anonymousForTest + private val remoteDiskStorageClient = + new RemoteDiskStorageClient(httpClient, authTokenProvider, Credentials.Anonymous) private val access = new RemoteDiskStorageAccess(remoteDiskStorageClient) diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageLinkFileSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageLinkFileSpec.scala index a8b5b3bab1..0819bed9ae 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageLinkFileSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageLinkFileSpec.scala @@ -16,6 +16,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.Storage import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.permissions.{read, write} import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientConfig} import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, Tags} import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ @@ -47,8 +48,9 @@ class RemoteStorageLinkFileSpec(docker: RemoteStorageDocker) implicit val ec: ExecutionContext = system.dispatcher implicit private val httpConfig: HttpClientConfig = httpClientConfig private val httpClient: HttpClient = HttpClient() - private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.test - private val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider) + private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.anonymousForTest + private val remoteDiskStorageClient = + new RemoteDiskStorageClient(httpClient, authTokenProvider, Credentials.Anonymous) private val iri = iri"http://localhost/remote" private val uuid = UUID.fromString("8049ba90-7cc6-4de5-93a1-802c04200dcc") diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSaveAndFetchFileSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSaveAndFetchFileSpec.scala index 977b97f3dc..4dd5a0705b 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSaveAndFetchFileSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSaveAndFetchFileSpec.scala @@ -17,6 +17,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.Storage import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.permissions.{read, write} import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientConfig} import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, Tags} import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ @@ -51,8 +52,9 @@ class RemoteStorageSaveAndFetchFileSpec(docker: RemoteStorageDocker) implicit val ec: ExecutionContext = system.dispatcher implicit private val httpConfig: HttpClientConfig = httpClientConfig private val httpClient: HttpClient = HttpClient() - private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.test - private val remoteDiskStorageClient = new RemoteDiskStorageClient(httpClient, authTokenProvider) + private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.anonymousForTest + private val remoteDiskStorageClient = + new RemoteDiskStorageClient(httpClient, authTokenProvider, Credentials.Anonymous) private val iri = iri"http://localhost/remote" private val uuid = UUID.fromString("8049ba90-7cc6-4de5-93a1-802c04200dcc") diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteStorageClientSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteStorageClientSpec.scala index e7b9057a5c..6d859a2088 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteStorageClientSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/client/RemoteStorageClientSpec.scala @@ -8,9 +8,9 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.Digest.{Compute import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.DigestAlgorithm import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.AkkaSourceHelpers import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.{FetchFileRejection, MoveFileRejection} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.AuthTokenProvider import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.model.RemoteDiskStorageFileAttributes import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpClientStatusError import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientConfig} import ch.epfl.bluebrain.nexus.delta.sdk.model.ComponentDescription.ServiceDescription @@ -43,7 +43,7 @@ class RemoteStorageClientSpec(docker: RemoteStorageDocker) private var client: RemoteDiskStorageClient = _ private var baseUri: BaseUri = _ - private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.test(None) + private val authTokenProvider: AuthTokenProvider = AuthTokenProvider.anonymousForTest private val bucket: Label = Label.unsafe(BucketName) override protected def beforeAll(): Unit = { @@ -51,7 +51,7 @@ class RemoteStorageClientSpec(docker: RemoteStorageDocker) val httpConfig: HttpClientConfig = httpClientConfig implicit val httpClient: HttpClient = HttpClient()(httpConfig, system, Scheduler.global) - client = new RemoteDiskStorageClient(httpClient, authTokenProvider) + client = new RemoteDiskStorageClient(httpClient, authTokenProvider, Credentials.Anonymous) baseUri = BaseUri(docker.hostConfig.endpoint).rightValue } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala new file mode 100644 index 0000000000..5d2dbd5e67 --- /dev/null +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala @@ -0,0 +1,84 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.auth + +import cats.effect.Clock +import ch.epfl.bluebrain.nexus.delta.kernel.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.cache.KeyValueStore +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.MigrateEffectSyntax +import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOUtils +import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials.ClientCredentials +import ch.epfl.bluebrain.nexus.delta.sdk.identities.ParsedToken +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken +import monix.bio.UIO + +import java.time.{Duration, Instant} + +/** + * Provides an auth token for the service account, for use when comunicating with remote storage + */ +trait AuthTokenProvider { + def apply(credentials: Credentials): UIO[Option[AuthToken]] +} + +object AuthTokenProvider { + def apply(authService: OpenIdAuthService): UIO[AuthTokenProvider] = { + KeyValueStore[ClientCredentials, ParsedToken]().map(cache => new CachingOpenIdAuthTokenProvider(authService, cache)) + } + def anonymousForTest: AuthTokenProvider = new AnonymousAuthTokenProvider +} + +private class AnonymousAuthTokenProvider extends AuthTokenProvider { + override def apply(credentials: Credentials): UIO[Option[AuthToken]] = UIO.pure(None) +} + +/** + * Uses the supplied credentials to get an auth token from an open id service. This token is cached until near-expiry + * to speed up operations + */ +private class CachingOpenIdAuthTokenProvider( + service: OpenIdAuthService, + cache: KeyValueStore[ClientCredentials, ParsedToken] +)(implicit + clock: Clock[UIO] +) extends AuthTokenProvider + with MigrateEffectSyntax { + + private val logger = Logger.cats[CachingOpenIdAuthTokenProvider] + + override def apply(credentials: Credentials): UIO[Option[AuthToken]] = { + + credentials match { + case Credentials.Anonymous => UIO.pure(None) + case Credentials.JWTToken(token) => UIO.pure(Some(AuthToken(token))) + case credentials: ClientCredentials => clientCredentialsFlow(credentials) + } + } + + private def clientCredentialsFlow(credentials: ClientCredentials) = { + for { + existingValue <- cache.get(credentials) + now <- IOUtils.instant + finalValue <- existingValue match { + case None => + logger.info("Fetching auth token, no initial value.").toUIO >> + fetchValue(credentials) + case Some(value) if isExpired(value, now) => + logger.info("Fetching new auth token, current value near expiry.").toUIO >> + fetchValue(credentials) + case Some(value) => UIO.pure(value) + } + } yield { + Some(AuthToken(finalValue.rawToken)) + } + } + + private def fetchValue(credentials: ClientCredentials) = { + cache.getOrElseUpdate(credentials, service.auth(credentials)) + } + + private def isExpired(value: ParsedToken, now: Instant): Boolean = { + // minus 10 seconds to account for tranport / processing time + val cutoffTime = value.expirationTime.minus(Duration.ofSeconds(10)) + + now.isAfter(cutoffTime) + } +} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/Credentials.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/Credentials.scala new file mode 100644 index 0000000000..bf4a486b39 --- /dev/null +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/Credentials.scala @@ -0,0 +1,48 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.auth + +import ch.epfl.bluebrain.nexus.delta.kernel.Secret +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label +import pureconfig.ConfigReader +import pureconfig.error.CannotConvert +import pureconfig.generic.semiauto.deriveReader + +import scala.annotation.nowarn + +/** + * Enumerates the different ways to obtain an auth toke for making requests to a remote service + */ +sealed trait Credentials + +object Credentials { + + /** + * When no auth token should be used + */ + case object Anonymous extends Credentials { + implicit val configReader: ConfigReader[Anonymous.type] = deriveReader[Anonymous.type] + } + + /** + * When a long-lived auth token should be used (legacy, not recommended) + */ + case class JWTToken(token: String) extends Credentials + case object JWTToken { + implicit val configReader: ConfigReader[JWTToken] = deriveReader[JWTToken] + } + + /** + * When client credentials should be exchanged with an OpenId service to obtain an auth token + * @param realm + * the realm which defines the OpenId service + */ + case class ClientCredentials(user: String, password: Secret[String], realm: Label) extends Credentials + object ClientCredentials { + @nowarn("cat=unused") + implicit private val labelConfigReader: ConfigReader[Label] = ConfigReader.fromString(str => + Label(str).left.map(e => CannotConvert(str, classOf[Label].getSimpleName, e.getMessage)) + ) + implicit val configReader: ConfigReader[ClientCredentials] = deriveReader[ClientCredentials] + } + + implicit val configReader: ConfigReader[Credentials] = deriveReader[Credentials] +} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala new file mode 100644 index 0000000000..e1dc547bb4 --- /dev/null +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala @@ -0,0 +1,76 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.auth + +import akka.http.javadsl.model.headers.HttpCredentials +import akka.http.scaladsl.model.HttpMethods.POST +import akka.http.scaladsl.model.headers.Authorization +import akka.http.scaladsl.model.{HttpRequest, Uri} +import ch.epfl.bluebrain.nexus.delta.kernel.Secret +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.MigrateEffectSyntax +import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials.ClientCredentials +import ch.epfl.bluebrain.nexus.delta.sdk.error.AuthTokenError.{AuthTokenHttpError, AuthTokenNotFoundInResponse, RealmIsDeprecated} +import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient +import ch.epfl.bluebrain.nexus.delta.sdk.identities.ParsedToken +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken +import ch.epfl.bluebrain.nexus.delta.sdk.realms.Realms +import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.Realm +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label +import io.circe.Json +import monix.bio.{IO, UIO} + +/** + * Exchanges client credentials for an auth token with a remote OpenId service, as defined in the specified realm + */ +class OpenIdAuthService(httpClient: HttpClient, realms: Realms) extends MigrateEffectSyntax { + + /** + * Exchanges client credentials for an auth token with a remote OpenId service, as defined in the specified realm + */ + def auth(credentials: ClientCredentials): UIO[ParsedToken] = { + for { + realm <- findRealm(credentials.realm) + response <- requestToken(realm.tokenEndpoint, credentials.user, credentials.password) + parsedToken <- parseResponse(response) + } yield { + parsedToken + } + } + + private def findRealm(id: Label): UIO[Realm] = { + for { + realm <- realms.fetch(id).toUIO + _ <- UIO.when(realm.deprecated)(UIO.terminate(RealmIsDeprecated(realm.value))) + } yield realm.value + } + + private def requestToken(tokenEndpoint: Uri, user: String, password: Secret[String]): UIO[Json] = { + httpClient + .toJson( + HttpRequest( + method = POST, + uri = tokenEndpoint, + headers = Authorization(HttpCredentials.createBasicHttpCredentials(user, password.value)) :: Nil, + entity = akka.http.scaladsl.model + .FormData( + Map( + "scope" -> "openid", + "grant_type" -> "client_credentials" + ) + ) + .toEntity + ) + ) + .hideErrorsWith(AuthTokenHttpError) + } + + private def parseResponse(json: Json): UIO[ParsedToken] = { + for { + rawToken <- json.hcursor.get[String]("access_token") match { + case Left(failure) => IO.terminate(AuthTokenNotFoundInResponse(failure)) + case Right(value) => UIO.pure(value) + } + parsedToken <- IO.fromEither(ParsedToken.fromToken(AuthToken(rawToken))).hideErrors + } yield { + parsedToken + } + } +} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala new file mode 100644 index 0000000000..2f1bd3e3f8 --- /dev/null +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala @@ -0,0 +1,57 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.error + +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder +import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError +import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.Realm +import io.circe.syntax.{EncoderOps, KeyOps} +import io.circe.{DecodingFailure, Encoder, JsonObject} + +sealed abstract class AuthTokenError(reason: String) extends SDKError { + override def getMessage: String = reason +} + +object AuthTokenError { + + /** + * Signals that an HTTP error occurred when fetching the token + */ + final case class AuthTokenHttpError(cause: HttpClientError) + extends AuthTokenError(s"HTTP error when requesting auth token: ${cause.reason}") + + /** + * Signals that the token was missing from the authentication response + */ + final case class AuthTokenNotFoundInResponse(failure: DecodingFailure) + extends AuthTokenError(s"Auth token not found in auth response: ${failure.reason}") + + /** + * Signals that the expiry was missing from the authentication response + */ + final case class ExpiryNotFoundInResponse(failure: DecodingFailure) + extends AuthTokenError(s"Expiry not found in auth response: ${failure.reason}") + + /** + * Signals that the realm specified for authentication is deprecated + */ + final case class RealmIsDeprecated(realm: Realm) + extends AuthTokenError(s"Realm for authentication is deprecated: ${realm.label}") + + implicit val identityErrorEncoder: Encoder.AsObject[AuthTokenError] = { + Encoder.AsObject.instance[AuthTokenError] { + case AuthTokenHttpError(r) => + JsonObject(keywords.tpe := "AuthTokenHttpError", "reason" := r.reason) + case AuthTokenNotFoundInResponse(r) => + JsonObject(keywords.tpe -> "AuthTokenNotFoundInResponse".asJson, "reason" := r.message) + case ExpiryNotFoundInResponse(r) => + JsonObject(keywords.tpe -> "ExpiryNotFoundInResponse".asJson, "reason" := r.message) + case r: RealmIsDeprecated => + JsonObject(keywords.tpe := "RealmIsDeprecated", "reason" := r.getMessage) + } + } + + implicit val identityErrorJsonLdEncoder: JsonLdEncoder[AuthTokenError] = + JsonLdEncoder.computeFromCirce(ContextValue(contexts.error)) +} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala index a753683306..598139952b 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala @@ -32,7 +32,7 @@ object IdentityError { * @param rejection * the specific reason why the token is invalid */ - final case class InvalidToken(rejection: TokenRejection) extends IdentityError(rejection.reason) + final case class InvalidToken(rejection: TokenRejection) extends IdentityError(rejection.getMessage) implicit val identityErrorEncoder: Encoder.AsObject[IdentityError] = Encoder.AsObject.instance[IdentityError] { diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala index 7d96bdb876..13e13e00b5 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala @@ -28,7 +28,7 @@ object ParsedToken { * @param token * the raw token */ - private[identities] def fromToken(token: AuthToken): Either[TokenRejection, ParsedToken] = { + def fromToken(token: AuthToken): Either[TokenRejection, ParsedToken] = { def parseJwt: Either[TokenRejection, SignedJWT] = Either diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala index 499e4e6472..e4855bafcb 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala @@ -7,6 +7,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder +import ch.epfl.bluebrain.nexus.delta.sdk.error.SDKError import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields import io.circe.syntax._ import io.circe.{Encoder, JsonObject} @@ -17,7 +18,9 @@ import io.circe.{Encoder, JsonObject} * @param reason * a descriptive message for reasons why a token is rejected by the system */ -sealed abstract class TokenRejection(val reason: String) extends Product with Serializable +sealed abstract class TokenRejection(reason: String) extends SDKError with Product with Serializable { + override def getMessage: String = reason +} object TokenRejection { @@ -62,7 +65,7 @@ object TokenRejection { implicit val tokenRejectionEncoder: Encoder.AsObject[TokenRejection] = Encoder.AsObject.instance { r => val tpe = ClassUtils.simpleName(r) - val json = JsonObject.empty.add(keywords.tpe, tpe.asJson).add("reason", r.reason.asJson) + val json = JsonObject.empty.add(keywords.tpe, tpe.asJson).add("reason", r.getMessage.asJson) r match { case InvalidAccessToken(_, _, error) => json.add("details", error.asJson) case _ => json diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfExceptionHandler.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfExceptionHandler.scala index 873815cb12..f6696132ea 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfExceptionHandler.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfExceptionHandler.scala @@ -11,7 +11,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.AuthorizationFailed -import ch.epfl.bluebrain.nexus.delta.sdk.error.{IdentityError, ServiceError} +import ch.epfl.bluebrain.nexus.delta.sdk.error.{AuthTokenError, IdentityError, ServiceError} import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import com.typesafe.scalalogging.Logger import io.circe.syntax._ @@ -33,6 +33,7 @@ object RdfExceptionHandler { ): ExceptionHandler = ExceptionHandler { case err: IdentityError => discardEntityAndForceEmit(err) + case err: AuthTokenError => discardEntityAndForceEmit(err) case AuthorizationFailed => discardEntityAndForceEmit(AuthorizationFailed: ServiceError) case err: RdfError => discardEntityAndForceEmit(err) case err: Throwable => diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala index 4a1fd4a19e..b6dc1cf571 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala @@ -25,8 +25,8 @@ class TokenRejectionSpec "be converted to compacted JSON-LD" in { val list = List( - noIssuer -> json"""{"@type": "AccessTokenDoesNotContainSubject", "reason": "${noIssuer.reason}"}""", - invalidFormat -> json"""{"@type": "InvalidAccessTokenFormat", "reason": "${invalidFormat.reason}"}""" + noIssuer -> json"""{"@type": "AccessTokenDoesNotContainSubject", "reason": "${noIssuer.getMessage}"}""", + invalidFormat -> json"""{"@type": "InvalidAccessTokenFormat", "reason": "${invalidFormat.getMessage}"}""" ) forAll(list) { case (rejection, json) => rejection.toCompactedJsonLd.accepted.json shouldEqual json.addContext(contexts.error) @@ -35,8 +35,8 @@ class TokenRejectionSpec "be converted to expanded JSON-LD" in { val list = List( - noIssuer -> json"""[{"@type": ["${nxv + "AccessTokenDoesNotContainSubject"}"], "${nxv + "reason"}": [{"@value": "${noIssuer.reason}"} ] } ]""", - invalidFormat -> json"""[{"@type": ["${nxv + "InvalidAccessTokenFormat"}"], "${nxv + "reason"}": [{"@value": "${invalidFormat.reason}"} ] } ]""" + noIssuer -> json"""[{"@type": ["${nxv + "AccessTokenDoesNotContainSubject"}"], "${nxv + "reason"}": [{"@value": "${noIssuer.getMessage}"} ] } ]""", + invalidFormat -> json"""[{"@type": ["${nxv + "InvalidAccessTokenFormat"}"], "${nxv + "reason"}": [{"@value": "${invalidFormat.getMessage}"} ] } ]""" ) forAll(list) { case (rejection, json) => rejection.toExpandedJsonLd.accepted.json shouldEqual json diff --git a/docs/src/main/paradox/docs/delta/api/storages-api.md b/docs/src/main/paradox/docs/delta/api/storages-api.md index 97623cc149..3fbbbb05b8 100644 --- a/docs/src/main/paradox/docs/delta/api/storages-api.md +++ b/docs/src/main/paradox/docs/delta/api/storages-api.md @@ -64,6 +64,8 @@ While there's no formal specification for this service, you can check out or dep @link:[Nexus remote storage service](https://github.com/BlueBrain/nexus/tree/$git.branch$/storage){ open=new }. In order to be able to use this storage, the configuration flag `plugins.storage.storages.remote-disk.enabled` should be set to `true`. +@ref:[More information about configuration](../../getting-started/running-nexus/configuration/index.md#remote-storage-configuration) + ```json { diff --git a/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md b/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md index cfff3d84ae..4021e5ce7c 100644 --- a/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md +++ b/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md @@ -129,7 +129,36 @@ Nexus Delta supports 3 types of storages: 'disk', 'amazon' (s3 compatible) and ' - For disk storages the most relevant configuration flag is `plugins.storage.storages.disk.default-volume`, which defines the default location in the Nexus Delta filesystem where the files using that storage are going to be saved. - For S3 compatible storages the most relevant configuration flags are the ones related to the S3 settings: `plugins.storage.storages.amazon.default-endpoint`, `plugins.storage.storages.amazon.default-access-key` and `plugins.storage.storages.amazon.default-secret-key`. -- For remote disk storages the most relevant configuration flags are `plugins.storage.storages.remote-disk.default-endpoint` (the endpoint where the remote storage service is running) and `plugins.storage.storages.remote-disk.default-credentials` (the Bearer token to authenticate to the remote storage service). +- For remote disk storages the most relevant configuration flags are `plugins.storage.storages.remote-disk.default-endpoint` (the endpoint where the remote storage service is running) and `plugins.storage.storages.remote-disk.credentials` (the method to authenticate to the remote storage service). + +#### Remote storage configuration + +Authentication for remote storage can be specified in three different ways. The value of `plugins.storage.storages.remote-disk.credentials` can be: + +##### Recommended: client credentials (OpenId authentication) +```hocon +{ + type: "client-credentials" + user: "username" + password: "password" + realm: "internal" +} +``` +This configuration tells Delta to log into the `internal` realm (which should have already been defined) with the `user` and `password` credentials, which will give Delta an access token to use when making requests to remote storage + +##### Anonymous +```hocon +{ + type: "anonymous" +} +``` +##### Long-living auth token (legacy) +```hocon +{ + type: "jwt-token" + token: "long-living-auth-token" +} +``` ### Archive plugin configuration diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index 2d73831af8..164b35fc4a 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -114,6 +114,8 @@ Storages can no longer be created with credentials that would get stored: These should instead be defined in the Delta configuration. +@ref:[More information](../getting-started/running-nexus/configuration/index.md#remote-storage-configuration) + ### Graph Analytics The Elasticsearch views behind Graph Analytics can now be queried using the `_search` endpoint. diff --git a/tests/docker/config/delta-postgres.conf b/tests/docker/config/delta-postgres.conf index 08cc2a33be..17f686a802 100644 --- a/tests/docker/config/delta-postgres.conf +++ b/tests/docker/config/delta-postgres.conf @@ -107,8 +107,13 @@ plugins { remote-disk { enabled = true + credentials { + type: "client-credentials" + user: "delta" + password: "shhh" + realm: "internal" + } default-endpoint = "http://storage-service:8080/v1" - default-credentials = "" } amazon { diff --git a/tests/src/test/resources/kg/storages/remote-disk-response.json b/tests/src/test/resources/kg/storages/remote-disk-response.json index b1a151c95d..21d6be9456 100644 --- a/tests/src/test/resources/kg/storages/remote-disk-response.json +++ b/tests/src/test/resources/kg/storages/remote-disk-response.json @@ -15,9 +15,9 @@ "readPermission": "{{read}}", "writePermission": "{{write}}", "_algorithm": "SHA-256", - "_incoming": "{{deltaUri}}/storages/{{project}}/nxv:{{id}}/incoming", - "_outgoing": "{{deltaUri}}/storages/{{project}}/nxv:{{id}}/outgoing", - "_self": "{{deltaUri}}/storages/{{project}}/nxv:{{id}}", + "_incoming": "{{self}}/incoming", + "_outgoing": "{{self}}/outgoing", + "_self": "{{self}}", "_constrainedBy": "https://bluebrain.github.io/nexus/schemas/storages.json", "_project": "{{deltaUri}}/projects/{{project}}", "_rev": 1, diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala index 02f0e44143..d9f7d4019e 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala @@ -11,13 +11,11 @@ import io.circe.generic.semiauto.deriveDecoder import io.circe.{Decoder, Json} import monix.bio.Task import org.scalactic.source.Position -import org.scalatest.{Assertion, Ignore} +import org.scalatest.Assertion import scala.annotation.nowarn import scala.sys.process._ -// Ignore while https://github.com/BlueBrain/nexus/issues/4063 is ongoing -@Ignore class RemoteStorageSpec extends StorageSpec { override def storageName: String = "external" From a69dc9ee42bce587409011198f3973bc6b5c8396 Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 14 Sep 2023 14:03:03 +0200 Subject: [PATCH 03/13] Run docker system prune after tests (#4272) Co-authored-by: Simon Dumas --- .github/workflows/ci-integration-tests.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-integration-tests.yml b/.github/workflows/ci-integration-tests.yml index b0e0c89125..74de8c7157 100644 --- a/.github/workflows/ci-integration-tests.yml +++ b/.github/workflows/ci-integration-tests.yml @@ -35,4 +35,6 @@ jobs: run: sbt -Dsbt.color=always -Dsbt.supershell=false "project tests" test - name: Stop & clean Docker if: ${{ always() }} - run: docker-compose -f tests/docker/docker-compose.yml down --rmi "local" --volumes \ No newline at end of file + run: | + docker-compose -f tests/docker/docker-compose.yml down --rmi "local" --volumes + docker system prune --force --volumes \ No newline at end of file From 93ac73df7766745edb35e9921a41ce875ce89b59 Mon Sep 17 00:00:00 2001 From: Simon Date: Mon, 18 Sep 2023 09:55:07 +0200 Subject: [PATCH 04/13] Update to sbt 1.9.6 (#4273) * Update to sbt 1.9.6 --------- Co-authored-by: Simon Dumas --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 06969a3771..303541e505 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.9.4 +sbt.version = 1.9.6 From 86a4463a79447bd865a46a1d47420331821d587c Mon Sep 17 00:00:00 2001 From: Simon Date: Mon, 18 Sep 2023 13:50:51 +0200 Subject: [PATCH 05/13] Improve Json-LD encoder based on Circe (#4278) * Improve Json-LD encoder based on Circe --------- Co-authored-by: Simon Dumas --- .../rdf/jsonld/encoder/JsonLdEncoder.scala | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/delta/rdf/src/main/scala/ch/epfl/bluebrain/nexus/delta/rdf/jsonld/encoder/JsonLdEncoder.scala b/delta/rdf/src/main/scala/ch/epfl/bluebrain/nexus/delta/rdf/jsonld/encoder/JsonLdEncoder.scala index 0c658de4c5..f310ec0487 100644 --- a/delta/rdf/src/main/scala/ch/epfl/bluebrain/nexus/delta/rdf/jsonld/encoder/JsonLdEncoder.scala +++ b/delta/rdf/src/main/scala/ch/epfl/bluebrain/nexus/delta/rdf/jsonld/encoder/JsonLdEncoder.scala @@ -7,7 +7,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteCon import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.{CompactedJsonLd, ExpandedJsonLd} import ch.epfl.bluebrain.nexus.delta.rdf.syntax._ import ch.epfl.bluebrain.nexus.delta.rdf.{IriOrBNode, RdfError} -import io.circe.Encoder +import io.circe.{Encoder, Json} import io.circe.syntax._ import monix.bio.IO @@ -137,21 +137,31 @@ object JsonLdEncoder { value: A )(implicit opts: JsonLdOptions, api: JsonLdApi, rcr: RemoteContextResolution): IO[RdfError, CompactedJsonLd] = for { - expanded <- expand(value) - compacted <- expanded.toCompacted(context(value)) + (expanded, context) <- expandAndExtractContext(value) + compacted <- expanded.toCompacted(context) } yield compacted override def expand( value: A - )(implicit opts: JsonLdOptions, api: JsonLdApi, rcr: RemoteContextResolution): IO[RdfError, ExpandedJsonLd] = { - val json = value.asJson.replaceContext(context(value).contextObj) - ExpandedJsonLd(json).map { - case expanded if fId(value).isBNode && expanded.rootId.isIri => expanded - case expanded => expanded.replaceId(fId(value)) - } + )(implicit opts: JsonLdOptions, api: JsonLdApi, rcr: RemoteContextResolution): IO[RdfError, ExpandedJsonLd] = + expandAndExtractContext(value).map(_._1) + + private def expandAndExtractContext( + value: A + )(implicit opts: JsonLdOptions, api: JsonLdApi, rcr: RemoteContextResolution) = { + val json = value.asJson + val context = contextFromJson(json) + ExpandedJsonLd(json.replaceContext(context.contextObj)) + .map { + case expanded if fId(value).isBNode && expanded.rootId.isIri => expanded + case expanded => expanded.replaceId(fId(value)) + } + .map(_ -> context) } - override def context(value: A): ContextValue = value.asJson.topContextValueOrEmpty merge ctx + override def context(value: A): ContextValue = contextFromJson(value.asJson) + + private def contextFromJson(json: Json): ContextValue = json.topContextValueOrEmpty merge ctx } implicit val jsonLdEncoderUnit: JsonLdEncoder[Unit] = new JsonLdEncoder[Unit] { From 46d110a6b1102ebdf9110a525d242d0be66dc4da Mon Sep 17 00:00:00 2001 From: Simon Date: Tue, 19 Sep 2023 09:40:48 +0200 Subject: [PATCH 06/13] Enable compression for outgoing requests (#4279) * Enable compression for outgoing requests --------- Co-authored-by: Simon Dumas --- delta/app/src/main/resources/app.conf | 12 +++++++++- .../nexus/delta/wiring/RealmsModule.scala | 2 +- .../src/main/resources/blazegraph.conf | 4 ++-- .../blazegraph/BlazegraphClientSetup.scala | 2 +- .../routes/BlazegraphViewsQuerySpec.scala | 2 +- .../src/main/resources/composite-views.conf | 2 +- .../src/main/resources/elasticsearch.conf | 2 +- .../ElasticSearchClientSetup.scala | 2 +- .../plugins/storage/StoragePluginModule.scala | 2 +- .../nexus/delta/sdk/http/HttpClient.scala | 17 ++++++++----- .../delta/sdk/http/HttpClientConfig.scala | 7 +++--- .../nexus/delta/sdk/ConfigFixtures.scala | 2 +- .../delta/sdk/http/HttpClientSetup.scala | 4 ++-- .../nexus/delta/sdk/http/HttpClientSpec.scala | 24 +++++++++++-------- .../docs/releases/v1.9-release-notes.md | 4 ++++ 15 files changed, 55 insertions(+), 33 deletions(-) diff --git a/delta/app/src/main/resources/app.conf b/delta/app/src/main/resources/app.conf index 0dba7f4b8c..687d13f912 100644 --- a/delta/app/src/main/resources/app.conf +++ b/delta/app/src/main/resources/app.conf @@ -310,7 +310,7 @@ app { } defaults { - http-client { + http-client-compression { # the retry strategy for the http client retry = ${app.defaults.constant-retry-strategy} # the strategy to decide if it is worth retrying when an Http error occurs. @@ -320,6 +320,16 @@ app { compression = true } + http-client-no-compression { + # the retry strategy for the http client + retry = ${app.defaults.constant-retry-strategy} + # the strategy to decide if it is worth retrying when an Http error occurs. + # allowed strategies are 'always', 'never' or 'onServerError'. + is-worth-retrying = "onServerError" + # Flag to decide whether or not to support compression + compression = false + } + # default query configuration query { batch-size = 30 diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/RealmsModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/RealmsModule.scala index 6893653285..189ed27c95 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/RealmsModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/RealmsModule.scala @@ -54,7 +54,7 @@ object RealmsModule extends ModuleDef { } make[HttpClient].named("realm").from { (as: ActorSystem[Nothing], sc: Scheduler) => - HttpClient.noRetry()(as.classicSystem, sc) + HttpClient.noRetry(compression = false)(as.classicSystem, sc) } many[SseEncoder[_]].add { base: BaseUri => RealmEvent.sseEncoder(base) } diff --git a/delta/plugins/blazegraph/src/main/resources/blazegraph.conf b/delta/plugins/blazegraph/src/main/resources/blazegraph.conf index e9f11ae05c..85bc4e4432 100644 --- a/delta/plugins/blazegraph/src/main/resources/blazegraph.conf +++ b/delta/plugins/blazegraph/src/main/resources/blazegraph.conf @@ -12,10 +12,10 @@ plugins.blazegraph { #} # configuration of the indexing Blazegraph client - indexing-client = ${app.defaults.http-client} + indexing-client = ${app.defaults.http-client-no-compression} # configuration of the query Blazegraph client - query-client = ${app.defaults.http-client} + query-client = ${app.defaults.http-client-no-compression} query-client.is-worth-retrying = "never" # Blazegraph query timeout diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala index db147c8030..176b02aa9f 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphClientSetup.scala @@ -16,7 +16,7 @@ object BlazegraphClientSetup { def resource()(implicit s: Scheduler): Resource[Task, BlazegraphClient] = { for { - (httpClient, actorSystem) <- HttpClientSetup() + (httpClient, actorSystem) <- HttpClientSetup(compression = false) container <- BlazegraphContainer.resource() } yield { implicit val as: ActorSystem = actorSystem diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala index a132b6b374..1b0a4332fa 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsQuerySpec.scala @@ -72,7 +72,7 @@ class BlazegraphViewsQuerySpec(docker: BlazegraphDocker) } implicit private val sc: Scheduler = Scheduler.global - implicit private val httpConfig: HttpClientConfig = HttpClientConfig(AlwaysGiveUp, HttpClientWorthRetry.never, true) + implicit private val httpConfig: HttpClientConfig = HttpClientConfig(AlwaysGiveUp, HttpClientWorthRetry.never, false) implicit private val baseUri: BaseUri = BaseUri("http://localhost", Label.unsafe("v1")) implicit private val uuidF: UUIDF = UUIDF.random diff --git a/delta/plugins/composite-views/src/main/resources/composite-views.conf b/delta/plugins/composite-views/src/main/resources/composite-views.conf index c691e14bd3..efd83f4928 100644 --- a/delta/plugins/composite-views/src/main/resources/composite-views.conf +++ b/delta/plugins/composite-views/src/main/resources/composite-views.conf @@ -27,7 +27,7 @@ plugins.composite-views { pagination = ${app.defaults.pagination} # the HTTP client configuration for a remote source remote-source-client { - http = ${app.defaults.http-client} + http = ${app.defaults.http-client-compression} retry-delay = 1 minute # the maximum batching size, corresponding to the maximum number of Blazegraph documents uploaded on a bulk request. # in this window, duplicated persistence ids are discarded diff --git a/delta/plugins/elasticsearch/src/main/resources/elasticsearch.conf b/delta/plugins/elasticsearch/src/main/resources/elasticsearch.conf index 475c8f33db..92e4e6bf86 100644 --- a/delta/plugins/elasticsearch/src/main/resources/elasticsearch.conf +++ b/delta/plugins/elasticsearch/src/main/resources/elasticsearch.conf @@ -11,7 +11,7 @@ plugins.elasticsearch { # password= "password" # } # configuration of the Elasticsearch client - client = ${app.defaults.http-client} + client = ${app.defaults.http-client-compression} # the elasticsearch event log configuration event-log = ${app.defaults.event-log} # the elasticsearch pagination config diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchClientSetup.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchClientSetup.scala index 4513849b82..fad6c61676 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchClientSetup.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchClientSetup.scala @@ -28,7 +28,7 @@ object ElasticSearchClientSetup extends CirceLiteral { def resource()(implicit s: Scheduler): Resource[Task, ElasticSearchClient] = { for { - (httpClient, actorSystem) <- HttpClientSetup() + (httpClient, actorSystem) <- HttpClientSetup(compression = true) container <- ElasticSearchContainer.resource() } yield { implicit val as: ActorSystem = actorSystem diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala index 768939edc3..537f7b93ac 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala @@ -60,7 +60,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { make[StorageTypeConfig].from { cfg: StoragePluginConfig => cfg.storages.storageTypeConfig } make[HttpClient].named("storage").from { (as: ActorSystem[Nothing], sc: Scheduler) => - HttpClient.noRetry()(as.classicSystem, sc) + HttpClient.noRetry(compression = false)(as.classicSystem, sc) } make[Storages] diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala index 6e7c2953e1..fb90ce2fdc 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClient.scala @@ -89,8 +89,8 @@ object HttpClient { /** * Construct an Http client using an underlying akka http client which will not retry on failures */ - final def noRetry()(implicit as: ActorSystem, scheduler: Scheduler): HttpClient = { - implicit val config: HttpClientConfig = HttpClientConfig.noRetry + final def noRetry(compression: Boolean)(implicit as: ActorSystem, scheduler: Scheduler): HttpClient = { + implicit val config: HttpClientConfig = HttpClientConfig.noRetry(compression) apply() } @@ -119,11 +119,16 @@ object HttpClient { override def apply[A]( req: HttpRequest )(handleResponse: PartialFunction[HttpResponse, HttpResult[A]]): HttpResult[A] = { - val reqCompressionSupport = if (httpConfig.compression) req.addHeader(acceptEncoding) else req + val reqCompressionSupport = + if (httpConfig.compression) { + Coders.Gzip.encodeMessage(req).addHeader(acceptEncoding) + } else + req.addHeader(acceptEncoding) + for { - encodedResp <- client.execute(reqCompressionSupport).mapError(toHttpError(req)) - resp <- decodeResponse(req, encodedResp) - a <- handleResponse.applyOrElse(resp, resp => consumeEntity[A](req, resp)) + encodedResp <- client.execute(reqCompressionSupport).mapError(toHttpError(reqCompressionSupport)) + resp <- decodeResponse(reqCompressionSupport, encodedResp) + a <- handleResponse.applyOrElse(resp, resp => consumeEntity[A](reqCompressionSupport, resp)) } yield a }.retry(httpConfig.strategy) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientConfig.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientConfig.scala index 528690f523..f8c0a3fe15 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientConfig.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientConfig.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.sdk.http -import ch.epfl.bluebrain.nexus.delta.kernel.{RetryStrategy, RetryStrategyConfig} -import com.typesafe.scalalogging.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.{Logger, RetryStrategy, RetryStrategyConfig} import pureconfig.ConfigReader import pureconfig.error.CannotConvert import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientConfig.logger @@ -38,8 +37,8 @@ object HttpClientConfig { private[http] val logger: Logger = Logger[HttpClientConfig] - val noRetry: HttpClientConfig = - HttpClientConfig(RetryStrategyConfig.AlwaysGiveUp, HttpClientWorthRetry.never, compression = true) + def noRetry(compression: Boolean): HttpClientConfig = + HttpClientConfig(RetryStrategyConfig.AlwaysGiveUp, HttpClientWorthRetry.never, compression = compression) @nowarn("cat=unused") implicit private val httpClientWorthRetryConverter: ConfigReader[HttpClientWorthRetry] = diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ConfigFixtures.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ConfigFixtures.scala index 93605f1273..37aad0f789 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ConfigFixtures.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ConfigFixtures.scala @@ -28,7 +28,7 @@ trait ConfigFixtures { ) def httpClientConfig: HttpClientConfig = - HttpClientConfig(RetryStrategyConfig.AlwaysGiveUp, HttpClientWorthRetry.never, true) + HttpClientConfig(RetryStrategyConfig.AlwaysGiveUp, HttpClientWorthRetry.never, false) def fusionConfig: FusionConfig = FusionConfig(Uri("https://bbp.epfl.ch/nexus/web/"), enableRedirects = true) diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSetup.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSetup.scala index ab525921a4..ac794c53cb 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSetup.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSetup.scala @@ -8,9 +8,9 @@ import monix.execution.Scheduler object HttpClientSetup { - def apply()(implicit s: Scheduler): Resource[Task, (HttpClient, ActorSystem)] = { + def apply(compression: Boolean)(implicit s: Scheduler): Resource[Task, (HttpClient, ActorSystem)] = { implicit val httpConfig: HttpClientConfig = - HttpClientConfig(RetryStrategyConfig.AlwaysGiveUp, HttpClientWorthRetry.never, compression = true) + HttpClientConfig(RetryStrategyConfig.AlwaysGiveUp, HttpClientWorthRetry.never, compression = compression) Resource .make[Task, ActorSystem](Task.delay(ActorSystem()))((as: ActorSystem) => Task.delay(as.terminate()).void) .map { implicit as => diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSpec.scala index 99bd9e7567..a1e2779055 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/http/HttpClientSpec.scala @@ -47,11 +47,15 @@ class HttpClientSpec private val value2 = Value("second", 2, deprecated = true) private val baseUri = Uri("http://localhost/v1") - private val reqGetValue = HttpRequest(uri = baseUri / s"values/first") + private val getUri = baseUri / s"values/first" + private val reqGetValue = HttpRequest(uri = getUri) private val count = Count() - private val reqStreamValues = HttpRequest(uri = baseUri / "values/events") - private val reqClientError = HttpRequest(uri = baseUri / "values/errors/client") - private val reqServerError = HttpRequest(uri = baseUri / "values/errors/server") + private val streamUri = baseUri / "values/events" + private val reqStreamValues = HttpRequest(uri = streamUri) + private val clientErrorUri = baseUri / "values/errors/client" + private val reqClientError = HttpRequest(uri = clientErrorUri) + private val serverErrorUri = baseUri / "values/errors/server" + private val reqServerError = HttpRequest(uri = serverErrorUri) private def toSource(values: List[Json]): AkkaSource = Source(values.map(j => ByteString(j.noSpaces))) @@ -63,22 +67,22 @@ class HttpClientSpec val httpSingleReq = new HttpSingleRequest { override def execute(request: HttpRequest): Task[HttpResponse] = - request match { - case `reqGetValue` => + request.uri match { + case `getUri` => Task.delay(count.reqGetValue.incrementAndGet()) >> Task(response(HttpEntity(`application/json`, value1.asJson.noSpaces))) - case `reqStreamValues` => + case `streamUri` => Task.delay(count.reqStreamValues.incrementAndGet()) >> Task(response(HttpEntity(`application/octet-stream`, toSource(List(value1.asJson, value2.asJson))))) - case `reqClientError` => + case `clientErrorUri` => Task.delay(count.reqClientError.incrementAndGet()) >> Task(response(HttpEntity(`application/json`, json"""{"error": "client"}""".noSpaces), BadRequest)) - case `reqServerError` => + case `serverErrorUri` => Task.delay(count.reqServerError.incrementAndGet()) >> Task( response(HttpEntity(`application/json`, json"""{"error": "server"}""".noSpaces), InternalServerError) ) - case _ => + case _ => Task.delay(count.reqOtherError.incrementAndGet()) >> Task.raiseError(new IllegalArgumentException("wrong request")) } diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index 164b35fc4a..8e6ae52233 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -65,6 +65,10 @@ To improve indexing performance, the types defined in the are filtered in PostgreSQL rather than in Nexus Delta. This avoids querying for data just to discard it straight away. +#### Compressing requests to Elasticsearch + +The different requests to Elasticsearch are now compressed by default allowing to reduce the I/Os especially during indexing. + ### Composite views To enhance performance of indexing of composite views, Nexus Delta introduces the following features. From 32f1b6c5aaf6c86e93fc5e455ea39c85c975a70a Mon Sep 17 00:00:00 2001 From: Simon Date: Tue, 19 Sep 2023 14:34:13 +0200 Subject: [PATCH 07/13] Better document the use of the schema segment (#4280) * Better document the use of the schema segment --------- Co-authored-by: Simon Dumas --- .../paradox/docs/delta/api/resources-api.md | 24 ++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/docs/src/main/paradox/docs/delta/api/resources-api.md b/docs/src/main/paradox/docs/delta/api/resources-api.md index 4686a05724..c15e325974 100644 --- a/docs/src/main/paradox/docs/delta/api/resources-api.md +++ b/docs/src/main/paradox/docs/delta/api/resources-api.md @@ -23,8 +23,8 @@ Please visit @ref:[Authentication & authorization](authentication.md) section to @@@ note { .warning title="Remote contexts" } -From Delta v1.5, remote contexts are only resolved during creates and updates. -That means that when those get updated, the resources importing them must be also updated to take them into account the new version. +Remote contexts are only resolved during creates and updates. +That means that when those get updated, the resources importing them must be also updated to take them into account in a new version. @@@ @@ -68,6 +68,12 @@ The json payload: - If the `@id` value is not found on the payload, an @id will be generated as follows: `base:{UUID}`. The `base` is the `prefix` defined on the resource's project (`{project_label}`). +The `{schema_id}` segment allows to define an existing SHACL schema to validate the resource with: + +- If `_` is provided, no SHACL validation will be performed +- If another value is provided, Nexus will attempt to resolve the schema then validate the expanded JSON-LD value generated +from the provided payload. + **Example** Request @@ -88,7 +94,9 @@ to specify one. The @id will be specified in the last segment of the endpoint UR PUT /v1/resources/{org_label}/{project_label}/{schema_id}/{resource_id} {...} ``` - + +The `{schema_id}` has the same behaviour as @ref:[the creation using post operation](#create-using-post). + Note that if the payload contains an @id different from the `{resource_id}`, the request will fail. **Example** @@ -116,6 +124,11 @@ PUT /v1/resources/{org_label}/{project_label}/{schema_id}/{resource_id}?rev={pre ``` ... where `{previous_rev}` is the last known revision number for the resource. +The `{schema_id}` segment allows to define an existing SHACL schema to validate the resource with: + +- If `_` is provided, no SHACL validation will be performed with the latest version of its current schema +- If another value is provided, it has to match the identifier of the current schema as changing the schema of a +resource is not currently supported. A different revision or tag of this schema can be provided though. **Example** @@ -252,6 +265,11 @@ where ... `{rev}` and `{tag}` fields cannot be simultaneously present. +The `{schema_id}` segment allows to pass the resource schema: + +- If `_` is provided, the value is ignored +- If another value is provided, it must match the identifier of the resource schema. + **Example** Request From b0f70f906e8f5711415d09a95a73c3116a1fc536 Mon Sep 17 00:00:00 2001 From: dantb Date: Wed, 20 Sep 2023 09:55:00 +0200 Subject: [PATCH 08/13] Update ElasticSearch version to 8.10.1 (#4283) --- .gitignore | 1 + .../testkit/elasticsearch/ElasticSearchContainer.scala | 2 +- .../main/paradox/docs/delta/api/assets/version.json | 2 +- .../running-nexus/docker/docker-compose.yaml | 2 +- tests/README.md | 10 +++++++++- tests/docker/docker-compose.yml | 2 +- 6 files changed, 14 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 7b96776db2..cc70a32e0a 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ native target/ .bloop .metals +metals.sbt project/.bloop .bsp diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/elasticsearch/ElasticSearchContainer.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/elasticsearch/ElasticSearchContainer.scala index 2c2673edbb..a74759a68c 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/elasticsearch/ElasticSearchContainer.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/elasticsearch/ElasticSearchContainer.scala @@ -27,7 +27,7 @@ class ElasticSearchContainer(password: String) } object ElasticSearchContainer { - private val Version = "8.9.1" + private val Version = "8.10.1" val ElasticSearchUser = "elastic" val ElasticSearchPassword = "password" diff --git a/docs/src/main/paradox/docs/delta/api/assets/version.json b/docs/src/main/paradox/docs/delta/api/assets/version.json index 5cdefb61d1..f026c80706 100644 --- a/docs/src/main/paradox/docs/delta/api/assets/version.json +++ b/docs/src/main/paradox/docs/delta/api/assets/version.json @@ -4,7 +4,7 @@ "dependencies": { "blazegraph": "2.1.6-RC", "postgresql": "15.4", - "elasticsearch": "8.9.1", + "elasticsearch": "8.10.1", "remoteStorage": "1.9.0" }, "plugins": { diff --git a/docs/src/main/paradox/docs/getting-started/running-nexus/docker/docker-compose.yaml b/docs/src/main/paradox/docs/getting-started/running-nexus/docker/docker-compose.yaml index f1ef094d64..8eb4289c58 100644 --- a/docs/src/main/paradox/docs/getting-started/running-nexus/docker/docker-compose.yaml +++ b/docs/src/main/paradox/docs/getting-started/running-nexus/docker/docker-compose.yaml @@ -23,7 +23,7 @@ services: memory: 1024M elasticsearch: - image: "docker.elastic.co/elasticsearch/elasticsearch:8.9.1" + image: "docker.elastic.co/elasticsearch/elasticsearch:8.10.1" environment: discovery.type: "single-node" bootstrap.memory_lock: "true" diff --git a/tests/README.md b/tests/README.md index f7bb897d9d..b77777b2c5 100644 --- a/tests/README.md +++ b/tests/README.md @@ -7,7 +7,15 @@ First, run: docker-compose -f docker/docker-compose.yml up -d ``` -To run the all the tests: +Add the following local domains to your `/etc/hosts` file for `S3StorageAccessSpec`: +``` +127.0.0.1 bucket.my-domain.com +127.0.0.1 other.my-domain.com +127.0.0.1 bucket2.my-domain.com +127.0.0.1 bucket3.my-domain.com +``` + +To run all the tests: ```sbtshell test ``` diff --git a/tests/docker/docker-compose.yml b/tests/docker/docker-compose.yml index ee13aa1b8d..91e5828e8e 100644 --- a/tests/docker/docker-compose.yml +++ b/tests/docker/docker-compose.yml @@ -78,7 +78,7 @@ services: - 9090:8080 elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.9.1 + image: docker.elastic.co/elasticsearch/elasticsearch:8.10.1 environment: ES_JAVA_OPTS: "-Xmx2G" discovery.type: "single-node" From 1bf50fd82ab2fce87aff6cd5b7d581d283e50159 Mon Sep 17 00:00:00 2001 From: dantb Date: Wed, 20 Sep 2023 16:44:59 +0200 Subject: [PATCH 09/13] Replace uses of 'archives/write' permission with 'resources/read' (#4285) Replace uses of 'archives/write' permission with 'resources/read' --- delta/app/src/main/resources/app.conf | 2 -- .../nexus/delta/plugins/archive/model/package.scala | 2 +- .../nexus/delta/plugins/archive/ArchiveRoutesSpec.scala | 4 +--- docs/src/main/paradox/docs/delta/api/archives-api.md | 5 +---- .../delta/api/assets/permissions/permissions-get.json | 1 - docs/src/main/paradox/docs/delta/api/permissions-api.md | 3 --- .../src/main/paradox/docs/releases/v1.9-release-notes.md | 4 ++++ .../bluebrain/nexus/tests/iam/types/AclListing.scala | 9 --------- 8 files changed, 7 insertions(+), 23 deletions(-) diff --git a/delta/app/src/main/resources/app.conf b/delta/app/src/main/resources/app.conf index 687d13f912..53aabd91a0 100644 --- a/delta/app/src/main/resources/app.conf +++ b/delta/app/src/main/resources/app.conf @@ -112,7 +112,6 @@ app { "schemas/write", "files/write", "storages/write", - "archives/write", "version/read", "quotas/read", "supervision/read" @@ -139,7 +138,6 @@ app { "schemas/write", "files/write", "storages/write", - "archives/write", "version/read", "quotas/read" ] diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/package.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/package.scala index 9ff0ac635d..4767391d08 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/package.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/package.scala @@ -38,6 +38,6 @@ package object model { */ object permissions { final val read: Permission = Permissions.resources.read - final val write: Permission = Permission.unsafe("archives/write") + final val write: Permission = Permissions.resources.read } } diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala index 1081083c4a..1755b9d56a 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala @@ -81,9 +81,7 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with TryValue private val perms = Seq( Permissions.resources.write, - Permissions.resources.read, - model.permissions.read, - model.permissions.write + Permissions.resources.read ) private val asSubject = addCredentials(OAuth2BearerToken("user")) diff --git a/docs/src/main/paradox/docs/delta/api/archives-api.md b/docs/src/main/paradox/docs/delta/api/archives-api.md index 6318e0c3f7..a0b71f2f39 100644 --- a/docs/src/main/paradox/docs/delta/api/archives-api.md +++ b/docs/src/main/paradox/docs/delta/api/archives-api.md @@ -10,10 +10,7 @@ Each archive... @@@ note { .tip title="Authorization notes" } -When modifying archives, the caller must have `archives/write` permissions on the current path of the project or the -ancestor paths. - -When reading archives, the caller must have `resources/read` permissions on the current path of the project or the +For both reading and modifying archives, the caller must have `resources/read` permissions on the current path of the project or the ancestor paths. Please visit @ref:[Authentication & authorization](authentication.md) section to learn more about it. diff --git a/docs/src/main/paradox/docs/delta/api/assets/permissions/permissions-get.json b/docs/src/main/paradox/docs/delta/api/assets/permissions/permissions-get.json index 740a9e173f..40916aea0c 100644 --- a/docs/src/main/paradox/docs/delta/api/assets/permissions/permissions-get.json +++ b/docs/src/main/paradox/docs/delta/api/assets/permissions/permissions-get.json @@ -18,7 +18,6 @@ "acls/read", "projects/read", "permissions/read", - "archives/write", "organizations/create", "views/query", "storages/write", diff --git a/docs/src/main/paradox/docs/delta/api/permissions-api.md b/docs/src/main/paradox/docs/delta/api/permissions-api.md index 55243b432f..a0a26bfca8 100644 --- a/docs/src/main/paradox/docs/delta/api/permissions-api.md +++ b/docs/src/main/paradox/docs/delta/api/permissions-api.md @@ -66,9 +66,6 @@ Currently, the following permissions are required: - default permissions for storages - `storages/write` -- default permissions for archives - - `archives/write` - - default permissions for the version endpoint - `version/read` diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index 8e6ae52233..4819525804 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -109,6 +109,10 @@ Annotated source is now available as an output format when creating an archive. @ref:[More information](../delta/api/archives-api.md#payload) +#### Require only `resources/read` permission for archive creation + +Creating an archive now requires only the `resources/read` permission instead of `archives/write`. + ### Storages Storages can no longer be created with credentials that would get stored: diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/types/AclListing.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/types/AclListing.scala index d82bd23f6e..5429e8d51f 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/types/AclListing.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/types/AclListing.scala @@ -143,13 +143,6 @@ object Permission { val list: List[Permission] = Write :: Nil } - object Archives { - val name = "archives" - val Write: Permission = Permission(name, "write") - - val list: List[Permission] = Write :: Nil - } - object Quotas { val name = "quotas" val Read: Permission = Permission(name, "read") @@ -178,7 +171,6 @@ object Permission { Schemas.list ++ Views.list ++ Storages.list ++ - Archives.list ++ Quotas.list ++ Supervision.list).toSet @@ -193,7 +185,6 @@ object Permission { Schemas.list ++ Views.list ++ Storages.list ++ - Archives.list ++ Quotas.list).toSet } From d1492e9286647ce9bc67fb5f9fc75397f2837565 Mon Sep 17 00:00:00 2001 From: Daniel Bell Date: Wed, 20 Sep 2023 17:34:29 +0100 Subject: [PATCH 10/13] Update the authentication process for composite views (#4284) * Update the authentication process for composite views includes removing postgres storage encryption as we no longer use this * fix PR issues * test fixes * more fixes * docs * more docs --- delta/app/src/main/resources/app.conf | 6 - .../nexus/delta/config/AppConfig.scala | 2 - .../nexus/delta/wiring/DeltaModule.scala | 2 - .../nexus/delta/wiring/IdentitiesModule.scala | 13 ++- .../src/main/resources/composite-views.conf | 4 + .../compositeviews/CompositeViews.scala | 10 +- .../CompositeViewsPluginModule.scala | 28 ++--- .../ValidateCompositeView.scala | 15 +-- .../compositeviews/client/DeltaClient.scala | 106 +++++++++--------- .../config/CompositeViewsConfig.scala | 4 +- .../compositeviews/model/CompositeView.scala | 3 +- .../model/CompositeViewEvent.scala | 13 +-- .../model/CompositeViewRejection.scala | 8 -- .../model/CompositeViewSource.scala | 15 +-- .../model/CompositeViewSourceFields.scala | 11 +- .../model/CompositeViewState.scala | 5 +- .../model/CompositeViewValue.scala | 15 +-- .../database/named-view-created.json | 3 - .../database/named-view-updated.json | 3 - .../database/view-created.json | 3 - .../composite-views/database/view-state.json | 3 - .../database/view-updated.json | 3 - .../CompositeViewDecodingSpec.scala | 7 +- .../CompositeViewFactorySuite.scala | 5 +- .../CompositeViewsFixture.scala | 16 +-- .../compositeviews/CompositeViewsSpec.scala | 1 - .../client/DeltaClientSpec.scala | 32 +----- .../indexing/CompositeIndexingSuite.scala | 3 +- .../MigrateCompositeViewsSuite.scala | 6 +- .../CompositeViewsSerializationSuite.scala | 8 +- .../routes/CompositeViewsRoutesSpec.scala | 1 - .../SearchScopeInitializationSpec.scala | 1 - .../plugins/storage/StoragePluginModule.scala | 11 +- .../storages/model/StorageRejection.scala | 30 ++--- .../delta/sdk/auth/AuthTokenProvider.scala | 5 +- .../nexus/delta/sdk/crypto/Crypto.scala | 76 ------------- .../delta/sdk/crypto/EncryptionConfig.scala | 22 ---- .../nexus/delta/sdk/crypto/CryptoSpec.scala | 25 ----- .../running-nexus/configuration/index.md | 8 +- docs/src/main/paradox/docs/releases/index.md | 2 +- .../docs/releases/v1.8-to-v1.9-migration.md | 8 ++ .../docs/releases/v1.9-release-notes.md | 10 +- tests/docker/config/delta-postgres.conf | 6 + .../kg/views/composite/composite-view.json | 3 +- .../nexus/tests/kg/CompositeViewsSpec.scala | 31 ----- 45 files changed, 170 insertions(+), 422 deletions(-) delete mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/Crypto.scala delete mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/EncryptionConfig.scala delete mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/CryptoSpec.scala diff --git a/delta/app/src/main/resources/app.conf b/delta/app/src/main/resources/app.conf index 53aabd91a0..4bbb20f559 100644 --- a/delta/app/src/main/resources/app.conf +++ b/delta/app/src/main/resources/app.conf @@ -277,12 +277,6 @@ app { } } - # secrets encryption configuration - encryption { - password = "changeme" - salt = "salt" - } - # projection configuration projections { cluster { diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/config/AppConfig.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/config/AppConfig.scala index 6721bc5e89..262108943e 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/config/AppConfig.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/config/AppConfig.scala @@ -3,7 +3,6 @@ package ch.epfl.bluebrain.nexus.delta.config import ch.epfl.bluebrain.nexus.delta.kernel.cache.CacheConfig import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApiConfig import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclsConfig -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.EncryptionConfig import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig import ch.epfl.bluebrain.nexus.delta.sdk.model.ServiceAccountConfig import ch.epfl.bluebrain.nexus.delta.sdk.organizations.OrganizationsConfig @@ -47,7 +46,6 @@ final case class AppConfig( schemas: SchemasConfig, serviceAccount: ServiceAccountConfig, sse: SseConfig, - encryption: EncryptionConfig, projections: ProjectionConfig, fusion: FusionConfig ) diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala index c1988efd43..08c7f0408c 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala @@ -20,7 +20,6 @@ import ch.epfl.bluebrain.nexus.delta.routes.ErrorRoutes import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.Acls -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig import ch.epfl.bluebrain.nexus.delta.sdk.http.StrictEntity import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.ServiceAccount @@ -61,7 +60,6 @@ class DeltaModule(appCfg: AppConfig, config: Config)(implicit classLoader: Class make[BaseUri].from { appCfg.http.baseUri } make[StrictEntity].from { appCfg.http.strictEntityTimeout } make[ServiceAccount].from { appCfg.serviceAccount.value } - make[Crypto].from { appCfg.encryption.crypto } make[Transactors].fromResource { Transactors.init(appCfg.database) diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/IdentitiesModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/IdentitiesModule.scala index 8c4f99737c..8c32951dbf 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/IdentitiesModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/IdentitiesModule.scala @@ -5,15 +5,17 @@ import akka.http.scaladsl.model.{HttpRequest, Uri} import ch.epfl.bluebrain.nexus.delta.Main.pluginsMaxPriority import ch.epfl.bluebrain.nexus.delta.config.AppConfig import ch.epfl.bluebrain.nexus.delta.kernel.cache.CacheConfig +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.routes.IdentitiesRoutes import ch.epfl.bluebrain.nexus.delta.sdk.PriorityRoute import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, OpenIdAuthService} import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientError} import ch.epfl.bluebrain.nexus.delta.sdk.identities.{Identities, IdentitiesImpl} -import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchParams.RealmSearchParams import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, ResourceF} import ch.epfl.bluebrain.nexus.delta.sdk.realms.Realms @@ -22,7 +24,6 @@ import io.circe.Json import izumi.distage.model.definition.{Id, ModuleDef} import monix.bio.{IO, UIO} import monix.execution.Scheduler -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ /** * Identities module wiring config. @@ -46,6 +47,14 @@ object IdentitiesModule extends ModuleDef { IdentitiesImpl(findActiveRealm, getUserInfo, config) } + make[OpenIdAuthService].from { (httpClient: HttpClient @Id("realm"), realms: Realms) => + new OpenIdAuthService(httpClient, realms) + } + + make[AuthTokenProvider].fromEffect { (authService: OpenIdAuthService) => + AuthTokenProvider(authService) + } + many[RemoteContextResolution].addEffect(ContextValue.fromFile("contexts/identities.json").map { ctx => RemoteContextResolution.fixed(contexts.identities -> ctx) }) diff --git a/delta/plugins/composite-views/src/main/resources/composite-views.conf b/delta/plugins/composite-views/src/main/resources/composite-views.conf index efd83f4928..f4971bd402 100644 --- a/delta/plugins/composite-views/src/main/resources/composite-views.conf +++ b/delta/plugins/composite-views/src/main/resources/composite-views.conf @@ -35,6 +35,10 @@ plugins.composite-views { # the maximum batching duration. In this window, duplicated persistence ids are discarded max-time-window = 500 millis } + # the credentials for comminication with the remote source + remote-source-credentials { + type: "anonymous" + } # the minimum allowed value for periodic rebuild strategy min-interval-rebuild = 30 minutes # the interval at which a view will look for requested restarts diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala index f6b08bcfab..7c7e2c650c 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViews.scala @@ -17,7 +17,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model._ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.serialization.CompositeViewFieldsJsonLdSourceDecoder import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.ExpandIri @@ -478,15 +477,15 @@ object CompositeViews { } } - def definition(validate: ValidateCompositeView, crypto: Crypto)(implicit + def definition(validate: ValidateCompositeView)(implicit clock: Clock[UIO], uuidF: UUIDF ): ScopedEntityDefinition[Iri, CompositeViewState, CompositeViewCommand, CompositeViewEvent, CompositeViewRejection] = ScopedEntityDefinition( entityType, StateMachine(None, evaluate(validate), next), - CompositeViewEvent.serializer(crypto), - CompositeViewState.serializer(crypto), + CompositeViewEvent.serializer, + CompositeViewState.serializer, Tagger[CompositeViewEvent]( { case r: CompositeViewTagAdded => Some(r.tag -> r.targetRev) @@ -515,7 +514,6 @@ object CompositeViews { fetchContext: FetchContext[CompositeViewRejection], contextResolution: ResolverContextResolution, validate: ValidateCompositeView, - crypto: Crypto, config: CompositeViewsConfig, xas: Transactors )(implicit @@ -530,7 +528,7 @@ object CompositeViews { .map { sourceDecoder => new CompositeViews( ScopedEventLog( - definition(validate, crypto), + definition(validate), config.eventLog, xas ), diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala index 72f9c25294..3f8c506708 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsPluginModule.scala @@ -7,7 +7,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.client.DeltaClient import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.config.CompositeViewsConfig import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.deletion.CompositeViewsDeletionTask -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing.{CompositeProjectionLifeCycle, CompositeSinks, CompositeSpaces, CompositeViewsCoordinator, MetadataPredicates} +import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing._ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.migration.MigrateCompositeViews import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewRejection.ProjectContextRejection import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model._ @@ -22,7 +22,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, JsonLdCon import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto +import ch.epfl.bluebrain.nexus.delta.sdk.auth.AuthTokenProvider import ch.epfl.bluebrain.nexus.delta.sdk.deletion.ProjectDeletionTask import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig @@ -51,9 +51,15 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { make[CompositeViewsConfig].fromEffect { cfg => CompositeViewsConfig.load(cfg) } - make[DeltaClient].from { (cfg: CompositeViewsConfig, as: ActorSystem[Nothing], sc: Scheduler) => - val httpClient = HttpClient()(cfg.remoteSourceClient.http, as.classicSystem, sc) - DeltaClient(httpClient, cfg.remoteSourceClient.retryDelay)(as, sc) + make[DeltaClient].from { + ( + cfg: CompositeViewsConfig, + as: ActorSystem[Nothing], + sc: Scheduler, + authTokenProvider: AuthTokenProvider + ) => + val httpClient = HttpClient()(cfg.remoteSourceClient.http, as.classicSystem, sc) + DeltaClient(httpClient, authTokenProvider, cfg.remoteSourceCredentials, cfg.remoteSourceClient.retryDelay)(as, sc) } make[BlazegraphClient].named("blazegraph-composite-indexing-client").from { @@ -91,7 +97,6 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { permissions: Permissions, client: ElasticSearchClient, deltaClient: DeltaClient, - crypto: Crypto, config: CompositeViewsConfig, baseUri: BaseUri ) => @@ -101,7 +106,6 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { permissions.fetchPermissionSet, client, deltaClient, - crypto, config.prefix, config.sources.maxSources, config.maxProjections @@ -113,7 +117,6 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { fetchContext: FetchContext[ContextRejection], contextResolution: ResolverContextResolution, validate: ValidateCompositeView, - crypto: Crypto, config: CompositeViewsConfig, xas: Transactors, api: JsonLdApi, @@ -124,7 +127,6 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { fetchContext.mapRejection(ProjectContextRejection), contextResolution, validate, - crypto, config, xas )( @@ -319,15 +321,15 @@ class CompositeViewsPluginModule(priority: Int) extends ModuleDef { )(baseUri, config.pagination, s, cr, ordering) } - make[CompositeView.Shift].from { (views: CompositeViews, base: BaseUri, crypto: Crypto) => - CompositeView.shift(views)(base, crypto) + make[CompositeView.Shift].from { (views: CompositeViews, base: BaseUri) => + CompositeView.shift(views)(base) } many[ResourceShift[_, _, _]].ref[CompositeView.Shift] - many[SseEncoder[_]].add { (crypto: Crypto, base: BaseUri) => CompositeViewEvent.sseEncoder(crypto)(base) } + many[SseEncoder[_]].add { (base: BaseUri) => CompositeViewEvent.sseEncoder(base) } - many[ScopedEventMetricEncoder[_]].add { (crypto: Crypto) => CompositeViewEvent.compositeViewMetricEncoder(crypto) } + many[ScopedEventMetricEncoder[_]].add { () => CompositeViewEvent.compositeViewMetricEncoder } many[PriorityRoute].add { ( diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/ValidateCompositeView.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/ValidateCompositeView.scala index c0b9d159c0..924492ab1d 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/ValidateCompositeView.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/ValidateCompositeView.scala @@ -4,12 +4,11 @@ import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.client.DeltaClient import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.indexing.projectionIndex import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjection.{ElasticSearchProjection, SparqlProjection} -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewRejection.{CrossProjectSourceForbidden, CrossProjectSourceProjectNotFound, DuplicateIds, InvalidElasticSearchProjectionPayload, InvalidEncryptionSecrets, InvalidRemoteProjectSource, PermissionIsNotDefined, TooManyProjections, TooManySources, WrappedElasticSearchClientError} -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{AccessToken, CrossProjectSource, ProjectSource, RemoteProjectSource} +import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewRejection.{CrossProjectSourceForbidden, CrossProjectSourceProjectNotFound, DuplicateIds, InvalidElasticSearchProjectionPayload, InvalidRemoteProjectSource, PermissionIsNotDefined, TooManyProjections, TooManySources, WrappedElasticSearchClientError} +import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{CrossProjectSource, ProjectSource, RemoteProjectSource} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.{CompositeViewProjection, CompositeViewRejection, CompositeViewSource, CompositeViewValue} import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.client.{ElasticSearchClient, IndexLabel} import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpClientStatusError import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri @@ -37,7 +36,6 @@ object ValidateCompositeView { fetchPermissions: UIO[Set[Permission]], client: ElasticSearchClient, deltaClient: DeltaClient, - crypto: Crypto, prefix: String, maxSources: Int, maxProjections: Int @@ -49,13 +47,6 @@ object ValidateCompositeView { projects.fetch(cpSource.project).mapError(_ => CrossProjectSourceProjectNotFound(cpSource)).void } - def validateCrypto(token: Option[AccessToken]): IO[InvalidEncryptionSecrets.type, Unit] = token match { - case Some(AccessToken(value)) => - IO.fromEither(crypto.encrypt(value.value).flatMap(crypto.decrypt).toEither.void) - .mapError(_ => InvalidEncryptionSecrets) - case None => IO.unit - } - def validatePermission(permission: Permission) = fetchPermissions.flatMap { perms => IO.when(!perms.contains(permission))(IO.raiseError(PermissionIsNotDefined(permission))) @@ -76,7 +67,7 @@ object ValidateCompositeView { case _: ProjectSource => IO.unit case cpSource: CrossProjectSource => validateAcls(cpSource) >> validateProject(cpSource) case rs: RemoteProjectSource => - checkRemoteEvent(rs).mapError(InvalidRemoteProjectSource(rs, _)) >> validateCrypto(rs.token) + checkRemoteEvent(rs).mapError(InvalidRemoteProjectSource(rs, _)) } val validateProjection: CompositeViewProjection => IO[CompositeViewRejection, Unit] = { diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClient.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClient.scala index f387d47fcc..abce4fdd32 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClient.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClient.scala @@ -5,17 +5,17 @@ import akka.http.scaladsl.client.RequestBuilding.{Get, Head} import akka.http.scaladsl.model.ContentTypes.`application/json` import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model.headers.{`Last-Event-ID`, Accept} -import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes, Uri} +import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes} import akka.stream.alpakka.sse.scaladsl.EventSource import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.RemoteProjectSource import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.stream.CompositeBranch import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.RdfMediaTypes import ch.epfl.bluebrain.nexus.delta.rdf.graph.NQuads +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient.HttpResult import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpClientStatusError -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectStatistics import ch.epfl.bluebrain.nexus.delta.sdk.stream.StreamConverter import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ @@ -24,7 +24,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset.Start import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{Elem, RemainingElems} import com.typesafe.scalalogging.Logger -import io.circe.Json import io.circe.parser.decode import fs2._ import monix.bio.{IO, UIO} @@ -72,12 +71,6 @@ trait DeltaClient { * Fetches a resource with a given id in n-quads format. */ def resourceAsNQuads(source: RemoteProjectSource, id: Iri): HttpResult[Option[NQuads]] - - /** - * Fetches a resource with a given id in n-quads format. - */ - def resourceAsJson(source: RemoteProjectSource, id: Iri): HttpResult[Option[Json]] - } object DeltaClient { @@ -86,35 +79,47 @@ object DeltaClient { private val accept = Accept(`application/json`.mediaType, RdfMediaTypes.`application/ld+json`) - final private class DeltaClientImpl(client: HttpClient, retryDelay: FiniteDuration)(implicit + final private class DeltaClientImpl( + client: HttpClient, + authTokenProvider: AuthTokenProvider, + credentials: Credentials, + retryDelay: FiniteDuration + )(implicit as: ActorSystem[Nothing], scheduler: Scheduler ) extends DeltaClient { override def projectStatistics(source: RemoteProjectSource): HttpResult[ProjectStatistics] = { - implicit val cred: Option[AuthToken] = token(source) - val statisticsEndpoint: HttpRequest = - Get( - source.endpoint / "projects" / source.project.organization.value / source.project.project.value / "statistics" - ).addHeader(accept).withCredentials - client.fromJsonTo[ProjectStatistics](statisticsEndpoint) + for { + authToken <- authTokenProvider(credentials) + request = + Get( + source.endpoint / "projects" / source.project.organization.value / source.project.project.value / "statistics" + ).addHeader(accept).withCredentials(authToken) + result <- client.fromJsonTo[ProjectStatistics](request) + } yield { + result + } } override def remaining(source: RemoteProjectSource, offset: Offset): HttpResult[RemainingElems] = { - implicit val cred: Option[AuthToken] = token(source) - val remainingEndpoint: HttpRequest = - Get(elemAddress(source) / "remaining") - .addHeader(accept) - .addHeader(`Last-Event-ID`(offset.value.toString)) - .withCredentials - client.fromJsonTo[RemainingElems](remainingEndpoint) + for { + authToken <- authTokenProvider(credentials) + request = Get(elemAddress(source) / "remaining") + .addHeader(accept) + .addHeader(`Last-Event-ID`(offset.value.toString)) + .withCredentials(authToken) + result <- client.fromJsonTo[RemainingElems](request) + } yield result } override def checkElems(source: RemoteProjectSource): HttpResult[Unit] = { - implicit val cred: Option[AuthToken] = token(source) - client(Head(elemAddress(source)).withCredentials) { - case resp if resp.status.isSuccess() => UIO.delay(resp.discardEntityBytes()) >> IO.unit - } + for { + authToken <- authTokenProvider(credentials) + result <- client(Head(elemAddress(source)).withCredentials(authToken)) { + case resp if resp.status.isSuccess() => UIO.delay(resp.discardEntityBytes()) >> IO.unit + } + } yield result } override def elems(source: RemoteProjectSource, run: CompositeBranch.Run, offset: Offset): ElemStream[Unit] = { @@ -123,10 +128,11 @@ object DeltaClient { case Offset.At(value) => Some(value.toString) } - implicit val cred: Option[AuthToken] = token(source) - def send(request: HttpRequest): Future[HttpResponse] = { - client[HttpResponse](request.withCredentials)(IO.pure(_)).runToFuture + (for { + authToken <- authTokenProvider(credentials) + result <- client[HttpResponse](request.withCredentials(authToken))(IO.pure(_)) + } yield result).runToFuture } val suffix = run match { @@ -155,37 +161,31 @@ object DeltaClient { .withQuery(typeQuery(source.selectFilter.types)) override def resourceAsNQuads(source: RemoteProjectSource, id: Iri): HttpResult[Option[NQuads]] = { - implicit val cred: Option[AuthToken] = token(source) - val resourceUrl: Uri = + val resourceUrl = source.endpoint / "resources" / source.project.organization.value / source.project.project.value / "_" / id.toString - val req = Get( - source.resourceTag.fold(resourceUrl)(t => resourceUrl.withQuery(Query("tag" -> t.value))) - ).addHeader(Accept(RdfMediaTypes.`application/n-quads`)).withCredentials - client.fromEntityTo[String](req).map(nq => Some(NQuads(nq, id))).onErrorRecover { - case HttpClientStatusError(_, StatusCodes.NotFound, _) => None - } + for { + authToken <- authTokenProvider(credentials) + req = Get( + source.resourceTag.fold(resourceUrl)(t => resourceUrl.withQuery(Query("tag" -> t.value))) + ).addHeader(Accept(RdfMediaTypes.`application/n-quads`)).withCredentials(authToken) + result <- client.fromEntityTo[String](req).map(nq => Some(NQuads(nq, id))).onErrorRecover { + case HttpClientStatusError(_, StatusCodes.NotFound, _) => None + } + } yield result } - - override def resourceAsJson(source: RemoteProjectSource, id: Iri): HttpResult[Option[Json]] = { - implicit val cred: Option[AuthToken] = token(source) - val req = Get( - source.endpoint / "resources" / source.project.organization.value / source.project.project.value / "_" / id.toString - ).addHeader(accept).withCredentials - client.toJson(req).map(Some(_)).onErrorRecover { case HttpClientStatusError(_, StatusCodes.NotFound, _) => - None - } - } - - private def token(source: RemoteProjectSource) = - source.token.map { token => AuthToken(token.value.value) } } /** * Factory method for delta clients. */ - def apply(client: HttpClient, retryDelay: FiniteDuration)(implicit + def apply( + client: HttpClient, + authTokenProvider: AuthTokenProvider, + credentials: Credentials, + retryDelay: FiniteDuration + )(implicit as: ActorSystem[Nothing], sc: Scheduler ): DeltaClient = - new DeltaClientImpl(client, retryDelay) + new DeltaClientImpl(client, authTokenProvider, credentials, retryDelay) } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/config/CompositeViewsConfig.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/config/CompositeViewsConfig.scala index 5e89ea192a..3fc003b23e 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/config/CompositeViewsConfig.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/config/CompositeViewsConfig.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.Uri import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.config.BlazegraphViewsConfig.Credentials import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.config.CompositeViewsConfig.SinkConfig.SinkConfig import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.config.CompositeViewsConfig.{BlazegraphAccess, RemoteSourceClientConfig, SourcesConfig} +import ch.epfl.bluebrain.nexus.delta.sdk.auth import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientConfig import ch.epfl.bluebrain.nexus.delta.sdk.instances._ import ch.epfl.bluebrain.nexus.delta.sdk.model.search.PaginationConfig @@ -60,7 +61,8 @@ final case class CompositeViewsConfig( elasticsearchBatch: BatchConfig, restartCheckInterval: FiniteDuration, indexingEnabled: Boolean, - sinkConfig: SinkConfig + sinkConfig: SinkConfig, + remoteSourceCredentials: auth.Credentials ) object CompositeViewsConfig { diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeView.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeView.scala index c9dd433390..ccfabdea33 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeView.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeView.scala @@ -12,7 +12,6 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteCon import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.{CompactedJsonLd, ExpandedJsonLd} import ch.epfl.bluebrain.nexus.delta.sdk.ResourceShift -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.JsonLdContent import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegmentRef, Tags} import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ @@ -168,7 +167,7 @@ object CompositeView { type Shift = ResourceShift[CompositeViewState, CompositeView, Metadata] - def shift(views: CompositeViews)(implicit baseUri: BaseUri, crypto: Crypto): Shift = + def shift(views: CompositeViews)(implicit baseUri: BaseUri): Shift = ResourceShift.withMetadata[CompositeViewState, CompositeView, Metadata]( CompositeViews.entityType, (ref, project) => views.fetch(IdSegmentRef(ref), project), diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewEvent.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewEvent.scala index a0d9aa1877..b34900d288 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewEvent.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewEvent.scala @@ -6,7 +6,6 @@ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.instances._ import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.IriEncoder import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri @@ -170,17 +169,17 @@ object CompositeViewEvent { ) extends CompositeViewEvent @nowarn("cat=unused") - def serializer(crypto: Crypto): Serializer[Iri, CompositeViewEvent] = { + val serializer: Serializer[Iri, CompositeViewEvent] = { import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Database._ implicit val configuration: Configuration = Serializer.circeConfiguration - implicit val compositeViewValueCodec: Codec[CompositeViewValue] = CompositeViewValue.databaseCodec(crypto) + implicit val compositeViewValueCodec: Codec[CompositeViewValue] = CompositeViewValue.databaseCodec() implicit val codec: Codec.AsObject[CompositeViewEvent] = deriveConfiguredCodec[CompositeViewEvent] Serializer.dropNulls() } - def compositeViewMetricEncoder(crypto: Crypto): ScopedEventMetricEncoder[CompositeViewEvent] = + val compositeViewMetricEncoder: ScopedEventMetricEncoder[CompositeViewEvent] = new ScopedEventMetricEncoder[CompositeViewEvent] { - override def databaseDecoder: Decoder[CompositeViewEvent] = serializer(crypto).codec + override def databaseDecoder: Decoder[CompositeViewEvent] = serializer.codec override def entityType: EntityType = CompositeViews.entityType @@ -199,9 +198,9 @@ object CompositeViewEvent { ) } - def sseEncoder(crypto: Crypto)(implicit base: BaseUri): SseEncoder[CompositeViewEvent] = + def sseEncoder(implicit base: BaseUri): SseEncoder[CompositeViewEvent] = new SseEncoder[CompositeViewEvent] { - override val databaseDecoder: Decoder[CompositeViewEvent] = serializer(crypto: Crypto).codec + override val databaseDecoder: Decoder[CompositeViewEvent] = serializer.codec override def entityType: EntityType = CompositeViews.entityType diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewRejection.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewRejection.scala index fbf83d593c..20930cc620 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewRejection.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewRejection.scala @@ -204,14 +204,6 @@ object CompositeViewRejection { s"RemoteProjectSource ${remoteProjectSource.tpe} is invalid: either provided endpoint '${remoteProjectSource.endpoint}' is invalid or there are insufficient permissions to access this endpoint. " ) - /** - * Signals a rejection caused by the failure to encrypt/decrypt sensitive data (credentials) - */ - final case object InvalidEncryptionSecrets - extends CompositeViewSourceRejection( - s"Composite view plugin is using incorrect system secrets. Please contact the system administrator." - ) - /** * Rejection signalling that a projection is invalid. */ diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSource.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSource.scala index 42223c1bc6..50afa7a41a 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSource.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSource.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.delta.kernel.Secret import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSourceFields.{CrossProjectSourceFields, ProjectSourceFields, RemoteProjectSourceFields} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.SourceType._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri @@ -14,7 +13,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.{Latest, UserTag} import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter import ch.epfl.bluebrain.nexus.delta.sourcing.stream.PipeChain -import io.circe.{Encoder, Json} +import io.circe.Encoder import java.util.UUID import scala.annotation.nowarn @@ -202,8 +201,7 @@ object CompositeViewSource { resourceTag: Option[UserTag], includeDeprecated: Boolean, project: ProjectRef, - endpoint: Uri, - token: Option[AccessToken] + endpoint: Uri ) extends CompositeViewSource { override def tpe: SourceType = RemoteProjectSourceType @@ -213,7 +211,6 @@ object CompositeViewSource { Some(id), project, endpoint, - token.map(_.value), resourceSchemas, resourceTypes, resourceTag, @@ -221,11 +218,6 @@ object CompositeViewSource { ) } - final case class AccessToken(value: Secret[String]) - - @nowarn("cat=unused") - implicit private val accessTokenEncoder: Encoder[AccessToken] = Encoder.instance(_ => Json.Null) - @nowarn("cat=unused") implicit final def sourceEncoder(implicit base: BaseUri): Encoder.AsObject[CompositeViewSource] = { import io.circe.generic.extras.Configuration @@ -250,8 +242,7 @@ object CompositeViewSource { @nowarn("cat=unused") implicit final val sourceLdDecoder: JsonLdDecoder[CompositeViewSource] = { - implicit val identityLdDecoder: JsonLdDecoder[Identity] = deriveDefaultJsonLdDecoder[Identity] - implicit val accessTokenLdDecoder: JsonLdDecoder[AccessToken] = deriveDefaultJsonLdDecoder[AccessToken] + implicit val identityLdDecoder: JsonLdDecoder[Identity] = deriveDefaultJsonLdDecoder[Identity] deriveDefaultJsonLdDecoder[CompositeViewSource] } } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSourceFields.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSourceFields.scala index 246d73648f..cc108102e3 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSourceFields.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewSourceFields.scala @@ -1,8 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.delta.kernel.Secret -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{AccessToken, CrossProjectSource, ProjectSource, RemoteProjectSource} +import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{CrossProjectSource, ProjectSource, RemoteProjectSource} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.SourceType._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords @@ -15,7 +14,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity import io.circe.Encoder -import io.circe.generic.semiauto.deriveEncoder import java.util.UUID import scala.annotation.nowarn @@ -125,7 +123,6 @@ object CompositeViewSourceFields { id: Option[Iri] = None, project: ProjectRef, endpoint: Uri, - token: Option[Secret[String]] = None, resourceSchemas: Set[Iri] = Set.empty, resourceTypes: Set[Iri] = Set.empty, resourceTag: Option[UserTag] = None, @@ -141,14 +138,10 @@ object CompositeViewSourceFields { resourceTag, includeDeprecated, project, - endpoint, - token.map(AccessToken) + endpoint ) } - @nowarn("cat=unused") - implicit private val accessTokenEncoder: Encoder[AccessToken] = deriveEncoder[AccessToken] - @nowarn("cat=unused") implicit final def sourceEncoder(implicit base: BaseUri): Encoder.AsObject[CompositeViewSourceFields] = { import io.circe.generic.extras.Configuration diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewState.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewState.scala index a7afe90638..0d66b0bd59 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewState.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewState.scala @@ -3,7 +3,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.model.{ResourceF, ResourceUris, Tags} import ch.epfl.bluebrain.nexus.delta.sourcing.Serializer import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject @@ -98,10 +97,10 @@ final case class CompositeViewState( object CompositeViewState { @nowarn("cat=unused") - implicit def serializer(implicit crypto: Crypto): Serializer[Iri, CompositeViewState] = { + implicit val serializer: Serializer[Iri, CompositeViewState] = { import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Database._ implicit val configuration: Configuration = Serializer.circeConfiguration - implicit val compositeViewValueCodec: Codec[CompositeViewValue] = CompositeViewValue.databaseCodec(crypto) + implicit val compositeViewValueCodec: Codec[CompositeViewValue] = CompositeViewValue.databaseCodec() implicit val codec: Codec.AsObject[CompositeViewState] = deriveConfiguredCodec[CompositeViewState] Serializer.dropNullsInjectType() } diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewValue.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewValue.scala index 67c6872582..3062181e27 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewValue.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewValue.scala @@ -1,12 +1,9 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model import cats.data.NonEmptyMap -import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.delta.kernel.Secret +//import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeView.RebuildStrategy -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.AccessToken import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.views.IndexingRev import io.circe.generic.extras.Configuration @@ -39,16 +36,8 @@ object CompositeViewValue { @SuppressWarnings(Array("TryGet")) @nowarn("cat=unused") - def databaseCodec(crypto: Crypto)(implicit configuration: Configuration): Codec[CompositeViewValue] = { + def databaseCodec()(implicit configuration: Configuration): Codec[CompositeViewValue] = { import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Database._ - implicit val stringSecretEncryptEncoder: Encoder[Secret[String]] = Encoder.encodeString.contramap { - case Secret(value) => crypto.encrypt(value).get - } - implicit val stringSecretDecryptDecoder: Decoder[Secret[String]] = - Decoder.decodeString.emap(str => crypto.decrypt(str).map(Secret(_)).toEither.leftMap(_.getMessage)) - - implicit val accessTokenCodec: Codec.AsObject[AccessToken] = deriveConfiguredCodec[AccessToken] - implicit val finiteDurationEncoder: Encoder[FiniteDuration] = Encoder.encodeString.contramap(_.toString()) implicit val finiteDurationDecoder: Decoder[FiniteDuration] = Decoder.decodeString.emap { s => Duration(s) match { diff --git a/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-created.json b/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-created.json index 59402092db..eab09732c1 100644 --- a/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-created.json +++ b/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-created.json @@ -143,9 +143,6 @@ ], "resourceTypes": [ ], - "token": { - "value": "vv/MQBHmWaNm+TX/EigReQ==" - }, "uuid": "f8468909-a797-4b10-8b5f-000cba337bfa" }, { diff --git a/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-updated.json b/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-updated.json index 3f5899facf..d9e1fc851e 100644 --- a/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-updated.json +++ b/delta/plugins/composite-views/src/test/resources/composite-views/database/named-view-updated.json @@ -143,9 +143,6 @@ ], "resourceTypes": [ ], - "token": { - "value": "vv/MQBHmWaNm+TX/EigReQ==" - }, "uuid": "f8468909-a797-4b10-8b5f-000cba337bfa" }, { diff --git a/delta/plugins/composite-views/src/test/resources/composite-views/database/view-created.json b/delta/plugins/composite-views/src/test/resources/composite-views/database/view-created.json index 76eca6aac5..2d57981259 100644 --- a/delta/plugins/composite-views/src/test/resources/composite-views/database/view-created.json +++ b/delta/plugins/composite-views/src/test/resources/composite-views/database/view-created.json @@ -141,9 +141,6 @@ ], "resourceTypes": [ ], - "token": { - "value": "vv/MQBHmWaNm+TX/EigReQ==" - }, "uuid": "f8468909-a797-4b10-8b5f-000cba337bfa" }, { diff --git a/delta/plugins/composite-views/src/test/resources/composite-views/database/view-state.json b/delta/plugins/composite-views/src/test/resources/composite-views/database/view-state.json index 2f835e6ef7..4c26c2ad28 100644 --- a/delta/plugins/composite-views/src/test/resources/composite-views/database/view-state.json +++ b/delta/plugins/composite-views/src/test/resources/composite-views/database/view-state.json @@ -154,9 +154,6 @@ ], "resourceTypes": [ ], - "token": { - "value": "vv/MQBHmWaNm+TX/EigReQ==" - }, "uuid": "f8468909-a797-4b10-8b5f-000cba337bfa" }, { diff --git a/delta/plugins/composite-views/src/test/resources/composite-views/database/view-updated.json b/delta/plugins/composite-views/src/test/resources/composite-views/database/view-updated.json index 2d937a8985..010c5f7b94 100644 --- a/delta/plugins/composite-views/src/test/resources/composite-views/database/view-updated.json +++ b/delta/plugins/composite-views/src/test/resources/composite-views/database/view-updated.json @@ -141,9 +141,6 @@ ], "resourceTypes": [ ], - "token": { - "value": "vv/MQBHmWaNm+TX/EigReQ==" - }, "uuid": "f8468909-a797-4b10-8b5f-000cba337bfa" }, { diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala index 01f8e4c8bc..ff2f89174b 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala @@ -2,7 +2,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews import akka.http.scaladsl.model.Uri import cats.data.NonEmptyList -import ch.epfl.bluebrain.nexus.delta.kernel.Secret import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeView.Interval import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjectionFields.{ElasticSearchProjectionFields, SparqlProjectionFields} @@ -112,8 +111,7 @@ class CompositeViewDecodingSpec RemoteProjectSourceFields( Some(iri"http://music.com/sources/songs"), ProjectGen.project("remote_demo", "songs").ref, - Uri("https://example2.nexus.com"), - Some(Secret("mytoken")) + Uri("https://example2.nexus.com") ) ), NonEmptyList.of( @@ -146,8 +144,7 @@ class CompositeViewDecodingSpec RemoteProjectSourceFields( None, ProjectGen.project("remote_demo", "songs").ref, - Uri("https://example2.nexus.com"), - Some(Secret("mytoken")) + Uri("https://example2.nexus.com") ) ), NonEmptyList.of( diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewFactorySuite.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewFactorySuite.scala index d291ad3bac..cc42d2b4e3 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewFactorySuite.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewFactorySuite.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.delta.kernel.Secret import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjection._ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjectionFields.{ElasticSearchProjectionFields, SparqlProjectionFields} @@ -60,7 +59,6 @@ class CompositeViewFactorySuite extends BioSuite { Some(remoteSourceId), ProjectRef.unsafe("org", "remoteproject"), Uri("http://example.com/remote-endpoint"), - Some(Secret("secret token")), schemas, types, tag, @@ -140,8 +138,7 @@ class CompositeViewFactorySuite extends BioSuite { tag, includeDeprecated, remoteSourceFields.project, - remoteSourceFields.endpoint, - remoteSourceFields.token.map(s => AccessToken(s)) + remoteSourceFields.endpoint ) ) } diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsFixture.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsFixture.scala index 0239c9ccc1..e11fb62765 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsFixture.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsFixture.scala @@ -2,20 +2,19 @@ package ch.epfl.bluebrain.nexus.delta.plugins.compositeviews import akka.http.scaladsl.model.Uri import cats.data.NonEmptyList -import ch.epfl.bluebrain.nexus.delta.kernel.Secret import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.config.CompositeViewsConfig import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.config.CompositeViewsConfig.{BlazegraphAccess, RemoteSourceClientConfig, SinkConfig, SourcesConfig} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeView.Interval import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjection.{ElasticSearchProjection, SparqlProjection} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewProjectionFields.{ElasticSearchProjectionFields, SparqlProjectionFields} -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{AccessToken, CrossProjectSource, ProjectSource, RemoteProjectSource} +import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{CrossProjectSource, ProjectSource, RemoteProjectSource} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSourceFields.{CrossProjectSourceFields, ProjectSourceFields, RemoteProjectSourceFields} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.{permissions, CompositeViewFields, TemplateSparqlConstructQuery} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue.ContextObject import ch.epfl.bluebrain.nexus.delta.rdf.syntax._ import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto +import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials import ch.epfl.bluebrain.nexus.delta.sdk.generators.ProjectGen import ch.epfl.bluebrain.nexus.delta.sdk.views.IndexingRev import ch.epfl.bluebrain.nexus.delta.sourcing.config.BatchConfig @@ -32,8 +31,6 @@ import scala.concurrent.duration._ trait CompositeViewsFixture extends ConfigFixtures with EitherValuable { - val crypto: Crypto = Crypto("changeme", "salt") - val alwaysValidate: ValidateCompositeView = (_, _) => IO.unit val query = @@ -70,8 +67,7 @@ trait CompositeViewsFixture extends ConfigFixtures with EitherValuable { val remoteProjectFields = RemoteProjectSourceFields( Some(iri"http://example.com/remote-project-source"), ProjectRef(Label.unsafe("org"), Label.unsafe("remoteproject")), - Uri("http://example.com/remote-endpoint"), - Some(Secret("secret token")) + Uri("http://example.com/remote-endpoint") ) val esProjectionFields = ElasticSearchProjectionFields( @@ -125,8 +121,7 @@ trait CompositeViewsFixture extends ConfigFixtures with EitherValuable { None, false, ProjectRef(Label.unsafe("org"), Label.unsafe("remoteproject")), - Uri("http://example.com/remote-endpoint"), - Some(AccessToken(Secret("secret token"))) + Uri("http://example.com/remote-endpoint") ) val esProjection = ElasticSearchProjection( @@ -182,7 +177,8 @@ trait CompositeViewsFixture extends ConfigFixtures with EitherValuable { batchConfig, 3.seconds, false, - SinkConfig.Batch + SinkConfig.Batch, + Credentials.Anonymous ) } diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsSpec.scala index 47a97b864a..68602a63fe 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewsSpec.scala @@ -59,7 +59,6 @@ class CompositeViewsSpec fetchContext, ResolverContextResolution(rcr), alwaysValidate, - crypto, config, xas ).accepted diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClientSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClientSpec.scala index 89884a9d4f..2403661675 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClientSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/client/DeltaClientSpec.scala @@ -11,12 +11,12 @@ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.RouteResult import akka.stream.scaladsl.Source import akka.testkit.TestKit -import ch.epfl.bluebrain.nexus.delta.kernel.Secret -import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.{AccessToken, RemoteProjectSource} +import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewSource.RemoteProjectSource import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.stream.CompositeBranch import ch.epfl.bluebrain.nexus.delta.rdf.RdfMediaTypes import ch.epfl.bluebrain.nexus.delta.rdf.graph.NQuads import ch.epfl.bluebrain.nexus.delta.sdk.ConfigFixtures +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.http.{HttpClient, HttpClientConfig} import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.QueryParamsUnmarshalling import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectStatistics @@ -141,7 +141,8 @@ class DeltaClientSpec } implicit private val httpCfg: HttpClientConfig = httpClientConfig - private val deltaClient = DeltaClient(HttpClient(), 1.second) + private val deltaClient = + DeltaClient(HttpClient(), AuthTokenProvider.fixedForTest(token), Credentials.Anonymous, 1.second) private val source = RemoteProjectSource( iri"http://example.com/remote-project-source", @@ -151,14 +152,11 @@ class DeltaClientSpec None, includeDeprecated = false, project, - Uri("http://localhost:8080/v1"), - Some(AccessToken(Secret(token))) + Uri("http://localhost:8080/v1") ) private val unknownProjectSource = source.copy(project = ProjectRef.unsafe("org", "unknown")) - private val unknownToken = source.copy(token = Some(AccessToken(Secret("invalid")))) - "Getting project statistics" should { "work" in { @@ -168,10 +166,6 @@ class DeltaClientSpec "fail if project is unknown" in { deltaClient.projectStatistics(unknownProjectSource).rejected.errorCode.value shouldEqual StatusCodes.NotFound } - - "fail if token is invalid" in { - deltaClient.projectStatistics(unknownToken).rejected.errorCode.value shouldEqual StatusCodes.Forbidden - } } "Getting remaining information" should { @@ -187,10 +181,6 @@ class DeltaClientSpec .errorCode .value shouldEqual StatusCodes.NotFound } - - "fail if token is invalid" in { - deltaClient.remaining(unknownToken, Offset.Start).rejected.errorCode.value shouldEqual StatusCodes.Forbidden - } } "Getting elems" should { @@ -215,23 +205,11 @@ class DeltaClientSpec "return None if tag doesn't exist" in { deltaClient.resourceAsNQuads(source.copy(resourceTag = invalidTag), resourceId).accepted shouldEqual None } - - "fail if token is invalid" in { - deltaClient - .resourceAsNQuads(unknownToken, resourceId) - .rejected - .errorCode - .value shouldEqual StatusCodes.Forbidden - } } "Checking elems" should { "work" in { deltaClient.checkElems(source).accepted } - "fail if token is invalid" in { - deltaClient.checkElems(unknownToken).rejected.errorCode.value shouldEqual StatusCodes.Forbidden - } } - } diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala index f7acf381ce..75029dc674 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/indexing/CompositeIndexingSuite.scala @@ -279,8 +279,7 @@ abstract class CompositeIndexingSuite(sinkConfig: SinkConfig, query: SparqlConst None, includeDeprecated = false, project3, - Uri("https://bbp.epfl.ch/nexus"), - None + Uri("https://bbp.epfl.ch/nexus") ) private val contextJson = jsonContentOf("indexing/music-context.json") diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/migration/MigrateCompositeViewsSuite.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/migration/MigrateCompositeViewsSuite.scala index 02de03ce67..ba57456b0f 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/migration/MigrateCompositeViewsSuite.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/migration/MigrateCompositeViewsSuite.scala @@ -8,7 +8,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.migration.MigrateCom import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.CompositeViewEvent.{CompositeViewCreated, CompositeViewUpdated} import ch.epfl.bluebrain.nexus.delta.plugins.compositeviews.model.{CompositeViewEvent, CompositeViewState, CompositeViewValue} import ch.epfl.bluebrain.nexus.delta.rdf.syntax.iriStringContextSyntax -import ch.epfl.bluebrain.nexus.delta.sdk.crypto.Crypto import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, Tag} import ch.epfl.bluebrain.nexus.delta.sourcing.implicits._ @@ -33,10 +32,9 @@ class MigrateCompositeViewsSuite extends BioSuite with Doobie.Fixture with Class implicit private lazy val xas: Transactors = doobie() - private val crypto: Crypto = Crypto("changeme", "salt") - private val eventSerializer = CompositeViewEvent.serializer(crypto) + private val eventSerializer = CompositeViewEvent.serializer implicit val eventGet: Get[CompositeViewEvent] = eventSerializer.getValue - private val stateSerializer = CompositeViewState.serializer(crypto) + private val stateSerializer = CompositeViewState.serializer implicit val stateGet: Get[CompositeViewState] = stateSerializer.getValue private def assertMigratedValue(value: CompositeViewValue, rev: Int)(implicit loc: Location): Unit = { diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewsSerializationSuite.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewsSerializationSuite.scala index e3de54b30c..5fe7b05f33 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewsSerializationSuite.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/model/CompositeViewsSerializationSuite.scala @@ -33,9 +33,9 @@ class CompositeViewsSerializationSuite extends SerializationSuite with Composite // format: on ) - private val eventSerializer = CompositeViewEvent.serializer(crypto) - private val sseEncoder = CompositeViewEvent.sseEncoder(crypto) - private val metricEncoder = CompositeViewEvent.compositeViewMetricEncoder(crypto) + private val eventSerializer = CompositeViewEvent.serializer + private val sseEncoder = CompositeViewEvent.sseEncoder + private val metricEncoder = CompositeViewEvent.compositeViewMetricEncoder eventsMapping.foreach { case (event, (database, sse)) => test(s"Correctly serialize ${event.getClass.getName}") { @@ -91,7 +91,7 @@ class CompositeViewsSerializationSuite extends SerializationSuite with Composite private val jsonState = jsonContentOf("/composite-views/database/view-state.json") - private val stateSerializer = CompositeViewState.serializer(crypto) + private val stateSerializer = CompositeViewState.serializer test(s"Correctly serialize a CompositeViewState") { stateSerializer.codec(state).equalsIgnoreArrayOrder(jsonState) diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala index 3883c51a7e..3d7203d638 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/routes/CompositeViewsRoutesSpec.scala @@ -60,7 +60,6 @@ class CompositeViewsRoutesSpec extends CompositeViewsRoutesFixtures { fetchContext, ResolverContextResolution(rcr), alwaysValidate, - crypto, config, xas ).accepted diff --git a/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchScopeInitializationSpec.scala b/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchScopeInitializationSpec.scala index 3e4cfd70f2..1ab8afcb6b 100644 --- a/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchScopeInitializationSpec.scala +++ b/delta/plugins/search/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/search/SearchScopeInitializationSpec.scala @@ -40,7 +40,6 @@ class SearchScopeInitializationSpec fetchContext, ResolverContextResolution(rcr), alwaysValidate, - crypto, config, xas ).accepted diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala index 537f7b93ac..03f4609bfd 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala @@ -24,7 +24,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteCon import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck -import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials, OpenIdAuthService} +import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} import ch.epfl.bluebrain.nexus.delta.sdk.deletion.ProjectDeletionTask import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig @@ -37,7 +37,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings -import ch.epfl.bluebrain.nexus.delta.sdk.realms.Realms import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.sse.SseEncoder import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors @@ -147,14 +146,6 @@ class StoragePluginModule(priority: Int) extends ModuleDef { many[ResourceShift[_, _, _]].ref[Storage.Shift] - make[OpenIdAuthService].from { (httpClient: HttpClient @Id("realm"), realms: Realms) => - new OpenIdAuthService(httpClient, realms) - } - - make[AuthTokenProvider].fromEffect { (authService: OpenIdAuthService) => - AuthTokenProvider(authService) - } - make[Files] .fromEffect { ( diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala index c849084912..e7dbd44bfe 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/model/StorageRejection.scala @@ -195,17 +195,6 @@ object StorageRejection { s"The provided permissions '${permissions.mkString(",")}' are not defined in the collection of allowed permissions." ) - /** - * Signals a rejection caused by the failure to encrypt/decrypt sensitive data (credentials) - */ - final case class InvalidEncryptionSecrets(tpe: StorageType, details: String) - extends StorageRejection( - s"Storage type '$tpe' is using incorrect system secrets. Please contact the system administrator.", - Some( - s"Encryption/decryption for storage type '$tpe' fails due to wrong configuration for password or salt. Details '$details'." - ) - ) - /** * Signals a rejection caused when interacting with other APIs when fetching a resource */ @@ -241,16 +230,15 @@ object StorageRejection { implicit final val storageRejectionHttpResponseFields: HttpResponseFields[StorageRejection] = HttpResponseFields { - case RevisionNotFound(_, _) => StatusCodes.NotFound - case TagNotFound(_) => StatusCodes.NotFound - case StorageNotFound(_, _) => StatusCodes.NotFound - case DefaultStorageNotFound(_) => StatusCodes.NotFound - case ResourceAlreadyExists(_, _) => StatusCodes.Conflict - case IncorrectRev(_, _) => StatusCodes.Conflict - case ProjectContextRejection(rej) => rej.status - case StorageNotAccessible(_, _) => StatusCodes.BadRequest - case InvalidEncryptionSecrets(_, _) => StatusCodes.InternalServerError - case _ => StatusCodes.BadRequest + case RevisionNotFound(_, _) => StatusCodes.NotFound + case TagNotFound(_) => StatusCodes.NotFound + case StorageNotFound(_, _) => StatusCodes.NotFound + case DefaultStorageNotFound(_) => StatusCodes.NotFound + case ResourceAlreadyExists(_, _) => StatusCodes.Conflict + case IncorrectRev(_, _) => StatusCodes.Conflict + case ProjectContextRejection(rej) => rej.status + case StorageNotAccessible(_, _) => StatusCodes.BadRequest + case _ => StatusCodes.BadRequest } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala index 5d2dbd5e67..8d3057a074 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala @@ -23,7 +23,10 @@ object AuthTokenProvider { def apply(authService: OpenIdAuthService): UIO[AuthTokenProvider] = { KeyValueStore[ClientCredentials, ParsedToken]().map(cache => new CachingOpenIdAuthTokenProvider(authService, cache)) } - def anonymousForTest: AuthTokenProvider = new AnonymousAuthTokenProvider + def anonymousForTest: AuthTokenProvider = new AnonymousAuthTokenProvider + def fixedForTest(token: String): AuthTokenProvider = new AuthTokenProvider { + override def apply(credentials: Credentials): UIO[Option[AuthToken]] = UIO.pure(Some(AuthToken(token))) + } } private class AnonymousAuthTokenProvider extends AuthTokenProvider { diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/Crypto.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/Crypto.scala deleted file mode 100644 index d4659f7aea..0000000000 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/Crypto.scala +++ /dev/null @@ -1,76 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.crypto - -import java.nio.charset.StandardCharsets.UTF_8 -import java.util.Base64 -import javax.crypto.Cipher._ -import javax.crypto.spec.{PBEKeySpec, SecretKeySpec} -import javax.crypto.{Cipher, SecretKey, SecretKeyFactory} -import scala.util.Try - -/** - * Provides encryption and decryption functionality - */ -final class Crypto private (derivedKey: SecretKey) { - - /** - * @return - * the key in its primary encoded format - */ - private[crypto] def encoded: Array[Byte] = derivedKey.getEncoded - - /** - * Encrypts the given input with the provided AES secret key. - * - * @return - * a right with the encrypted string in base64 encoding or a left with the error message - */ - def encrypt(input: String): Try[String] = - Try { - val cipher = Cipher.getInstance(Crypto.transformation) - cipher.init(ENCRYPT_MODE, derivedKey) - val bytes = cipher.doFinal(input.getBytes(UTF_8)) - Base64.getEncoder.encodeToString(bytes) - } - - /** - * Decrypts the given base64 encoded input with the provided AES secret key. - * - * @return - * a right with the decrypted string or a left with the error message - */ - def decrypt(input: String): Try[String] = - Try { - val cipher = Cipher.getInstance(Crypto.transformation) - cipher.init(DECRYPT_MODE, derivedKey) - val bytes = cipher.doFinal(Base64.getDecoder.decode(input)) - new String(bytes, UTF_8) - } - - override def toString: String = "SECRET" -} - -object Crypto { - private val transformation = "AES" - - /** - * Derives a suitable AES-256 secret key from a given password and a salt. - */ - private def deriveKey(password: String, salt: String): SecretKey = { - val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA512") - val spec = new PBEKeySpec(password.toCharArray, salt.getBytes(UTF_8), 1000, 256) - val key = factory.generateSecret(spec) - new SecretKeySpec(key.getEncoded, "AES") - } - - /** - * Creates a [[Crypto]] for AES-256 - * - * @param password - * the password to use for encryption - * @param salt - * the salt to use for encryption - */ - final def apply(password: String, salt: String): Crypto = - new Crypto(deriveKey(password, salt)) - -} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/EncryptionConfig.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/EncryptionConfig.scala deleted file mode 100644 index c260fd0585..0000000000 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/EncryptionConfig.scala +++ /dev/null @@ -1,22 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.crypto - -import ch.epfl.bluebrain.nexus.delta.kernel.Secret -import pureconfig.ConfigReader -import pureconfig.generic.semiauto.deriveReader - -/** - * The encryption of sensitive fields configuration - * - * @param password - * the password for the symmetric-key cyphering algorithm - * @param salt - * the salt value - */ -final case class EncryptionConfig(password: Secret[String], salt: Secret[String]) { - val crypto: Crypto = Crypto(password.value, salt.value) -} - -object EncryptionConfig { - implicit final val encryptionConfigReader: ConfigReader[EncryptionConfig] = - deriveReader[EncryptionConfig] -} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/CryptoSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/CryptoSpec.scala deleted file mode 100644 index 5e2dc15d59..0000000000 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/crypto/CryptoSpec.scala +++ /dev/null @@ -1,25 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.crypto - -import ch.epfl.bluebrain.nexus.testkit.{EitherValuable, TestHelpers} -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike - -import java.util.Base64 - -class CryptoSpec extends AnyWordSpecLike with Matchers with TestHelpers with EitherValuable { - - private val secretKey = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" - - "The Crypto object" should { - - "always generate the same key for a given password" in { - val crypto = Crypto("changeme", "salt") - Base64.getEncoder.encodeToString(crypto.encoded) shouldEqual "FB4G2MHn/q6PXqpNkE1F5wBG7Ndsd9FtyeLcNQL0G40=" - } - - "encode and decode secrets" in { - val crypto = Crypto(genString(32), genString(16)) - crypto.decrypt(crypto.encrypt(secretKey).get).get shouldEqual secretKey - } - } -} diff --git a/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md b/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md index 4021e5ce7c..efe5bcea4a 100644 --- a/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md +++ b/docs/src/main/paradox/docs/getting-started/running-nexus/configuration/index.md @@ -69,12 +69,6 @@ This feature can be turned on via the flag `app.automatic-provisioning.enabled`. @link:[The `automatic-provisioning` section](https://github.com/BlueBrain/nexus/blob/$git.branch$/delta/app/src/main/resources/app.conf#L197){ open=new } of the configuration defines the project provisioning configuration. -## Encryption configuration - -Nexus Delta uses symmetric encryption to secure sensitive data information (tokens and passwords). - -@link:[The `encryption` section](https://github.com/BlueBrain/nexus/blob/$git.branch$/delta/app/src/main/resources/app.conf#L276){ open=new } of the configuration defines the encryption configuration. - ## Fusion configuration When fetching a resource, Nexus Delta allows to return a redirection to its representation in Fusion by providing `text/html` in the `Accept` header. @@ -121,6 +115,8 @@ The composite views plugin configuration can be found @link:[here](https://githu There are several configuration flags related to tweaking the range of values allowed for sources, projections and rebuild interval. +Authentication for remote sources can be specified in three different ways. The value of `plugins.composite-views.remote-source-credentials` should be speficied in the same way as remote storages, as shown @ref:[here](#remote-storage-configuration) + ### Storage plugin configuration The storage plugin configuration can be found @link:[here](https://github.com/BlueBrain/nexus/blob/$git.branch$/delta/plugins/storage/src/main/resources/storage.conf){ open=new }. diff --git a/docs/src/main/paradox/docs/releases/index.md b/docs/src/main/paradox/docs/releases/index.md index ed1ad237a6..7d672fcd30 100644 --- a/docs/src/main/paradox/docs/releases/index.md +++ b/docs/src/main/paradox/docs/releases/index.md @@ -27,7 +27,7 @@ The latest stable release is **v1.8.0** released on **14.06.2023**. - @ref:[Resource payloads can't contain fields starting by `_` anymore](../delta/api/resources-api.md) - @ref:[The endpoint for fetching indexing errors as SSEs changed](../delta/api/views/index.md#fetch-indexing-failures-as-sses) -- @ref:[Credentials for storages can now only be defined at the configuration level](../releases/v1.9-release-notes.md#storages) +- @ref:[Credentials for storages can now only be defined at the configuration level](../releases/v1.9-release-notes.md#remote-storages) ### New features / enhancements diff --git a/docs/src/main/paradox/docs/releases/v1.8-to-v1.9-migration.md b/docs/src/main/paradox/docs/releases/v1.8-to-v1.9-migration.md index 2092f0f458..235a7f896c 100644 --- a/docs/src/main/paradox/docs/releases/v1.8-to-v1.9-migration.md +++ b/docs/src/main/paradox/docs/releases/v1.8-to-v1.9-migration.md @@ -41,3 +41,11 @@ The following logs should appear in Delta: ``` After this completes, the environment variable can be removed and Delta restarted. + +## Remote authentication + +Currently, both remote storages and composite views with remote sources can hold auth tokens for communications with these remote endpoints. In v1.9, we now expect authentication to be dealt with in the Delta configuration + +A remote storage might currently have a token stored in the storage definition. In 1.9, this should be moved to `plugins.storage.storages.remote-disk.credentials`, as shown @ref:[here](../getting-started/running-nexus/configuration/index.md#remote-storage-configuration) + +A composite view might currently have a token stored in the view definition. In 1.9, this should be moved to `plugins.composite-views.remote-source-credentials`, as shown @ref:[here](../getting-started/running-nexus/configuration/index.md#composite-views-plugin-configuration) diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index 4819525804..d6366685a2 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -71,7 +71,7 @@ The different requests to Elasticsearch are now compressed by default allowing t ### Composite views -To enhance performance of indexing of composite views, Nexus Delta introduces the following features. +New features enhance performance of indexing of composite views, as well as authentication changes for views which incorporate projects on remote Delta instances #### Batching queries to the intermediate space @@ -91,6 +91,12 @@ Preserving the intermediate space also reduces the fragmentation of the Blazegra @ref:[More information](../delta/api/views/composite-view-api.md#batching-queries-to-the-intermediate-space) +#### Remote authentication + +Rather than storing an auth token in the composite view, it is now expected to specify an authentication method in the Delta configuration + +@ref:[More information](../getting-started/running-nexus/configuration/index.md#composite-views-plugin-configuration) + ### Elasticsearch Views The Elasticsearch mapping of an Elasticsearch View can be retrieved. @@ -113,7 +119,7 @@ Annotated source is now available as an output format when creating an archive. Creating an archive now requires only the `resources/read` permission instead of `archives/write`. -### Storages +### Remote Storages Storages can no longer be created with credentials that would get stored: diff --git a/tests/docker/config/delta-postgres.conf b/tests/docker/config/delta-postgres.conf index 17f686a802..f142eaa3e7 100644 --- a/tests/docker/config/delta-postgres.conf +++ b/tests/docker/config/delta-postgres.conf @@ -68,6 +68,12 @@ plugins { composite-views { min-interval-rebuild = 5 seconds sink-config = batch + remote-source-credentials { + type: "client-credentials" + user: "delta" + password: "shhh" + realm: "internal" + } } elasticsearch { diff --git a/tests/src/test/resources/kg/views/composite/composite-view.json b/tests/src/test/resources/kg/views/composite/composite-view.json index 58a1e0b9b8..4d5a650e97 100644 --- a/tests/src/test/resources/kg/views/composite/composite-view.json +++ b/tests/src/test/resources/kg/views/composite/composite-view.json @@ -18,8 +18,7 @@ "@id": "https://music.example.com/sources/songs", "@type": "RemoteProjectEventStream", "project": "{{org2}}/songs", - "endpoint": "{{remoteEndpoint}}", - "token": "{{token}}" + "endpoint": "{{remoteEndpoint}}" } ], "projections": [ diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala index 9d5a255b4d..f40a8305c4 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala @@ -126,8 +126,6 @@ class CompositeViewsSpec extends BaseSpec { "creating the view" should { - def jerryToken = tokensMap.get(Jerry).credentials.token() - "create a composite view" in { val view = jsonContentOf( "/kg/views/composite/composite-view.json", @@ -136,7 +134,6 @@ class CompositeViewsSpec extends BaseSpec { "org" -> orgId, "org2" -> orgId, "remoteEndpoint" -> "http://delta:8080/v1", - "token" -> jerryToken, "bandQuery" -> bandQuery, "albumQuery" -> albumQuery ): _* @@ -162,7 +159,6 @@ class CompositeViewsSpec extends BaseSpec { "org" -> orgId, "org2" -> orgId, "remoteEndpoint" -> "http://delta:8080/v1/other", - "token" -> jerryToken, "bandQuery" -> bandQuery, "albumQuery" -> albumQuery ): _* @@ -173,32 +169,6 @@ class CompositeViewsSpec extends BaseSpec { } } - "reject creating a composite view with wrong remote source token" in { - val view = jsonContentOf( - "/kg/views/composite/composite-view.json", - replacements( - Jerry, - "org" -> orgId, - "org2" -> orgId, - "remoteEndpoint" -> "http://delta:8080/v1", - "token" -> s"${jerryToken}wrong", - "bandQuery" -> bandQuery, - "albumQuery" -> albumQuery - ): _* - ) - - deltaClient.put[Json](s"/views/$orgId/bands/composite2", view, Jerry) { (json, response) => - response.status shouldEqual StatusCodes.BadRequest - json shouldEqual jsonContentOf( - "/kg/views/composite/composite-source-token-reject.json", - replacements( - Jerry, - "project" -> s"$orgId/songs" - ): _* - ) - } - } - "reject creating a composite view with remote source endpoint with a wrong hostname" in { val view = jsonContentOf( "/kg/views/composite/composite-view.json", @@ -207,7 +177,6 @@ class CompositeViewsSpec extends BaseSpec { "org" -> orgId, "org2" -> orgId, "remoteEndpoint" -> "http://fail.does.not.exist.at.all.asndkajbskhabsdfjhabsdfjkh/v1", - "token" -> jerryToken, "bandQuery" -> bandQuery, "albumQuery" -> albumQuery ): _* From dfd58f22f8431806b38d8d4061579495e452f239 Mon Sep 17 00:00:00 2001 From: dantb Date: Thu, 21 Sep 2023 10:55:43 +0200 Subject: [PATCH 11/13] Remove Tar archiving (#4286) Remove Tar archiving --- .../plugins/archive/ArchiveDownload.scala | 91 ++++++++---------- .../delta/plugins/archive/Archives.scala | 3 +- .../plugins/archive/model/ArchiveFormat.scala | 94 ------------------- .../archive/model/ArchiveRejection.scala | 6 -- .../delta/plugins/archive/model/Zip.scala | 29 ++++++ .../archive/routes/ArchiveRoutes.scala | 46 ++++----- .../plugins/archive/ArchiveDownloadSpec.scala | 68 +++++--------- .../plugins/archive/ArchiveRoutesSpec.scala | 32 +------ .../delta/plugins/archive/ArchivesSpec.scala | 10 +- .../plugins/archive/TarDownloadSpec.scala | 15 --- .../plugins/archive/ZipDownloadSpec.scala | 12 --- .../testkit/archive/ArchiveHelpers.scala | 36 ++----- .../paradox/docs/delta/api/archives-api.md | 19 +--- .../docs/delta/api/assets/archives/fetch.sh | 4 +- .../delta/api/assets/archives/fetched.json | 2 +- docs/src/main/paradox/docs/delta/api/index.md | 2 +- .../docs/releases/v1.9-release-notes.md | 6 ++ .../nexus/tests/kg/ArchiveSpec.scala | 24 ----- 18 files changed, 142 insertions(+), 357 deletions(-) delete mode 100644 delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveFormat.scala create mode 100644 delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala delete mode 100644 delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/TarDownloadSpec.scala delete mode 100644 delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ZipDownloadSpec.scala diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala index 8ea91c610d..43a7115c8c 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala @@ -35,6 +35,7 @@ import monix.execution.Scheduler import java.nio.ByteBuffer import java.nio.charset.StandardCharsets +import akka.stream.alpakka.file.ArchiveMetadata /** * Archive download functionality. @@ -55,10 +56,9 @@ trait ArchiveDownload { * @param caller * the caller to be used for checking for access */ - def apply[M]( + def apply( value: ArchiveValue, project: ProjectRef, - format: ArchiveFormat[M], ignoreNotFound: Boolean )(implicit caller: Caller, scheduler: Scheduler): IO[ArchiveRejection, AkkaSource] @@ -96,18 +96,17 @@ object ArchiveDownload { private val printer = Printer.spaces2.copy(dropNullValues = true) private val sourcePrinter = Printer.spaces2.copy(dropNullValues = false) - override def apply[M]( + override def apply( value: ArchiveValue, project: ProjectRef, - format: ArchiveFormat[M], ignoreNotFound: Boolean )(implicit caller: Caller, scheduler: Scheduler): IO[ArchiveRejection, AkkaSource] = { for { references <- value.resources.toList.traverse(toFullReference) _ <- checkResourcePermissions(references, project) - contentStream <- resolveReferencesAsStream(references, project, ignoreNotFound, format) + contentStream <- resolveReferencesAsStream(references, project, ignoreNotFound) } yield { - Source.fromGraph(StreamConverter(contentStream)).via(format.writeFlow) + Source.fromGraph(StreamConverter(contentStream)).via(Zip.writeFlow) } } @@ -124,34 +123,29 @@ object ArchiveDownload { } } - private def resolveReferencesAsStream[M]( + private def resolveReferencesAsStream( references: List[FullArchiveReference], project: ProjectRef, - ignoreNotFound: Boolean, - format: ArchiveFormat[M] - )(implicit caller: Caller): IO[ArchiveRejection, Stream[Task, (M, AkkaSource)]] = { + ignoreNotFound: Boolean + )(implicit caller: Caller): IO[ArchiveRejection, Stream[Task, (ArchiveMetadata, AkkaSource)]] = { references .traverseFilter { - case ref: FileReference => fileEntry(ref, project, format, ignoreNotFound) - case ref: ResourceReference => resourceEntry(ref, project, format, ignoreNotFound) + case ref: FileReference => fileEntry(ref, project, ignoreNotFound) + case ref: ResourceReference => resourceEntry(ref, project, ignoreNotFound) } - .map(sortWith(format)) + .map(sortWith) .map(asStream) } - private def sortWith[M]( - format: ArchiveFormat[M] - )(list: List[(M, Task[AkkaSource])]): List[(M, Task[AkkaSource])] = { - list.sortBy { case (entry, _) => - entry - }(format.ordering) - } + private def sortWith(list: List[(ArchiveMetadata, Task[AkkaSource])]): List[(ArchiveMetadata, Task[AkkaSource])] = + list.sortBy { case (entry, _) => entry }(Zip.ordering) - private def asStream[M](list: List[(M, Task[AkkaSource])]) = { - Stream.iterable(list).evalMap[Task, (M, AkkaSource)] { case (metadata, source) => + private def asStream( + list: List[(ArchiveMetadata, Task[AkkaSource])] + ): Stream[Task, (ArchiveMetadata, AkkaSource)] = + Stream.iterable(list).evalMap { case (metadata, source) => source.map(metadata -> _) } - } private def checkResourcePermissions( refs: List[FullArchiveReference], @@ -166,14 +160,13 @@ object ArchiveDownload { ) .void - private def fileEntry[Metadata]( + private def fileEntry( ref: FileReference, project: ProjectRef, - format: ArchiveFormat[Metadata], ignoreNotFound: Boolean )(implicit caller: Caller - ): IO[ArchiveRejection, Option[(Metadata, Task[AkkaSource])]] = { + ): IO[ArchiveRejection, Option[(ArchiveMetadata, Task[AkkaSource])]] = { val refProject = ref.project.getOrElse(project) // the required permissions are checked for each file content fetch val entry = fetchFileContent(ref.ref, refProject, caller) @@ -184,21 +177,19 @@ object ArchiveDownload { case FileRejection.AuthorizationFailed(addr, perm) => AuthorizationFailed(addr, perm) case other => WrappedFileRejection(other) } - .flatMap { case FileResponse(fileMetadata, content) => - IO.fromEither( - pathOf(ref, project, format, fileMetadata.filename).map { path => - val archiveMetadata = format.metadata(path, fileMetadata.bytes) - val contentTask: Task[AkkaSource] = content - .tapError(response => - UIO.delay( - logger - .error(s"Error streaming file '${fileMetadata.filename}' for archive: ${response.value.value}") - ) - ) - .mapError(response => ArchiveDownloadError(fileMetadata.filename, response)) - Some((archiveMetadata, contentTask)) - } - ) + .map { case FileResponse(fileMetadata, content) => + val path = pathOf(ref, project, fileMetadata.filename) + val archiveMetadata = Zip.metadata(path) + val contentTask: Task[AkkaSource] = content + .tapError(response => + UIO.delay( + logger + .error(s"Error streaming file '${fileMetadata.filename}' for archive: ${response.value.value}") + ) + ) + .mapError(response => ArchiveDownloadError(fileMetadata.filename, response)) + Some((archiveMetadata, contentTask)) + } if (ignoreNotFound) entry.onErrorRecover { case _: ResourceNotFound => None } else entry @@ -207,27 +198,21 @@ object ArchiveDownload { private def pathOf( ref: FileReference, project: ProjectRef, - format: ArchiveFormat[_], filename: String - ): Either[FilenameTooLong, String] = - ref.path.map { p => Right(p.value.toString) }.getOrElse { + ): String = + ref.path.map(_.value.toString).getOrElse { val p = ref.project.getOrElse(project) - Either.cond( - format != ArchiveFormat.Tar || filename.length < 100, - s"$p/file/$filename", - FilenameTooLong(ref.ref.original, p, filename) - ) + s"$p/file/$filename" } - private def resourceEntry[Metadata]( + private def resourceEntry( ref: ResourceReference, project: ProjectRef, - format: ArchiveFormat[Metadata], ignoreNotFound: Boolean - ): IO[ArchiveRejection, Option[(Metadata, Task[AkkaSource])]] = { + ): IO[ArchiveRejection, Option[(ArchiveMetadata, Task[AkkaSource])]] = { val archiveEntry = resourceRefToByteString(ref, project).map { content => val path = pathOf(ref, project) - val metadata = format.metadata(path, content.length.toLong) + val metadata = Zip.metadata(path) Some((metadata, Task.pure(Source.single(content)))) } if (ignoreNotFound) archiveEntry.onErrorHandle { _: ResourceNotFound => None } diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala index e451e6fdde..c99b3d5a04 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala @@ -166,13 +166,12 @@ class Archives( def download( id: IdSegment, project: ProjectRef, - format: ArchiveFormat[_], ignoreNotFound: Boolean )(implicit caller: Caller, scheduler: Scheduler): IO[ArchiveRejection, AkkaSource] = (for { resource <- fetch(id, project) value = resource.value - source <- archiveDownload(value.value, project, format, ignoreNotFound) + source <- archiveDownload(value.value, project, ignoreNotFound) } yield source).span("downloadArchive") private def eval(cmd: CreateArchive): IO[ArchiveRejection, ArchiveResource] = diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveFormat.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveFormat.scala deleted file mode 100644 index 378e39942f..0000000000 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveFormat.scala +++ /dev/null @@ -1,94 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.archive.model - -import akka.NotUsed -import akka.http.scaladsl.model.{ContentType, HttpRequest, MediaTypes} -import akka.stream.alpakka.file.scaladsl.Archive -import akka.stream.alpakka.file.{ArchiveMetadata, TarArchiveMetadata} -import akka.stream.scaladsl.{Flow, Source} -import akka.util.ByteString -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveFormat.WriteFlow -import ch.epfl.bluebrain.nexus.delta.sdk.utils.HeadersUtils - -/** - * Available format to download the archive - */ -sealed trait ArchiveFormat[Metadata] extends Product with Serializable { - - /** - * Content type - */ - def contentType: ContentType - - /** - * File extension - */ - def fileExtension: String - - /** - * How to build the metadata for the archive entry - */ - def metadata(filename: String, size: Long): Metadata - - /** - * Ordering for the archive entries - */ - def ordering: Ordering[Metadata] = Ordering.by(filePath) - - /** - * How to extract the file path from the archive metadata - */ - def filePath(metadata: Metadata): String - - /** - * Flow to create an archive - */ - def writeFlow: WriteFlow[Metadata] -} - -object ArchiveFormat { - - type WriteFlow[Metadata] = Flow[(Metadata, Source[ByteString, _]), ByteString, NotUsed] - - /** - * Tar format - * @see - * https://en.wikipedia.org/wiki/Tar_(computing)#Limitations for the limitations - */ - final case object Tar extends ArchiveFormat[TarArchiveMetadata] { - override def contentType: ContentType = MediaTypes.`application/x-tar` - - override def fileExtension: String = "tar" - - override def metadata(filename: String, size: Long): TarArchiveMetadata = - TarArchiveMetadata.create(filename, size) - - override def filePath(metadata: TarArchiveMetadata): String = metadata.filePath - - override def writeFlow: WriteFlow[TarArchiveMetadata] = Archive.tar() - } - - /** - * Zip format - * - * @see - * https://en.wikipedia.org/wiki/ZIP_(file_format)#Limits for the limitations - */ - final case object Zip extends ArchiveFormat[ArchiveMetadata] { - override def contentType: ContentType = MediaTypes.`application/zip` - - override def fileExtension: String = "zip" - - override def metadata(filename: String, size: Long): ArchiveMetadata = - ArchiveMetadata.create(filename) - - override def filePath(metadata: ArchiveMetadata): String = metadata.filePath - - override def writeFlow: WriteFlow[ArchiveMetadata] = Archive.zip() - } - - private val availableFormats = List(Tar, Zip) - - def apply(req: HttpRequest): Option[ArchiveFormat[_]] = availableFormats.find { format => - HeadersUtils.matches(req.headers, format.contentType.mediaType) - } -} diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala index f278888d2d..49637d95b6 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala @@ -69,11 +69,6 @@ object ArchiveRejection { )).mkString("\n") ) - final case class FilenameTooLong(id: Iri, project: ProjectRef, fileName: String) - extends ArchiveRejection( - s"File '$id' in project '$project' has a file name '$fileName' exceeding the 100 character limit for a tar file." - ) - /** * Rejection returned when an archive doesn't exist. * @@ -201,7 +196,6 @@ object ArchiveRejection { HttpResponseFields { case ResourceAlreadyExists(_, _) => StatusCodes.Conflict case InvalidResourceCollection(_, _, _) => StatusCodes.BadRequest - case FilenameTooLong(_, _, _) => StatusCodes.BadRequest case ArchiveNotFound(_, _) => StatusCodes.NotFound case InvalidArchiveId(_) => StatusCodes.BadRequest case ProjectContextRejection(rejection) => rejection.status diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala new file mode 100644 index 0000000000..7477eb6a19 --- /dev/null +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala @@ -0,0 +1,29 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.archive.model + +import akka.NotUsed +import akka.http.scaladsl.model.{ContentType, HttpRequest, MediaTypes} +import akka.stream.alpakka.file.scaladsl.Archive +import akka.stream.alpakka.file.ArchiveMetadata +import akka.stream.scaladsl.{Flow, Source} +import akka.util.ByteString +import ch.epfl.bluebrain.nexus.delta.sdk.utils.HeadersUtils + +/** + * Zip archive format + * + * @see + * https://en.wikipedia.org/wiki/ZIP_(file_format)#Limits for the limitations + */ +object Zip { + type WriteFlow[Metadata] = Flow[(Metadata, Source[ByteString, _]), ByteString, NotUsed] + + lazy val contentType: ContentType = MediaTypes.`application/zip` + + lazy val writeFlow: WriteFlow[ArchiveMetadata] = Archive.zip() + + lazy val ordering: Ordering[ArchiveMetadata] = Ordering.by(md => md.filePath) + + def metadata(filename: String): ArchiveMetadata = ArchiveMetadata.create(filename) + + def checkHeader(req: HttpRequest): Boolean = HeadersUtils.matches(req.headers, Zip.contentType.mediaType) +} diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala index 7153dcc6ce..f4cba81633 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala @@ -2,9 +2,10 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive.routes import akka.http.scaladsl.model.StatusCodes.{Created, SeeOther} import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.{Directive1, Route} +import akka.http.scaladsl.server.Route import ch.epfl.bluebrain.nexus.delta.plugins.archive.Archives -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{permissions, ArchiveFormat} +import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.permissions +import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.Zip import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource @@ -53,10 +54,10 @@ class ArchiveRoutes( (post & entity(as[Json]) & pathEndOrSingleSlash) { json => operationName(s"$prefix/archives/{org}/{project}") { authorizeFor(ref, permissions.write).apply { - archiveResponse { - case Some(_) => emitRedirect(SeeOther, archives.create(ref, json).map(_.uris.accessUri)) - case None => emit(Created, archives.create(ref, json).mapValue(_.metadata)) - } + archiveResponse( + emitRedirect(SeeOther, archives.create(ref, json).map(_.uris.accessUri)), + emit(Created, archives.create(ref, json).mapValue(_.metadata)) + ) } } }, @@ -66,25 +67,24 @@ class ArchiveRoutes( // create an archive with an id (put & entity(as[Json]) & pathEndOrSingleSlash) { json => authorizeFor(ref, permissions.write).apply { - archiveResponse { - case Some(_) => emitRedirect(SeeOther, archives.create(id, ref, json).map(_.uris.accessUri)) - case None => emit(Created, archives.create(id, ref, json).mapValue(_.metadata)) - } + archiveResponse( + emitRedirect(SeeOther, archives.create(id, ref, json).map(_.uris.accessUri)), + emit(Created, archives.create(id, ref, json).mapValue(_.metadata)) + ) } }, // fetch or download an archive (get & pathEndOrSingleSlash) { authorizeFor(ref, permissions.read).apply { - archiveResponse { - case Some(format) => - parameter("ignoreNotFound".as[Boolean] ? false) { ignoreNotFound => - val response = archives.download(id, ref, format, ignoreNotFound).map { source => - sourceToFileResponse(source, format) - } - emit(response) + archiveResponse( + parameter("ignoreNotFound".as[Boolean] ? false) { ignoreNotFound => + val response = archives.download(id, ref, ignoreNotFound).map { source => + sourceToFileResponse(source) } - case None => emit(archives.fetch(id, ref)) - } + emit(response) + }, + emit(archives.fetch(id, ref)) + ) } } ) @@ -96,9 +96,9 @@ class ArchiveRoutes( } } - private def sourceToFileResponse(source: AkkaSource, format: ArchiveFormat[_]): FileResponse = - FileResponse(s"archive.${format.fileExtension}", format.contentType, 0L, source) + private def archiveResponse(validResp: Route, invalidResp: Route): Route = + extractRequest.map(Zip.checkHeader(_)).apply(valid => if (valid) validResp else invalidResp) - private def archiveResponse: Directive1[Option[ArchiveFormat[_]]] = - extractRequest.map(ArchiveFormat(_)) + private def sourceToFileResponse(source: AkkaSource): FileResponse = + FileResponse(s"archive.zip", Zip.contentType, 0L, source) } diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala index a549e747de..3d75c62a63 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala @@ -10,9 +10,9 @@ import cats.data.NonEmptySet import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils.encode import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveReference.{FileReference, FileSelfReference, ResourceReference} -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection.{AuthorizationFailed, FilenameTooLong, InvalidFileSelf, ResourceNotFound} +import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection.{AuthorizationFailed, InvalidFileSelf, ResourceNotFound} import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceRepresentation.{CompactedJsonLd, Dot, ExpandedJsonLd, NQuads, NTriples, SourceJson} -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{ArchiveFormat, ArchiveRejection, ArchiveValue} +import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{ArchiveRejection, ArchiveValue} import ch.epfl.bluebrain.nexus.delta.plugins.storage.RemoteContextResolutionFixture import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin.Client import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.FileNotFound @@ -50,7 +50,7 @@ import java.util.UUID import scala.concurrent.ExecutionContext import scala.reflect.ClassTag -abstract class ArchiveDownloadSpec +class ArchiveDownloadSpec extends TestKit(ActorSystem()) with AnyWordSpecLike with Inspectors @@ -81,9 +81,7 @@ abstract class ArchiveDownloadSpec private val permissions = Set(Permissions.resources.read) private val aclCheck = AclSimpleCheck((subject, AclAddress.Root, permissions)).accepted - def format: ArchiveFormat[_] - - def sourceToMap(source: AkkaSource): Map[String, String] + def sourceToMap(source: AkkaSource): Map[String, String] = fromZip(source).map { case (k, v) => k -> v.utf8String } "An ArchiveDownload" should { val storageRef = ResourceRef.Revision(iri"http://localhost/${genString()}", 5) @@ -147,20 +145,20 @@ abstract class ArchiveDownloadSpec ) def downloadAndExtract(value: ArchiveValue, ignoreNotFound: Boolean) = { - archiveDownload(value, project.ref, format, ignoreNotFound).map(sourceToMap).accepted + archiveDownload(value, project.ref, ignoreNotFound).map(sourceToMap).accepted } def failToDownload[R <: ArchiveRejection: ClassTag](value: ArchiveValue, ignoreNotFound: Boolean) = { - archiveDownload(value, project.ref, format, ignoreNotFound).rejectedWith[R] + archiveDownload(value, project.ref, ignoreNotFound).rejectedWith[R] } def rejectedAccess(value: ArchiveValue) = { archiveDownload - .apply(value, project.ref, format, ignoreNotFound = true)(Caller.Anonymous, global) + .apply(value, project.ref, ignoreNotFound = true)(Caller.Anonymous, global) .rejectedWith[AuthorizationFailed] } - s"provide a ${format.fileExtension} for both resources and files" in { + s"provide a zip for both resources and files" in { val value = ArchiveValue.unsafe( NonEmptySet.of( ResourceReference(Latest(id1), None, None, None), @@ -175,7 +173,7 @@ abstract class ArchiveDownloadSpec result shouldEqual expected } - s"provide a ${format.fileExtension} for file selfs" in { + s"provide a zip for file selfs" in { val value = ArchiveValue.unsafe( NonEmptySet.of( FileSelfReference(file1Self, None) @@ -188,12 +186,12 @@ abstract class ArchiveDownloadSpec result shouldEqual expected } - s"fail to provide a ${format.fileExtension} for file selfs which do not resolve" in { + s"fail to provide a zip for file selfs which do not resolve" in { val value = ArchiveValue.unsafe(NonEmptySet.of(FileSelfReference("http://wrong.file/self", None))) failToDownload[InvalidFileSelf](value, ignoreNotFound = false) } - s"provide a ${format.fileExtension} for both resources and files with different paths and formats" in { + s"provide a zip for both resources and files with different paths and formats" in { val list = List( SourceJson -> file1.value.asJson.sort.spaces2, CompactedJsonLd -> file1.toCompactedJsonLd.accepted.json.sort.spaces2, @@ -226,39 +224,17 @@ abstract class ArchiveDownloadSpec } } - if (format == ArchiveFormat.Tar) { - "fail to provide a tar if the file name is too long and no path is provided" in { - val value = ArchiveValue.unsafe( - NonEmptySet.of( - FileReference(Latest(id2), None, None) - ) - ) - failToDownload[FilenameTooLong](value, ignoreNotFound = false) - } - - "provide a tar if the file name is too long but a path is provided" in { - val filePath = AbsolutePath.apply(s"/${genString()}/file.txt").rightValue - val value = ArchiveValue.unsafe( - NonEmptySet.of( - FileReference(Latest(id2), None, Some(filePath)) - ) - ) - - downloadAndExtract(value, ignoreNotFound = false) should contain key filePath.value.toString - } - } else { - "provide a zip if the file name is long" in { - val value = ArchiveValue.unsafe( - NonEmptySet.of( - FileReference(Latest(id2), None, None) - ) + "provide a zip if the file name is long" in { + val value = ArchiveValue.unsafe( + NonEmptySet.of( + FileReference(Latest(id2), None, None) ) - val file2Path = s"${project.ref.toString}/file/${file2.value.attributes.filename}" - downloadAndExtract(value, ignoreNotFound = false) should contain key file2Path - } + ) + val file2Path = s"${project.ref.toString}/file/${file2.value.attributes.filename}" + downloadAndExtract(value, ignoreNotFound = false) should contain key file2Path } - s"fail to provide a ${format.fileExtension} when a resource is not found" in { + s"fail to provide a zip when a resource is not found" in { val value = ArchiveValue.unsafe( NonEmptySet.of( ResourceReference(Latest(iri"http://localhost/${genString()}"), None, None, None), @@ -268,7 +244,7 @@ abstract class ArchiveDownloadSpec failToDownload[ResourceNotFound](value, ignoreNotFound = false) } - s"fail to provide a ${format.fileExtension} when a file is not found" in { + s"fail to provide a zip when a file is not found" in { val value = ArchiveValue.unsafe( NonEmptySet.of( ResourceReference(Latest(id1), None, None, None), @@ -306,14 +282,14 @@ abstract class ArchiveDownloadSpec result shouldEqual expected } - s"fail to provide a ${format.fileExtension} when access to a resource is not found" in { + s"fail to provide a zip when access to a resource is not found" in { val value = ArchiveValue.unsafe( NonEmptySet.of(ResourceReference(Latest(id1), None, None, None)) ) rejectedAccess(value) } - s"fail to provide a ${format.fileExtension} when access to a file is not found" in { + s"fail to provide a zip when access to a file is not found" in { val value = ArchiveValue.unsafe( NonEmptySet.of(FileReference(Latest(id1), None, None)) ) diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala index 1755b9d56a..0e061ee440 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala @@ -2,7 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` import akka.http.scaladsl.model.MediaRanges.`*/*` -import akka.http.scaladsl.model.MediaTypes.{`application/x-tar`, `application/zip`} +import akka.http.scaladsl.model.MediaTypes.`application/zip` import akka.http.scaladsl.model.headers.{`Content-Type`, Accept, Location, OAuth2BearerToken} import akka.http.scaladsl.model.{ContentTypes, StatusCodes, Uri} import akka.http.scaladsl.server.Route @@ -284,36 +284,6 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with TryValue } } - "fetch a tar archive ignoring not found" in { - forAll(List(Accept(`application/x-tar`), acceptAll)) { accept => - Get(s"/v1/archives/$projectRef/$uuid?ignoreNotFound=true") ~> asSubject ~> accept ~> routes ~> check { - status shouldEqual StatusCodes.OK - header[`Content-Type`].value.value() shouldEqual `application/x-tar`.value - val result = fromTar(responseEntity.dataBytes) - - result.keySet shouldEqual Set( - s"${project.ref}/file/file.txt", - s"${project.ref}/compacted/${encode(fileId.toString)}.json" - ) - - val expectedContent = fileContent - val actualContent = result.entryAsString(s"${project.ref}/file/file.txt") - actualContent shouldEqual expectedContent - - val expectedMetadata = FilesRoutesSpec.fileMetadata( - projectRef, - fileId, - file.value.attributes, - storageRef, - createdBy = subject, - updatedBy = subject - ) - val actualMetadata = result.entryAsJson(s"${project.ref}/compacted/${encode(fileId.toString)}.json") - actualMetadata shouldEqual expectedMetadata - } - } - } - "fetch a zip archive ignoring not found" in { Get(s"/v1/archives/$projectRef/$uuid?ignoreNotFound=true") ~> asSubject ~> Accept( `application/zip` diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala index 5d1c49abc5..273d3ee9dd 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala @@ -5,7 +5,7 @@ import cats.data.NonEmptySet import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveReference.{FileReference, ResourceReference} import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection.{ArchiveNotFound, ProjectContextRejection} -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{Archive, ArchiveFormat, ArchiveRejection, ArchiveValue} +import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{Archive, ArchiveRejection, ArchiveValue} import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{nxv, schema} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.{JsonLdApi, JsonLdJavaApi} import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource @@ -63,8 +63,7 @@ class ArchivesSpec private val cfg = ArchivePluginConfig(1, EphemeralLogConfig(5.seconds, 5.hours)) private val download = new ArchiveDownload { - override def apply[M](value: ArchiveValue, project: ProjectRef, format: ArchiveFormat[M], ignoreNotFound: Boolean)( - implicit + override def apply(value: ArchiveValue, project: ProjectRef, ignoreNotFound: Boolean)(implicit caller: Caller, scheduler: Scheduler ): IO[ArchiveRejection, AkkaSource] = @@ -250,7 +249,7 @@ class ArchivesSpec resource.value shouldEqual Archive(id, project.ref, value.resources, 5.hours.toSeconds) } - "download an existing archive as zip and tar" in { + "download an existing archive as zip" in { val id = iri"http://localhost/base/${genString()}" val resourceId = iri"http://localhost/${genString()}" val fileId = iri"http://localhost/${genString()}" @@ -261,8 +260,7 @@ class ArchivesSpec ) ) archives.create(id, project.ref, value).accepted - archives.download(id, project.ref, ArchiveFormat.Tar, ignoreNotFound = true).accepted - archives.download(id, project.ref, ArchiveFormat.Zip, ignoreNotFound = true).accepted + archives.download(id, project.ref, ignoreNotFound = true).accepted } "return not found for unknown archives" in { diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/TarDownloadSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/TarDownloadSpec.scala deleted file mode 100644 index 456b4bc266..0000000000 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/TarDownloadSpec.scala +++ /dev/null @@ -1,15 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.archive -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveFormat -import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource - -import scala.concurrent.duration.DurationInt - -class TarDownloadSpec extends ArchiveDownloadSpec { - - implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds, 10.millis) - override def format: ArchiveFormat[_] = ArchiveFormat.Tar - - override def sourceToMap(source: AkkaSource): Map[String, String] = - fromTar(source).map { case (k, v) => k -> v.utf8String } - -} diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ZipDownloadSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ZipDownloadSpec.scala deleted file mode 100644 index dafffbb2c9..0000000000 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ZipDownloadSpec.scala +++ /dev/null @@ -1,12 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.archive - -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveFormat -import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource - -class ZipDownloadSpec extends ArchiveDownloadSpec { - override def format: ArchiveFormat[_] = ArchiveFormat.Zip - - override def sourceToMap(source: AkkaSource): Map[String, String] = - fromZip(source).map { case (k, v) => k -> v.utf8String } - -} diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/archive/ArchiveHelpers.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/archive/ArchiveHelpers.scala index 7f281024d6..e65d69497a 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/archive/ArchiveHelpers.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/archive/ArchiveHelpers.scala @@ -13,8 +13,12 @@ import org.scalatest.concurrent.ScalaFutures import java.nio.file.{Files => JFiles} import scala.concurrent.ExecutionContext +import scala.concurrent.duration._ import java.security.MessageDigest +import org.scalatest.concurrent.PatienceConfiguration +import org.scalatest.time.Span +import org.scalatest.time.Seconds trait ArchiveHelpers extends ScalaFutures with EitherValuable with OptionValues { @@ -30,37 +34,15 @@ trait ArchiveHelpers extends ScalaFutures with EitherValuable with OptionValues } } - def fromTar(byteString: ByteString)(implicit m: Materializer, e: ExecutionContext): ArchiveContent = - fromTar(Source.single(byteString)) - - def fromTar(source: Source[ByteString, Any])(implicit m: Materializer, e: ExecutionContext): ArchiveContent = { - val path = JFiles.createTempFile("test", ".tar") - source.runWith(FileIO.toPath(path)).futureValue - val result = FileIO - .fromPath(path) - .via(Archive.tarReader()) - .mapAsync(1) { case (metadata, source) => - source - .runFold(ByteString.empty) { case (bytes, elem) => - bytes ++ elem - } - .map { bytes => - (metadata.filePath, bytes) - } - } - .runFold(Map.empty[String, ByteString]) { case (map, elem) => - map + elem - } - .futureValue - result - } - def fromZip(byteString: ByteString)(implicit m: Materializer, e: ExecutionContext): ArchiveContent = fromZip(Source.single(byteString)) def fromZip(source: Source[ByteString, Any])(implicit m: Materializer, e: ExecutionContext): ArchiveContent = { - val path = JFiles.createTempFile("test", ".tar") - source.runWith(FileIO.toPath(path)).futureValue + val path = JFiles.createTempFile("test", ".zip") + source + .completionTimeout(10.seconds) + .runWith(FileIO.toPath(path)) + .futureValue(PatienceConfiguration.Timeout(Span(10, Seconds))) val result = Archive .zipReader(path.toFile) .mapAsync(1) { case (metadata, source) => diff --git a/docs/src/main/paradox/docs/delta/api/archives-api.md b/docs/src/main/paradox/docs/delta/api/archives-api.md index a0b71f2f39..8e19a7f150 100644 --- a/docs/src/main/paradox/docs/delta/api/archives-api.md +++ b/docs/src/main/paradox/docs/delta/api/archives-api.md @@ -1,7 +1,7 @@ # Archives An archive is a collection of resources stored inside an archive file. The archiving format chosen for this purpose is -tar (or tarball). Archive resources are rooted in the `/v1/archives/{org_label}/{project_label}/` collection. +ZIP. Archive resources are rooted in the `/v1/archives/{org_label}/{project_label}/` collection. Each archive... @@ -101,7 +101,7 @@ The json payload: - If the `@id` value is not found on the payload, an @id will be generated as follows: `base:{UUID}`. The `base` is the `prefix` defined on the resource's project (`{project_label}`). -The response will be an HTTP 303 Location redirect, which will point to the url where to consume the archive (tarball). +The response will be an HTTP 303 Location redirect, which will point to the url where to consume the archive (ZIP). The following diagram can help to understand the HTTP exchange ![post-redirect-get](assets/archives/post-redirect-get.png "Post/Redirect/Get archive") @@ -109,7 +109,7 @@ The following diagram can help to understand the HTTP exchange **Example** The following example shows how to create an archive containing 3 files. 2 of them are resources and the other is a file. -As a response, the tarball will be offered. +As a response, the ZIP file will be offered. Request : @@snip [archive.sh](assets/archives/create.sh) @@ -147,16 +147,7 @@ Note that if the payload contains an @id different from the `{archive_id}`, the When fetching an archive, the response format can be chosen through HTTP content negotiation. In order to fetch the archive metadata, the client can use any of the @ref:[following MIME types](content-negotiation.md#supported-mime-types). -However, in order to fetch the archive content, the HTTP `Accept` header should be provided: - -* `*/*` or `application/x-tar` will return a tar archive (or tarball) -* `application/zip` will return a zip archive - -@@@ note { .warning } - -@link:[The limitations of the tar format](https://en.wikipedia.org/wiki/Tar_(computing)) -makes the usage of archives difficult (among other things, the maximum file name is limited to 100 characters), -so its support will be removed in a future release. +However, in order to fetch the archive content, the HTTP `Accept` header should be provided as `application/zip`. @@@ @@ -170,7 +161,7 @@ GET /v1/archives/{org_label}/{project_label}/{archive_id}?ignoreNotFound=true **Example** -Request (tarball) +Request (ZIP) : @@snip [fetch.sh](assets/archives/fetch.sh) Request (metadata) diff --git a/docs/src/main/paradox/docs/delta/api/assets/archives/fetch.sh b/docs/src/main/paradox/docs/delta/api/assets/archives/fetch.sh index 46fc4031d9..6961af6e50 100644 --- a/docs/src/main/paradox/docs/delta/api/assets/archives/fetch.sh +++ b/docs/src/main/paradox/docs/delta/api/assets/archives/fetch.sh @@ -1,3 +1,3 @@ curl "http://localhost:8080/v1/archives/myorg/myproject/myarchive" \ - -H "Accept: application/x-tar" \ - -o output.tar \ No newline at end of file + -H "Accept: application/x-zip" \ + -o output.zip \ No newline at end of file diff --git a/docs/src/main/paradox/docs/delta/api/assets/archives/fetched.json b/docs/src/main/paradox/docs/delta/api/assets/archives/fetched.json index de3ab9aab3..5d742add13 100644 --- a/docs/src/main/paradox/docs/delta/api/assets/archives/fetched.json +++ b/docs/src/main/paradox/docs/delta/api/assets/archives/fetched.json @@ -32,6 +32,6 @@ "_createdAt": "2021-05-17T14:54:42.939Z", "_createdBy": "http://localhost:8080/v1/realms/myrealm/users/john", "_updatedAt": "2021-05-17T14:54:42.939Z", - "_updatedBy": "http://localhost:8080/v1/realms/myrealm/users/john" + "_updatedBy": "http://localhost:8080/v1/realms/myrealm/users/john", "_expiresInSeconds": 17530 } \ No newline at end of file diff --git a/docs/src/main/paradox/docs/delta/api/index.md b/docs/src/main/paradox/docs/delta/api/index.md index 57ab90a186..6f988d6f1d 100644 --- a/docs/src/main/paradox/docs/delta/api/index.md +++ b/docs/src/main/paradox/docs/delta/api/index.md @@ -121,7 +121,7 @@ A file is a binary attachment resource. ## Archives -An archive is a collection of resources stored inside an archive file. The archiving format chosen for this purpose is tar (or tarball). +An archive is a collection of resources stored inside an archive file. The archiving format chosen for this purpose is ZIP file. @ref:[Operations on archives](archives-api.md) diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index d6366685a2..d5d71099e5 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -119,6 +119,12 @@ Annotated source is now available as an output format when creating an archive. Creating an archive now requires only the `resources/read` permission instead of `archives/write`. +#### Remove support for Tarball archives + +Tarball archives are no longer supported due to unnecessary restrictions. ZIP is now the only allowed format and clients should send `application/zip` in the `Accept` header when creating archives. + +### Storages + ### Remote Storages Storages can no longer be created with credentials that would get stored: diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ArchiveSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ArchiveSpec.scala index 6c5b5df548..90a41cec9a 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ArchiveSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ArchiveSpec.scala @@ -231,25 +231,6 @@ class ArchiveSpec extends BaseSpec with ArchiveHelpers with CirceEq { } } - "succeed returning tar" in { - val prefix = "https%3A%2F%2Fdev.nexus.test.com%2Fsimplified-resource%2F" - deltaClient.get[ByteString](s"/archives/$fullId/test-resource:archive", Tweety, acceptAll) { - (byteString, response) => - contentType(response) shouldEqual MediaTypes.`application/x-tar`.toContentType - response.status shouldEqual StatusCodes.OK - - val result = fromTar(byteString) - - val actualContent1 = result.entryAsJson(s"$fullId/compacted/${prefix}1%3Frev%3D1.json") - val actualContent2 = result.entryAsJson(s"$fullId2/compacted/${prefix}2.json") - val actualDigest3 = result.entryDigest("/some/other/nexus-logo.png") - - filterMetadataKeys(actualContent1) should equalIgnoreArrayOrder(payloadResponse1) - filterMetadataKeys(actualContent2) should equalIgnoreArrayOrder(payloadResponse2) - actualDigest3 shouldEqual nexusLogoDigest - } - } - "succeed returning zip" in { val prefix = "https%3A%2F%2Fdev.nexus.test.com%2Fsimplified-resource%2F" deltaClient.get[ByteString](s"/archives/$fullId/test-resource:archive", Tweety, acceptZip) { @@ -300,11 +281,6 @@ class ArchiveSpec extends BaseSpec with ArchiveHelpers with CirceEq { response.status shouldEqual StatusCodes.Created } downloadLink = s"/archives/$fullId/test-resource:archive-not-found?ignoreNotFound=true" - _ <- deltaClient.get[ByteString](downloadLink, Tweety, acceptAll) { (byteString, response) => - contentType(response) shouldEqual MediaTypes.`application/x-tar`.toContentType - response.status shouldEqual StatusCodes.OK - assertContent(fromTar(byteString)) - } _ <- deltaClient.get[ByteString](downloadLink, Tweety, acceptZip) { (byteString, response) => contentType(response) shouldEqual MediaTypes.`application/zip`.toContentType response.status shouldEqual StatusCodes.OK From 3e84402688f9d0785a95a49d3ac322685fc6bf27 Mon Sep 17 00:00:00 2001 From: Daniel Bell Date: Thu, 21 Sep 2023 12:00:57 +0100 Subject: [PATCH 12/13] Fix JSON logs with missing string interpolation values. (#4287) we don't use log formatting, however the scala logger we use utilises a macro to re-write log calls to use log formatting. this resulted in logs appearing in logstash with missing interpolation values --- .../ch/epfl/bluebrain/nexus/delta/logback/JsonLayout.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/logback/JsonLayout.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/logback/JsonLayout.scala index cf0a4daaef..cc3f37007f 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/logback/JsonLayout.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/logback/JsonLayout.scala @@ -14,7 +14,7 @@ class JsonLayout extends LayoutBase[ILoggingEvent] { .fromFields( Map( "@timestamp" := event.getInstant, - "message" := event.getMessage, + "message" := event.getFormattedMessage, "log.level" := event.getLevel.toString, "log.logger" := event.getLoggerName ) ++ stackTraceFields From 80f591b6f064c8d0a7d192f835b355fe97482e36 Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 21 Sep 2023 13:30:57 +0200 Subject: [PATCH 13/13] Migrate jira plugin to Cats-effect (#4281) Co-authored-by: Simon Dumas --- .../nexus/delta/plugins/jira/JiraClient.scala | 98 +++++++++---------- .../delta/plugins/jira/JiraPluginModule.scala | 5 +- .../nexus/delta/plugins/jira/TokenStore.scala | 32 +++--- .../plugins/jira/model/JiraResponse.scala | 16 ++- .../plugins/jira/routes/JiraRoutes.scala | 26 +++-- .../delta/plugins/jira/TokenStoreSpec.scala | 46 --------- .../delta/plugins/jira/TokenStoreSuite.scala | 41 ++++++++ 7 files changed, 131 insertions(+), 133 deletions(-) delete mode 100644 delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSpec.scala create mode 100644 delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSuite.scala diff --git a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraClient.scala b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraClient.scala index d9e809be5b..291fbfa1ea 100644 --- a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraClient.scala +++ b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraClient.scala @@ -1,6 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.plugins.jira import akka.http.scaladsl.model.Uri +import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.jira.JiraError.{AccessTokenExpected, NoTokenError, RequestTokenExpected} import ch.epfl.bluebrain.nexus.delta.plugins.jira.OAuthToken.{AccessToken, RequestToken} import ch.epfl.bluebrain.nexus.delta.plugins.jira.config.JiraConfig @@ -13,7 +15,6 @@ import com.google.api.client.http.{ByteArrayContent, GenericUrl} import com.typesafe.scalalogging.Logger import io.circe.JsonObject import io.circe.syntax.EncoderOps -import monix.bio.{IO, Task} import org.apache.commons.codec.binary.Base64 import java.nio.charset.StandardCharsets @@ -29,47 +30,47 @@ trait JiraClient { /** * Creates an authorization request for the current user */ - def requestToken()(implicit caller: User): IO[JiraError, AuthenticationRequest] + def requestToken()(implicit caller: User): IO[AuthenticationRequest] /** * Generates an access token for the current user by providing the verifier code provided by the user */ - def accessToken(verifier: Verifier)(implicit caller: User): IO[JiraError, Unit] + def accessToken(verifier: Verifier)(implicit caller: User): IO[Unit] /** * Create an issue on behalf of the user in Jira * @param payload * the issue payload */ - def createIssue(payload: JsonObject)(implicit caller: User): IO[JiraError, JiraResponse] + def createIssue(payload: JsonObject)(implicit caller: User): IO[JiraResponse] /** * Edits an issue on behalf of the user in Jira * @param payload * the issue payload */ - def editIssue(issueId: String, payload: JsonObject)(implicit caller: User): IO[JiraError, JiraResponse] + def editIssue(issueId: String, payload: JsonObject)(implicit caller: User): IO[JiraResponse] /** * Get the issue matching the provided identifier * @param issueId * the identifier */ - def getIssue(issueId: String)(implicit caller: User): IO[JiraError, JiraResponse] + def getIssue(issueId: String)(implicit caller: User): IO[JiraResponse] /** * List the projects the current user has access to * @param recent * when provided, return the n most recent projects the user was active in */ - def listProjects(recent: Option[Int])(implicit caller: User): IO[JiraError, JiraResponse] + def listProjects(recent: Option[Int])(implicit caller: User): IO[JiraResponse] /** * Search issues in Jira the user has access to according to the provided search payload * @param payload * the search payload */ - def search(payload: JsonObject)(implicit caller: User): IO[JiraError, JiraResponse] + def search(payload: JsonObject)(implicit caller: User): IO[JiraResponse] } @@ -101,34 +102,32 @@ object JiraClient { * @param jiraConfig * the jira configuration */ - def apply(store: TokenStore, jiraConfig: JiraConfig): Task[JiraClient] = { - Task - .delay { - // Create the RSA signer according to the PKCS8 key provided by the configuration - val privateBytes = Base64.decodeBase64(jiraConfig.privateKey.value) - val keySpec = new PKCS8EncodedKeySpec(privateBytes) - val kf = KeyFactory.getInstance("RSA") - val signer = new OAuthRsaSigner() - signer.privateKey = kf.generatePrivate(keySpec) - signer - } + def apply(store: TokenStore, jiraConfig: JiraConfig): IO[JiraClient] = { + IO { + // Create the RSA signer according to the PKCS8 key provided by the configuration + val privateBytes = Base64.decodeBase64(jiraConfig.privateKey.value) + val keySpec = new PKCS8EncodedKeySpec(privateBytes) + val kf = KeyFactory.getInstance("RSA") + val signer = new OAuthRsaSigner() + signer.privateKey = kf.generatePrivate(keySpec) + signer + } .map { signer => new JiraClient { private val netHttpTransport = new NetHttpTransport() - override def requestToken()(implicit caller: User): IO[JiraError, AuthenticationRequest] = - Task - .delay { - val tempToken = new JiraOAuthGetTemporaryToken(jiraConfig.base) - tempToken.consumerKey = jiraConfig.consumerKey - tempToken.signer = signer - tempToken.transport = netHttpTransport - tempToken.callback = "oob" - val response = tempToken.execute() - logger.debug(s"Request Token value: ${response.token}") - response.token - } + override def requestToken()(implicit caller: User): IO[AuthenticationRequest] = + IO { + val tempToken = new JiraOAuthGetTemporaryToken(jiraConfig.base) + tempToken.consumerKey = jiraConfig.consumerKey + tempToken.signer = signer + tempToken.transport = netHttpTransport + tempToken.callback = "oob" + val response = tempToken.execute() + logger.debug(s"Request Token value: ${response.token}") + response.token + } .flatMap { token => store.save(caller, RequestToken(token)).as { val authorizationURL = @@ -137,33 +136,32 @@ object JiraClient { AuthenticationRequest(Uri(authorizationURL.toString)) } } - .mapError { JiraError.from } + .adaptError { e => JiraError.from(e) } - override def accessToken(verifier: Verifier)(implicit caller: User): IO[JiraError, Unit] = + override def accessToken(verifier: Verifier)(implicit caller: User): IO[Unit] = store .get(caller) .flatMap { case None => IO.raiseError(NoTokenError) case Some(_: AccessToken) => IO.raiseError(RequestTokenExpected) case Some(RequestToken(value)) => - Task - .delay { - val accessToken = new JiraOAuthGetAccessToken(jiraConfig.base) - accessToken.consumerKey = jiraConfig.consumerKey - accessToken.signer = signer - accessToken.transport = netHttpTransport - accessToken.verifier = verifier.value - accessToken.temporaryToken = value - accessToken.execute().token - } + IO { + val accessToken = new JiraOAuthGetAccessToken(jiraConfig.base) + accessToken.consumerKey = jiraConfig.consumerKey + accessToken.signer = signer + accessToken.transport = netHttpTransport + accessToken.verifier = verifier.value + accessToken.temporaryToken = value + accessToken.execute().token + } .flatMap { token => logger.debug("Access Token:" + token) store.save(caller, AccessToken(token)) } } - .mapError { JiraError.from } + .adaptError { e => JiraError.from(e) } - override def createIssue(payload: JsonObject)(implicit caller: User): IO[JiraError, JiraResponse] = + override def createIssue(payload: JsonObject)(implicit caller: User): IO[JiraResponse] = requestFactory(caller).flatMap { factory => val url = jiraConfig.base / issueUrl JiraResponse( @@ -176,7 +174,7 @@ object JiraClient { override def editIssue(issueId: String, payload: JsonObject)(implicit caller: User - ): IO[JiraError, JiraResponse] = + ): IO[JiraResponse] = requestFactory(caller).flatMap { factory => val url = jiraConfig.base / issueUrl / issueId JiraResponse( @@ -187,7 +185,7 @@ object JiraClient { ) } - override def getIssue(issueId: String)(implicit caller: User): IO[JiraError, JiraResponse] = + override def getIssue(issueId: String)(implicit caller: User): IO[JiraResponse] = requestFactory(caller).flatMap { factory => val url = jiraConfig.base / issueUrl / issueId JiraResponse( @@ -197,7 +195,7 @@ object JiraClient { ) } - override def listProjects(recent: Option[Int])(implicit caller: User): IO[JiraError, JiraResponse] = + override def listProjects(recent: Option[Int])(implicit caller: User): IO[JiraResponse] = requestFactory(caller).flatMap { factory => val url = recent.fold(jiraConfig.base / projectUrl) { r => (jiraConfig.base / projectUrl).withQuery(Uri.Query("recent" -> r.toString)) @@ -209,7 +207,7 @@ object JiraClient { ) } - def search(payload: JsonObject)(implicit caller: User): IO[JiraError, JiraResponse] = + def search(payload: JsonObject)(implicit caller: User): IO[JiraResponse] = requestFactory(caller).flatMap { factory => JiraResponse( factory.buildPostRequest( @@ -219,7 +217,7 @@ object JiraClient { ) } - private def requestFactory(caller: User) = store.get(caller).hideErrors.flatMap { + private def requestFactory(caller: User) = store.get(caller).flatMap { case None => IO.raiseError(NoTokenError) case Some(_: RequestToken) => IO.raiseError(AccessTokenExpected) case Some(AccessToken(token)) => diff --git a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraPluginModule.scala b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraPluginModule.scala index 417f2ed4c4..b3f2894b6e 100644 --- a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraPluginModule.scala +++ b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/JiraPluginModule.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.jira -import cats.effect.Clock +import cats.effect.{Clock, IO} import ch.epfl.bluebrain.nexus.delta.plugins.jira.config.JiraConfig import ch.epfl.bluebrain.nexus.delta.plugins.jira.routes.JiraRoutes import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution @@ -11,7 +11,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities import ch.epfl.bluebrain.nexus.delta.sdk.model._ import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import izumi.distage.model.definition.{Id, ModuleDef} -import monix.bio.UIO import monix.execution.Scheduler /** @@ -21,7 +20,7 @@ class JiraPluginModule(priority: Int) extends ModuleDef { make[JiraConfig].from { JiraConfig.load(_) } - make[JiraClient].fromEffect { (xas: Transactors, jiraConfig: JiraConfig, clock: Clock[UIO]) => + make[JiraClient].fromEffect { (xas: Transactors, jiraConfig: JiraConfig, clock: Clock[IO]) => JiraClient(TokenStore(xas)(clock), jiraConfig) } diff --git a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala index 1dad495167..6b91f52d77 100644 --- a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala +++ b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala @@ -1,7 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.plugins.jira -import cats.effect.Clock -import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOUtils.instant +import cats.effect.{Clock, IO} +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import ch.epfl.bluebrain.nexus.delta.sourcing.implicits._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity @@ -10,7 +11,6 @@ import doobie.implicits._ import doobie.postgres.implicits._ import io.circe.Json import io.circe.syntax._ -import monix.bio.{Task, UIO} /** * Stores Jira tokens in the underlying databases @@ -22,7 +22,7 @@ trait TokenStore { * @param user * the user */ - def get(user: User): Task[Option[OAuthToken]] + def get(user: User): IO[Option[OAuthToken]] /** * Save the token for the given user @@ -31,7 +31,7 @@ trait TokenStore { * @param oauthToken * the associated token */ - def save(user: User, oauthToken: OAuthToken): Task[Unit] + def save(user: User, oauthToken: OAuthToken): IO[Unit] } @@ -40,21 +40,23 @@ object TokenStore { /** * Create a token store */ - def apply(xas: Transactors)(implicit clock: Clock[UIO]): TokenStore = { + def apply(xas: Transactors)(implicit clock: Clock[IO]): TokenStore = { new TokenStore { - override def get(user: Identity.User): Task[Option[OAuthToken]] = - sql"SELECT token_value FROM jira_tokens WHERE realm = ${user.realm.value} and subject = ${user.subject}" - .query[Json] - .option - .transact(xas.read) + override def get(user: Identity.User): IO[Option[OAuthToken]] = + toCatsIO( + sql"SELECT token_value FROM jira_tokens WHERE realm = ${user.realm.value} and subject = ${user.subject}" + .query[Json] + .option + .transact(xas.read) + ) .flatMap { case Some(token) => - Task.fromEither(token.as[OAuthToken]).map(Some(_)) - case None => Task.none + IO.fromEither(token.as[OAuthToken]).map(Some(_)) + case None => IO.none } - override def save(user: Identity.User, oauthToken: OAuthToken): Task[Unit] = - instant.flatMap { now => + override def save(user: Identity.User, oauthToken: OAuthToken): IO[Unit] = + IOInstant.now.flatMap { now => sql""" INSERT INTO jira_tokens(realm, subject, instant, token_value) | VALUES(${user.realm.value}, ${user.subject}, $now, ${oauthToken.asJson}) | ON CONFLICT (realm, subject) DO UPDATE SET instant = EXCLUDED.instant, token_value = EXCLUDED.token_value diff --git a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/model/JiraResponse.scala b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/model/JiraResponse.scala index a6ad91f471..63f1576ec7 100644 --- a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/model/JiraResponse.scala +++ b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/model/JiraResponse.scala @@ -1,9 +1,10 @@ package ch.epfl.bluebrain.nexus.delta.plugins.jira.model +import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.jira.JiraError import com.google.api.client.http.HttpRequest import io.circe.{parser, Json} -import monix.bio.{IO, Task} /** * Jira response @@ -12,19 +13,16 @@ final case class JiraResponse(content: Option[Json]) object JiraResponse { - def apply(request: HttpRequest): IO[JiraError, JiraResponse] = { - Task - .delay( - request.execute() - ) + def apply(request: HttpRequest): IO[JiraResponse] = { + IO(request.execute()) .flatMap { response => val content = response.parseAsString() if (content.nonEmpty) { - Task.fromEither(parser.parse(content)).map { r => JiraResponse(Some(r)) } + IO.fromEither(parser.parse(content)).map { r => JiraResponse(Some(r)) } } else { - Task.pure(JiraResponse(None)) + IO.pure(JiraResponse(None)) } } - .mapError { JiraError.from } + .adaptError { e => JiraError.from(e) } } } diff --git a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/routes/JiraRoutes.scala b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/routes/JiraRoutes.scala index 47fb023d4e..32e1079476 100644 --- a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/routes/JiraRoutes.scala +++ b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/routes/JiraRoutes.scala @@ -1,20 +1,23 @@ package ch.epfl.bluebrain.nexus.delta.plugins.jira.routes +import cats.syntax.all._ import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Directive1, Route} -import ch.epfl.bluebrain.nexus.delta.plugins.jira.JiraClient -import ch.epfl.bluebrain.nexus.delta.plugins.jira.model.Verifier +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.plugins.jira.{JiraClient, JiraError} +import ch.epfl.bluebrain.nexus.delta.plugins.jira.model.{JiraResponse, Verifier} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling import ch.epfl.bluebrain.nexus.delta.sdk.directives.AuthDirectives -import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ +import ch.epfl.bluebrain.nexus.delta.sdk.ce.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.AuthorizationFailed import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfMarshalling import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri +import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.RealmRejection import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User import io.circe.JsonObject import io.circe.syntax.EncoderOps @@ -46,6 +49,9 @@ class JiraRoutes( } } + private def adaptResponse(io: IO[JiraResponse]) = + io.map(_.content).attemptNarrow[JiraError] + def routes: Route = baseUriPrefix(baseUri.prefix) { pathPrefix("jira") { @@ -53,12 +59,12 @@ class JiraRoutes( concat( // Request token (pathPrefix("request-token") & post & pathEndOrSingleSlash) { - emit(jiraClient.requestToken().map(_.asJson)) + emit(jiraClient.requestToken().map(_.asJson).attemptNarrow[RealmRejection]) }, // Get the access token (pathPrefix("access-token") & post & pathEndOrSingleSlash) { entity(as[Verifier]) { verifier => - emit(jiraClient.accessToken(verifier).map(_.asJson)) + emit(jiraClient.accessToken(verifier).attemptNarrow[RealmRejection]) } }, // Issues @@ -66,28 +72,28 @@ class JiraRoutes( concat( // Create an issue (post & entity(as[JsonObject])) { payload => - emit(StatusCodes.Created, jiraClient.createIssue(payload).map(_.content)) + emit(StatusCodes.Created, adaptResponse(jiraClient.createIssue(payload))) }, // Edit an issue (put & pathPrefix(Segment)) { issueId => entity(as[JsonObject]) { payload => - emit(StatusCodes.NoContent, jiraClient.editIssue(issueId, payload).map(_.content)) + emit(StatusCodes.NoContent, adaptResponse(jiraClient.editIssue(issueId, payload))) } }, // Get an issue (get & pathPrefix(Segment)) { issueId => - emit(jiraClient.getIssue(issueId).map(_.content)) + emit(adaptResponse(jiraClient.getIssue(issueId))) } ) }, // List projects (get & pathPrefix("project") & get & parameter("recent".as[Int].?)) { recent => - emit(jiraClient.listProjects(recent).map(_.content)) + emit(adaptResponse(jiraClient.listProjects(recent))) }, // Search issues (post & pathPrefix("search") & pathEndOrSingleSlash) { entity(as[JsonObject]) { payload => - emit(jiraClient.search(payload).map(_.content)) + emit(adaptResponse(jiraClient.search(payload))) } } ) diff --git a/delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSpec.scala b/delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSpec.scala deleted file mode 100644 index e5b06d890f..0000000000 --- a/delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSpec.scala +++ /dev/null @@ -1,46 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.jira - -import ch.epfl.bluebrain.nexus.delta.plugins.jira.OAuthToken.{AccessToken, RequestToken} -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label -import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture -import ch.epfl.bluebrain.nexus.testkit._ -import org.scalatest.OptionValues - -class TokenStoreSpec - extends DoobieScalaTestFixture - with IOFixedClock - with IOValues - with OptionValues - with TestHelpers - with ShouldMatchers { - - private lazy val tokenStore: TokenStore = TokenStore(xas) - - "A store" should { - - val user = User("Alice", Label.unsafe("Wonderland")) - - val request = RequestToken("request") - val access = AccessToken("access") - - "return none if no token exist for the user" in { - tokenStore.get(user).accepted shouldEqual None - } - - "save a given token for the user" in { - tokenStore.save(user, request).accepted - } - - "get a token for the user" in { - tokenStore.get(user).accepted.value shouldEqual request - } - - "overwrite an existing token for the user" in { - tokenStore.save(user, access).accepted - tokenStore.get(user).accepted.value shouldEqual access - } - - } - -} diff --git a/delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSuite.scala b/delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSuite.scala new file mode 100644 index 0000000000..2c6579ce0a --- /dev/null +++ b/delta/plugins/jira/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStoreSuite.scala @@ -0,0 +1,41 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.jira + +import ch.epfl.bluebrain.nexus.delta.plugins.jira.OAuthToken.{AccessToken, RequestToken} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label +import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsEffectSuite, IOFixedClock} +import munit.AnyFixture + +class TokenStoreSuite extends CatsEffectSuite with Doobie.Fixture with IOFixedClock { + + override def munitFixtures: Seq[AnyFixture[_]] = List(doobie) + + private lazy val xas = doobie() + + private lazy val tokenStore: TokenStore = TokenStore(xas) + + private val user = User("Alice", Label.unsafe("Wonderland")) + + private val request = RequestToken("request") + private val access = AccessToken("access") + + test("Return none if no token exist for the user") { + tokenStore.get(user).assertEquals(None) + } + + test("Save a given token for the user and return it") { + for { + _ <- tokenStore.save(user, request) + _ <- tokenStore.get(user).assertEquals(Some(request)) + } yield () + } + + test("Overwrite an existing token for the user") { + for { + _ <- tokenStore.save(user, access) + _ <- tokenStore.get(user).assertEquals(Some(access)) + } yield () + } + +}