From bcdeee280855a1788d19cfb8b93a61fe97ca83f4 Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 5 Oct 2023 13:46:10 +0200 Subject: [PATCH 01/13] Use Transactors read/write with Cats Effect in the Jira plugin (#4333) * Use Transactors read/write with Cats Effect --------- Co-authored-by: Simon Dumas --- .../nexus/delta/plugins/jira/TokenStore.scala | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala index 6b91f52d77..cd3df85c89 100644 --- a/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala +++ b/delta/plugins/jira/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/jira/TokenStore.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.jira import cats.effect.{Clock, IO} -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import ch.epfl.bluebrain.nexus.delta.sourcing.implicits._ @@ -43,12 +42,10 @@ object TokenStore { def apply(xas: Transactors)(implicit clock: Clock[IO]): TokenStore = { new TokenStore { override def get(user: Identity.User): IO[Option[OAuthToken]] = - toCatsIO( - sql"SELECT token_value FROM jira_tokens WHERE realm = ${user.realm.value} and subject = ${user.subject}" - .query[Json] - .option - .transact(xas.read) - ) + sql"SELECT token_value FROM jira_tokens WHERE realm = ${user.realm.value} and subject = ${user.subject}" + .query[Json] + .option + .transact(xas.readCE) .flatMap { case Some(token) => IO.fromEither(token.as[OAuthToken]).map(Some(_)) @@ -60,7 +57,7 @@ object TokenStore { sql""" INSERT INTO jira_tokens(realm, subject, instant, token_value) | VALUES(${user.realm.value}, ${user.subject}, $now, ${oauthToken.asJson}) | ON CONFLICT (realm, subject) DO UPDATE SET instant = EXCLUDED.instant, token_value = EXCLUDED.token_value - """.stripMargin.update.run.transact(xas.write).void + """.stripMargin.update.run.transact(xas.writeCE).void } } } From c6a155c50301749738befb02de077fb929d43fc5 Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 5 Oct 2023 13:59:41 +0200 Subject: [PATCH 02/13] Remove static contexts from docs (#4334) Co-authored-by: Simon Dumas --- docs/src/main/paradox/contexts/admin.json | 30 -- docs/src/main/paradox/contexts/digest.json | 9 - docs/src/main/paradox/contexts/iam.json | 59 ---- docs/src/main/paradox/contexts/identity.json | 14 - docs/src/main/paradox/contexts/resolver.json | 32 --- docs/src/main/paradox/contexts/resource.json | 50 ---- .../main/paradox/contexts/shacl-20170720.json | 259 ------------------ docs/src/main/paradox/contexts/storage.json | 30 -- docs/src/main/paradox/contexts/tag.json | 9 - docs/src/main/paradox/contexts/view.json | 36 --- 10 files changed, 528 deletions(-) delete mode 100644 docs/src/main/paradox/contexts/admin.json delete mode 100644 docs/src/main/paradox/contexts/digest.json delete mode 100644 docs/src/main/paradox/contexts/iam.json delete mode 100644 docs/src/main/paradox/contexts/identity.json delete mode 100644 docs/src/main/paradox/contexts/resolver.json delete mode 100644 docs/src/main/paradox/contexts/resource.json delete mode 100644 docs/src/main/paradox/contexts/shacl-20170720.json delete mode 100644 docs/src/main/paradox/contexts/storage.json delete mode 100644 docs/src/main/paradox/contexts/tag.json delete mode 100644 docs/src/main/paradox/contexts/view.json diff --git a/docs/src/main/paradox/contexts/admin.json b/docs/src/main/paradox/contexts/admin.json deleted file mode 100644 index 85d1663384..0000000000 --- a/docs/src/main/paradox/contexts/admin.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "description": "nxv:description", - "_label": "nxv:label", - "_uuid": "nxv:uuid", - "base": { - "@id": "nxv:base", - "@type": "@id" - }, - "vocab": { - "@id": "nxv:vocab", - "@type": "@id" - }, - "apiMappings": { - "@id": "nxv:apiMappings", - "@container": "@set" - }, - "prefix": "nxv:prefix", - "namespace": { - "@id": "nxv:namespace", - "@type": "@id" - }, - "_organizationUuid": "nxv:organizationUuid", - "_organizationLabel": "nxv:organizationLabel", - "Organization": "nxv:Organization", - "Project": "nxv:Project" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/admin.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/digest.json b/docs/src/main/paradox/contexts/digest.json deleted file mode 100644 index c4b8972f01..0000000000 --- a/docs/src/main/paradox/contexts/digest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "value": "nxv:value", - "algorithm": "nxv:algorithm", - "UpdateDigest": "nxv:UpdateDigest" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/digest.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/iam.json b/docs/src/main/paradox/contexts/iam.json deleted file mode 100644 index 5d080e3a77..0000000000 --- a/docs/src/main/paradox/contexts/iam.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "logo": { - "@type": "@id", - "@id": "nxv:logo" - }, - "openIdConfig": { - "@id": "nxv:openIdConfig", - "@type": "@id" - }, - "name": "nxv:name", - "_label": "nxv:label", - "_keys": "nxv:keys", - "_grantTypes": "nxv:grantTypes", - "_issuer": "nxv:issuer", - "_authorizationEndpoint": { - "@id": "nxv:authorizationEndpoint", - "@type": "@id" - }, - "_tokenEndpoint": { - "@id": "nxv:tokenEndpoint", - "@type": "@id" - }, - "_userInfoEndpoint": { - "@id": "nxv:userInfoEndpoint", - "@type": "@id" - }, - "_revocationEndpoint": { - "@id": "nxv:revocationEndpoint", - "@type": "@id" - }, - "_endSessionEndpoint": { - "@id": "nxv:endSessionEndpoint", - "@type": "@id" - }, - "permissions": { - "@id": "nxv:permissions", - "@container": "@set" - }, - "acl": { - "@id": "nxv:acl", - "@container": "@set" - }, - "_path": "nxv:path", - "identity": "nxv:identity", - "realm": "nxv:realm", - "subject": "nxv:subject", - "group": "nxv:group", - "Realm": "nxv:Realm", - "Permissions": "nxv:Permissions", - "AccessControlList": "nxv:AccessControlList", - "Authenticated": "nxv:Authenticated", - "Anonymous": "nxv:Anonymous", - "User": "nxv:User", - "Group": "nxv:Group" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/iam.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/identity.json b/docs/src/main/paradox/contexts/identity.json deleted file mode 100644 index f06350518f..0000000000 --- a/docs/src/main/paradox/contexts/identity.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "User": "nxv:User", - "Group": "nxv:Group", - "Authenticated": "nxv:Authenticated", - "Anonymous": "nxv:Anonymous", - "realm": "nxv:realm", - "subject": "nxv:subject", - "group": "nxv:group", - "identity": "nxv:identity" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/identity.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/resolver.json b/docs/src/main/paradox/contexts/resolver.json deleted file mode 100644 index 4581db1bca..0000000000 --- a/docs/src/main/paradox/contexts/resolver.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "Resolver": "nxv:Resolver", - "CrossProject": "nxv:CrossProject", - "InProject": "nxv:InProject", - "User": "nxv:User", - "Group": "nxv:Group", - "Authenticated": "nxv:Authenticated", - "Anonymous": "nxv:Anonymous", - "priority": "nxv:priority", - "resourceTypes": { - "@id": "nxv:resourceTypes", - "@type": "@id", - "@container": "@set" - }, - "projects": { - "@container": "@set", - "@id": "nxv:projects" - }, - "identities": { - "@id": "nxv:identities", - "@container": "@set" - }, - "realm": "nxv:realm", - "subject": "nxv:subject", - "group": "nxv:group" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/resolver.json", - "name": "http://schema.org/name", - "description": "http://schema.org/description" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/resource.json b/docs/src/main/paradox/contexts/resource.json deleted file mode 100644 index 3e81715e0c..0000000000 --- a/docs/src/main/paradox/contexts/resource.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "Schema": "nxv:Schema", - "File": "nxv:File", - "Resolver": "nxv:Resolver", - "InProject": "nxv:InProject", - "CrossProject": "nxv:CrossProject", - "View": "nxv:View", - "ElasticSearchView": "nxv:ElasticSearchView", - "Storage": "nxv:Storage", - "DiskStorage": "nxv:DiskStorage", - "RemoteDiskStorage": "nxv:RemoteDiskStorage", - "S3Storage": "nxv:S3Storage", - "AggregateElasticSearchView": "nxv:AggregateElasticSearchView", - "AggregateSparqlView": "nxv:AggregateSparqlView", - "SparqlView": "nxv:SparqlView", - "_deprecated": "nxv:deprecated", - "_createdAt": "nxv:createdAt", - "_updatedAt": "nxv:updatedAt", - "_project": { - "@id": "nxv:project", - "@type": "@id" - }, - "_self": "nxv:self", - "_incoming": "nxv:incoming", - "_outgoing": "nxv:outgoing", - "_createdBy": "nxv:createdBy", - "_updatedBy": "nxv:updatedBy", - "_constrainedBy": { - "@id": "nxv:constrainedBy", - "@type": "@id" - }, - "_rev": "nxv:rev", - "_digest": "nxv:digest", - "_algorithm": "nxv:algorithm", - "_value": "nxv:value", - "_mediaType": "nxv:mediaType", - "_location": "nxv:location", - "_filename": "nxv:filename", - "_bytes": "nxv:bytes", - "_instant": "nxv:instant", - "_storage": "nxv:storage", - "_subject": { - "@id": "nxv:subject", - "@type": "@id" - } - }, - "@id": "https://bluebrain.github.io/nexus/contexts/resource.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/shacl-20170720.json b/docs/src/main/paradox/contexts/shacl-20170720.json deleted file mode 100644 index 0792045f15..0000000000 --- a/docs/src/main/paradox/contexts/shacl-20170720.json +++ /dev/null @@ -1,259 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "nxs": "https://bluebrain.github.io/nexus/schemas/", - "nxc": "https://bluebrain.github.io/nexus/contexts/", - "xsd": "http://www.w3.org/2001/XMLSchema#", - "owl": "http://www.w3.org/2002/07/owl#", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - "rdfs": "http://www.w3.org/2000/01/rdf-schema#", - "skos": "http://www.w3.org/2004/02/skos/core#", - "prov": "http://www.w3.org/ns/prov#", - "sh": "http://www.w3.org/ns/shacl#", - "shsh": "http://www.w3.org/ns/shacl-shacl#", - "dcterms": "http://purl.org/dc/terms/", - "schema": "http://schema.org/", - "dcat": "http://www.w3.org/ns/dcat#", - "shext": "http://www.w3.org/ns/shacl/ext#", - "nxsh": "https://bluebrain.github.io/nexus/vocabulary/shacl/", - "distribution": "schema:distribution", - "class": { - "@id": "sh:class", - "@type": "@id" - }, - "rootClass": { - "@id": "shext:rootClass", - "@type": "@id" - }, - "path": { - "@id": "sh:path", - "@type": "@id" - }, - "qualifiedValueShape": { - "@id": "sh:qualifiedValueShape", - "@type": "@id" - }, - "qualifiedValueShapesDisjoint": { - "@id": "sh:qualifiedValueShapesDisjoint", - "@type": "xsd:boolean" - }, - "qualifiedMinCount": { - "@id": "sh:qualifiedMinCount", - "@type": "xsd:integer" - }, - "qualifiedMaxCount": { - "@id": "sh:qualifiedMaxCount", - "@type": "xsd:integer" - }, - "maxCount": { - "@id": "sh:maxCount", - "@type": "xsd:integer" - }, - "minCount": { - "@id": "sh:minCount", - "@type": "xsd:integer" - }, - "minInclusive": { - "@id": "sh:minInclusive" - }, - "maxInclusive": { - "@id": "sh:maxInclusive" - }, - "maxExclusive": { - "@id": "sh:maxExclusive" - }, - "minExclusive": { - "@id": "sh:minExclusive" - }, - "in": { - "@id": "sh:in", - "@container": "@list" - }, - "imports": { - "@id": "owl:imports", - "@type": "@id", - "@container": "@set" - }, - "datatype": { - "@id": "sh:datatype", - "@type": "@id" - }, - "description": { - "@id": "sh:description" - }, - "name": { - "@id": "sh:name" - }, - "severity": { - "@id": "sh:severity" - }, - "nodeKind": { - "@id": "sh:nodeKind", - "@type": "@id" - }, - "BlankNode": { - "@id": "sh:BlankNode" - }, - "BlankNodeOrIRI": { - "@id": "sh:BlankNodeOrIRI" - }, - "BlankNodeOrLiteral": { - "@id": "sh:BlankNodeOrLiteral" - }, - "IRI": { - "@id": "sh:IRI" - }, - "IRIOrLiteral": { - "@id": "sh:IRIOrLiteral" - }, - "Literal": { - "@id": "sh:Literal" - }, - "prefix": { - "@id": "sh:prefix", - "@type": "xsd:string" - }, - "declare": { - "@id": "sh:declare" - }, - "namespace": { - "@id": "sh:namespace", - "@type": "xsd:anyURI" - }, - "Violation": { - "@id": "sh:Violation" - }, - "suggestedShapesGraph": { - "@id": "sh:suggestedShapesGraph", - "@type": "@id" - }, - "shapesGraph": { - "@id": "sh:shapesGraph", - "@type": "@id" - }, - "node": { - "@id": "sh:node", - "@type": "@id" - }, - "property": { - "@id": "sh:property", - "@type": "@id", - "@container": "@set" - }, - "targetClass": { - "@id": "sh:targetClass", - "@type": "@id" - }, - "targetObjectsOf": { - "@id": "sh:targetObjectsOf", - "@type": "@id" - }, - "targetSubjectsOf": { - "@id": "sh:targetSubjectsOf", - "@type": "@id" - }, - "targetNode": { - "@id": "sh:targetNode", - "@type": "@id" - }, - "target": { - "@id": "sh:target", - "@type": "@id" - }, - "shapes": { - "@id": "nxv:shapes", - "@type": "@id", - "@container": "@set" - }, - "Shape": { - "@id": "sh:Shape" - }, - "NodeShape": { - "@id": "sh:NodeShape" - }, - "PropertyShape": { - "@id": "sh:PropertyShape" - }, - "or": { - "@id": "sh:or", - "@type": "@id", - "@container": "@list" - }, - "and": { - "@id": "sh:and", - "@type": "@id", - "@container": "@list" - }, - "xone": { - "@id": "sh:xone", - "@type": "@id", - "@container": "@list" - }, - "not": { - "@id": "sh:not", - "@type": "@id", - "@container": "@list" - }, - "lessThan": { - "@id": "sh:lessThan", - "@type": "@id" - }, - "hasValue": { - "@id": "sh:hasValue" - }, - "message": { - "@id": "sh:message" - }, - "deactivated": { - "@id": "sh:deactivated" - }, - "pattern": { - "@id": "sh:pattern" - }, - "label": { - "@id": "rdfs:label", - "@type": "xsd:string" - }, - "comment": { - "@id": "rdfs:comment", - "@type": "xsd:string" - }, - "editorialNote": { - "@id": "skos:editorialNote", - "@type": "xsd:string" - }, - "seeAlso": { - "@id": "rdfs:seeAlso", - "@type": "@id" - }, - "targetedNodes": "nxsh:targetedNodes", - "conforms": "sh:conforms", - "result": "sh:result", - "resultMessage": "sh:resultMessage", - "focusNode": { - "@id": "sh:focusNode", - "@type": "@id" - }, - "resultSeverity": { - "@id": "sh:resultSeverity", - "@type": "@id" - }, - "sourceConstraintComponent": { - "@id": "sh:sourceConstraintComponent", - "@type": "@id" - }, - "sourceShape": { - "@id": "sh:sourceShape", - "@type": "@id" - }, - "value": { - "@id": "sh:value", - "@type": "@id" - }, - "detail": { - "@id": "sh:detail", - "@container": "@set" - } - }, - "@id": "https://bluebrain.github.io/nexus/contexts/shacl-20170720.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/storage.json b/docs/src/main/paradox/contexts/storage.json deleted file mode 100644 index ce03f8a015..0000000000 --- a/docs/src/main/paradox/contexts/storage.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "Storage": "nxv:Storage", - "DiskStorage": "nxv:DiskStorage", - "RemoteDiskStorage": "nxv:RemoteDiskStorage", - "S3Storage": "nxv:S3Storage", - "default": "nxv:default", - "volume": "nxv:volume", - "readPermission": "nxv:readPermission", - "writePermission": "nxv:writePermission", - "maxFileSize": { - "@id": "nxv:maxFileSize", - "@type": "http://www.w3.org/2001/XMLSchema#long" - }, - "bucket": "nxv:bucket", - "credentials": "nxv:credentials", - "folder": "nxv:folder", - "endpoint": "nxv:endpoint", - "region": "nxv:region", - "accessKey": "nxv:accessKey", - "secretKey": "nxv:secretKey", - "path": "nxv:path", - "location": "nxv:location", - "filename": "nxv:filename", - "mediaType": "nxv:mediaType", - "_algorithm": "nxv:algorithm" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/storage.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/tag.json b/docs/src/main/paradox/contexts/tag.json deleted file mode 100644 index 6bf3def230..0000000000 --- a/docs/src/main/paradox/contexts/tag.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "tag": "nxv:tag", - "rev": "nxv:rev", - "tags": "nxv:tags" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/tag.json" -} \ No newline at end of file diff --git a/docs/src/main/paradox/contexts/view.json b/docs/src/main/paradox/contexts/view.json deleted file mode 100644 index 004db99ce5..0000000000 --- a/docs/src/main/paradox/contexts/view.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "@context": { - "nxv": "https://bluebrain.github.io/nexus/vocabulary/", - "View": "nxv:View", - "ElasticSearchView": "nxv:ElasticSearchView", - "SparqlView": "nxv:SparqlView", - "AggregateElasticSearchView": "nxv:AggregateElasticSearchView", - "AggregateSparqlView": "nxv:AggregateSparqlView", - "_uuid": "nxv:uuid", - "resourceSchemas": { - "@id": "nxv:resourceSchemas", - "@type": "@id", - "@container": "@set" - }, - "resourceTypes": { - "@id": "nxv:resourceTypes", - "@type": "@id", - "@container": "@set" - }, - "views": { - "@id": "nxv:views", - "@container": "@set" - }, - "project": "nxv:project", - "viewId": { - "@id": "nxv:viewId", - "@type": "@id" - }, - "resourceTag": "nxv:resourceTag", - "includeMetadata": "nxv:includeMetadata", - "includeDeprecated": "nxv:includeDeprecated", - "sourceAsText": "nxv:sourceAsText", - "mapping": "nxv:mapping" - }, - "@id": "https://bluebrain.github.io/nexus/contexts/view.json" -} \ No newline at end of file From b8ed2c5972afc713fa54f634a4ecbb48865fd208 Mon Sep 17 00:00:00 2001 From: Oliver <20188437+olivergrabinski@users.noreply.github.com> Date: Thu, 5 Oct 2023 14:23:31 +0200 Subject: [PATCH 03/13] Cache the active realm information in Identities (#4325) --- .../delta/sdk/identities/IdentitiesImpl.scala | 28 ++++--- .../sdk/identities/IdentitiesImplSuite.scala | 77 ++++++++++++++++--- 2 files changed, 87 insertions(+), 18 deletions(-) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala index 729ba15d3a..fe9200ecae 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala @@ -2,7 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.identities import akka.http.scaladsl.model.headers.{Authorization, OAuth2BearerToken} import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri} -import cats.data.NonEmptySet +import cats.data.{NonEmptySet, OptionT} import cats.effect.IO import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.Logger @@ -12,7 +12,7 @@ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpClientStatusError -import ch.epfl.bluebrain.nexus.delta.sdk.identities.IdentitiesImpl.{extractGroups, logger, GroupsCache} +import ch.epfl.bluebrain.nexus.delta.sdk.identities.IdentitiesImpl.{extractGroups, logger, GroupsCache, RealmCache} import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection.{GetGroupsFromOidcError, InvalidAccessToken, UnknownAccessTokenIssuer} import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceF @@ -31,6 +31,7 @@ import io.circe.{Decoder, HCursor, Json} import scala.util.Try class IdentitiesImpl private[identities] ( + realm: RealmCache, findActiveRealm: String => IO[Option[Realm]], getUserInfo: (Uri, OAuth2BearerToken) => IO[Json], groups: GroupsCache @@ -61,6 +62,11 @@ class IdentitiesImpl private[identities] ( ) } + def fetchRealm(parsedToken: ParsedToken): IO[Realm] = { + val getRealm = realm.getOrElseAttemptUpdate(parsedToken.rawToken, findActiveRealm(parsedToken.issuer)) + OptionT(getRealm).getOrRaise(UnknownAccessTokenIssuer) + } + def fetchGroups(parsedToken: ParsedToken, realm: Realm): IO[Set[Group]] = { parsedToken.groups .map { s => @@ -77,11 +83,10 @@ class IdentitiesImpl private[identities] ( } val result = for { - parsedToken <- IO.fromEither(ParsedToken.fromToken(token)) - activeRealmOption <- findActiveRealm(parsedToken.issuer) - activeRealm <- IO.fromOption(activeRealmOption)(UnknownAccessTokenIssuer) - _ <- validate(activeRealm.acceptedAudiences, parsedToken, realmKeyset(activeRealm)) - groups <- fetchGroups(parsedToken, activeRealm) + parsedToken <- IO.fromEither(ParsedToken.fromToken(token)) + activeRealm <- fetchRealm(parsedToken) + _ <- validate(activeRealm.acceptedAudiences, parsedToken, realmKeyset(activeRealm)) + groups <- fetchGroups(parsedToken, activeRealm) } yield { val user = User(parsedToken.subject, activeRealm.label) Caller(user, groups ++ Set(Anonymous, user, Authenticated(activeRealm.label))) @@ -95,6 +100,7 @@ class IdentitiesImpl private[identities] ( object IdentitiesImpl { type GroupsCache = LocalCache[String, Set[Group]] + type RealmCache = LocalCache[String, Realm] private val logger = Logger.cats[this.type] @@ -133,10 +139,14 @@ object IdentitiesImpl { * the cache configuration */ def apply(realms: Realms, hc: HttpClient, config: CacheConfig): IO[Identities] = { + val groupsCache = LocalCache[String, Set[Group]](config) + val realmCache = LocalCache[String, Realm](config) + val findActiveRealm: String => IO[Option[Realm]] = { (issuer: String) => val pagination = FromPagination(0, 1000) val params = RealmSearchParams(issuer = Some(issuer), deprecated = Some(false)) val sort = ResourceF.defaultSort[Realm] + realms.list(pagination, params, sort).map { _.results.map(entry => entry.source.value).headOption } @@ -145,8 +155,8 @@ object IdentitiesImpl { hc.toJson(HttpRequest(uri = uri, headers = List(Authorization(token)))) } - LocalCache[String, Set[Group]](config).map { groups => - new IdentitiesImpl(findActiveRealm, getUserInfo, groups) + (realmCache, groupsCache).mapN { (realm, groups) => + new IdentitiesImpl(realm, findActiveRealm, getUserInfo, groups) } } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala index 3071928685..7e000063db 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala @@ -4,10 +4,12 @@ import akka.http.scaladsl.model.headers.OAuth2BearerToken import akka.http.scaladsl.model.{HttpRequest, Uri} import cats.data.NonEmptySet import cats.effect.IO +import cats.effect.concurrent.Ref import cats.implicits._ import ch.epfl.bluebrain.nexus.delta.kernel.cache.LocalCache import ch.epfl.bluebrain.nexus.delta.sdk.generators.{RealmGen, WellKnownGen} import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpUnexpectedError +import ch.epfl.bluebrain.nexus.delta.sdk.identities.IdentitiesImpl.{GroupsCache, RealmCache} import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection.{AccessTokenDoesNotContainAnIssuer, AccessTokenDoesNotContainSubject, GetGroupsFromOidcError, InvalidAccessToken, InvalidAccessTokenFormat, UnknownAccessTokenIssuer} import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.Realm @@ -108,6 +110,8 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa keys = Set(parser.parse(rsaKey.toPublicJWK.toJSONString).rightValue) ) + type FindRealm = String => IO[Option[Realm]] + private val findActiveRealm: String => IO[Option[Realm]] = ioFromMap[String, Realm]( githubLabel.value -> github, githubLabel2.value -> github2, @@ -120,15 +124,21 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa (_: Uri) => HttpUnexpectedError(HttpRequest(), "Error while getting response") )(uri) - private val identities: Identities = LocalCache[String, Set[Group]]() - .map { cache => - new IdentitiesImpl( - findActiveRealm, - (uri: Uri, _: OAuth2BearerToken) => userInfo(uri), - cache - ) - } - .unsafeRunSync() + private val realmCache = LocalCache[String, Realm]() + private val groupsCache = LocalCache[String, Set[Group]]() + + private val identitiesFromCaches: (RealmCache, GroupsCache) => FindRealm => Identities = + (realmCache, groupsCache) => + findRealm => + new IdentitiesImpl( + realmCache, + findRealm, + (uri: Uri, _: OAuth2BearerToken) => userInfo(uri), + groupsCache + ) + + private val identities = + identitiesFromCaches(realmCache.unsafeRunSync(), groupsCache.unsafeRunSync())(findActiveRealm) private val auth = Authenticated(githubLabel) private val group1 = Group("group1", githubLabel) @@ -326,4 +336,53 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa identities.exchange(token).intercept(expectedError) } + test("Cache realm and groups") { + val token = generateToken( + subject = "Bobby", + issuer = githubLabel, + rsaKey = rsaKey, + expires = nowPlus1h, + groups = None, + useCommas = true + ) + + for { + parsedToken <- IO.fromEither(ParsedToken.fromToken(token)) + realm <- realmCache + groups <- groupsCache + _ <- realm.get(parsedToken.rawToken).assertNone + _ <- groups.get(parsedToken.rawToken).assertNone + _ <- identitiesFromCaches(realm, groups)(findActiveRealm).exchange(token) + _ <- realm.get(parsedToken.rawToken).assertSome(github) + _ <- groups.get(parsedToken.rawToken).assertSome(Set(group3, group4)) + } yield () + } + + test("Find active realm function should not run once value is cached") { + val token = generateToken( + subject = "Robert", + issuer = githubLabel, + rsaKey = rsaKey, + expires = nowPlus1h, + groups = Some(Set("group1", "group2")) + ) + + def findRealmOnce: Ref[IO, Boolean] => String => IO[Option[Realm]] = ref => + _ => + for { + flag <- ref.get + _ <- IO.raiseWhen(!flag)(new RuntimeException("Function executed more than once!")) + _ <- ref.set(false) + } yield Some(github) + + for { + sem <- Ref.of[IO, Boolean](true) + realm <- realmCache + groups <- groupsCache + identities = identitiesFromCaches(realm, groups)(findRealmOnce(sem)) + _ <- identities.exchange(token) + _ <- identities.exchange(token) + } yield () + } + } From c1d5d65391e2e655ce6c880dd3fbded518ce60c3 Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 5 Oct 2023 18:07:40 +0200 Subject: [PATCH 04/13] Migrate resolvers to CE (#4326) * Migrate resolvers to CE --------- Co-authored-by: Simon Dumas --- delta/app/src/main/resources/app.conf | 2 - .../nexus/delta/routes/ResolversRoutes.scala | 195 +++--- .../nexus/delta/wiring/DeltaModule.scala | 3 +- .../nexus/delta/wiring/ResolversModule.scala | 12 +- .../nexus/delta/wiring/SchemasModule.scala | 7 +- .../delta/routes/ResolversRoutesSpec.scala | 94 +-- .../delta/routes/ResourcesRoutesSpec.scala | 14 +- .../delta/routes/SchemasRoutesSpec.scala | 16 +- .../CompositeViewDecodingSpec.scala | 5 +- .../ElasticSearchViewDecodingSpec.scala | 5 +- .../StorageScopeInitializationSpec.scala | 3 +- .../plugins/storage/files/FilesSpec.scala | 3 +- .../files/routes/FilesRoutesSpec.scala | 3 +- .../storage/storages/StoragesSpec.scala | 3 +- .../storages/routes/StoragesRoutesSpec.scala | 3 +- .../bluebrain/nexus/delta/sdk/package.scala | 4 +- .../delta/sdk/resolvers/MultiResolution.scala | 15 +- .../resolvers/ResolverContextResolution.scala | 32 +- .../sdk/resolvers/ResolverResolution.scala | 61 +- .../ResolverScopeInitialization.scala | 37 +- .../nexus/delta/sdk/resolvers/Resolvers.scala | 79 ++- .../delta/sdk/resolvers/ResolversConfig.scala | 4 - .../delta/sdk/resolvers/ResolversImpl.scala | 31 +- .../sdk/resolvers/ResourceResolution.scala | 19 +- .../model/MultiResolutionResult.scala | 2 +- .../delta/sdk/resolvers/model/Resolver.scala | 3 +- .../model/ResolverResolutionRejection.scala | 3 +- .../sdk/resources/ValidateResource.scala | 11 +- .../delta/sdk/schemas/SchemaImports.scala | 42 +- .../nexus/delta/sdk/schemas/SchemasImpl.scala | 10 +- .../generators/ResolverResolutionGen.scala | 4 +- .../generators/ResourceResolutionGen.scala | 4 +- ...nSpec.scala => MultiResolutionSuite.scala} | 119 ++-- ...a => ResolverContextResolutionSuite.scala} | 44 +- .../resolvers/ResolverResolutionSpec.scala | 372 ------------ .../resolvers/ResolverResolutionSuite.scala | 360 +++++++++++ .../ResolverScopeInitializationSpec.scala | 85 --- .../ResolverScopeInitializationSuite.scala | 53 ++ .../ResolverStateMachineFixture.scala | 67 ++ .../resolvers/ResolversEvaluateSuite.scala | 317 ++++++++++ .../sdk/resolvers/ResolversImplSpec.scala | 67 +- .../sdk/resolvers/ResolversNextSuite.scala | 190 ++++++ .../delta/sdk/resolvers/ResolversSpec.scala | 572 ------------------ .../sdk/resources/ResourcesImplSpec.scala | 11 +- .../sdk/resources/ResourcesTrialSuite.scala | 9 +- .../delta/sdk/schemas/SchemaImportsSpec.scala | 129 ---- .../sdk/schemas/SchemaImportsSuite.scala | 128 ++++ .../delta/sdk/schemas/SchemasImplSuite.scala | 12 +- .../delta/sourcing/rejection/Rejection.scala | 2 + .../nexus/testkit/ce/CatsEffectSuite.scala | 6 +- .../nexus/testkit/ce/CatsIOValues.scala | 6 +- 51 files changed, 1550 insertions(+), 1728 deletions(-) rename delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/{MultiResolutionSpec.scala => MultiResolutionSuite.scala} (51%) rename delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/{ResolverContextResolutionSpec.scala => ResolverContextResolutionSuite.scala} (72%) delete mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSpec.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSuite.scala delete mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSpec.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSuite.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverStateMachineFixture.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversEvaluateSuite.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversNextSuite.scala delete mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversSpec.scala delete mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSpec.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSuite.scala diff --git a/delta/app/src/main/resources/app.conf b/delta/app/src/main/resources/app.conf index 4bbb20f559..1417769624 100644 --- a/delta/app/src/main/resources/app.conf +++ b/delta/app/src/main/resources/app.conf @@ -239,8 +239,6 @@ app { resolvers { # the resolvers event-log configuration event-log = ${app.defaults.event-log} - # the resolvers pagination config - pagination = ${app.defaults.pagination} defaults = { # the name of the default resolver diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala index 7fecca3bab..95826f8a29 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala @@ -1,17 +1,20 @@ package ch.epfl.bluebrain.nexus.delta.routes import akka.http.scaladsl.model.StatusCodes.Created +import akka.http.scaladsl.model.{StatusCode, StatusCodes} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ +import cats.effect.IO import cats.implicits._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, schemas} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck +import ch.epfl.bluebrain.nexus.delta.sdk.ce.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling -import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.directives.{AuthDirectives, DeltaSchemeDirectives} import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities @@ -19,9 +22,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfMarshalling import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag -import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchParams.ResolverSearchParams -import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchResults.searchResultsJsonLdEncoder -import ch.epfl.bluebrain.nexus.delta.sdk.model.search.{PaginationConfig, SearchResults} import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment, IdSegmentRef, ResourceF} import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions.resolvers.{read => Read, write => Write} @@ -29,10 +29,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.Resol import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{MultiResolutionResult, Resolver, ResolverRejection} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.{MultiResolution, Resolvers} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef -import io.circe.Json -import kamon.instrumentation.akka.http.TracingDirectives.operationName -import monix.bio.IO -import monix.execution.Scheduler +import io.circe.{Json, Printer} /** * The resolver routes @@ -45,7 +42,7 @@ import monix.execution.Scheduler * the resolvers module * @param schemeDirectives * directives related to orgs and projects - * @param index + * @param indexAction * the indexing action on write operations */ final class ResolversRoutes( @@ -54,11 +51,9 @@ final class ResolversRoutes( resolvers: Resolvers, multiResolution: MultiResolution, schemeDirectives: DeltaSchemeDirectives, - index: IndexingAction.Execute[Resolver] + indexAction: IndexingAction.Execute[Resolver] )(implicit baseUri: BaseUri, - paginationConfig: PaginationConfig, - s: Scheduler, cr: RemoteContextResolution, ordering: JsonKeyOrdering, fusionConfig: FusionConfig @@ -66,138 +61,104 @@ final class ResolversRoutes( with CirceUnmarshalling with RdfMarshalling { - import baseUri.prefixSegment import schemeDirectives._ implicit private val resourceFUnitJsonLdEncoder: JsonLdEncoder[ResourceF[Unit]] = ResourceF.resourceFAJsonLdEncoder(ContextValue(contexts.resolversMetadata)) - private def resolverSearchParams(implicit projectRef: ProjectRef, caller: Caller): Directive1[ResolverSearchParams] = - (searchParams & types).tmap { case (deprecated, rev, createdBy, updatedBy, types) => - val fetchAllCached = aclCheck.fetchAll.memoizeOnSuccess - ResolverSearchParams( - Some(projectRef), - deprecated, - rev, - createdBy, - updatedBy, - types, - resolver => aclCheck.authorizeFor(resolver.project, Read, fetchAllCached) - ) - } + private def emitFetch(io: IO[ResolverResource]): Route = + emit(io.attemptNarrow[ResolverRejection].rejectOn[ResolverNotFound]) + private def emitMetadata(statusCode: StatusCode, io: IO[ResolverResource]): Route = + emit(statusCode, io.map(_.void).attemptNarrow[ResolverRejection]) + + private def emitMetadata(io: IO[ResolverResource]): Route = emitMetadata(StatusCodes.OK, io) + + private def emitMetadataOrReject(io: IO[ResolverResource]): Route = + emit(io.map(_.void).attemptNarrow[ResolverRejection].rejectOn[ResolverNotFound]) + + private def emitSource(io: IO[ResolverResource]): Route = { + implicit val source: Printer = sourcePrinter + emit(io.map(_.value.source).attemptNarrow[ResolverRejection].rejectOn[ResolverNotFound]) + } + + private def emitTags(io: IO[ResolverResource]): Route = + emit(io.map(_.value.tags).attemptNarrow[ResolverRejection].rejectOn[ResolverNotFound]) def routes: Route = (baseUriPrefix(baseUri.prefix) & replaceUri("resolvers", schemas.resolvers)) { pathPrefix("resolvers") { extractCaller { implicit caller => - resolveProjectRef.apply { implicit ref => - val projectAddress = ref - val authorizeRead = authorizeFor(projectAddress, Read) - val authorizeWrite = authorizeFor(projectAddress, Write) + (resolveProjectRef & indexingMode) { (ref, mode) => + def index(resolver: ResolverResource): IO[Unit] = indexAction(resolver.value.project, resolver, mode) + val authorizeRead = authorizeFor(ref, Read) + val authorizeWrite = authorizeFor(ref, Write) concat( - (pathEndOrSingleSlash & operationName(s"$prefixSegment/resolvers/{org}/{project}")) { + pathEndOrSingleSlash { // Create a resolver without an id segment - (post & noParameter("rev") & entity(as[Json]) & indexingMode) { (payload, mode) => + (post & noParameter("rev") & entity(as[Json])) { payload => authorizeWrite { - emit(Created, resolvers.create(ref, payload).tapEval(index(ref, _, mode)).map(_.void)) - } - } - }, - (pathPrefix("caches") & pathEndOrSingleSlash) { - operationName(s"$prefixSegment/resolvers/{org}/{project}/caches") { - // List resolvers in cache - (get & extractUri & fromPaginated & resolverSearchParams & sort[Resolver]) { - (uri, pagination, params, order) => - authorizeRead { - implicit val searchJsonLdEncoder: JsonLdEncoder[SearchResults[ResolverResource]] = - searchResultsJsonLdEncoder(Resolver.context, pagination, uri) - - emit(resolvers.list(pagination, params, order).widen[SearchResults[ResolverResource]]) - } + emitMetadata(Created, resolvers.create(ref, payload).flatTap(index)) } } }, - (idSegment & indexingMode) { (id, mode) => + idSegment { id => concat( pathEndOrSingleSlash { - operationName(s"$prefixSegment/resolvers/{org}/{project}/{id}") { - concat( - put { - authorizeWrite { - (parameter("rev".as[Int].?) & pathEndOrSingleSlash & entity(as[Json])) { - case (None, payload) => - // Create a resolver with an id segment - emit( - Created, - resolvers.create(id, ref, payload).tapEval(index(ref, _, mode)).map(_.void) - ) - case (Some(rev), payload) => - // Update a resolver - emit(resolvers.update(id, ref, rev, payload).tapEval(index(ref, _, mode)).map(_.void)) - } + concat( + put { + authorizeWrite { + (parameter("rev".as[Int].?) & pathEndOrSingleSlash & entity(as[Json])) { + case (None, payload) => + // Create a resolver with an id segment + emitMetadata(Created, resolvers.create(id, ref, payload).flatTap(index)) + case (Some(rev), payload) => + // Update a resolver + emitMetadata(resolvers.update(id, ref, rev, payload).flatTap(index)) } - }, - (delete & parameter("rev".as[Int])) { rev => - authorizeWrite { - // Deprecate a resolver - emit( - resolvers - .deprecate(id, ref, rev) - .tapEval(index(ref, _, mode)) - .map(_.void) - .rejectOn[ResolverNotFound] - ) - } - }, - // Fetches a resolver - (get & idSegmentRef(id)) { id => - emitOrFusionRedirect( - ref, - id, - authorizeRead { - emit(resolvers.fetch(id, ref).rejectOn[ResolverNotFound]) - } - ) } - ) - } + }, + (delete & parameter("rev".as[Int])) { rev => + authorizeWrite { + // Deprecate a resolver + emitMetadataOrReject(resolvers.deprecate(id, ref, rev).flatTap(index)) + } + }, + // Fetches a resolver + (get & idSegmentRef(id)) { id => + emitOrFusionRedirect( + ref, + id, + authorizeRead { + emitFetch(resolvers.fetch(id, ref)) + } + ) + } + ) }, // Fetches a resolver original source (pathPrefix("source") & get & pathEndOrSingleSlash & idSegmentRef(id) & authorizeRead) { id => - operationName(s"$prefixSegment/resolvers/{org}/{project}/{id}/source") { - emit(resolvers.fetch(id, ref).map(_.value.source).rejectOn[ResolverNotFound]) - } + emitSource(resolvers.fetch(id, ref)) }, // Tags (pathPrefix("tags") & pathEndOrSingleSlash) { - operationName(s"$prefixSegment/resolvers/{org}/{project}/{id}/tags") { - concat( - // Fetch a resolver tags - (get & idSegmentRef(id) & authorizeRead) { id => - emit(resolvers.fetch(id, ref).map(_.value.tags).rejectOn[ResolverNotFound]) - }, - // Tag a resolver - (post & parameter("rev".as[Int])) { rev => - authorizeWrite { - entity(as[Tag]) { case Tag(tagRev, tag) => - emit( - Created, - resolvers - .tag(id, ref, tag, tagRev, rev) - .tapEval(index(ref, _, mode)) - .map(_.void) - ) - } + concat( + // Fetch a resolver tags + (get & idSegmentRef(id) & authorizeRead) { id => + emitTags(resolvers.fetch(id, ref)) + }, + // Tag a resolver + (post & parameter("rev".as[Int])) { rev => + authorizeWrite { + entity(as[Tag]) { case Tag(tagRev, tag) => + emitMetadata(Created, resolvers.tag(id, ref, tag, tagRev, rev).flatTap(index)) } } - ) - } + } + ) }, // Fetch a resource using a resolver (idSegmentRef & pathEndOrSingleSlash) { resourceIdRef => - operationName(s"$prefixSegment/resolvers/{org}/{project}/{id}/{resourceId}") { - resolve(resourceIdRef, ref, underscoreToOption(id)) - } + resolve(resourceIdRef, ref, underscoreToOption(id)) } ) } @@ -212,11 +173,11 @@ final class ResolversRoutes( ): Route = authorizeFor(projectRef, Permissions.resources.read).apply { parameter("showReport".as[Boolean].withDefault(default = false)) { showReport => - def emitResult[R: JsonLdEncoder](io: IO[ResolverRejection, MultiResolutionResult[R]]) = + def emitResult[R: JsonLdEncoder](io: IO[MultiResolutionResult[R]]) = if (showReport) - emit(io.map(_.report)) + emit(io.map(_.report).attemptNarrow[ResolverRejection]) else - emit(io.map(_.value.jsonLdValue)) + emit(io.map(_.value.jsonLdValue).attemptNarrow[ResolverRejection]) resolverId.fold(emitResult(multiResolution(resourceSegment, projectRef))) { resolverId => emitResult(multiResolution(resourceSegment, projectRef, resolverId)) @@ -241,8 +202,6 @@ object ResolversRoutes { index: IndexingAction.Execute[Resolver] )(implicit baseUri: BaseUri, - s: Scheduler, - paginationConfig: PaginationConfig, cr: RemoteContextResolution, ordering: JsonKeyOrdering, fusionConfig: FusionConfig diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala index ce4b3414cb..a22575c37e 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala @@ -8,7 +8,7 @@ import akka.http.scaladsl.model.headers.Location import akka.http.scaladsl.server.{ExceptionHandler, RejectionHandler, Route} import akka.stream.{Materializer, SystemMaterializer} import cats.data.NonEmptyList -import cats.effect.{Clock, IO, Resource, Sync, Timer} +import cats.effect.{Clock, ContextShift, IO, Resource, Sync, Timer} import ch.epfl.bluebrain.nexus.delta.Main.pluginsMaxPriority import ch.epfl.bluebrain.nexus.delta.config.AppConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF @@ -105,6 +105,7 @@ class DeltaModule(appCfg: AppConfig, config: Config)(implicit classLoader: Class make[Clock[UIO]].from(Clock[UIO]) make[Clock[IO]].from(Clock.create[IO]) make[Timer[IO]].from(IO.timer(ExecutionContext.global)) + make[ContextShift[IO]].from(IO.contextShift(ExecutionContext.global)) make[UUIDF].from(UUIDF.random) make[Scheduler].from(scheduler) make[JsonKeyOrdering].from( diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala index c927a46523..a9f0cf6a18 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.wiring -import cats.effect.Clock +import cats.effect.{Clock, IO} import ch.epfl.bluebrain.nexus.delta.Main.pluginsMaxPriority import ch.epfl.bluebrain.nexus.delta.config.AppConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF @@ -26,8 +26,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Resolver, ResolverEven import ch.epfl.bluebrain.nexus.delta.sdk.sse.SseEncoder import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import izumi.distage.model.definition.{Id, ModuleDef} -import monix.bio.UIO -import monix.execution.Scheduler /** * Resolvers wiring @@ -42,7 +40,7 @@ object ResolversModule extends ModuleDef { config: AppConfig, xas: Transactors, api: JsonLdApi, - clock: Clock[UIO], + clock: Clock[IO], uuidF: UUIDF ) => ResolversImpl( @@ -68,7 +66,6 @@ object ResolversModule extends ModuleDef { make[ResolversRoutes].from { ( - config: AppConfig, identities: Identities, aclCheck: AclCheck, resolvers: Resolvers, @@ -77,7 +74,6 @@ object ResolversModule extends ModuleDef { shift: Resolver.Shift, multiResolution: MultiResolution, baseUri: BaseUri, - s: Scheduler, cr: RemoteContextResolution @Id("aggregate"), ordering: JsonKeyOrdering, fusionConfig: FusionConfig @@ -91,8 +87,6 @@ object ResolversModule extends ModuleDef { indexingAction(_, _, _)(shift, cr) )( baseUri, - config.resolvers.pagination, - s, cr, ordering, fusionConfig @@ -104,7 +98,7 @@ object ResolversModule extends ModuleDef { many[ScopedEventMetricEncoder[_]].add { ResolverEvent.resolverEventMetricEncoder } make[ResolverScopeInitialization].from { (resolvers: Resolvers, serviceAccount: ServiceAccount, config: AppConfig) => - new ResolverScopeInitialization(resolvers, serviceAccount, config.resolvers.defaults) + ResolverScopeInitialization(resolvers, serviceAccount, config.resolvers.defaults) } many[ScopeInitialization].ref[ResolverScopeInitialization] diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala index a9fcf0a567..550cc08b2a 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.wiring -import cats.effect.{Clock, IO} +import cats.effect.{Clock, ContextShift, IO} import ch.epfl.bluebrain.nexus.delta.Main.pluginsMaxPriority import ch.epfl.bluebrain.nexus.delta.config.AppConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF @@ -59,9 +59,10 @@ object SchemasModule extends ModuleDef { aclCheck: AclCheck, resolvers: Resolvers, resources: Resources, - schemas: Schemas + schemas: Schemas, + contextShift: ContextShift[IO] ) => - SchemaImports(aclCheck, resolvers, schemas, resources) + SchemaImports(aclCheck, resolvers, schemas, resources)(contextShift) } make[SchemasRoutes].from { diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutesSpec.scala index aa0d40da91..3862673d7d 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutesSpec.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.MediaTypes.`text/html` import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken} import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Route +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.{UUIDF, UrlUtils} import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schema, schemas} @@ -22,7 +23,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers._ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.ProjectContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverType.{CrossProject, InProject} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{ResolverRejection, ResolverType, ResourceResolutionReport} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{ResolverRejection, ResolverType} import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.Schema import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec @@ -30,14 +31,14 @@ import ch.epfl.bluebrain.nexus.delta.sdk.{Defaults, IndexingAction} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.{Latest, Revision} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} +import ch.epfl.bluebrain.nexus.testkit.ce.IOFixedClock import io.circe.Json import io.circe.syntax._ -import monix.bio.{IO, UIO} import java.util.UUID import java.util.concurrent.atomic.AtomicInteger -class ResolversRoutesSpec extends BaseRouteSpec { +class ResolversRoutesSpec extends BaseRouteSpec with IOFixedClock { private val uuid = UUID.randomUUID() implicit private val uuidF: UUIDF = UUIDF.fixed(uuid) @@ -58,10 +59,7 @@ class ResolversRoutesSpec extends BaseRouteSpec { Caller(bob, Set(bob)) ) - val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( - rcr, - (_, _, _) => IO.raiseError(ResourceResolutionReport()) - ) + val resolverContextResolution: ResolverContextResolution = ResolverContextResolution(rcr) private val resourceId = nxv + "resource" private val resource = @@ -77,18 +75,18 @@ class ResolversRoutesSpec extends BaseRouteSpec { ) private val resourceFS = SchemaGen.resourceFor(schemaResource) - def fetchResource: (ResourceRef, ProjectRef) => UIO[Option[JsonLdContent[Resource, Nothing]]] = + def fetchResource: (ResourceRef, ProjectRef) => IO[Option[JsonLdContent[Resource, Nothing]]] = (ref: ResourceRef, _: ProjectRef) => ref match { - case Latest(`resourceId`) => UIO.some(JsonLdContent(resourceFR, resourceFR.value.source, None)) - case _ => UIO.none + case Latest(`resourceId`) => IO.pure(Some(JsonLdContent(resourceFR, resourceFR.value.source, None))) + case _ => IO.none } - def fetchSchema: (ResourceRef, ProjectRef) => UIO[Option[JsonLdContent[Schema, Nothing]]] = + def fetchSchema: (ResourceRef, ProjectRef) => IO[Option[JsonLdContent[Schema, Nothing]]] = (ref: ResourceRef, _: ProjectRef) => ref match { - case Revision(_, `schemaId`, 5) => UIO.some(JsonLdContent(resourceFS, resourceFS.value.source, None)) - case _ => UIO.none + case Revision(_, `schemaId`, 5) => IO.pure(Some(JsonLdContent(resourceFS, resourceFS.value.source, None))) + case _ => IO.none } private val defaults = Defaults("resolverName", "resolverDescription") @@ -96,7 +94,7 @@ class ResolversRoutesSpec extends BaseRouteSpec { private lazy val resolvers = ResolversImpl( fetchContext, resolverContextResolution, - ResolversConfig(eventLogConfig, pagination, defaults), + ResolversConfig(eventLogConfig, defaults), xas ) @@ -111,7 +109,7 @@ class ResolversRoutesSpec extends BaseRouteSpec { resolvers, (ref: ResourceRef, project: ProjectRef) => fetchResource(ref, project).flatMap { - case Some(c) => UIO.some(c) + case Some(c) => IO.pure(Some(c)) case None => fetchSchema(ref, project) } ) @@ -447,10 +445,10 @@ class ResolversRoutesSpec extends BaseRouteSpec { def inProject( id: Iri, priority: Int, - rev: Int = 1, - deprecated: Boolean = false, + rev: Int, + deprecated: Boolean, createdBy: Subject = bob, - updatedBy: Subject = bob + updatedBy: Subject ) = resolverMetadata( id, @@ -659,66 +657,6 @@ class ResolversRoutesSpec extends BaseRouteSpec { } } - "listing the resolvers" should { - - def expectedResults(results: Json*): Json = { - val ctx = json"""{"@context": ["${contexts.metadata}", "${contexts.search}", "${contexts.resolvers}"]}""" - Json.obj("_total" -> Json.fromInt(results.size), "_results" -> Json.arr(results: _*)) deepMerge ctx - } - - "return the deprecated resolvers the user has access to" in { - Get(s"/v1/resolvers/${project.ref}/caches?deprecated=true") ~> asBob ~> routes ~> check { - status shouldEqual StatusCodes.OK - response.asJson shouldEqual expectedResults(inProjectLast) - } - } - - "return the in project resolvers" in { - val encodedResolver = UrlUtils.encode(nxv.Resolver.toString) - val encodedInProjectResolver = UrlUtils.encode(nxv.InProject.toString) - Get( - s"/v1/resolvers/${project.ref}/caches?type=$encodedResolver&type=$encodedInProjectResolver" - ) ~> asBob ~> routes ~> check { - status shouldEqual StatusCodes.OK - response.asJson should equalIgnoreArrayOrder( - expectedResults( - inProjectLast, - inProject(nxv + "in-project-put2", 3), - inProject(nxv + "in-project-post", 1) - ) - ) - } - } - - "return the resolvers with revision 2" in { - Get(s"/v1/resolvers/${project2.ref}/caches?rev=2") ~> asAlice ~> routes ~> check { - status shouldEqual StatusCodes.OK - response.asJson should equalIgnoreArrayOrder( - expectedResults( - crossProjectUseCurrentLast, - crossProjectProvidedIdentitiesLast.replace( - Json.arr("nxv:Schema".asJson, "nxv:Custom".asJson), - Json.arr(nxv.Schema.asJson, (nxv + "Custom").asJson) - ) - ) - ) - } - } - - "fail to list resolvers if the user has not access resolvers/read on the project" in { - forAll( - List( - Get(s"/v1/resolvers/${project.ref}/caches?deprecated=true") ~> routes, - Get(s"/v1/resolvers/${project2.ref}/caches") ~> asBob ~> routes - ) - ) { request => - request ~> check { - status shouldEqual StatusCodes.Forbidden - } - } - } - } - val idResourceEncoded = UrlUtils.encode(resourceId.toString) val idSchemaEncoded = UrlUtils.encode(schemaId.toString) val unknownResourceEncoded = UrlUtils.encode((nxv + "xxx").toString) diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala index 3a4d0b3492..da88aaa92c 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.MediaTypes.`text/html` import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken} import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Route +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.{UUIDF, UrlUtils} import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schema, schemas} @@ -22,7 +23,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.FetchResource -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.resources.NexusSource.DecodingOption import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection.ProjectContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.resources.{Resources, ResourcesConfig, ResourcesImpl, ValidateResource} @@ -32,7 +32,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authent import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap import io.circe.{Json, Printer} -import monix.bio.{IO, UIO} import java.util.UUID @@ -84,19 +83,16 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { private val aclCheck = AclSimpleCheck().accepted private val fetchSchema: (ResourceRef, ProjectRef) => FetchResource[Schema] = { - case (ref, _) if ref.iri == schema2.id => UIO.some(SchemaGen.resourceFor(schema2, deprecated = true)) - case (ref, _) if ref.iri == schema1.id => UIO.some(SchemaGen.resourceFor(schema1)) - case _ => UIO.none + case (ref, _) if ref.iri == schema2.id => IO.pure(Some(SchemaGen.resourceFor(schema2, deprecated = true))) + case (ref, _) if ref.iri == schema1.id => IO.pure(Some(SchemaGen.resourceFor(schema1))) + case _ => IO.none } private val validator: ValidateResource = ValidateResource( ResourceResolutionGen.singleInProject(projectRef, fetchSchema) ) private val fetchContext = FetchContextDummy(List(project.value), ProjectContextRejection) - private val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( - rcr, - (_, _, _) => IO.raiseError(ResourceResolutionReport()) - ) + private val resolverContextResolution: ResolverContextResolution = ResolverContextResolution(rcr) private def routesWithDecodingOption(implicit decodingOption: DecodingOption) = { val resources = ResourcesImpl( diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala index e09ec8684e..6cc50ff1e5 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutesSpec.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.MediaTypes.`text/html` import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken} import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Route +import cats.effect.{ContextShift, IO} import ch.epfl.bluebrain.nexus.delta.kernel.utils.{UUIDF, UrlUtils} import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary @@ -22,7 +23,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions.{events, resour import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.SchemaRejection.ProjectContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.schemas.{SchemaImports, SchemasConfig, SchemasImpl} import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec @@ -31,15 +31,17 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import ch.epfl.bluebrain.nexus.testkit.bio.IOFromMap import ch.epfl.bluebrain.nexus.testkit.ce.IOFixedClock import io.circe.Json -import monix.bio.IO import java.util.UUID +import scala.concurrent.ExecutionContext class SchemasRoutesSpec extends BaseRouteSpec with IOFixedClock with IOFromMap { private val uuid = UUID.randomUUID() implicit private val uuidF: UUIDF = UUIDF.fixed(uuid) + implicit private val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) + private val caller = Caller(alice, Set(alice, Anonymous, Authenticated(realm), Group("group", realm))) private val identities = IdentitiesDummy(caller) @@ -61,15 +63,9 @@ class SchemasRoutesSpec extends BaseRouteSpec with IOFixedClock with IOFromMap { private val payloadNoId = payload.removeKeys(keywords.id) private val payloadUpdated = payloadNoId.replace("datatype" -> "xsd:integer", "xsd:double") - private val schemaImports = new SchemaImports( - (_, _, _) => IO.raiseError(ResourceResolutionReport()), - (_, _, _) => IO.raiseError(ResourceResolutionReport()) - ) + private val schemaImports = SchemaImports.alwaysFail - private val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( - rcr, - (_, _, _) => IO.raiseError(ResourceResolutionReport()) - ) + private val resolverContextResolution: ResolverContextResolution = ResolverContextResolution(rcr) private lazy val aclCheck = AclSimpleCheck().accepted diff --git a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala index ff2f89174b..16d3ce4f48 100644 --- a/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala +++ b/delta/plugins/composite-views/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/CompositeViewDecodingSpec.scala @@ -17,11 +17,9 @@ import ch.epfl.bluebrain.nexus.delta.sdk.generators.ProjectGen import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, ProjectContext} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Group, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, EitherValuable, TestHelpers} -import monix.bio.IO import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.{Inspectors, OptionValues} @@ -52,8 +50,7 @@ class CompositeViewDecodingSpec val uuid = UUID.randomUUID() implicit private val uuidF: UUIDF = UUIDF.fixed(uuid) - val resolverContext: ResolverContextResolution = - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())) + val resolverContext: ResolverContextResolution = ResolverContextResolution(rcr) private val decoder = CompositeViewFieldsJsonLdSourceDecoder(uuidF, resolverContext, 1.minute) val query1 = diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViewDecodingSpec.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViewDecodingSpec.scala index 956bff2d40..9c3babdd08 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViewDecodingSpec.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchViewDecodingSpec.scala @@ -11,14 +11,12 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, ProjectContext} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.views.{PipeStep, ViewRef} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.stream.pipes._ import ch.epfl.bluebrain.nexus.testkit.{IOValues, TestHelpers} import io.circe.literal._ -import monix.bio.IO import monix.execution.Scheduler.Implicits.global import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike @@ -44,8 +42,7 @@ class ElasticSearchViewDecodingSpec implicit private val uuidF: UUIDF = UUIDF.fixed(UUID.randomUUID()) - implicit private val resolverContext: ResolverContextResolution = - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())) + implicit private val resolverContext: ResolverContextResolution = ResolverContextResolution(rcr) implicit private val caller: Caller = Caller.Anonymous private val decoder = diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StorageScopeInitializationSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StorageScopeInitializationSpec.scala index 472b04d204..50a5069c9e 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StorageScopeInitializationSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StorageScopeInitializationSpec.scala @@ -11,7 +11,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.ServiceAccount import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.{ConfigFixtures, Defaults} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label @@ -56,7 +55,7 @@ class StorageScopeInitializationSpec "A StorageScopeInitialization" should { lazy val storages = Storages( fetchContext, - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())), + ResolverContextResolution(rcr), IO.pure(allowedPerms.toSet), (_, _) => IO.unit, xas, diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala index 39174a9c6e..2acfa30993 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala @@ -31,7 +31,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} @@ -108,7 +107,7 @@ class FilesSpec(docker: RemoteStorageDocker) lazy val storages: Storages = Storages( fetchContext.mapRejection(StorageRejection.ProjectContextRejection), - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())), + ResolverContextResolution(rcr), IO.pure(allowedPerms), (_, _) => IO.unit, xas, diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala index 638e7ba20f..ea5e957d8f 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala @@ -34,7 +34,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions.events import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.utils.{BaseRouteSpec, RouteFixtures} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} @@ -101,7 +100,7 @@ class FilesRoutesSpec private val aclCheck = AclSimpleCheck().accepted lazy val storages: Storages = Storages( fetchContext.mapRejection(StorageRejection.ProjectContextRejection), - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())), + ResolverContextResolution(rcr), IO.pure(allowedPerms.toSet), (_, _) => IO.unit, xas, diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesSpec.scala index 7356901899..361686cfcf 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesSpec.scala @@ -13,7 +13,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegmentRef, Tags} import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Authenticated, Group, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} @@ -65,7 +64,7 @@ class StoragesSpec lazy val storages = Storages( fetchContext, - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())), + ResolverContextResolution(rcr), IO.pure(allowedPerms.toSet), (_, _) => IO.unit, xas, diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala index e8a30c5af5..35219a1f08 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutesSpec.scala @@ -28,7 +28,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} @@ -102,7 +101,7 @@ class StoragesRoutesSpec extends BaseRouteSpec with TryValues with StorageFixtur private lazy val storages = Storages( fetchContext, - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())), + ResolverContextResolution(rcr), IO.pure(perms), (_, _) => IO.unit, xas, diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/package.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/package.scala index 2a7f173817..6fe942e104 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/package.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/package.scala @@ -2,6 +2,7 @@ package ch.epfl.bluebrain.nexus.delta import akka.stream.scaladsl.Source import akka.util.ByteString +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.sdk.acls.model.Acl import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceF @@ -14,7 +15,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.Schema import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef -import monix.bio.IO package object sdk { @@ -61,7 +61,7 @@ package object sdk { /** * Type alias for resolver resolution */ - type Resolve[A] = (ResourceRef, ProjectRef, Caller) => IO[ResourceResolutionReport, A] + type Resolve[A] = (ResourceRef, ProjectRef, Caller) => IO[Either[ResourceResolutionReport, A]] type AkkaSource = Source[ByteString, Any] diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala index dd6dc01f13..5ddbd3f597 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolution.scala @@ -1,15 +1,16 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.{ExpandIri, JsonLdContent} import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegment, IdSegmentRef} +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ProjectContext import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{InvalidResolution, InvalidResolvedResourceId, InvalidResolverResolution} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport.ResolverReport import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{MultiResolutionResult, ResolverRejection, ResourceResolutionReport} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef -import monix.bio.IO /** * Allow to attempt resolutions for the different resource types available @@ -19,7 +20,7 @@ import monix.bio.IO * the resource resolution */ final class MultiResolution( - fetchProject: ProjectRef => IO[ResolverRejection, ProjectContext], + fetchProject: ProjectRef => IO[ProjectContext], resourceResolution: ResolverResolution[JsonLdContent[_, _]] ) { @@ -36,10 +37,10 @@ final class MultiResolution( def apply( resourceSegment: IdSegmentRef, projectRef: ProjectRef - )(implicit caller: Caller): IO[ResolverRejection, MultiResolutionResult[ResourceResolutionReport]] = + )(implicit caller: Caller): IO[MultiResolutionResult[ResourceResolutionReport]] = for { project <- fetchProject(projectRef) - resourceRef <- expandResourceIri(resourceSegment, project) + resourceRef <- toCatsIO(expandResourceIri(resourceSegment, project)) result <- resourceResolution.resolveReport(resourceRef, projectRef).flatMap { case (resourceReport, Some(resourceResult)) => IO.pure(MultiResolutionResult(resourceReport, resourceResult)) @@ -61,12 +62,12 @@ final class MultiResolution( resourceSegment: IdSegmentRef, projectRef: ProjectRef, resolverSegment: IdSegment - )(implicit caller: Caller): IO[ResolverRejection, MultiResolutionResult[ResolverReport]] = { + )(implicit caller: Caller): IO[MultiResolutionResult[ResolverReport]] = { for { project <- fetchProject(projectRef) - resourceRef <- expandResourceIri(resourceSegment, project) - resolverId <- Resolvers.expandIri(resolverSegment, project) + resourceRef <- toCatsIO(expandResourceIri(resourceSegment, project)) + resolverId <- toCatsIO(Resolvers.expandIri(resolverSegment, project)) result <- resourceResolution.resolveReport(resourceRef, projectRef, resolverId).flatMap { case (resourceReport, Some(resourceResult)) => IO.pure(MultiResolutionResult(resourceReport, resourceResult)) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolution.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolution.scala index b5e98c4ee2..068ae7d24d 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolution.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolution.scala @@ -1,10 +1,13 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers +import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution.Result import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolutionError.RemoteContextNotAccessible -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContext, RemoteContextResolution} +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContext, RemoteContextResolution, RemoteContextResolutionError} import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller @@ -16,7 +19,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} import io.circe.syntax._ -import monix.bio.IO import scala.collection.concurrent @@ -40,32 +42,32 @@ final class ResolverContextResolution(val rcr: RemoteContextResolution, resolveR IO.pure(cache.get(iri)).flatMap { case Some(s) => IO.pure(s) case None => - rcr - .resolve(iri) - .onErrorFallbackTo( - resolveResource(ResourceRef(iri), projectRef, caller) - .bimap( - report => + toCatsIO(rcr.resolve(iri)) + .handleErrorWith(_ => + resolveResource(ResourceRef(iri), projectRef, caller).flatMap { + case Left(report) => + IO.raiseError( RemoteContextNotAccessible( iri, s"Resolution via static resolution and via resolvers failed in '$projectRef'", Some(report.asJson) - ), - ProjectRemoteContext.fromResource - ) + ) + ) + case Right(resource) => IO.pure(ProjectRemoteContext.fromResource(resource)) + } ) - .tapEval { context => + .flatTap { context => IO.pure(cache.put(iri, context)) *> logger.debug(s"Iri $iri has been resolved for project $projectRef and caller $caller.subject") } } - } + }.toBIO[RemoteContextResolutionError] } } object ResolverContextResolution { - private val logger: Logger = Logger[ResolverContextResolution] + private val logger = Logger.cats[ResolverContextResolution] /** * A remote context defined in Nexus as a resource @@ -89,7 +91,7 @@ object ResolverContextResolution { * a previously defined 'RemoteContextResolution' */ def apply(rcr: RemoteContextResolution): ResolverContextResolution = - new ResolverContextResolution(rcr, (_, _, _) => IO.raiseError(ResourceResolutionReport())) + new ResolverContextResolution(rcr, (_, _, _) => IO.pure(Left(ResourceResolutionReport()))) /** * Constructs a [[ResolverContextResolution]] diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolution.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolution.scala index b8997226d4..d35985f668 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolution.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolution.scala @@ -1,6 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers +import cats.effect.IO import cats.implicits._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck @@ -16,10 +18,10 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.{Pro import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.Resolver.{CrossProjectResolver, InProjectResolver} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverResolutionRejection.{ProjectAccessDenied, ResolutionFetchRejection, ResourceTypesDenied, WrappedResolverRejection} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport.{ResolverFailedReport, ResolverReport, ResolverSuccessReport} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Resolver, ResolverRejection, ResourceResolutionReport} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Resolver, ResolverRejection, ResolverResolutionRejection, ResourceResolutionReport} import ch.epfl.bluebrain.nexus.delta.sdk.{ResolverResource, ResourceShifts} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef, ResourceRef} -import monix.bio.{IO, UIO} +import monix.bio.{IO => BIO} import java.time.Instant import scala.collection.immutable.VectorMap @@ -36,9 +38,9 @@ import scala.collection.immutable.VectorMap * how we can get a resource from a [[ResourceRef]] */ final class ResolverResolution[R]( - checkAcls: (ProjectRef, Set[Identity]) => UIO[Boolean], - listResolvers: ProjectRef => UIO[List[Resolver]], - fetchResolver: (Iri, ProjectRef) => IO[ResolverRejection, Resolver], + checkAcls: (ProjectRef, Set[Identity]) => IO[Boolean], + listResolvers: ProjectRef => IO[List[Resolver]], + fetchResolver: (Iri, ProjectRef) => IO[Resolver], fetch: (ResourceRef, ProjectRef) => Fetch[R], extractTypes: R => Set[Iri] ) { @@ -52,9 +54,11 @@ final class ResolverResolution[R]( * @param projectRef * the project reference */ - def resolve(ref: ResourceRef, projectRef: ProjectRef)(implicit caller: Caller): IO[ResourceResolutionReport, R] = - resolveReport(ref, projectRef).flatMap { case (report, resource) => - IO.fromOption(resource, report) + def resolve(ref: ResourceRef, projectRef: ProjectRef)(implicit + caller: Caller + ): IO[Either[ResourceResolutionReport, R]] = + resolveReport(ref, projectRef).map { case (report, resource) => + resource.toRight(report) } /** @@ -68,7 +72,7 @@ final class ResolverResolution[R]( */ def resolveReport(ref: ResourceRef, projectRef: ProjectRef)(implicit caller: Caller - ): UIO[(ResourceResolutionReport, Option[R])] = { + ): IO[(ResourceResolutionReport, Option[R])] = { val initial: (ResourceResolutionReport, Option[R]) = ResourceResolutionReport() -> None @@ -101,11 +105,9 @@ final class ResolverResolution[R]( */ def resolve(ref: ResourceRef, projectRef: ProjectRef, resolverId: Iri)(implicit caller: Caller - ): IO[ResolverReport, R] = + ): IO[Either[ResolverReport, R]] = resolveReport(ref, projectRef, resolverId) - .flatMap { case (report, resource) => - IO.fromOption(resource, report) - } + .map { case (report, resource) => resource.toRight(report) } /** * Attempts to resolve the resource against the given resolver and return the resource if found and a report of how @@ -119,10 +121,10 @@ final class ResolverResolution[R]( */ def resolveReport(ref: ResourceRef, projectRef: ProjectRef, resolverId: Iri)(implicit caller: Caller - ): UIO[(ResolverReport, Option[R])] = + ): IO[(ResolverReport, Option[R])] = fetchResolver(resolverId, projectRef) .flatMap { r => resolveReport(ref, projectRef, r) } - .onErrorHandle { r => + .recover { case r: ResolverRejection => ResolverReport.failed(resolverId, projectRef -> WrappedResolverRejection(r)) -> None } @@ -130,7 +132,7 @@ final class ResolverResolution[R]( ref: ResourceRef, projectRef: ProjectRef, resolver: Resolver - )(implicit caller: Caller): UIO[ResolverResolutionResult[R]] = + )(implicit caller: Caller): IO[ResolverResolutionResult[R]] = resolver match { case i: InProjectResolver => inProjectResolve(ref, projectRef, i) case c: CrossProjectResolver => crossProjectResolve(ref, c) @@ -140,7 +142,7 @@ final class ResolverResolution[R]( ref: ResourceRef, projectRef: ProjectRef, resolver: InProjectResolver - ): UIO[ResolverResolutionResult[R]] = + ): IO[ResolverResolutionResult[R]] = fetch(ref, projectRef).map { case None => ResolverReport.failed(resolver.id, projectRef -> ResolutionFetchRejection(ref, projectRef)) -> None case s => ResolverReport.success(resolver.id, projectRef) -> s @@ -149,10 +151,10 @@ final class ResolverResolution[R]( private def crossProjectResolve( ref: ResourceRef, resolver: CrossProjectResolver - )(implicit caller: Caller): UIO[ResolverResolutionResult[R]] = { + )(implicit caller: Caller): IO[ResolverResolutionResult[R]] = { import resolver.value._ - def validateIdentities(p: ProjectRef): IO[ProjectAccessDenied, Unit] = { + def validateIdentities(p: ProjectRef): IO[Unit] = { val identities = identityResolution match { case UseCurrentCaller => caller.identities case ProvidedIdentities(identities) => identities @@ -164,10 +166,8 @@ final class ResolverResolution[R]( } } - def validateResourceTypes(types: Set[Iri], p: ProjectRef): IO[ResourceTypesDenied, Unit] = - IO.unless(resourceTypes.isEmpty || resourceTypes.exists(types.contains))( - IO.raiseError(ResourceTypesDenied(p, types)) - ) + def validateResourceTypes(types: Set[Iri], p: ProjectRef): IO[Unit] = + IO.raiseUnless(resourceTypes.isEmpty || resourceTypes.exists(types.contains))(ResourceTypesDenied(p, types)) val initial: ResolverResolutionResult[R] = ResolverFailedReport(resolver.id, VectorMap.empty) -> None projects.foldLeftM(initial) { (previous, projectRef) => @@ -179,12 +179,13 @@ final class ResolverResolution[R]( val resolve = for { _ <- validateIdentities(projectRef) resource <- fetch(ref, projectRef).flatMap { res => - IO.fromOption(res, ResolutionFetchRejection(ref, projectRef)) + IO.fromOption(res)(ResolutionFetchRejection(ref, projectRef)) } _ <- validateResourceTypes(extractTypes(resource), projectRef) } yield ResolverSuccessReport(resolver.id, projectRef, f.rejections) -> Option(resource) - resolve.onErrorHandle { e => - f.copy(rejections = f.rejections + (projectRef -> e)) -> None + resolve.attemptNarrow[ResolverResolutionRejection].map { + case Left(r) => f.copy(rejections = f.rejections + (projectRef -> r)) -> None + case Right(s) => s } } } @@ -198,13 +199,13 @@ object ResolverResolution { */ type ResourceResolution[R] = ResolverResolution[ResourceF[R]] - type Fetch[R] = UIO[Option[R]] + type Fetch[R] = IO[Option[R]] - type FetchResource[R] = UIO[Option[ResourceF[R]]] + type FetchResource[R] = IO[Option[ResourceF[R]]] type ResolverResolutionResult[R] = (ResolverReport, Option[R]) - private val resolverSearchParams = ResolverSearchParams(deprecated = Some(false), filter = _ => UIO.pure(true)) + private val resolverSearchParams = ResolverSearchParams(deprecated = Some(false), filter = _ => BIO.pure(true)) private val resolverOrdering: Ordering[ResolverResource] = Ordering[Instant] on (r => r.createdAt) @@ -257,7 +258,7 @@ object ResolverResolution { def apply( aclCheck: AclCheck, resolvers: Resolvers, - fetch: (ResourceRef, ProjectRef) => UIO[Option[JsonLdContent[_, _]]] + fetch: (ResourceRef, ProjectRef) => IO[Option[JsonLdContent[_, _]]] ): ResolverResolution[JsonLdContent[_, _]] = apply(aclCheck, resolvers, fetch, _.resource.types, Permissions.resources.read) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitialization.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitialization.scala index 296a9fa7bf..8eceb187d0 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitialization.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitialization.scala @@ -3,13 +3,13 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers import cats.effect.IO import cats.implicits._ import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.ScopeInitializationFailed import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{Caller, ServiceAccount} import ch.epfl.bluebrain.nexus.delta.sdk.organizations.model.Organization import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.Project +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverScopeInitialization.{logger, CreateResolver} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.Resolvers.entityType import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{ProjectContextRejection, ResourceAlreadyExists} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.InProjectValue @@ -17,6 +17,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Priority, ResolverValu import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sdk.{Defaults, ScopeInitialization} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef /** * The default creation of the InProject resolver as part of the project initialization. @@ -26,22 +27,14 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject * @param serviceAccount * the subject that will be recorded when performing the initialization */ -class ResolverScopeInitialization( - resolvers: Resolvers, - serviceAccount: ServiceAccount, - defaults: Defaults -) extends ScopeInitialization { +class ResolverScopeInitialization(createResolver: CreateResolver, defaults: Defaults) extends ScopeInitialization { - private val logger = Logger.cats[ResolverScopeInitialization] private val defaultInProjectResolverValue: ResolverValue = InProjectValue(Some(defaults.name), Some(defaults.description), Priority.unsafe(1)) - implicit private val caller: Caller = serviceAccount.caller implicit private val kamonComponent: KamonMetricComponent = KamonMetricComponent(entityType.value) override def onProjectCreation(project: Project, subject: Subject): IO[Unit] = - resolvers - .create(nxv.defaultResolver, project.ref, defaultInProjectResolverValue) - .void + createResolver(project.ref, defaultInProjectResolverValue) .handleErrorWith { case _: ResourceAlreadyExists => IO.unit // nothing to do, resolver already exits case _: ProjectContextRejection => IO.unit // project or org is likely deprecated @@ -52,8 +45,22 @@ class ResolverScopeInitialization( } .span("createDefaultResolver") - override def onOrganizationCreation( - organization: Organization, - subject: Subject - ): IO[Unit] = IO.unit + override def onOrganizationCreation(organization: Organization, subject: Subject): IO[Unit] = IO.unit +} + +object ResolverScopeInitialization { + + type CreateResolver = (ProjectRef, ResolverValue) => IO[Unit] + + private val logger = Logger.cats[ResolverScopeInitialization] + + def apply(resolvers: Resolvers, serviceAccount: ServiceAccount, defaults: Defaults) = { + implicit val caller: Caller = serviceAccount.caller + def createResolver: CreateResolver = resolvers.create(nxv.defaultResolver, _, _).void + new ResolverScopeInitialization( + createResolver, + defaults + ) + } + } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/Resolvers.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/Resolvers.scala index 791b13f7ad..6b08e8a59a 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/Resolvers.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/Resolvers.scala @@ -1,6 +1,9 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers -import cats.effect.Clock +import cats.effect.{Clock, IO} +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schemas} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue @@ -8,7 +11,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.ResolverResource import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.instances._ import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.ExpandIri -import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchParams.ResolverSearchParams import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchResults.UnscoredSearchResults import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegment, IdSegmentRef, ResourceToSchemaMappings, Tags} @@ -17,7 +19,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.{ProvidedIdentities, UseCurrentCaller} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverCommand.{CreateResolver, DeprecateResolver, TagResolver, UpdateResolver} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverEvent.{ResolverCreated, ResolverDeprecated, ResolverTagAdded, ResolverUpdated} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{DifferentResolverType, IncorrectRev, InvalidIdentities, InvalidResolverId, NoIdentities, PriorityAlreadyExists, ResolverIsDeprecated, ResolverNotFound, ResourceAlreadyExists, RevisionNotFound} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{DifferentResolverType, IncorrectRev, InvalidIdentities, InvalidResolverId, NoIdentities, ResolverIsDeprecated, ResolverNotFound, ResourceAlreadyExists, RevisionNotFound} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.{CrossProjectValue, InProjectValue} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model._ import ch.epfl.bluebrain.nexus.delta.sourcing.ScopedEntityDefinition.Tagger @@ -27,7 +29,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{EntityType, Label, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.{ScopedEntityDefinition, StateMachine} import io.circe.Json -import monix.bio.{IO, UIO} /** * Operations for handling resolvers @@ -42,9 +43,7 @@ trait Resolvers { * @param source * the payload to create the resolver */ - def create(projectRef: ProjectRef, source: Json)(implicit - caller: Caller - ): IO[ResolverRejection, ResolverResource] + def create(projectRef: ProjectRef, source: Json)(implicit caller: Caller): IO[ResolverResource] /** * Create a new resolver with the provided id @@ -56,9 +55,7 @@ trait Resolvers { * @param source * the payload to create the resolver */ - def create(id: IdSegment, projectRef: ProjectRef, source: Json)(implicit - caller: Caller - ): IO[ResolverRejection, ResolverResource] + def create(id: IdSegment, projectRef: ProjectRef, source: Json)(implicit caller: Caller): IO[ResolverResource] /** * Create a new resolver with the provided id @@ -71,7 +68,7 @@ trait Resolvers { */ def create(id: IdSegment, projectRef: ProjectRef, resolverValue: ResolverValue)(implicit caller: Caller - ): IO[ResolverRejection, ResolverResource] + ): IO[ResolverResource] /** * Update an existing resolver @@ -86,7 +83,7 @@ trait Resolvers { */ def update(id: IdSegment, projectRef: ProjectRef, rev: Int, source: Json)(implicit caller: Caller - ): IO[ResolverRejection, ResolverResource] + ): IO[ResolverResource] /** * Update an existing resolver @@ -101,7 +98,7 @@ trait Resolvers { */ def update(id: IdSegment, projectRef: ProjectRef, rev: Int, resolverValue: ResolverValue)(implicit caller: Caller - ): IO[ResolverRejection, ResolverResource] + ): IO[ResolverResource] /** * Add a tag to an existing resolver @@ -119,7 +116,7 @@ trait Resolvers { */ def tag(id: IdSegment, projectRef: ProjectRef, tag: UserTag, tagRev: Int, rev: Int)(implicit subject: Subject - ): IO[ResolverRejection, ResolverResource] + ): IO[ResolverResource] /** * Deprecate an existing resolver @@ -130,9 +127,7 @@ trait Resolvers { * @param rev * the ResolverState revision of the resolver */ - def deprecate(id: IdSegment, projectRef: ProjectRef, rev: Int)(implicit - subject: Subject - ): IO[ResolverRejection, ResolverResource] + def deprecate(id: IdSegment, projectRef: ProjectRef, rev: Int)(implicit subject: Subject): IO[ResolverResource] /** * Fetch the resolver at the requested version @@ -141,7 +136,7 @@ trait Resolvers { * @param projectRef * the project where the resolver belongs */ - def fetch(id: IdSegmentRef, projectRef: ProjectRef): IO[ResolverRejection, ResolverResource] + def fetch(id: IdSegmentRef, projectRef: ProjectRef): IO[ResolverResource] /** * Fetches and validate the resolver, rejecting if the project does not exists or if it is deprecated @@ -150,7 +145,7 @@ trait Resolvers { * @param projectRef * the project reference */ - def fetchActiveResolver(id: Iri, projectRef: ProjectRef): IO[ResolverRejection, Resolver] = + def fetchActiveResolver(id: Iri, projectRef: ProjectRef): IO[Resolver] = fetch(id, projectRef).flatMap(res => IO.raiseWhen(res.deprecated)(ResolverIsDeprecated(id)).as(res.value)) /** @@ -169,7 +164,7 @@ trait Resolvers { pagination: FromPagination, params: ResolverSearchParams, ordering: Ordering[ResolverResource] - ): UIO[UnscoredSearchResults[ResolverResource]] + ): IO[UnscoredSearchResults[ResolverResource]] /** * List resolvers within a project @@ -188,13 +183,13 @@ trait Resolvers { pagination: FromPagination, params: ResolverSearchParams, ordering: Ordering[ResolverResource] - ): UIO[UnscoredSearchResults[ResolverResource]] = + ): IO[UnscoredSearchResults[ResolverResource]] = list(pagination, params.copy(project = Some(projectRef)), ordering) } object Resolvers { - type ValidatePriority = (ProjectRef, Iri, Priority) => IO[PriorityAlreadyExists, Unit] + type ValidatePriority = (ProjectRef, Iri, Priority) => IO[Unit] /** * The resolver entity type. @@ -217,7 +212,7 @@ object Resolvers { Label.unsafe("resolvers") -> schemas.resolvers ) - import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOUtils.instant + import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant.now private[delta] def next(state: Option[ResolverState], event: ResolverEvent): Option[ResolverState] = { @@ -267,15 +262,15 @@ object Resolvers { private[delta] def evaluate( validatePriority: ValidatePriority )(state: Option[ResolverState], command: ResolverCommand)(implicit - clock: Clock[UIO] - ): IO[ResolverRejection, ResolverEvent] = { + clock: Clock[IO] + ): IO[ResolverEvent] = { def validateResolverValue( project: ProjectRef, id: Iri, value: ResolverValue, caller: Caller - ): IO[ResolverRejection, Unit] = + ): IO[Unit] = (value match { case CrossProjectValue(_, _, _, _, _, identityResolution) => identityResolution match { @@ -283,18 +278,17 @@ object Resolvers { case ProvidedIdentities(value) if value.isEmpty => IO.raiseError(NoIdentities) case ProvidedIdentities(value) => val missing = value.diff(caller.identities) - IO.when(missing.nonEmpty)(IO.raiseError(InvalidIdentities(missing))) + IO.raiseWhen(missing.nonEmpty)(InvalidIdentities(missing)) } - - case _ => IO.unit + case _ => IO.unit }) >> validatePriority(project, id, value.priority) - def create(c: CreateResolver): IO[ResolverRejection, ResolverCreated] = state match { + def create(c: CreateResolver): IO[ResolverCreated] = state match { // Create a resolver case None => for { _ <- validateResolverValue(c.project, c.id, c.value, c.caller) - now <- instant + now <- now } yield ResolverCreated( id = c.id, project = c.project, @@ -309,7 +303,7 @@ object Resolvers { IO.raiseError(ResourceAlreadyExists(c.id, c.project)) } - def update(c: UpdateResolver): IO[ResolverRejection, ResolverUpdated] = state match { + def update(c: UpdateResolver): IO[ResolverUpdated] = state match { // Update a non existing resolver case None => IO.raiseError(ResolverNotFound(c.id, c.project)) @@ -323,9 +317,9 @@ object Resolvers { // Update a resolver case Some(s) => for { - _ <- IO.when(s.value.tpe != c.value.tpe)(IO.raiseError(DifferentResolverType(c.id, c.value.tpe, s.value.tpe))) + _ <- IO.raiseWhen(s.value.tpe != c.value.tpe)(DifferentResolverType(c.id, c.value.tpe, s.value.tpe)) _ <- validateResolverValue(c.project, c.id, c.value, c.caller) - now <- instant + now <- now } yield ResolverUpdated( id = c.id, project = c.project, @@ -337,7 +331,7 @@ object Resolvers { ) } - def addTag(c: TagResolver): IO[ResolverRejection, ResolverTagAdded] = state match { + def addTag(c: TagResolver): IO[ResolverTagAdded] = state match { // Resolver can't be found case None => IO.raiseError(ResolverNotFound(c.id, c.project)) @@ -348,7 +342,7 @@ object Resolvers { case Some(s) if c.targetRev <= 0 || c.targetRev > s.rev => IO.raiseError(RevisionNotFound(c.targetRev, s.rev)) case Some(s) => - instant.map { now => + now.map { now => ResolverTagAdded( id = c.id, project = c.project, @@ -362,7 +356,7 @@ object Resolvers { } } - def deprecate(c: DeprecateResolver): IO[ResolverRejection, ResolverDeprecated] = state match { + def deprecate(c: DeprecateResolver): IO[ResolverDeprecated] = state match { // Resolver can't be found case None => IO.raiseError(ResolverNotFound(c.id, c.project)) @@ -372,7 +366,7 @@ object Resolvers { case Some(s) if s.deprecated => IO.raiseError(ResolverIsDeprecated(s.id)) case Some(s) => - instant.map { now => + now.map { now => ResolverDeprecated( id = c.id, project = c.project, @@ -392,15 +386,16 @@ object Resolvers { } } + private type ResolverDefinition = + ScopedEntityDefinition[Iri, ResolverState, ResolverCommand, ResolverEvent, ResolverRejection] + /** * Entity definition for [[Resolvers]] */ - def definition(validatePriority: ValidatePriority)(implicit - clock: Clock[UIO] - ): ScopedEntityDefinition[Iri, ResolverState, ResolverCommand, ResolverEvent, ResolverRejection] = + def definition(validatePriority: ValidatePriority)(implicit clock: Clock[IO]): ResolverDefinition = ScopedEntityDefinition( entityType, - StateMachine(None, evaluate(validatePriority), next), + StateMachine(None, evaluate(validatePriority)(_, _).toBIO[ResolverRejection], next), ResolverEvent.serializer, ResolverState.serializer, Tagger[ResolverEvent]( diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversConfig.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversConfig.scala index a43870ede3..2883f51a90 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversConfig.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversConfig.scala @@ -1,7 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers import ch.epfl.bluebrain.nexus.delta.sdk.Defaults -import ch.epfl.bluebrain.nexus.delta.sdk.model.search.PaginationConfig import ch.epfl.bluebrain.nexus.delta.sourcing.config.EventLogConfig import pureconfig.ConfigReader import pureconfig.generic.semiauto.deriveReader @@ -11,12 +10,9 @@ import pureconfig.generic.semiauto.deriveReader * * @param eventLog * configuration of the event log - * @param pagination - * configuration for how pagination should behave in listing operations */ final case class ResolversConfig( eventLog: EventLogConfig, - pagination: PaginationConfig, defaults: Defaults ) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala index 71f4b51567..a34ce39e56 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImpl.scala @@ -1,6 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers -import cats.effect.Clock +import cats.effect.{Clock, IO} +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF @@ -27,7 +28,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef} import doobie.implicits._ import io.circe.Json -import monix.bio.{IO, UIO} final class ResolversImpl private ( log: ResolversLog, @@ -40,7 +40,7 @@ final class ResolversImpl private ( override def create( projectRef: ProjectRef, source: Json - )(implicit caller: Caller): IO[ResolverRejection, ResolverResource] = { + )(implicit caller: Caller): IO[ResolverResource] = { for { pc <- fetchContext.onCreate(projectRef) (iri, resolverValue) <- sourceDecoder(projectRef, pc, source) @@ -52,7 +52,7 @@ final class ResolversImpl private ( id: IdSegment, projectRef: ProjectRef, source: Json - )(implicit caller: Caller): IO[ResolverRejection, ResolverResource] = { + )(implicit caller: Caller): IO[ResolverResource] = { for { pc <- fetchContext.onCreate(projectRef) iri <- expandIri(id, pc) @@ -65,7 +65,7 @@ final class ResolversImpl private ( id: IdSegment, projectRef: ProjectRef, resolverValue: ResolverValue - )(implicit caller: Caller): IO[ResolverRejection, ResolverResource] = { + )(implicit caller: Caller): IO[ResolverResource] = { for { pc <- fetchContext.onCreate(projectRef) iri <- expandIri(id, pc) @@ -79,7 +79,7 @@ final class ResolversImpl private ( projectRef: ProjectRef, rev: Int, source: Json - )(implicit caller: Caller): IO[ResolverRejection, ResolverResource] = { + )(implicit caller: Caller): IO[ResolverResource] = { for { pc <- fetchContext.onModify(projectRef) iri <- expandIri(id, pc) @@ -95,7 +95,7 @@ final class ResolversImpl private ( resolverValue: ResolverValue )(implicit caller: Caller - ): IO[ResolverRejection, ResolverResource] = { + ): IO[ResolverResource] = { for { pc <- fetchContext.onModify(projectRef) iri <- expandIri(id, pc) @@ -112,7 +112,7 @@ final class ResolversImpl private ( rev: Int )(implicit subject: Identity.Subject - ): IO[ResolverRejection, ResolverResource] = { + ): IO[ResolverResource] = { for { pc <- fetchContext.onModify(projectRef) iri <- expandIri(id, pc) @@ -124,7 +124,7 @@ final class ResolversImpl private ( id: IdSegment, projectRef: ProjectRef, rev: Int - )(implicit subject: Identity.Subject): IO[ResolverRejection, ResolverResource] = { + )(implicit subject: Identity.Subject): IO[ResolverResource] = { for { pc <- fetchContext.onModify(projectRef) iri <- expandIri(id, pc) @@ -132,7 +132,7 @@ final class ResolversImpl private ( } yield res }.span("deprecateResolver") - override def fetch(id: IdSegmentRef, projectRef: ProjectRef): IO[ResolverRejection, ResolverResource] = { + override def fetch(id: IdSegmentRef, projectRef: ProjectRef): IO[ResolverResource] = { for { pc <- fetchContext.onRead(projectRef) iri <- expandIri(id.value, pc) @@ -151,7 +151,7 @@ final class ResolversImpl private ( pagination: FromPagination, params: ResolverSearchParams, ordering: Ordering[ResolverResource] - ): UIO[UnscoredSearchResults[ResolverResource]] = { + ): IO[UnscoredSearchResults[ResolverResource]] = { val scope = params.project.fold[Scope](Scope.Root)(ref => Scope.Project(ref)) SearchResults( log.currentStates(scope, _.toResource).evalFilter(params.matches), @@ -160,7 +160,7 @@ final class ResolversImpl private ( ).span("listResolvers") } - private def eval(cmd: ResolverCommand): IO[ResolverRejection, ResolverResource] = + private def eval(cmd: ResolverCommand) = log.evaluate(cmd.project, cmd.id, cmd).map(_._2.toResource) } @@ -176,13 +176,12 @@ object ResolversImpl { contextResolution: ResolverContextResolution, config: ResolversConfig, xas: Transactors - )(implicit api: JsonLdApi, clock: Clock[UIO], uuidF: UUIDF): Resolvers = { - def priorityAlreadyExists(ref: ProjectRef, self: Iri, priority: Priority): IO[PriorityAlreadyExists, Unit] = { + )(implicit api: JsonLdApi, clock: Clock[IO], uuidF: UUIDF): Resolvers = { + def priorityAlreadyExists(ref: ProjectRef, self: Iri, priority: Priority): IO[Unit] = { sql"SELECT id FROM scoped_states WHERE type = ${Resolvers.entityType} AND org = ${ref.organization} AND project = ${ref.project} AND id != $self AND (value->'value'->'priority')::int = ${priority.value} " .query[Iri] .option - .transact(xas.read) - .hideErrors + .transact(xas.readCE) .flatMap { case Some(other) => IO.raiseError(PriorityAlreadyExists(ref, other, priority)) case None => IO.unit diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResourceResolution.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResourceResolution.scala index 72686a4dcb..24ddb3303c 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResourceResolution.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResourceResolution.scala @@ -1,19 +1,19 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceF import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.{FetchResource, ResourceResolution} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Resolver, ResolverRejection} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.Resolver import ch.epfl.bluebrain.nexus.delta.sdk.resources.Resources import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource import ch.epfl.bluebrain.nexus.delta.sdk.schemas.Schemas -import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.{Schema, SchemaRejection} +import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.Schema import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef, ResourceRef} -import monix.bio.{IO, UIO} -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ object ResourceResolution { @@ -29,9 +29,9 @@ object ResourceResolution { * how to fetch the resource */ def apply[R]( - checkAcls: (ProjectRef, Set[Identity]) => UIO[Boolean], - listResolvers: ProjectRef => UIO[List[Resolver]], - fetchResolver: (Iri, ProjectRef) => IO[ResolverRejection, Resolver], + checkAcls: (ProjectRef, Set[Identity]) => IO[Boolean], + listResolvers: ProjectRef => IO[List[Resolver]], + fetchResolver: (Iri, ProjectRef) => IO[Resolver], fetch: (ResourceRef, ProjectRef) => FetchResource[R] ): ResourceResolution[R] = new ResolverResolution(checkAcls, listResolvers, fetchResolver, fetch, (r: ResourceF[R]) => r.types) @@ -67,7 +67,7 @@ object ResourceResolution { apply( aclCheck, resolvers, - (ref: ResourceRef, project: ProjectRef) => resources.fetch(ref, project).redeem(_ => None, Some(_)), + (ref: ResourceRef, project: ProjectRef) => toCatsIO(resources.fetch(ref, project).redeem(_ => None, Some(_))), Permissions.resources.read ) @@ -84,8 +84,7 @@ object ResourceResolution { apply( aclCheck, resolvers, - (ref: ResourceRef, project: ProjectRef) => - schemas.fetch(ref, project).toBIO[SchemaRejection].redeem(_ => None, Some(_)), + (ref: ResourceRef, project: ProjectRef) => schemas.fetch(ref, project).redeem(_ => None, Some(_)), Permissions.schemas.read ) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/MultiResolutionResult.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/MultiResolutionResult.scala index ce5116a1c1..49fadb1496 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/MultiResolutionResult.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/MultiResolutionResult.scala @@ -5,4 +5,4 @@ import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.JsonLdContent /** * Result of a MultiResolution */ -final case class MultiResolutionResult[R](report: R, value: JsonLdContent[_, _]) +final case class MultiResolutionResult[+R](report: R, value: JsonLdContent[_, _]) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/Resolver.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/Resolver.scala index a8f39b1446..c8d76a4c96 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/Resolver.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/Resolver.scala @@ -1,5 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue @@ -104,7 +105,7 @@ object Resolver { def shift(resolvers: Resolvers)(implicit baseUri: BaseUri): Shift = ResourceShift.apply[ResolverState, Resolver]( Resolvers.entityType, - (ref, project) => resolvers.fetch(IdSegmentRef(ref), project), + (ref, project) => resolvers.fetch(IdSegmentRef(ref), project).toBIO[ResolverRejection], state => state.toResource, value => JsonLdContent(value, value.value.source, None) ) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/ResolverResolutionRejection.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/ResolverResolutionRejection.scala index 55391b7d7c..39c3c5b837 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/ResolverResolutionRejection.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/model/ResolverResolutionRejection.scala @@ -8,6 +8,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag +import ch.epfl.bluebrain.nexus.delta.sourcing.rejection.Rejection import io.circe.syntax._ import io.circe.{Encoder, JsonObject} @@ -17,7 +18,7 @@ import io.circe.{Encoder, JsonObject} * @param reason * a descriptive message as to why the rejection occurred */ -sealed abstract class ResolverResolutionRejection(val reason: String) extends Product with Serializable +sealed abstract class ResolverResolutionRejection(val reason: String) extends Rejection object ResolverResolutionRejection { diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ValidateResource.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ValidateResource.scala index 580614c781..5de2089d00 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ValidateResource.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ValidateResource.scala @@ -1,6 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resources import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, schemas} import ch.epfl.bluebrain.nexus.delta.rdf.graph.Graph import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.ExpandedJsonLd @@ -123,8 +125,11 @@ object ValidateResource { ) = { resourceResolution .resolve(schemaRef, projectRef)(caller) - .mapError(InvalidSchemaRejection(schemaRef, projectRef, _)) - .tapEval(schema => assertNotDeprecated(schema)) - } + .flatMap { result => + val invalidSchema = result.leftMap(InvalidSchemaRejection(schemaRef, projectRef, _)) + IO.fromEither(invalidSchema) + } + .flatTap(schema => assertNotDeprecated(schema)) + }.toBIO[ResourceRejection] } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImports.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImports.scala index bca5065df2..0be3b097d1 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImports.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImports.scala @@ -1,26 +1,28 @@ package ch.epfl.bluebrain.nexus.delta.sdk.schemas import cats.data.NonEmptyList +import cats.effect.{ContextShift, IO} import cats.implicits._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.owl import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.ExpandedJsonLd +import ch.epfl.bluebrain.nexus.delta.sdk.Resolve import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.{Resolvers, ResourceResolution} import ch.epfl.bluebrain.nexus.delta.sdk.resources.Resources import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource +import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.Schema import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.SchemaRejection.InvalidSchemaResolution -import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.{Schema, SchemaRejection} -import ch.epfl.bluebrain.nexus.delta.sdk.Resolve -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.{Resolvers, ResourceResolution} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} -import monix.bio.IO /** * Resolves the OWL imports from a Schema */ -final class SchemaImports(resolveSchema: Resolve[Schema], resolveResource: Resolve[Resource]) { self => +final class SchemaImports(resolveSchema: Resolve[Schema], resolveResource: Resolve[Resource])(implicit + contextShift: ContextShift[IO] +) { self => /** * Resolve the ''imports'' from the passed ''expanded'' document and recursively from the resolved documents. @@ -36,7 +38,7 @@ final class SchemaImports(resolveSchema: Resolve[Schema], resolveResource: Resol */ def resolve(id: Iri, projectRef: ProjectRef, expanded: ExpandedJsonLd)(implicit caller: Caller - ): IO[SchemaRejection, NonEmptyList[ExpandedJsonLd]] = { + ): IO[NonEmptyList[ExpandedJsonLd]] = { def detectNonOntology(resourceSuccess: Map[ResourceRef, Resource]): Set[ResourceRef] = resourceSuccess.collect { @@ -47,14 +49,14 @@ final class SchemaImports(resolveSchema: Resolve[Schema], resolveResource: Resol schemaRejections: Map[ResourceRef, ResourceResolutionReport], resourceRejections: Map[ResourceRef, ResourceResolutionReport], nonOntologies: Set[ResourceRef] - ): IO[InvalidSchemaResolution, Unit] = - IO.when(resourceRejections.nonEmpty || nonOntologies.nonEmpty)( - IO.raiseError(InvalidSchemaResolution(id, schemaRejections, resourceRejections, nonOntologies)) + ): IO[Unit] = + IO.raiseWhen(resourceRejections.nonEmpty || nonOntologies.nonEmpty)( + InvalidSchemaResolution(id, schemaRejections, resourceRejections, nonOntologies) ) def lookupFromSchemasAndResources( toResolve: Set[ResourceRef] - ): IO[InvalidSchemaResolution, Iterable[ExpandedJsonLd]] = + ): IO[Iterable[ExpandedJsonLd]] = for { (schemaRejections, schemaSuccess) <- lookupInBatch(toResolve, resolveSchema(_, projectRef, caller)) resourcesToResolve = toResolve -- schemaSuccess.keySet @@ -74,15 +76,23 @@ final class SchemaImports(resolveSchema: Resolve[Schema], resolveResource: Resol } } - private def lookupInBatch[A](toResolve: Set[ResourceRef], fetch: ResourceRef => IO[ResourceResolutionReport, A]) = + private def lookupInBatch[A]( + toResolve: Set[ResourceRef], + fetch: ResourceRef => IO[Either[ResourceResolutionReport, A]] + ): IO[(Map[ResourceRef, ResourceResolutionReport], Map[ResourceRef, A])] = toResolve.toList - .parTraverse(ref => fetch(ref).bimap(ref -> _, ref -> _).attempt) + .parTraverse { ref => fetch(ref).map(_.bimap(ref -> _, ref -> _)) } .map(_.partitionMap(identity)) .map { case (rejections, successes) => rejections.toMap -> successes.toMap } } object SchemaImports { + final def alwaysFail(implicit contextShift: ContextShift[IO]) = new SchemaImports( + (_, _, _) => IO.pure(Left(ResourceResolutionReport())), + (_, _, _) => IO.pure(Left(ResourceResolutionReport())) + ) + /** * Construct a [[SchemaImports]]. */ @@ -91,17 +101,17 @@ object SchemaImports { resolvers: Resolvers, schemas: Schemas, resources: Resources - ): SchemaImports = { + )(implicit contextShift: ContextShift[IO]): SchemaImports = { def resolveSchema(ref: ResourceRef, projectRef: ProjectRef, caller: Caller) = ResourceResolution .schemaResource(aclCheck, resolvers, schemas) .resolve(ref, projectRef)(caller) - .map(_.value) + .map(_.map(_.value)) def resolveResource(ref: ResourceRef, projectRef: ProjectRef, caller: Caller) = ResourceResolution .dataResource(aclCheck, resolvers, resources) .resolve(ref, projectRef)(caller) - .map(_.value) + .map(_.map(_.value)) new SchemaImports(resolveSchema, resolveResource) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala index daa7ba1bb6..bd3b2d8445 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImpl.scala @@ -7,6 +7,7 @@ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv} +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.ExpandedJsonLd import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller @@ -59,7 +60,7 @@ final class SchemasImpl private ( pc <- fetchContext.onCreate(projectRef) iri <- id.traverse(expandIri(_, pc)) jsonLd <- sourceParser(projectRef, pc, iri, source) - expandedResolved <- schemaImports.resolve(jsonLd.iri, projectRef, jsonLd.expanded.addType(nxv.Schema)) + expandedResolved <- resolveImports(jsonLd.iri, projectRef, jsonLd.expanded) } yield CreateSchema(jsonLd.iri, projectRef, source, jsonLd.compacted, expandedResolved, caller.subject) override def update( @@ -72,7 +73,7 @@ final class SchemasImpl private ( pc <- fetchContext.onModify(projectRef) iri <- expandIri(id, pc) (compacted, expanded) <- sourceParser(projectRef, pc, iri, source).map { j => (j.compacted, j.expanded) } - expandedResolved <- schemaImports.resolve(iri, projectRef, expanded.addType(nxv.Schema)) + expandedResolved <- resolveImports(iri, projectRef, expanded) res <- eval(UpdateSchema(iri, projectRef, source, compacted, expandedResolved, rev, caller.subject)) } yield res @@ -87,7 +88,7 @@ final class SchemasImpl private ( iri <- expandIri(id, pc) schema <- log.stateOr(projectRef, iri, SchemaNotFound(iri, projectRef)) (compacted, expanded) <- sourceParser(projectRef, pc, iri, schema.source).map { j => (j.compacted, j.expanded) } - expandedResolved <- schemaImports.resolve(iri, projectRef, expanded.addType(nxv.Schema)) + expandedResolved <- resolveImports(iri, projectRef, expanded) res <- eval(RefreshSchema(iri, projectRef, compacted, expandedResolved, schema.rev, caller.subject)) } yield res @@ -149,6 +150,9 @@ final class SchemasImpl private ( private def dryRun(cmd: SchemaCommand) = log.dryRun(cmd.project, cmd.id, cmd).map(_._2.toResource) + + private def resolveImports(id: Iri, projectRef: ProjectRef, expanded: ExpandedJsonLd)(implicit caller: Caller) = + schemaImports.resolve(id, projectRef, expanded.addType(nxv.Schema)).toBIO[SchemaRejection] } object SchemasImpl { diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResolverResolutionGen.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResolverResolutionGen.scala index 33bc5c69a1..e697818bb7 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResolverResolutionGen.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResolverResolutionGen.scala @@ -1,12 +1,12 @@ package ch.epfl.bluebrain.nexus.delta.sdk.generators +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.Fetch import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.ResolverNotFound import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef, ResourceRef} -import monix.bio.{IO, UIO} object ResolverResolutionGen { @@ -24,7 +24,7 @@ object ResolverResolutionGen { val resolver = ResolverGen.inProject(nxv + "in-project", projectRef) new ResolverResolution( - (_: ProjectRef, _: Set[Identity]) => UIO.pure(false), + (_: ProjectRef, _: Set[Identity]) => IO.pure(false), (_: ProjectRef) => IO.pure(List(resolver)), (resolverId: Iri, p: ProjectRef) => if (resolverId == resolver.id && p == resolver.project) diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResourceResolutionGen.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResourceResolutionGen.scala index a2c25e0fee..fa3c28c00b 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResourceResolutionGen.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/generators/ResourceResolutionGen.scala @@ -1,12 +1,12 @@ package ch.epfl.bluebrain.nexus.delta.sdk.generators +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sdk.resolvers import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.{FetchResource, ResourceResolution} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.ResolverNotFound import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, ProjectRef, ResourceRef} -import monix.bio.{IO, UIO} object ResourceResolutionGen { @@ -24,7 +24,7 @@ object ResourceResolutionGen { val resolver = ResolverGen.inProject(nxv + "in-project", projectRef) resolvers.ResourceResolution( - (_: ProjectRef, _: Set[Identity]) => UIO.pure(false), + (_: ProjectRef, _: Set[Identity]) => IO.pure(false), (_: ProjectRef) => IO.pure(List(resolver)), (resolverId: Iri, p: ProjectRef) => if (resolverId == resolver.id && p == resolver.project) diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala similarity index 51% rename from delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSpec.scala rename to delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala index f89d386675..3a345ae347 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/MultiResolutionSuite.scala @@ -1,38 +1,32 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.sdk.generators.{ResolverResolutionGen, ResourceGen, SchemaGen} import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.JsonLdContent import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegmentRef, ResourceF} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverResolutionRejection.ResourceNotFound import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.{ApiMappings, ProjectContext} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.Fetch -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{InvalidResolution, InvalidResolverId, InvalidResolverResolution, ProjectContextRejection} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverResolutionRejection.ResourceNotFound +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{MultiResolutionResult, ResourceResolutionReport} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{InvalidResolution, InvalidResolverResolution, ProjectContextRejection} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport.ResolverReport -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{MultiResolutionResult, ResolverRejection, ResourceResolutionReport} import ch.epfl.bluebrain.nexus.delta.sdk.utils.Fixtures import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.{Latest, Revision} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, IOValues, TestHelpers} +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsEffectSuite, IOFromMap} +import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} import io.circe.Json -import monix.bio.IO -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike - -class MultiResolutionSpec - extends AnyWordSpecLike - with Matchers - with TestHelpers - with IOValues - with CirceLiteral - with Fixtures { + +class MultiResolutionSuite extends CatsEffectSuite with TestHelpers with CirceLiteral with IOFromMap with Fixtures { private val alice = User("alice", Label.unsafe("wonderland")) - implicit val aliceCaller: Caller = Caller(User("alice", Label.unsafe("wonderland")), Set(alice)) + implicit val aliceCaller: Caller = Caller(alice, Set(alice)) private val projectRef = ProjectRef.unsafe("org", "project") @@ -61,12 +55,12 @@ class MultiResolutionSpec def fetch: (ResourceRef, ProjectRef) => Fetch[JsonLdContent[_, _]] = (ref: ResourceRef, _: ProjectRef) => ref match { - case Latest(`resourceId`) => IO.some(resourceValue) - case Revision(_, `schemaId`, _) => IO.some(schemaValue) + case Latest(`resourceId`) => IO.pure(Some(resourceValue)) + case Revision(_, `schemaId`, _) => IO.pure(Some(schemaValue)) case _ => IO.none } - def fetchProject: ProjectRef => IO[ResolverRejection, ProjectContext] = + def fetchProject: ProjectRef => IO[ProjectContext] = FetchContextDummy(Map(projectRef -> ProjectContext.unsafe(ApiMappings.empty, nxv.base, nxv.base))) .mapRejection(ProjectContextRejection) .onRead @@ -77,53 +71,46 @@ class MultiResolutionSpec private val multiResolution = new MultiResolution(fetchProject, resourceResolution) - "A multi-resolution" should { - - "resolve the id as a resource" in { - multiResolution(resourceId, projectRef).accepted shouldEqual - MultiResolutionResult(ResourceResolutionReport(ResolverReport.success(resolverId, projectRef)), resourceValue) - } - - "resolve the id as a resource with a specific resolver" in { - multiResolution(resourceId, projectRef, resolverId).accepted shouldEqual - MultiResolutionResult(ResolverReport.success(resolverId, projectRef), resourceValue) - } - - "resolve the id as a schema" in { - multiResolution(IdSegmentRef(schemaId, 5), projectRef).accepted shouldEqual - MultiResolutionResult(ResourceResolutionReport(ResolverReport.success(resolverId, projectRef)), schemaValue) - } - - "resolve the id as a schema with a specific resolver" in { - multiResolution(IdSegmentRef(schemaId, 5), projectRef, resolverId).accepted shouldEqual - MultiResolutionResult(ResolverReport.success(resolverId, projectRef), schemaValue) - } - - "fail when it can't be resolved neither as a resource or a schema" in { - multiResolution(unknownResourceId, projectRef).rejected shouldEqual - InvalidResolution( - unknownResourceRef, - projectRef, - ResourceResolutionReport( - ResolverReport.failed(resolverId, projectRef -> ResourceNotFound(unknownResourceId, projectRef)) - ) - ) - } - - "fail with a specific resolver when it can't be resolved neither as a resource or a schema" in { - multiResolution(unknownResourceId, projectRef, resolverId).rejected shouldEqual - InvalidResolverResolution( - unknownResourceRef, - resolverId, - projectRef, - ResolverReport.failed(resolverId, projectRef -> ResourceNotFound(unknownResourceId, projectRef)) - ) - } - - "fail with an invalid resolver id" in { - val invalid = "qa$%" - multiResolution(resourceId, projectRef, invalid).rejected shouldEqual InvalidResolverId(invalid) - } + test("Resolve the id as a resource") { + val expected = + MultiResolutionResult(ResourceResolutionReport(ResolverReport.success(resolverId, projectRef)), resourceValue) + multiResolution(resourceId, projectRef).assertEquals(expected) + } + + test("Resolve the id as a resource with a specific resolver") { + val expected = MultiResolutionResult(ResolverReport.success(resolverId, projectRef), resourceValue) + multiResolution(resourceId, projectRef, resolverId).assertEquals(expected) + } + + test("Resolve the id as a schema") { + val expected = + MultiResolutionResult(ResourceResolutionReport(ResolverReport.success(resolverId, projectRef)), schemaValue) + multiResolution(IdSegmentRef(schemaId, 5), projectRef).assertEquals(expected) + } + + test("Resolve the id as a schema with a specific resolver") { + val expected = MultiResolutionResult(ResolverReport.success(resolverId, projectRef), schemaValue) + multiResolution(IdSegmentRef(schemaId, 5), projectRef, resolverId).assertEquals(expected) } + test("Fail when it can't be resolved neither as a resource or a schema") { + val expectedError = InvalidResolution( + unknownResourceRef, + projectRef, + ResourceResolutionReport( + ResolverReport.failed(resolverId, projectRef -> ResourceNotFound(unknownResourceId, projectRef)) + ) + ) + multiResolution(unknownResourceId, projectRef).intercept(expectedError) + } + + test("Fail with a specific resolver when it can't be resolved neither as a resource or a schema") { + val expectedError = InvalidResolverResolution( + unknownResourceRef, + resolverId, + projectRef, + ResolverReport.failed(resolverId, projectRef -> ResourceNotFound(unknownResourceId, projectRef)) + ) + multiResolution(unknownResourceId, projectRef, resolverId).intercept(expectedError) + } } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolutionSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolutionSuite.scala similarity index 72% rename from delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolutionSpec.scala rename to delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolutionSuite.scala index 837c265b17..ee99041de8 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolutionSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverContextResolutionSuite.scala @@ -1,32 +1,33 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers import akka.http.scaladsl.model.Uri +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schemas} +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.{CompactedJsonLd, ExpandedJsonLd} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContext.StaticContext import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolutionError.RemoteContextNotAccessible import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.{CompactedJsonLd, ExpandedJsonLd} import ch.epfl.bluebrain.nexus.delta.sdk.generators.ResourceResolutionGen import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller +import ch.epfl.bluebrain.nexus.testkit.TestHelpers import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ -import ch.epfl.bluebrain.nexus.delta.sdk.model._ +import ch.epfl.bluebrain.nexus.delta.sdk.model.{ResourceF, ResourceUris, Tags} import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution.ProjectRemoteContext import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.FetchResource import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.{IOValues, TestHelpers} +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite import io.circe.Json import io.circe.syntax._ -import monix.bio.IO -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike import java.time.Instant -class ResolverContextResolutionSpec extends AnyWordSpecLike with IOValues with TestHelpers with Matchers { +class ResolverContextResolutionSuite extends CatsEffectSuite with TestHelpers { private val metadataContext = jsonContentOf("/contexts/metadata.json").topContextValueOrEmpty @@ -65,7 +66,7 @@ class ResolverContextResolutionSpec extends AnyWordSpecLike with IOValues with T def fetchResource: (ResourceRef, ProjectRef) => FetchResource[Resource] = { (r: ResourceRef, p: ProjectRef) => (r, p) match { - case (Latest(id), `project`) if resourceId == id => IO.some(resource) + case (Latest(id), `project`) if resourceId == id => IO.pure(Some(resource)) case _ => IO.none } } @@ -74,23 +75,20 @@ class ResolverContextResolutionSpec extends AnyWordSpecLike with IOValues with T private val resolverContextResolution = ResolverContextResolution(rcr, resourceResolution) - "Resolving contexts" should { - - "resolve correctly static contexts" in { - val expected = StaticContext(contexts.metadata, metadataContext) - resolverContextResolution(project).resolve(contexts.metadata).accepted shouldEqual expected - } + private def resolve(iri: Iri) = + toCatsIO(resolverContextResolution(project).resolve(iri)) - "resolve correctly a resource context" in { - val expected = ProjectRemoteContext(resourceId, project, 5, ContextValue(context)) - resolverContextResolution(project).resolve(resourceId).accepted shouldEqual expected - } + test("Resolve correctly static contexts") { + val expected = StaticContext(contexts.metadata, metadataContext) + resolve(contexts.metadata).assertEquals(expected) + } - "fail is applying for an unknown resource" in { - resolverContextResolution(project) - .resolve(nxv + "xxx") - .rejectedWith[RemoteContextNotAccessible] - } + test("Resolve correctly a resource context") { + val expected = ProjectRemoteContext(resourceId, project, 5, ContextValue(context)) + resolve(resourceId).assertEquals(expected) } + test("Fail is applying for an unknown resource") { + resolve(nxv + "xxx").intercept[RemoteContextNotAccessible] + } } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSpec.scala deleted file mode 100644 index 9192275c99..0000000000 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSpec.scala +++ /dev/null @@ -1,372 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.resolvers - -import akka.http.scaladsl.model.Uri -import cats.data.NonEmptyList -import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri -import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{nxv, schemas} -import ch.epfl.bluebrain.nexus.delta.sdk -import ch.epfl.bluebrain.nexus.delta.sdk.generators.ResolverGen -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller -import ch.epfl.bluebrain.nexus.delta.sdk.model._ -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.FetchResource -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolutionSpec.ResourceExample -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.{ProvidedIdentities, UseCurrentCaller} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.Resolver.CrossProjectResolver -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.ResolverNotFound -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverResolutionRejection.{ProjectAccessDenied, ResourceNotFound, ResourceTypesDenied, WrappedResolverRejection} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.CrossProjectValue -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport.ResolverReport -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{IdentityResolution, Priority, Resolver, ResolverRejection, ResourceResolutionReport} -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User -import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest -import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.IOValues -import io.circe.Json -import monix.bio.{IO, UIO} -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike -import org.scalatest.{Inspectors, OptionValues} - -import java.time.Instant - -class ResolverResolutionSpec extends AnyWordSpecLike with Matchers with IOValues with OptionValues with Inspectors { - - private val alice = User("alice", Label.unsafe("wonderland")) - private val bob = User("bob", Label.unsafe("wonderland")) - - implicit val aliceCaller: Caller = Caller(alice, Set(alice)) - - private val project1 = ProjectRef.unsafe("org", "project1") - private val project2 = ProjectRef.unsafe("org", "project2") - private val project3 = ProjectRef.unsafe("org", "project3") - - val checkAcls: (ProjectRef, Set[Identity]) => UIO[Boolean] = - (p: ProjectRef, identities: Set[Identity]) => - p match { - case `project1` if identities == Set(alice) || identities == Set(bob) => UIO.pure(true) - case `project2` if identities == Set(bob) => UIO.pure(true) - case `project3` if identities == Set(alice) => UIO.pure(true) - case _ => UIO.pure(false) - } - - private val resource = ResourceF( - id = nxv + "example1", - uris = ResourceUris(Uri("/example1")), - rev = 5, - types = Set(nxv + "ResourceExample", nxv + "ResourceExample2"), - deprecated = false, - createdAt = Instant.now(), - createdBy = alice, - updatedAt = Instant.now(), - updatedBy = alice, - schema = Latest(schemas + "ResourceExample"), - value = ResourceExample("myResource") - ) - - private val inProjectResolver = ResolverGen.inProject(nxv + "in-project-proj-1", project1) - - def crossProjectResolver( - id: String, - priority: Int, - resourceTypes: Set[Iri] = Set.empty, - projects: NonEmptyList[ProjectRef] = NonEmptyList.of(project1, project2, project3), - identityResolution: IdentityResolution = UseCurrentCaller - ): CrossProjectResolver = - CrossProjectResolver( - nxv + id, - project1, - CrossProjectValue( - Priority.unsafe(priority), - resourceTypes, - projects, - identityResolution - ), - Json.obj(), - Tags.empty - ) - - def listResolvers(resolvers: List[Resolver]): ProjectRef => UIO[List[Resolver]] = (_: ProjectRef) => - IO.pure(resolvers) - private val emptyResolverListQuery = listResolvers(List.empty[Resolver]) - - val noResolverFetch: (Iri, ProjectRef) => IO[ResolverNotFound, Nothing] = - (_: Iri, projectRef: ProjectRef) => IO.raiseError(ResolverNotFound(nxv + "not-found", projectRef)) - def fetchResolver(resolver: Resolver): (Iri, ProjectRef) => IO[ResolverRejection, Resolver] = - (id: Iri, projectRef: ProjectRef) => - if (id == resolver.id) IO.pure(resolver) - else IO.raiseError(ResolverNotFound(id, projectRef)) - - def fetchResource( - projectRef: ProjectRef - ): (ResourceRef, ProjectRef) => FetchResource[ResourceExample] = - (_: ResourceRef, p: ProjectRef) => - p match { - case `projectRef` => UIO.some(resource) - case _ => UIO.none - } - - "The Resource resolution" when { - - def singleResolverResolution(resourceProject: ProjectRef, resolver: Resolver) = - ResourceResolution( - checkAcls, - emptyResolverListQuery, - fetchResolver(resolver), - fetchResource(resourceProject) - ) - - def multipleResolverResolution(resourceProject: ProjectRef, resolvers: Resolver*) = - sdk.resolvers.ResourceResolution( - checkAcls, - listResolvers(resolvers.toList), - noResolverFetch, - fetchResource(resourceProject) - ) - - "resolving with an in-project resolver" should { - val resourceResolution = singleResolverResolution(project1, inProjectResolver) - - "fail if the resolver can't be found" in { - val unknown = nxv + "xxx" - resourceResolution - .resolve(Latest(resource.id), project1, unknown) - .rejected shouldEqual ResolverReport.failed( - unknown, - project1 -> WrappedResolverRejection(ResolverNotFound(unknown, project1)) - ) - } - - "fail if the resource can't be found in the project" in { - val (report, result) = resourceResolution - .resolveReport( - Latest(resource.id), - project2, - inProjectResolver.id - ) - .accepted - - report shouldEqual ResolverReport.failed( - inProjectResolver.id, - project2 -> ResourceNotFound(resource.id, project2) - ) - result shouldEqual None - } - - "be successful if the resource can be fetched" in { - val (report, result) = - resourceResolution.resolveReport(Latest(resource.id), project1, inProjectResolver.id).accepted - - report shouldEqual ResolverReport.success(inProjectResolver.id, project1) - result.value shouldEqual resource - } - } - - "resolving with a cross-project resolver with using current caller resolution" should { - "succeed at 3rd project" in { - forAll( - List( - crossProjectResolver("use-current", 40, identityResolution = UseCurrentCaller), - crossProjectResolver( - "use-current", - 40, - resourceTypes = resource.types + nxv.Schema, - identityResolution = UseCurrentCaller - ) - ) - ) { resolver => - val (report, result) = singleResolverResolution(project3, resolver) - .resolveReport(Latest(resource.id), project1, resolver.id) - .accepted - - report shouldEqual ResolverReport.success( - resolver.id, - project3, - project1 -> ResourceNotFound(resource.id, project1), - project2 -> ProjectAccessDenied(project2, UseCurrentCaller) - ) - result.value shouldEqual resource - } - } - - "fail if the caller has no access to the resource project" in { - val resolver = crossProjectResolver( - "use-current", - 40, - identityResolution = UseCurrentCaller - ) - val (report, result) = singleResolverResolution(project2, resolver) - .resolveReport(Latest(resource.id), project1, resolver.id) - .accepted - - report shouldEqual ResolverReport.failed( - resolver.id, - project1 -> ResourceNotFound(resource.id, project1), - project2 -> ProjectAccessDenied(project2, UseCurrentCaller), - project3 -> ResourceNotFound(resource.id, project3) - ) - result shouldEqual None - } - - "fail if the resource type is not defined in the cross project resolver" in { - val resolver = crossProjectResolver( - "use-current", - 40, - resourceTypes = Set(nxv.Schema), - identityResolution = UseCurrentCaller - ) - - val resourceResolution = singleResolverResolution(project3, resolver) - - val (report, result) = resourceResolution - .resolveReport(Latest(resource.id), project1, resolver.id) - .accepted - - report shouldEqual ResolverReport.failed( - resolver.id, - project1 -> ResourceNotFound(resource.id, project1), - project2 -> ProjectAccessDenied(project2, UseCurrentCaller), - project3 -> ResourceTypesDenied(project3, resource.types) - ) - result shouldEqual None - } - - } - - "resolving with a cross-project resolver with using provided entities resolution" should { - "succeed at 2nd project" in { - forAll( - List( - crossProjectResolver("provided-entities", 40, identityResolution = ProvidedIdentities(Set(bob))), - crossProjectResolver( - "provided-entities", - 40, - resourceTypes = resource.types + nxv.Schema, - identityResolution = ProvidedIdentities(Set(bob)) - ) - ) - ) { resolver => - val (report, result) = singleResolverResolution(project2, resolver) - .resolveReport(Latest(resource.id), project1, resolver.id) - .accepted - - report shouldEqual ResolverReport.success( - resolver.id, - project2, - project1 -> ResourceNotFound(resource.id, project1) - ) - result.value shouldEqual resource - } - } - - "fail if the provided entity has no access to the resource project" in { - val resolver = crossProjectResolver( - "provided-entities", - 40, - identityResolution = ProvidedIdentities(Set(bob)) - ) - val (report, result) = singleResolverResolution(project3, resolver) - .resolveReport(Latest(resource.id), project1, resolver.id) - .accepted - - report shouldEqual ResolverReport.failed( - resolver.id, - project1 -> ResourceNotFound(resource.id, project1), - project2 -> ResourceNotFound(resource.id, project2), - project3 -> ProjectAccessDenied(project3, ProvidedIdentities(Set(bob))) - ) - result shouldEqual None - } - } - - "resolving with multiple resolvers" should { - - "be successful with the in-project resolver after failing a first time" in { - val resolution = multipleResolverResolution( - project1, - crossProjectResolver("cross-project-1", priority = 10, resourceTypes = Set(nxv.Schema)), - crossProjectResolver("cross-project-2", priority = 40), - inProjectResolver - ) - - val (report, result) = resolution.resolveReport(Latest(resource.id), project1).accepted - - report shouldEqual ResourceResolutionReport( - ResolverReport.failed( - nxv + "cross-project-1", - project1 -> ResourceTypesDenied(project1, resource.types), - project2 -> ProjectAccessDenied(project2, UseCurrentCaller), - project3 -> ResourceNotFound(resource.id, project3) - ), - ResolverReport.success(inProjectResolver.id, project1) - ) - - result.value shouldEqual resource - } - - "be successful with the last resolver" in { - val resolution = multipleResolverResolution( - project3, - crossProjectResolver("cross-project-1", priority = 10, resourceTypes = Set(nxv.Schema)), - crossProjectResolver("cross-project-2", priority = 40, projects = NonEmptyList.of(project3)), - inProjectResolver - ) - - val (report, result) = resolution.resolveReport(Latest(resource.id), project1).accepted - - report shouldEqual ResourceResolutionReport( - ResolverReport.failed( - nxv + "cross-project-1", - project1 -> ResourceNotFound(resource.id, project1), - project2 -> ProjectAccessDenied(project2, UseCurrentCaller), - project3 -> ResourceTypesDenied(project3, resource.types) - ), - ResolverReport.failed( - inProjectResolver.id, - project1 -> ResourceNotFound(resource.id, project1) - ), - ResolverReport.success(nxv + "cross-project-2", project3) - ) - - result.value shouldEqual resource - } - - "fail if no resolver matches" in { - val resolution = multipleResolverResolution( - project2, - crossProjectResolver("cross-project-1", priority = 10, resourceTypes = Set(nxv.Schema)), - crossProjectResolver("cross-project-2", priority = 40, projects = NonEmptyList.of(project3)), - inProjectResolver - ) - - val (report, result) = resolution.resolveReport(Latest(resource.id), project1).accepted - - report shouldEqual ResourceResolutionReport( - ResolverReport.failed( - nxv + "cross-project-1", - project1 -> ResourceNotFound(resource.id, project1), - project2 -> ProjectAccessDenied(project2, UseCurrentCaller), - project3 -> ResourceNotFound(resource.id, project3) - ), - ResolverReport.failed( - inProjectResolver.id, - project1 -> ResourceNotFound(resource.id, project1) - ), - ResolverReport.failed( - nxv + "cross-project-2", - project3 -> ResourceNotFound(resource.id, project3) - ) - ) - result shouldEqual None - } - - } - - } - -} - -object ResolverResolutionSpec { - - final case class ResourceExample(value: String) - -} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSuite.scala new file mode 100644 index 0000000000..9da87bc9e1 --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverResolutionSuite.scala @@ -0,0 +1,360 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.resolvers + +import akka.http.scaladsl.model.Uri +import cats.data.NonEmptyList +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{nxv, schemas} +import ch.epfl.bluebrain.nexus.delta.sdk.generators.ResolverGen +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller +import ch.epfl.bluebrain.nexus.delta.sdk.model.{ResourceF, ResourceUris, Tags} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolution.FetchResource +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverResolutionSuite.ResourceExample +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.{ProvidedIdentities, UseCurrentCaller} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.Resolver.CrossProjectResolver +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.ResolverNotFound +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverResolutionRejection.{ProjectAccessDenied, ResourceNotFound, ResourceTypesDenied, WrappedResolverRejection} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.CrossProjectValue +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport.ResolverReport +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{IdentityResolution, Priority, Resolver, ResourceResolutionReport} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest +import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef, ResourceRef} +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite +import io.circe.Json + +import java.time.Instant + +class ResolverResolutionSuite extends CatsEffectSuite { + + private val realm = Label.unsafe("wonderland") + private val alice = User("alice", realm) + private val bob = User("bob", realm) + + implicit val aliceCaller: Caller = Caller(alice, Set(alice)) + + private val project1 = ProjectRef.unsafe("org", "project1") + private val project2 = ProjectRef.unsafe("org", "project2") + private val project3 = ProjectRef.unsafe("org", "project3") + + private val checkAcls: (ProjectRef, Set[Identity]) => IO[Boolean] = + (p: ProjectRef, identities: Set[Identity]) => + p match { + case `project1` if identities == Set(alice) || identities == Set(bob) => IO.pure(true) + case `project2` if identities == Set(bob) => IO.pure(true) + case `project3` if identities == Set(alice) => IO.pure(true) + case _ => IO.pure(false) + } + + private val resource = ResourceF( + id = nxv + "example1", + uris = ResourceUris(Uri("/example1")), + rev = 5, + types = Set(nxv + "ResourceExample", nxv + "ResourceExample2"), + deprecated = false, + createdAt = Instant.now(), + createdBy = alice, + updatedAt = Instant.now(), + updatedBy = alice, + schema = Latest(schemas + "ResourceExample"), + value = ResourceExample("myResource") + ) + + private val inProjectResolver = ResolverGen.inProject(nxv + "in-project-proj-1", project1) + + private def crossProjectResolver( + id: String, + priority: Int, + resourceTypes: Set[Iri] = Set.empty, + projects: NonEmptyList[ProjectRef] = NonEmptyList.of(project1, project2, project3), + identityResolution: IdentityResolution = UseCurrentCaller + ): CrossProjectResolver = + CrossProjectResolver( + nxv + id, + project1, + CrossProjectValue( + Priority.unsafe(priority), + resourceTypes, + projects, + identityResolution + ), + Json.obj(), + Tags.empty + ) + + def listResolvers(resolvers: List[Resolver]): ProjectRef => IO[List[Resolver]] = (_: ProjectRef) => IO.pure(resolvers) + + private val emptyResolverListQuery = listResolvers(List.empty[Resolver]) + + val noResolverFetch: (Iri, ProjectRef) => IO[Nothing] = + (_: Iri, projectRef: ProjectRef) => IO.raiseError(ResolverNotFound(nxv + "not-found", projectRef)) + + def fetchResolver(resolver: Resolver): (Iri, ProjectRef) => IO[Resolver] = + (id: Iri, projectRef: ProjectRef) => + if (id == resolver.id) IO.pure(resolver) + else IO.raiseError(ResolverNotFound(id, projectRef)) + + def fetchResource( + projectRef: ProjectRef + ): (ResourceRef, ProjectRef) => FetchResource[ResourceExample] = + (_: ResourceRef, p: ProjectRef) => + p match { + case `projectRef` => IO.pure(Some(resource)) + case _ => IO.none + } + + private def singleResolverResolution(resourceProject: ProjectRef, resolver: Resolver) = + ResourceResolution( + checkAcls, + emptyResolverListQuery, + fetchResolver(resolver), + fetchResource(resourceProject) + ) + + private def multipleResolverResolution(resourceProject: ProjectRef, resolvers: Resolver*) = + ResourceResolution( + checkAcls, + listResolvers(resolvers.toList), + noResolverFetch, + fetchResource(resourceProject) + ) + + private val inProjectResolution = singleResolverResolution(project1, inProjectResolver) + + private val resource1NotFound = None + private val resource1Found = Some(resource) + + test("Using an in-project resolver fails if the resolver can't be found") { + + val unknown = nxv + "xxx" + val expectedError = ResolverReport.failed( + unknown, + project1 -> WrappedResolverRejection(ResolverNotFound(unknown, project1)) + ) + inProjectResolution + .resolve(Latest(resource.id), project1, unknown) + .assertEquals(Left(expectedError)) + } + + test("Using an in-project resolver fails if the resource can't be found in the project") { + val expectedReport = ResolverReport.failed( + inProjectResolver.id, + project2 -> ResourceNotFound(resource.id, project2) + ) + inProjectResolution + .resolveReport( + Latest(resource.id), + project2, + inProjectResolver.id + ) + .assertEquals((expectedReport, resource1NotFound)) + } + + test("Using an in-project resolver succeeds if the resource can be fetched") { + val expectedReport = ResolverReport.success(inProjectResolver.id, project1) + val expectedResult = Some(resource) + inProjectResolution + .resolveReport(Latest(resource.id), project1, inProjectResolver.id) + .assertEquals((expectedReport, expectedResult)) + } + + test("Using a cross-project resolver with current caller succeeds") { + val resolver = crossProjectResolver("use-current", 40, identityResolution = UseCurrentCaller) + val resolverResolution = singleResolverResolution(project3, resolver) + + val successAtProject3 = ResolverReport.success( + resolver.id, + project3, + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((successAtProject3, resource1Found)) + } + + test("Using a cross-project resolver with current caller and limiting on types succeeds") { + val acceptedTypes = resource.types + nxv.Schema + val resolver = + crossProjectResolver("use-current", 40, resourceTypes = acceptedTypes, identityResolution = UseCurrentCaller) + val resolverResolution = singleResolverResolution(project3, resolver) + + val successAtProject3 = ResolverReport.success( + resolver.id, + project3, + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((successAtProject3, resource1Found)) + } + + test("Using a cross-project resolver with current caller fails if the caller has no access to the project") { + val resolver = crossProjectResolver("use-current", 40, identityResolution = UseCurrentCaller) + val resolverResolution = singleResolverResolution(project2, resolver) + + val failedReport = ResolverReport.failed( + resolver.id, + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller), + project3 -> ResourceNotFound(resource.id, project3) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((failedReport, resource1NotFound)) + } + + test("Using a cross-project resolver with current caller fails if the resource type is not defined") { + val acceptedTypes = Set(nxv.Schema) + val resolver = + crossProjectResolver("use-current", 40, resourceTypes = acceptedTypes, identityResolution = UseCurrentCaller) + val resolverResolution = singleResolverResolution(project3, resolver) + + val failedReport = ResolverReport.failed( + resolver.id, + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller), + project3 -> ResourceTypesDenied(project3, resource.types) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((failedReport, resource1NotFound)) + } + + test("Using a cross-project resolver with provided identities succeeds") { + val resolver = crossProjectResolver("provided-identities", 40, identityResolution = ProvidedIdentities(Set(bob))) + val resolverResolution = singleResolverResolution(project2, resolver) + + val successAtProject2 = ResolverReport.success( + resolver.id, + project2, + project1 -> ResourceNotFound(resource.id, project1) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((successAtProject2, resource1Found)) + } + + test("Using a cross-project resolver with provided identities and limiting on types succeeds") { + val acceptedTypes = resource.types + nxv.Schema + val resolver = crossProjectResolver( + "provided-identities", + 40, + resourceTypes = acceptedTypes, + identityResolution = ProvidedIdentities(Set(bob)) + ) + val resolverResolution = singleResolverResolution(project2, resolver) + + val successAtProject2 = ResolverReport.success( + resolver.id, + project2, + project1 -> ResourceNotFound(resource.id, project1) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((successAtProject2, resource1Found)) + } + + test("Using a cross-project resolver with provided identities fail if the identity has no access") { + val resolver = crossProjectResolver("provided-identities", 40, identityResolution = ProvidedIdentities(Set(bob))) + val resolverResolution = singleResolverResolution(project3, resolver) + + val failedReport = ResolverReport.failed( + resolver.id, + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ResourceNotFound(resource.id, project2), + project3 -> ProjectAccessDenied(project3, ProvidedIdentities(Set(bob))) + ) + + resolverResolution + .resolveReport(Latest(resource.id), project1, resolver.id) + .assertEquals((failedReport, resource1NotFound)) + } + + test("Using multiple resolvers succeeds after a first failure") { + val resolution = multipleResolverResolution( + project1, + crossProjectResolver("cross-project-1", priority = 10, resourceTypes = Set(nxv.Schema)), + crossProjectResolver("cross-project-2", priority = 40), + inProjectResolver + ) + + val expectedReport = ResourceResolutionReport( + ResolverReport.failed( + nxv + "cross-project-1", + project1 -> ResourceTypesDenied(project1, resource.types), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller), + project3 -> ResourceNotFound(resource.id, project3) + ), + ResolverReport.success(inProjectResolver.id, project1) + ) + + resolution.resolveReport(Latest(resource.id), project1).assertEquals((expectedReport, resource1Found)) + } + + test("Using multiple resolvers succeeds with the last resolver") { + val resolution = multipleResolverResolution( + project3, + crossProjectResolver("cross-project-1", priority = 10, resourceTypes = Set(nxv.Schema)), + crossProjectResolver("cross-project-2", priority = 40, projects = NonEmptyList.of(project3)), + inProjectResolver + ) + + val expectedReport = ResourceResolutionReport( + ResolverReport.failed( + nxv + "cross-project-1", + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller), + project3 -> ResourceTypesDenied(project3, resource.types) + ), + ResolverReport.failed( + inProjectResolver.id, + project1 -> ResourceNotFound(resource.id, project1) + ), + ResolverReport.success(nxv + "cross-project-2", project3) + ) + + resolution.resolveReport(Latest(resource.id), project1).assertEquals((expectedReport, resource1Found)) + } + + test("Using multiple resolvers fails if no resolver matches") { + val resolution = multipleResolverResolution( + project2, + crossProjectResolver("cross-project-1", priority = 10, resourceTypes = Set(nxv.Schema)), + crossProjectResolver("cross-project-2", priority = 40, projects = NonEmptyList.of(project3)), + inProjectResolver + ) + + val expectedReport = ResourceResolutionReport( + ResolverReport.failed( + nxv + "cross-project-1", + project1 -> ResourceNotFound(resource.id, project1), + project2 -> ProjectAccessDenied(project2, UseCurrentCaller), + project3 -> ResourceNotFound(resource.id, project3) + ), + ResolverReport.failed( + inProjectResolver.id, + project1 -> ResourceNotFound(resource.id, project1) + ), + ResolverReport.failed( + nxv + "cross-project-2", + project3 -> ResourceNotFound(resource.id, project3) + ) + ) + + resolution.resolveReport(Latest(resource.id), project1).assertEquals((expectedReport, resource1NotFound)) + } + +} + +object ResolverResolutionSuite { + final case class ResourceExample(value: String) + +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSpec.scala deleted file mode 100644 index fb23ce6d28..0000000000 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSpec.scala +++ /dev/null @@ -1,85 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.resolvers - -import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF -import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schema} -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.{JsonLdApi, JsonLdJavaApi} -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.generators.ProjectGen -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.ServiceAccount -import ch.epfl.bluebrain.nexus.delta.sdk.model.IdSegment -import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy -import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{ProjectContextRejection, ResolverNotFound} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.InProjectValue -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Priority, ResourceResolutionReport} -import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ -import ch.epfl.bluebrain.nexus.delta.sdk.{ConfigFixtures, Defaults} -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Subject, User} -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label -import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture -import ch.epfl.bluebrain.nexus.testkit.ce.CatsIOValues -import ch.epfl.bluebrain.nexus.testkit.{IOFixedClock, TestHelpers} -import monix.bio.IO -import org.scalatest.matchers.should.Matchers - -import java.util.UUID - -class ResolverScopeInitializationSpec - extends DoobieScalaTestFixture - with Matchers - with CatsIOValues - with IOFixedClock - with TestHelpers - with ConfigFixtures { - - private val defaultInProjectResolverId: IdSegment = nxv.defaultResolver - - private val uuid = UUID.randomUUID() - implicit private val uuidF: UUIDF = UUIDF.fixed(uuid) - private val saRealm: Label = Label.unsafe("service-accounts") - private val usersRealm: Label = Label.unsafe("users") - implicit private val sa: ServiceAccount = ServiceAccount(User("nexus-sa", saRealm)) - implicit private val bob: Subject = User("bob", usersRealm) - - private val am = ApiMappings("nxv" -> nxv.base, "Person" -> schema.Person) - private val projBase = nxv.base - private val project = - ProjectGen.project("org", "project", uuid = uuid, orgUuid = uuid, base = projBase, mappings = am) - - private val defaults = Defaults("resolverName", "resolverDescription") - - private lazy val resolvers: Resolvers = { - implicit val api: JsonLdApi = JsonLdJavaApi.strict - val resolution = RemoteContextResolution.fixed( - contexts.resolvers -> jsonContentOf("/contexts/resolvers.json").topContextValueOrEmpty - ) - val rcr = new ResolverContextResolution(resolution, (_, _, _) => IO.raiseError(ResourceResolutionReport())) - ResolversImpl( - FetchContextDummy(List(project), ProjectContextRejection), - rcr, - ResolversConfig(eventLogConfig, pagination, defaults), - xas - ) - } - "A ResolverScopeInitialization" should { - lazy val init = new ResolverScopeInitialization(resolvers, sa, defaults) - - "create a default resolver on newly created project" in { - resolvers.fetch(defaultInProjectResolverId, project.ref).rejectedWith[ResolverNotFound] - init.onProjectCreation(project, bob).accepted - val resource = resolvers.fetch(defaultInProjectResolverId, project.ref).accepted - resource.value.value shouldEqual - InProjectValue(Some(defaults.name), Some(defaults.description), Priority.unsafe(1)) - resource.rev shouldEqual 1L - resource.createdBy shouldEqual sa.caller.subject - } - - "not create a new resolver if one already exists" in { - resolvers.fetch(defaultInProjectResolverId, project.ref).accepted.rev shouldEqual 1L - init.onProjectCreation(project, bob).accepted - val resource = resolvers.fetch(defaultInProjectResolverId, project.ref).accepted - resource.rev shouldEqual 1L - } - } - -} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSuite.scala new file mode 100644 index 0000000000..1a4904fc6c --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverScopeInitializationSuite.scala @@ -0,0 +1,53 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.resolvers + +import cats.effect.IO +import cats.effect.concurrent.Ref +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv +import ch.epfl.bluebrain.nexus.delta.sdk.Defaults +import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.ScopeInitializationFailed +import ch.epfl.bluebrain.nexus.delta.sdk.generators.ProjectGen +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.ResourceAlreadyExists +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.InProjectValue +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Priority, ResolverValue} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Subject, User} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite + +class ResolverScopeInitializationSuite extends CatsEffectSuite { + + private val defaults = Defaults("resolverName", "resolverDescription") + + private val project = ProjectGen.project("org", "project") + + private val usersRealm: Label = Label.unsafe("users") + private val bob: Subject = User("bob", usersRealm) + + test("Succeeds") { + for { + ref <- Ref.of[IO, Option[ResolverValue]](None) + scopeInit = new ResolverScopeInitialization( + (_, resolver) => ref.set(Some(resolver)), + defaults + ) + _ <- scopeInit.onProjectCreation(project, bob) + expected = InProjectValue(Some(defaults.name), Some(defaults.description), Priority.unsafe(1)) + _ <- ref.get.assertEquals(Some(expected)) + } yield () + } + + test("Recovers if the resolver already exists") { + val scopeInit = new ResolverScopeInitialization( + (project, _) => IO.raiseError(ResourceAlreadyExists(nxv.defaultResolver, project)), + defaults + ) + scopeInit.onProjectCreation(project, bob).assert + } + + test("Raises a failure otherwise") { + val scopeInit = new ResolverScopeInitialization( + (_, _) => IO.raiseError(new IllegalStateException("Something got wrong !")), + defaults + ) + scopeInit.onProjectCreation(project, bob).intercept[ScopeInitializationFailed] + } +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverStateMachineFixture.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverStateMachineFixture.scala new file mode 100644 index 0000000000..31ccb78703 --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolverStateMachineFixture.scala @@ -0,0 +1,67 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.resolvers + +import cats.data.NonEmptyList +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller +import ch.epfl.bluebrain.nexus.delta.sdk.model.Tags +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.ProvidedIdentities +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Priority, ResolverState} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.{CrossProjectValue, InProjectValue} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, User} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag +import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} +import io.circe.Json + +import java.time.Instant +trait ResolverStateMachineFixture { + + val epoch = Instant.EPOCH + val instant = Instant.ofEpochMilli(1000L) + val realm = Label.unsafe("myrealm") + val bob = Caller(User("Bob", realm), Set(User("Bob", realm), Group("mygroup", realm), Authenticated(realm))) + val alice = Caller(User("Alice", realm), Set(User("Alice", realm), Group("mygroup2", realm))) + + val project = ProjectRef.unsafe("org", "proj") + val priority = Priority.unsafe(42) + + val ipId = nxv + "in-project" + val cpId = nxv + "cross-project" + + val inProjectCurrent = ResolverState( + ipId, + project, + InProjectValue(priority), + Json.obj(), + Tags.empty, + 2, + deprecated = false, + epoch, + bob.subject, + instant, + Anonymous + ) + + val crossProjectCurrent = ResolverState( + cpId, + project, + CrossProjectValue( + priority, + Set.empty, + NonEmptyList.of( + ProjectRef.unsafe("org2", "proj") + ), + ProvidedIdentities(bob.identities) + ), + Json.obj(), + Tags(UserTag.unsafe("tag1") -> 5), + 2, + deprecated = false, + epoch, + alice.subject, + instant, + bob.subject + ) + + val bothStates = List(inProjectCurrent, crossProjectCurrent) + +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversEvaluateSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversEvaluateSuite.scala new file mode 100644 index 0000000000..e81733183b --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversEvaluateSuite.scala @@ -0,0 +1,317 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.resolvers + +import cats.data.NonEmptyList +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.Resolvers.{evaluate, ValidatePriority} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.{ProvidedIdentities, UseCurrentCaller} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.Priority +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverCommand.{CreateResolver, DeprecateResolver, TagResolver, UpdateResolver} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverEvent.{ResolverCreated, ResolverDeprecated, ResolverTagAdded, ResolverUpdated} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{IncorrectRev, _} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverType.{CrossProject, InProject} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.{CrossProjectValue, InProjectValue} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsEffectSuite, IOFixedClock} +import io.circe.Json + +class ResolversEvaluateSuite extends CatsEffectSuite with IOFixedClock with ResolverStateMachineFixture { + + private val validatePriority: ValidatePriority = (_, _, _) => IO.unit + + private def eval = evaluate(validatePriority)(_, _) + + private val createInProject = CreateResolver( + ipId, + project, + InProjectValue(priority), + Json.obj("inProject" -> Json.fromString("created")), + bob + ) + + test("Creation fails if the in-project resolver already exists") { + eval(Some(inProjectCurrent), createInProject).intercept( + ResourceAlreadyExists(createInProject.id, createInProject.project) + ) + } + + test("Creation fails if the priority already exists") { + val validatePriority: ValidatePriority = + (ref, _, priority) => IO.raiseError(PriorityAlreadyExists(ref, nxv + "same-prio", priority)) + evaluate(validatePriority)(None, createInProject).intercept[PriorityAlreadyExists] + } + + test("Creation succeeds for a in-project resolver") { + val expected = ResolverCreated( + ipId, + project, + createInProject.value, + createInProject.source, + 1, + epoch, + bob.subject + ) + eval(None, createInProject).assertEquals(expected) + } + + private val crossProjectValue = CrossProjectValue( + priority, + Set(nxv + "resource"), + NonEmptyList.of( + ProjectRef.unsafe("org2", "proj"), + ProjectRef.unsafe("org2", "proj2") + ), + ProvidedIdentities(bob.identities) + ) + + private val createCrossProject = CreateResolver( + cpId, + project, + crossProjectValue, + Json.obj("crossProject" -> Json.fromString("created")), + bob + ) + + test("Creation fails if the cross-project resolver already exists") { + eval(Some(crossProjectCurrent), createCrossProject).intercept( + ResourceAlreadyExists(createCrossProject.id, createCrossProject.project) + ) + } + + test("Creation fails if no identities are provided for a cross-project resolver") { + val invalidValue = crossProjectValue.copy(identityResolution = ProvidedIdentities(Set.empty)) + val invalidCommand = createCrossProject.copy(value = invalidValue) + eval(None, invalidCommand).intercept(NoIdentities) + } + + test("Creation fails if no identities are provided for a cross-project resolver") { + val invalidValue = + crossProjectValue.copy(identityResolution = ProvidedIdentities(Set(bob.subject, alice.subject))) + val invalidCommand = createCrossProject.copy(value = invalidValue) + eval(None, invalidCommand).intercept(InvalidIdentities(Set(alice.subject))) + } + + test("Creation succeeds for a cross-project resolver with provided identities") { + val expected = ResolverCreated( + cpId, + project, + createCrossProject.value, + createCrossProject.source, + 1, + epoch, + bob.subject + ) + eval(None, createCrossProject).assertEquals(expected) + } + + test("Creation succeeds for a cross-project resolver with provided identities") { + val useCaller = crossProjectValue.copy(identityResolution = UseCurrentCaller) + val command = createCrossProject.copy(value = useCaller) + val expected = ResolverCreated( + cpId, + project, + command.value, + command.source, + 1, + epoch, + bob.subject + ) + eval(None, command).assertEquals(expected) + } + + private val updateInProject = UpdateResolver( + ipId, + project, + InProjectValue(Priority.unsafe(99)), + Json.obj("inProject" -> Json.fromString("updated")), + 2, + alice + ) + + test("Update fails if the in-project resolver does not exist") { + eval(None, updateInProject).intercept(ResolverNotFound(updateInProject.id, updateInProject.project)) + } + + test("Update fails if the provided revision for the in-project resolver is incorrect") { + val invalidCommand = updateInProject.copy(rev = 4) + eval(Some(inProjectCurrent), invalidCommand).intercept(IncorrectRev(invalidCommand.rev, inProjectCurrent.rev)) + } + + test("Update fails if the in-project resolver is deprecated") { + val deprecated = inProjectCurrent.copy(deprecated = true) + eval(Some(deprecated), updateInProject).intercept(ResolverIsDeprecated(deprecated.id)) + } + + test("Update fails if we try to change from in-project to cross-project type") { + val expectedError = DifferentResolverType(updateCrossProject.id, CrossProject, InProject) + eval(Some(inProjectCurrent), updateCrossProject).intercept(expectedError) + } + + test("Update fails if the priority already exists") { + val validatePriority: ValidatePriority = + (ref, _, priority) => IO.raiseError(PriorityAlreadyExists(ref, nxv + "same-priority", priority)) + evaluate(validatePriority)(Some(inProjectCurrent), updateInProject).intercept[PriorityAlreadyExists] + } + + test("Update succeeds for a in-project resolver") { + val expected = ResolverUpdated( + ipId, + project, + updateInProject.value, + updateInProject.source, + 3, + epoch, + alice.subject + ) + eval(Some(inProjectCurrent), updateInProject).assertEquals(expected) + } + + private val updateCrossProject = UpdateResolver( + cpId, + project, + CrossProjectValue( + Priority.unsafe(99), + Set(nxv + "resource"), + NonEmptyList.of( + ProjectRef.unsafe("org2", "proj"), + ProjectRef.unsafe("org2", "proj2") + ), + ProvidedIdentities(alice.identities) + ), + Json.obj("crossProject" -> Json.fromString("updated")), + 2, + alice + ) + + test("Update fails if the cross-project resolver does not exist") { + eval(None, updateCrossProject).intercept(ResolverNotFound(updateCrossProject.id, updateCrossProject.project)) + } + + test("Update fails if the provided revision for the cross-project resolver is incorrect") { + val invalidCommand = updateCrossProject.copy(rev = 1) + eval(Some(crossProjectCurrent), invalidCommand).intercept(IncorrectRev(invalidCommand.rev, inProjectCurrent.rev)) + } + + test("Update fails if the cross-project resolver is deprecated") { + val deprecated = crossProjectCurrent.copy(deprecated = true) + eval(Some(deprecated), updateCrossProject).intercept(ResolverIsDeprecated(deprecated.id)) + } + + test("Update fails if no identities are provided for a cross-project resolver") { + val invalidValue = crossProjectValue.copy(identityResolution = ProvidedIdentities(Set.empty)) + val invalidCommand = updateCrossProject.copy(value = invalidValue) + eval(Some(crossProjectCurrent), invalidCommand).intercept(NoIdentities) + } + + test("Update fails if some provided identities don't belong to the caller for a cross-project resolver") { + val invalidValue = crossProjectValue.copy(identityResolution = ProvidedIdentities(Set(bob.subject, alice.subject))) + val invalidCommand = updateCrossProject.copy(value = invalidValue) + eval(Some(crossProjectCurrent), invalidCommand).intercept(InvalidIdentities(Set(bob.subject))) + } + + test("Update fails if we try to change from cross-project to in-project type") { + val expectedError = DifferentResolverType(updateInProject.id, InProject, CrossProject) + eval(Some(crossProjectCurrent), updateInProject).intercept(expectedError) + } + + test("Update succeeds for a cross-project resolver with provided entities") { + val expected = ResolverUpdated( + cpId, + project, + updateCrossProject.value, + updateCrossProject.source, + 3, + epoch, + alice.subject + ) + eval(Some(crossProjectCurrent), updateCrossProject).assertEquals(expected) + } + + test("Update succeeds for a cross-project resolver with current caller") { + val userCallerResolution = crossProjectValue.copy(identityResolution = UseCurrentCaller) + val command = updateCrossProject.copy(value = userCallerResolution) + val expected = ResolverUpdated( + cpId, + project, + command.value, + command.source, + 3, + epoch, + alice.subject + ) + eval(Some(crossProjectCurrent), command).assertEquals(expected) + } + + private val tagCommand = TagResolver(ipId, project, 1, UserTag.unsafe("tag1"), 2, bob.subject) + + test("Tag fails if the resolver does not exist") { + val expectedError = ResolverNotFound(tagCommand.id, tagCommand.project) + eval(None, tagCommand).intercept(expectedError) + } + + bothStates.foreach { state => + test(s"Tag fails for an ${state.value.tpe} resolver if the provided revision is incorrect") { + val incorrectRev = tagCommand.copy(rev = 5) + val expectedError = IncorrectRev(incorrectRev.rev, state.rev) + eval(Some(state), incorrectRev).intercept(expectedError) + } + } + + bothStates.foreach { state => + test(s"Tag succeeds for the ${state.value.tpe} resolver") { + val expected = ResolverTagAdded( + tagCommand.id, + project, + state.value.tpe, + targetRev = tagCommand.targetRev, + tag = tagCommand.tag, + 3, + epoch, + bob.subject + ) + eval(Some(state), tagCommand).assertEquals(expected) + } + + test(s"Tag succeeds for if the ${state.value.tpe} is deprecated") { + val deprecated = state.copy(deprecated = true) + val expected = ResolverTagAdded( + tagCommand.id, + project, + state.value.tpe, + targetRev = tagCommand.targetRev, + tag = tagCommand.tag, + 3, + epoch, + bob.subject + ) + eval(Some(deprecated), tagCommand).assertEquals(expected) + } + } + + private val deprecateCommand = DeprecateResolver(ipId, project, 2, bob.subject) + + test("Deprecate fails if resolver does not exist") { + val expectedError = ResolverNotFound(deprecateCommand.id, deprecateCommand.project) + eval(None, deprecateCommand).intercept(expectedError) + } + + bothStates.foreach { state => + test(s"Deprecate fails for an ${state.value.tpe} resolver if the provided revision is incorrect") { + val incorrectRev = deprecateCommand.copy(rev = 5) + val expectedError = IncorrectRev(incorrectRev.rev, state.rev) + eval(Some(state), incorrectRev).intercept(expectedError) + } + + test(s"Deprecate fails for an ${state.value.tpe} resolver if it is already deprecated") { + val deprecated = state.copy(deprecated = true) + val expectedError = ResolverIsDeprecated(deprecated.id) + eval(Some(deprecated), deprecateCommand).intercept(expectedError) + } + + test(s"Deprecate succeeds for an ${state.value.tpe} resolver") { + val expected = ResolverDeprecated(deprecateCommand.id, project, state.value.tpe, 3, epoch, bob.subject) + eval(Some(state), deprecateCommand).assertEquals(expected) + } + } +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImplSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImplSpec.scala index 0a9c97caf0..412126edc0 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImplSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversImplSpec.scala @@ -2,6 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resolvers import cats.data.NonEmptyList import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schema} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.{JsonLdApi, JsonLdJavaApi} @@ -11,7 +12,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.generators.ResolverGen.{resolverResourc import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.model._ -import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchParams.ResolverSearchParams import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.projects.{FetchContextDummy, Projects} @@ -27,8 +27,9 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Authenticated, Gro import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture -import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, IOFixedClock, IOValues} -import monix.bio.{IO, UIO} +import ch.epfl.bluebrain.nexus.testkit.CirceLiteral +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsIOValues, IOFixedClock} +import monix.bio.UIO import org.scalatest.matchers.should.Matchers import org.scalatest.{CancelAfterFailure, Inspectors, OptionValues} @@ -36,8 +37,8 @@ import java.util.UUID class ResolversImplSpec extends DoobieScalaTestFixture + with CatsIOValues with Matchers - with IOValues with IOFixedClock with CancelAfterFailure with CirceLiteral @@ -63,10 +64,7 @@ class ResolversImplSpec contexts.resolversMetadata -> jsonContentOf("/contexts/resolvers-metadata.json").topContextValueOrEmpty ) - private val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( - res, - (_, _, _) => IO.raiseError(ResourceResolutionReport()) - ) + private val resolverContextResolution: ResolverContextResolution = ResolverContextResolution(res) private val org = Label.unsafe("org") private val apiMappings = ApiMappings("nxv" -> nxv.base, "Person" -> schema.Person) @@ -91,7 +89,7 @@ class ResolversImplSpec private lazy val resolvers: Resolvers = ResolversImpl( fetchContext, resolverContextResolution, - ResolversConfig(eventLogConfig, pagination, defaults), + ResolversConfig(eventLogConfig, defaults), xas ) @@ -210,9 +208,7 @@ class ResolversImplSpec ) { case (id, value) => val payloadId = nxv + "resolver-fail" val payload = sourceFrom(payloadId, value) - resolvers - .create(id, projectRef, payload) - .rejected shouldEqual UnexpectedResolverId(id, payloadId) + resolvers.create(id, projectRef, payload).rejected(UnexpectedResolverId(id, payloadId)) } } @@ -224,14 +220,14 @@ class ResolversImplSpec ) ) { case (id, value) => val payload = sourceWithoutId(value) - resolvers.create(id, projectRef, payload).rejected shouldEqual InvalidResolverId(id) + resolvers.create(id, projectRef, payload).rejected(InvalidResolverId(id)) } } "fail if priority already exists" in { resolvers .create(nxv + "in-project-other", projectRef, inProjectValue) - .rejected shouldEqual PriorityAlreadyExists(projectRef, nxv + "in-project", inProjectValue.priority) + .rejected(PriorityAlreadyExists(projectRef, nxv + "in-project", inProjectValue.priority)) } "fail if it already exists" in { @@ -245,15 +241,10 @@ class ResolversImplSpec val payload = sourceWithoutId(value) resolvers .create(id.toString, projectRef, payload) - .rejected shouldEqual ResourceAlreadyExists(id, projectRef) + .rejected(ResourceAlreadyExists(id, projectRef)) val payloadWithId = sourceFrom(id, value) - resolvers - .create(projectRef, payloadWithId) - .rejected shouldEqual ResourceAlreadyExists( - id, - projectRef - ) + resolvers.create(projectRef, payloadWithId).rejected(ResourceAlreadyExists(id, projectRef)) } } @@ -302,7 +293,7 @@ class ResolversImplSpec val payload = sourceWithoutId(invalidValue) resolvers .create(nxv + "cross-project-no-id", projectRef, payload) - .rejected shouldEqual NoIdentities + .rejected(NoIdentities) } "fail if some provided identities don't belong to the caller for a cross-project resolver" in { @@ -315,7 +306,7 @@ class ResolversImplSpec val payload = sourceWithoutId(invalidValue) resolvers .create(nxv + "cross-project-miss-id", projectRef, payload) - .rejected shouldEqual InvalidIdentities(Set(alice.subject)) + .rejected(InvalidIdentities(Set(alice.subject))) } "fail if mandatory values in source are missing" in { @@ -376,7 +367,7 @@ class ResolversImplSpec val payload = sourceWithoutId(value) resolvers .update(id, projectRef, 1, payload) - .rejected shouldEqual ResolverNotFound(id, projectRef) + .rejected(ResolverNotFound(id, projectRef)) } } @@ -390,7 +381,7 @@ class ResolversImplSpec val payload = sourceWithoutId(value) resolvers .update(id, projectRef, 5, payload) - .rejected shouldEqual IncorrectRev(5, 2) + .rejected(IncorrectRev(5, 2)) } } @@ -405,7 +396,7 @@ class ResolversImplSpec val payload = sourceFrom(payloadId, value) resolvers .update(id, projectRef, 2, payload) - .rejected shouldEqual UnexpectedResolverId(id = id, payloadId = payloadId) + .rejected(UnexpectedResolverId(id = id, payloadId = payloadId)) } } @@ -442,7 +433,7 @@ class ResolversImplSpec val payload = sourceWithoutId(invalidValue) resolvers .update(nxv + "cross-project", projectRef, 2, payload) - .rejected shouldEqual NoIdentities + .rejected(NoIdentities) } "fail if some provided identities don't belong to the caller for a cross-project resolver" in { @@ -454,7 +445,7 @@ class ResolversImplSpec val payload = sourceWithoutId(invalidValue) resolvers .update(nxv + "cross-project", projectRef, 2, payload) - .rejected shouldEqual InvalidIdentities(Set(alice.subject)) + .rejected(InvalidIdentities(Set(alice.subject))) } } @@ -488,7 +479,7 @@ class ResolversImplSpec nxv + "cross-project-xxx" ) ) { id => - resolvers.tag(id, projectRef, tag, 1, 2).rejected shouldEqual ResolverNotFound(id, projectRef) + resolvers.tag(id, projectRef, tag, 1, 2).rejected(ResolverNotFound(id, projectRef)) } } @@ -499,7 +490,7 @@ class ResolversImplSpec nxv + "cross-project" ) ) { id => - resolvers.tag(id, projectRef, tag, 1, 21).rejected shouldEqual IncorrectRev(21, 3) + resolvers.tag(id, projectRef, tag, 1, 21).rejected(IncorrectRev(21, 3)) } } @@ -510,7 +501,7 @@ class ResolversImplSpec nxv + "cross-project" ) ) { id => - resolvers.tag(id, projectRef, tag, 20, 3).rejected shouldEqual RevisionNotFound(20, 3) + resolvers.tag(id, projectRef, tag, 20, 3).rejected(RevisionNotFound(20, 3)) } } @@ -565,7 +556,7 @@ class ResolversImplSpec nxv + "cross-project-xxx" ) ) { id => - resolvers.deprecate(id, projectRef, 3).rejected shouldEqual ResolverNotFound(id, projectRef) + resolvers.deprecate(id, projectRef, 3).rejected(ResolverNotFound(id, projectRef)) } } @@ -576,7 +567,7 @@ class ResolversImplSpec nxv + "cross-project" ) ) { id => - resolvers.deprecate(id, projectRef, 3).rejected shouldEqual IncorrectRev(3, 4) + resolvers.deprecate(id, projectRef, 3).rejected(IncorrectRev(3, 4)) } } @@ -609,7 +600,7 @@ class ResolversImplSpec nxv + "cross-project" ) ) { id => - resolvers.deprecate(id, projectRef, 4).rejected shouldEqual ResolverIsDeprecated(id) + resolvers.deprecate(id, projectRef, 4).rejected(ResolverIsDeprecated(id)) } } @@ -622,7 +613,7 @@ class ResolversImplSpec ) { case (id, value) => resolvers .update(id, projectRef, 4, sourceWithoutId(value)) - .rejected shouldEqual ResolverIsDeprecated(id) + .rejected(ResolverIsDeprecated(id)) } } @@ -728,14 +719,12 @@ class ResolversImplSpec } "fail if revision does not exist" in { - resolvers.fetch(IdSegmentRef(nxv + "in-project", 30), projectRef).rejected shouldEqual - RevisionNotFound(30, 5) + resolvers.fetch(IdSegmentRef(nxv + "in-project", 30), projectRef).rejected(RevisionNotFound(30, 5)) } "fail if tag does not exist" in { val unknownTag = UserTag.unsafe("xxx") - resolvers.fetch(IdSegmentRef(nxv + "in-project", unknownTag), projectRef).rejected shouldEqual - TagNotFound(unknownTag) + resolvers.fetch(IdSegmentRef(nxv + "in-project", unknownTag), projectRef).rejected(TagNotFound(unknownTag)) } } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversNextSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversNextSuite.scala new file mode 100644 index 0000000000..480d0a2770 --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversNextSuite.scala @@ -0,0 +1,190 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.resolvers + +import cats.data.NonEmptyList +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv +import ch.epfl.bluebrain.nexus.delta.sdk.model.Tags +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.Resolvers.next +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.ProvidedIdentities +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverEvent.{ResolverCreated, ResolverDeprecated, ResolverTagAdded, ResolverUpdated} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.{CrossProjectValue, InProjectValue} +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Priority, ResolverState, ResolverType} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag +import ch.epfl.bluebrain.nexus.testkit.NexusSuite +import ch.epfl.bluebrain.nexus.testkit.bio.OptionAssertions +import io.circe.Json + +class ResolversNextSuite extends NexusSuite with ResolverStateMachineFixture with OptionAssertions { + + private val inProjectCreated = ResolverCreated( + ipId, + project, + InProjectValue(Priority.unsafe(22)), + Json.obj("inProject" -> Json.fromString("created")), + 1, + epoch, + bob.subject + ) + + private val crossProjectCreated = ResolverCreated( + cpId, + project, + CrossProjectValue( + Priority.unsafe(55), + Set(nxv + "resource"), + NonEmptyList.of( + ProjectRef.unsafe("org2", "proj"), + ProjectRef.unsafe("org2", "proj2") + ), + ProvidedIdentities(bob.identities) + ), + Json.obj("crossProject" -> Json.fromString("created")), + 1, + epoch, + bob.subject + ) + + test("A create event gives a new in-project state from None") { + val expected = ResolverState( + ipId, + project, + inProjectCreated.value, + inProjectCreated.source, + Tags.empty, + 1, + deprecated = false, + epoch, + bob.subject, + epoch, + bob.subject + ) + next(None, inProjectCreated).assertSome(expected) + } + + test("A create event gives a new cross-project resolver state from None") { + val expected = ResolverState( + cpId, + project, + crossProjectCreated.value, + crossProjectCreated.source, + Tags.empty, + 1, + deprecated = false, + epoch, + bob.subject, + epoch, + bob.subject + ) + next(None, crossProjectCreated).assertSome(expected) + } + + List( + inProjectCurrent -> inProjectCreated, + crossProjectCurrent -> crossProjectCreated + ).foreach { case (state, event) => + test(s"A create event returns None for an existing ${state.value.tpe} resolver") { + next(Some(state), event).assertNone() + } + } + + val inProjectUpdated = ResolverUpdated( + ipId, + project, + InProjectValue(Priority.unsafe(40)), + Json.obj("inProject" -> Json.fromString("updated")), + 3, + instant, + bob.subject + ) + + val crossCrojectUpdated = ResolverUpdated( + cpId, + project, + CrossProjectValue( + Priority.unsafe(999), + Set(nxv + "r", nxv + "r2"), + NonEmptyList.of( + ProjectRef.unsafe("org2", "proj"), + ProjectRef.unsafe("org3", "proj2") + ), + ProvidedIdentities(alice.identities) + ), + Json.obj("crossProject" -> Json.fromString("updated")), + 3, + epoch, + bob.subject + ) + + test("An update event gives a new revision of an existing in-project resolver") { + val expected = inProjectCurrent.copy( + value = inProjectUpdated.value, + source = inProjectUpdated.source, + rev = inProjectUpdated.rev, + updatedAt = inProjectUpdated.instant, + updatedBy = inProjectUpdated.subject + ) + next(Some(inProjectCurrent), inProjectUpdated).assertSome(expected) + } + + test("An update event gives a new revision of an existing cross-project resolver") { + val expected = crossProjectCurrent.copy( + value = crossCrojectUpdated.value, + source = crossCrojectUpdated.source, + rev = crossCrojectUpdated.rev, + updatedAt = crossCrojectUpdated.instant, + updatedBy = crossCrojectUpdated.subject + ) + next(Some(crossProjectCurrent), crossCrojectUpdated).assertSome(expected) + } + + List(inProjectUpdated, crossCrojectUpdated).foreach { event => + test(s"Return None when attempting to update a non-existing ${event.value.tpe} resolver") { + next(None, event).assertNone() + } + } + + List(inProjectCurrent -> crossCrojectUpdated, crossProjectCurrent -> inProjectUpdated).foreach { + case (state, event) => + test(s"Return None when attempting to update an existing ${event.value.tpe} resolver with the other type") { + next(Some(state), event).assertNone() + } + } + + private val tagEvent = + ResolverTagAdded(ipId, project, ResolverType.InProject, 1, UserTag.unsafe("tag2"), 3, instant, alice.subject) + + bothStates.foreach { state => + test(s"Update the tag list fot a ${state.value.tpe} resolver") { + val expected = state.copy( + tags = state.tags + (tagEvent.tag -> tagEvent.targetRev), + rev = tagEvent.rev, + updatedAt = tagEvent.instant, + updatedBy = tagEvent.subject + ) + next(Some(state), tagEvent).assertSome(expected) + } + } + + test(s"Return None when attempting to tag a non-existing resolver") { + next(None, tagEvent).assertNone() + } + + private val deprecatedEvent = ResolverDeprecated(ipId, project, ResolverType.InProject, 3, instant, alice.subject) + + bothStates.foreach { state => + test(s"mark the current state as deprecated for a ${state.value.tpe} resolver") { + val expected = state.copy( + deprecated = true, + rev = deprecatedEvent.rev, + updatedAt = deprecatedEvent.instant, + updatedBy = deprecatedEvent.subject + ) + next(Some(state), deprecatedEvent).assertSome(expected) + } + } + + test(s"Return None when attempting to deprecate a non-existing resolver") { + next(None, deprecatedEvent).assertNone() + } + +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversSpec.scala deleted file mode 100644 index 53a9556b72..0000000000 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resolvers/ResolversSpec.scala +++ /dev/null @@ -1,572 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.resolvers - -import cats.data.NonEmptyList -import cats.implicits._ -import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller -import ch.epfl.bluebrain.nexus.delta.sdk.model.Tags -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.Resolvers.{evaluate, next, ValidatePriority} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.IdentityResolution.{ProvidedIdentities, UseCurrentCaller} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverCommand.{CreateResolver, DeprecateResolver, TagResolver, UpdateResolver} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverEvent.{ResolverCreated, ResolverDeprecated, ResolverTagAdded, ResolverUpdated} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverRejection.{DifferentResolverType, IncorrectRev, InvalidIdentities, NoIdentities, PriorityAlreadyExists, ResolverIsDeprecated, ResolverNotFound, ResourceAlreadyExists, RevisionNotFound} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverType.{CrossProject, InProject} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResolverValue.{CrossProjectValue, InProjectValue} -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.{Priority, ResolverRejection, ResolverState, ResolverType} -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, User} -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag -import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import ch.epfl.bluebrain.nexus.testkit.{IOFixedClock, IOValues} -import io.circe.Json -import monix.bio.{IO, UIO} -import monix.execution.Scheduler -import org.scalatest.{Inspectors, OptionValues} -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpec - -import java.time.Instant - -class ResolversSpec - extends AnyWordSpec - with Matchers - with OptionValues - with IOValues - with IOFixedClock - with Inspectors { - - private val epoch = Instant.EPOCH - private val instant = Instant.ofEpochMilli(1000L) - private val realm = Label.unsafe("myrealm") - private val bob = Caller(User("Bob", realm), Set(User("Bob", realm), Group("mygroup", realm), Authenticated(realm))) - private val alice = Caller(User("Alice", realm), Set(User("Alice", realm), Group("mygroup2", realm))) - - private val project = ProjectRef.unsafe("org", "proj") - private val priority = Priority.unsafe(42) - - private val ipId = nxv + "in-project" - private val cpId = nxv + "cross-project" - - private val inProjectCurrent = ResolverState( - ipId, - project, - InProjectValue(priority), - Json.obj(), - Tags.empty, - 2, - deprecated = false, - epoch, - bob.subject, - instant, - Anonymous - ) - - private val crossProjectCurrent = ResolverState( - cpId, - project, - CrossProjectValue( - priority, - Set.empty, - NonEmptyList.of( - ProjectRef.unsafe("org2", "proj") - ), - ProvidedIdentities(bob.identities) - ), - Json.obj(), - Tags(UserTag.unsafe("tag1") -> 5), - 2, - deprecated = false, - epoch, - alice.subject, - instant, - bob.subject - ) - - val validatePriority: ValidatePriority = (_, _, _) => UIO.unit - - private def eval = evaluate(validatePriority)(_, _) - - "The Resolvers evaluation" when { - implicit val sc: Scheduler = Scheduler.global - - val createInProject = CreateResolver( - ipId, - project, - InProjectValue(priority), - Json.obj("inProject" -> Json.fromString("created")), - bob - ) - - val crossProjectValue = CrossProjectValue( - priority, - Set(nxv + "resource"), - NonEmptyList.of( - ProjectRef.unsafe("org2", "proj"), - ProjectRef.unsafe("org2", "proj2") - ), - ProvidedIdentities(bob.identities) - ) - - val createCrossProject = CreateResolver( - cpId, - project, - crossProjectValue, - Json.obj("crossProject" -> Json.fromString("created")), - bob - ) - - val updateInProject = UpdateResolver( - ipId, - project, - InProjectValue(Priority.unsafe(99)), - Json.obj("inProject" -> Json.fromString("updated")), - 2, - alice - ) - - val updateCrossProject = UpdateResolver( - cpId, - project, - CrossProjectValue( - Priority.unsafe(99), - Set(nxv + "resource"), - NonEmptyList.of( - ProjectRef.unsafe("org2", "proj"), - ProjectRef.unsafe("org2", "proj2") - ), - ProvidedIdentities(alice.identities) - ), - Json.obj("crossProject" -> Json.fromString("updated")), - 2, - alice - ) - - "evaluating a create command" should { - - "fail if the resolver already exists" in { - forAll( - (List(inProjectCurrent, crossProjectCurrent), List(createInProject, createCrossProject)).tupled - ) { case (state, command) => - eval(Some(state), command).rejected shouldEqual ResourceAlreadyExists(command.id, command.project) - } - } - - "create a in-project creation event" in { - eval(None, createInProject).accepted shouldEqual ResolverCreated( - ipId, - project, - createInProject.value, - createInProject.source, - 1, - epoch, - bob.subject - ) - } - - "fail if no identities are provided for a cross-project resolver" in { - val invalidValue = crossProjectValue.copy(identityResolution = ProvidedIdentities(Set.empty)) - eval(None, createCrossProject.copy(value = invalidValue)).rejected shouldEqual NoIdentities - } - - "fail if the priority already exists" in { - val validatePriority: ValidatePriority = - (ref, _, priority) => IO.raiseError(PriorityAlreadyExists(ref, nxv + "same-prio", priority)) - evaluate(validatePriority)(None, createInProject).rejectedWith[PriorityAlreadyExists] - } - - "fail if some provided identities don't belong to the caller for a cross-project resolver" in { - val invalidValue = - crossProjectValue.copy(identityResolution = ProvidedIdentities(Set(bob.subject, alice.subject))) - eval(None, createCrossProject.copy(value = invalidValue)).rejected shouldEqual InvalidIdentities( - Set(alice.subject) - ) - } - - "create a cross-project creation event" in { - val userCallerResolution = crossProjectValue.copy(identityResolution = UseCurrentCaller) - - forAll(List(createCrossProject, createCrossProject.copy(value = userCallerResolution))) { command => - eval(None, command).accepted shouldEqual ResolverCreated( - cpId, - project, - command.value, - command.source, - 1, - epoch, - bob.subject - ) - } - } - } - - "eval an update command" should { - - "fail if the resolver doesn't exist" in { - forAll(List(updateInProject, updateCrossProject)) { command => - eval(None, command).rejected shouldEqual ResolverNotFound(command.id, command.project) - } - } - - "fail if the provided revision is incorrect" in { - forAll( - ( - List(inProjectCurrent, crossProjectCurrent), - List(updateInProject.copy(rev = 4), updateCrossProject.copy(rev = 1)) - ).tupled - ) { case (state, command) => - eval(Some(state), command).rejected shouldEqual IncorrectRev(command.rev, state.rev) - } - } - - "fail if the current state is deprecated" in { - forAll( - ( - List(inProjectCurrent.copy(deprecated = true), crossProjectCurrent.copy(deprecated = true)), - List(updateInProject, updateCrossProject) - ).tupled - ) { case (state, command) => - eval(Some(state), command).rejected shouldEqual ResolverIsDeprecated(state.id) - } - } - - "fail if we try to change from in-project to cross-project type" in { - eval(Some(inProjectCurrent), updateCrossProject).rejected shouldEqual DifferentResolverType( - updateCrossProject.id, - CrossProject, - InProject - ) - } - - "create an in-project resolver update event" in { - eval(Some(inProjectCurrent), updateInProject).accepted shouldEqual ResolverUpdated( - ipId, - project, - updateInProject.value, - updateInProject.source, - 3, - epoch, - alice.subject - ) - } - - "fail if the priority already exists" in { - val validatePriority: ValidatePriority = - (ref, _, priority) => IO.raiseError(PriorityAlreadyExists(ref, nxv + "same-prio", priority)) - evaluate(validatePriority)(Some(inProjectCurrent), updateInProject).rejectedWith[PriorityAlreadyExists] - } - - "fail if no identities are provided for a cross-project resolver" in { - val invalidValue = crossProjectValue.copy(identityResolution = ProvidedIdentities(Set.empty)) - eval(Some(crossProjectCurrent), updateCrossProject.copy(value = invalidValue)).rejected shouldEqual NoIdentities - } - - "fail if some provided identities don't belong to the caller for a cross-project resolver" in { - val invalidValue = - crossProjectValue.copy(identityResolution = ProvidedIdentities(Set(bob.subject, alice.subject))) - eval( - Some(crossProjectCurrent), - updateCrossProject.copy(value = invalidValue) - ).rejected shouldEqual InvalidIdentities(Set(bob.subject)) - } - - "fail if we try to change from cross-project to in-project type" in { - eval(Some(crossProjectCurrent), updateInProject).rejected shouldEqual DifferentResolverType( - updateInProject.id, - InProject, - CrossProject - ) - } - - "create an cross-project update event" in { - val userCallerResolution = crossProjectValue.copy(identityResolution = UseCurrentCaller) - - forAll(List(updateCrossProject, updateCrossProject.copy(value = userCallerResolution))) { command => - eval(Some(crossProjectCurrent), command).accepted shouldEqual ResolverUpdated( - cpId, - project, - command.value, - command.source, - 3, - epoch, - alice.subject - ) - } - } - - } - - "eval a tag command" should { - - val tagResolver = TagResolver(ipId, project, 1, UserTag.unsafe("tag1"), 2, bob.subject) - - "fail if the resolver doesn't exist" in { - eval(None, tagResolver).rejected shouldEqual ResolverNotFound(tagResolver.id, tagResolver.project) - } - - "fail if the provided revision is incorrect" in { - val incorrectRev = tagResolver.copy(rev = 5) - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - eval(Some(state), incorrectRev) - .rejectedWith[ResolverRejection] shouldEqual IncorrectRev(incorrectRev.rev, state.rev) - } - } - - "succeed if the resolver is deprecated" in { - forAll(List(inProjectCurrent.copy(deprecated = true), crossProjectCurrent.copy(deprecated = true))) { state => - eval(Some(state), tagResolver).accepted shouldEqual ResolverTagAdded( - tagResolver.id, - project, - state.value.tpe, - targetRev = tagResolver.targetRev, - tag = tagResolver.tag, - 3, - epoch, - bob.subject - ) - } - } - - "fail if the version to tag is invalid" in { - val incorrectTagRev = tagResolver.copy(targetRev = 5) - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - eval(Some(state), incorrectTagRev).rejected shouldEqual RevisionNotFound(incorrectTagRev.targetRev, state.rev) - } - } - - "create a tag event" in { - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - eval(Some(state), tagResolver).accepted shouldEqual ResolverTagAdded( - tagResolver.id, - project, - state.value.tpe, - targetRev = tagResolver.targetRev, - tag = tagResolver.tag, - 3, - epoch, - bob.subject - ) - } - } - } - - "eval a deprecate command" should { - - val deprecateResolver = DeprecateResolver(ipId, project, 2, bob.subject) - - "fail if the resolver doesn't exist" in { - eval(None, deprecateResolver).rejected shouldEqual ResolverNotFound( - deprecateResolver.id, - deprecateResolver.project - ) - } - - "fail if the provided revision is incorrect" in { - val incorrectRev = deprecateResolver.copy(rev = 5) - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - eval(Some(state), incorrectRev).rejected shouldEqual IncorrectRev(incorrectRev.rev, state.rev) - } - } - - "fail if the resolver is already deprecated" in { - forAll(List(inProjectCurrent.copy(deprecated = true), crossProjectCurrent.copy(deprecated = true))) { state => - eval(Some(state), deprecateResolver).rejected shouldEqual ResolverIsDeprecated(state.id) - } - } - - "deprecate the resolver" in { - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - eval(Some(state), deprecateResolver).accepted shouldEqual ResolverDeprecated( - deprecateResolver.id, - project, - state.value.tpe, - 3, - epoch, - bob.subject - ) - } - } - } - } - - "The Resolvers next state" when { - - "applying a create event" should { - - val inProjectCreated = ResolverCreated( - ipId, - project, - InProjectValue(Priority.unsafe(22)), - Json.obj("inProject" -> Json.fromString("created")), - 1, - epoch, - bob.subject - ) - - val crossProjectCreated = ResolverCreated( - cpId, - project, - CrossProjectValue( - Priority.unsafe(55), - Set(nxv + "resource"), - NonEmptyList.of( - ProjectRef.unsafe("org2", "proj"), - ProjectRef.unsafe("org2", "proj2") - ), - ProvidedIdentities(bob.identities) - ), - Json.obj("crossProject" -> Json.fromString("created")), - 1, - epoch, - bob.subject - ) - - "give a new in-project resolver state from None" in { - next(None, inProjectCreated).value shouldEqual ResolverState( - ipId, - project, - inProjectCreated.value, - inProjectCreated.source, - Tags.empty, - 1, - deprecated = false, - epoch, - bob.subject, - epoch, - bob.subject - ) - } - - "give a new cross-project resolver state from None" in { - next(None, crossProjectCreated).value shouldEqual ResolverState( - cpId, - project, - crossProjectCreated.value, - crossProjectCreated.source, - Tags.empty, - 1, - deprecated = false, - epoch, - bob.subject, - epoch, - bob.subject - ) - } - - "return None for an existing entity" in { - forAll( - ( - List(inProjectCurrent, crossProjectCurrent), - List(inProjectCreated, crossProjectCreated) - ).tupled - ) { case (state, event) => - next(Some(state), event) shouldEqual None - } - } - } - - "applying an update event" should { - val inProjectUpdated = ResolverUpdated( - ipId, - project, - InProjectValue(Priority.unsafe(40)), - Json.obj("inProject" -> Json.fromString("updated")), - 3, - instant, - bob.subject - ) - - val crossCrojectUpdated = ResolverUpdated( - cpId, - project, - CrossProjectValue( - Priority.unsafe(999), - Set(nxv + "r", nxv + "r2"), - NonEmptyList.of( - ProjectRef.unsafe("org2", "proj"), - ProjectRef.unsafe("org3", "proj2") - ), - ProvidedIdentities(alice.identities) - ), - Json.obj("crossProject" -> Json.fromString("updated")), - 3, - epoch, - bob.subject - ) - - "give a new revision of the in-project resolver state from an existing in-project state" in { - next(Some(inProjectCurrent), inProjectUpdated).value shouldEqual inProjectCurrent.copy( - value = inProjectUpdated.value, - source = inProjectUpdated.source, - rev = inProjectUpdated.rev, - updatedAt = inProjectUpdated.instant, - updatedBy = inProjectUpdated.subject - ) - } - - "give a new revision of the cross-project resolver state from an existing cross-project state" in { - next(Some(crossProjectCurrent), crossCrojectUpdated).value shouldEqual crossProjectCurrent.copy( - value = crossCrojectUpdated.value, - source = crossCrojectUpdated.source, - rev = crossCrojectUpdated.rev, - updatedAt = crossCrojectUpdated.instant, - updatedBy = crossCrojectUpdated.subject - ) - } - - "return None for other combinations" in { - forAll( - List( - None -> inProjectUpdated, - None -> crossCrojectUpdated, - Some(inProjectCurrent) -> crossCrojectUpdated, - Some(crossProjectCurrent) -> inProjectUpdated - ) - ) { case (state, event) => - next(state, event) shouldEqual None - } - } - } - - "applying a tag event" should { - val resolverTagAdded = - ResolverTagAdded(ipId, project, ResolverType.InProject, 1, UserTag.unsafe("tag2"), 3, instant, alice.subject) - - "update the tag list" in { - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - next(Some(state), resolverTagAdded).value shouldEqual state.copy( - tags = state.tags + (resolverTagAdded.tag -> resolverTagAdded.targetRev), - rev = resolverTagAdded.rev, - updatedAt = resolverTagAdded.instant, - updatedBy = resolverTagAdded.subject - ) - } - } - - "doesn't result in any change on an initial state" in { - next(None, resolverTagAdded) shouldEqual None - } - - } - - "applying a deprecate event" should { - - val deprecated = ResolverDeprecated(ipId, project, ResolverType.InProject, 3, instant, alice.subject) - - "mark the current state as deprecated for a resolver" in { - forAll(List(inProjectCurrent, crossProjectCurrent)) { state => - next(Some(state), deprecated).value shouldEqual state.copy( - deprecated = true, - rev = deprecated.rev, - updatedAt = deprecated.instant, - updatedBy = deprecated.subject - ) - } - } - - "doesn't result in any change on an initial state" in { - next(None, deprecated) shouldEqual None - } - - } - } - -} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImplSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImplSpec.scala index 8b288d58cb..0851d880d6 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImplSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesImplSpec.scala @@ -1,6 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.sdk.resources +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv, schema, schemas} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.{JsonLdApi, JsonLdJavaApi} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords @@ -25,7 +27,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef, ResourceRef} import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, IOFixedClock, IOValues} -import monix.bio.UIO import org.scalatest.matchers.should.Matchers import org.scalatest.{CancelAfterFailure, Inspectors, OptionValues} @@ -70,9 +71,9 @@ class ResourcesImplSpec private val schema2 = SchemaGen.schema(schema.Person, project.ref, schemaSource.removeKeys(keywords.id)) private val fetchSchema: (ResourceRef, ProjectRef) => FetchResource[Schema] = { - case (ref, _) if ref.iri == schema2.id => UIO.some(SchemaGen.resourceFor(schema2, deprecated = true)) - case (ref, _) if ref.iri == schema1.id => UIO.some(SchemaGen.resourceFor(schema1)) - case _ => UIO.none + case (ref, _) if ref.iri == schema2.id => IO.pure(Some(SchemaGen.resourceFor(schema2, deprecated = true))) + case (ref, _) if ref.iri == schema1.id => IO.pure(Some(SchemaGen.resourceFor(schema1))) + case _ => IO.none } private val resourceResolution: ResourceResolution[Schema] = ResourceResolutionGen.singleInProject(projectRef, fetchSchema) @@ -89,7 +90,7 @@ class ResourcesImplSpec private val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( res, - (r, p, _) => resources.fetch(r, p).bimap(_ => ResourceResolutionReport(), identity) + (r, p, _) => resources.fetch(r, p).bimap(_ => ResourceResolutionReport(), identity).attempt ) private lazy val resources: Resources = ResourcesImpl( diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrialSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrialSuite.scala index bfd737d63f..67c3fad44d 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrialSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/resources/ResourcesTrialSuite.scala @@ -10,11 +10,11 @@ import ch.epfl.bluebrain.nexus.delta.sdk.generators.{ProjectGen, ResourceGen, Sc import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings -import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.resources.ValidationResult._ -import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.{Resource, ResourceGenerationResult, ResourceRejection} import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection.{InvalidResource, ProjectContextRejection, ReservedResourceId} +import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.{Resource, ResourceGenerationResult, ResourceRejection} +import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Revision import ch.epfl.bluebrain.nexus.testkit.bio.BioSuite import ch.epfl.bluebrain.nexus.testkit.{IOFixedClock, TestHelpers} @@ -41,10 +41,7 @@ class ResourcesTrialSuite extends BioSuite with ValidateResourceFixture with Tes private val fetchResourceFail = IO.terminate(new IllegalStateException("Should not be attempt to fetch a resource")) - private val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( - res, - (_, _, _) => fetchResourceFail - ) + private val resolverContextResolution: ResolverContextResolution = ResolverContextResolution(res) private val am = ApiMappings(Map("nxv" -> nxv.base, "Person" -> schema.Person)) private val allApiMappings = am + Resources.mappings diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSpec.scala deleted file mode 100644 index 2ad510a4a1..0000000000 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSpec.scala +++ /dev/null @@ -1,129 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.schemas - -import cats.data.NonEmptyList -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.ExpandedJsonLd -import ch.epfl.bluebrain.nexus.delta.sdk.Resolve -import ch.epfl.bluebrain.nexus.delta.sdk.generators.{ProjectGen, ResourceGen} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller -import ch.epfl.bluebrain.nexus.delta.sdk.model.Tags -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport -import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource -import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.Schema -import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.SchemaRejection.InvalidSchemaResolution -import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ -import ch.epfl.bluebrain.nexus.delta.sdk.utils.Fixtures -import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User -import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, IOValues, TestHelpers} -import monix.bio.IO -import org.scalatest.OptionValues -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike - -import scala.collection.immutable.VectorMap - -class SchemaImportsSpec - extends AnyWordSpecLike - with Matchers - with TestHelpers - with IOValues - with OptionValues - with CirceLiteral - with Fixtures { - - private val alice = User("alice", Label.unsafe("wonderland")) - implicit val aliceCaller: Caller = Caller(alice, Set(alice)) - - "A SchemaImports" should { - val neuroshapes = "https://neuroshapes.org" - val parcellationlabel = iri"$neuroshapes/dash/parcellationlabel" - val json = jsonContentOf("schemas/parcellationlabel.json") - val projectRef = ProjectGen.project("org", "proj").ref - - val entitySource = jsonContentOf("schemas/entity.json") - - val entityExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/entity-expanded.json")).accepted - val identifierExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/identifier-expanded.json")).accepted - val licenseExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/license-expanded.json")).accepted - val propertyValueExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/property-value-expanded.json")).accepted - - val expandedSchemaMap = Map( - iri"$neuroshapes/commons/entity" -> - Schema( - iri"$neuroshapes/commons/entity", - projectRef, - Tags.empty, - entitySource, - entityExpandedSchema.toCompacted(entitySource.topContextValueOrEmpty).accepted, - NonEmptyList.of( - entityExpandedSchema, - identifierExpandedSchema, - licenseExpandedSchema, - propertyValueExpandedSchema - ) - ) - ) - - // format: off - val resourceMap = VectorMap( - iri"$neuroshapes/commons/vocabulary" -> jsonContentOf("schemas/vocabulary.json"), - iri"$neuroshapes/wrong/vocabulary" -> jsonContentOf("schemas/vocabulary.json").replace("owl:Ontology", "owl:Other") - ).map { case (iri, json) => iri -> ResourceGen.resource(iri, projectRef, json) } - // format: on - - val errorReport = ResourceResolutionReport() - - val fetchSchema: Resolve[Schema] = { - case (ref, `projectRef`, _) => IO.fromOption(expandedSchemaMap.get(ref.iri), errorReport) - case (_, _, _) => IO.raiseError(errorReport) - } - val fetchResource: Resolve[Resource] = { - case (ref, `projectRef`, _) => IO.fromOption(resourceMap.get(ref.iri), errorReport) - case (_, _, _) => IO.raiseError(errorReport) - } - - val imports = new SchemaImports(fetchSchema, fetchResource) - - "resolve all the imports" in { - val expanded = ExpandedJsonLd(json).accepted - val result = imports.resolve(parcellationlabel, projectRef, expanded).accepted - - result.toList.toSet shouldEqual - (resourceMap.take(1).values.map(_.expanded).toSet ++ Set( - entityExpandedSchema, - identifierExpandedSchema, - licenseExpandedSchema, - propertyValueExpandedSchema - ) + expanded) - } - - "fail to resolve an import if it is not found" in { - val other = iri"$neuroshapes/other" - val other2 = iri"$neuroshapes/other2" - val parcellation = json deepMerge json"""{"imports": ["$neuroshapes/commons/entity", "$other", "$other2"]}""" - val expanded = ExpandedJsonLd(parcellation).accepted - - imports.resolve(parcellationlabel, projectRef, expanded).rejected shouldEqual - InvalidSchemaResolution( - parcellationlabel, - schemaImports = Map(ResourceRef(other) -> errorReport, ResourceRef(other2) -> errorReport), - resourceImports = Map(ResourceRef(other) -> errorReport, ResourceRef(other2) -> errorReport), - nonOntologyResources = Set.empty - ) - } - - "fail to resolve an import if it is a resource without owl:Ontology type" in { - val wrong = iri"$neuroshapes/wrong/vocabulary" - val parcellation = json deepMerge json"""{"imports": ["$neuroshapes/commons/entity", "$wrong"]}""" - val expanded = ExpandedJsonLd(parcellation).accepted - - imports.resolve(parcellationlabel, projectRef, expanded).rejected shouldEqual - InvalidSchemaResolution( - parcellationlabel, - schemaImports = Map(ResourceRef(wrong) -> errorReport), - resourceImports = Map.empty, - nonOntologyResources = Set(ResourceRef(wrong)) - ) - } - } -} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSuite.scala new file mode 100644 index 0000000000..674f1d36d5 --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemaImportsSuite.scala @@ -0,0 +1,128 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.schemas + +import cats.data.NonEmptyList +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.ExpandedJsonLd +import ch.epfl.bluebrain.nexus.delta.sdk.Resolve +import ch.epfl.bluebrain.nexus.delta.sdk.generators.ResourceGen +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller +import ch.epfl.bluebrain.nexus.delta.sdk.model.Tags +import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport +import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.Resource +import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.Schema +import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.SchemaRejection.InvalidSchemaResolution +import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ +import ch.epfl.bluebrain.nexus.delta.sdk.utils.Fixtures +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User +import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite +import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} +import io.circe.Json + +import scala.collection.immutable.VectorMap + +class SchemaImportsSuite extends CatsEffectSuite with TestHelpers with CirceLiteral with Fixtures { + + private val alice = User("alice", Label.unsafe("wonderland")) + implicit val aliceCaller: Caller = Caller(alice, Set(alice)) + + private val neuroshapes = "https://neuroshapes.org" + private val parcellationlabel = iri"$neuroshapes/dash/parcellationlabel" + private val json = jsonContentOf("schemas/parcellationlabel.json") + val projectRef = ProjectRef.unsafe("org", "proj") + + val entitySource = jsonContentOf("schemas/entity.json") + + val entityExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/entity-expanded.json")).accepted + val identifierExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/identifier-expanded.json")).accepted + val licenseExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/license-expanded.json")).accepted + val propertyValueExpandedSchema = ExpandedJsonLd(jsonContentOf("schemas/property-value-expanded.json")).accepted + + val expandedSchemaMap = Map( + iri"$neuroshapes/commons/entity" -> + Schema( + iri"$neuroshapes/commons/entity", + projectRef, + Tags.empty, + entitySource, + entityExpandedSchema.toCompacted(entitySource.topContextValueOrEmpty).accepted, + NonEmptyList.of( + entityExpandedSchema, + identifierExpandedSchema, + licenseExpandedSchema, + propertyValueExpandedSchema + ) + ) + ) + + // format: off + val resourceMap = VectorMap( + iri"$neuroshapes/commons/vocabulary" -> jsonContentOf("schemas/vocabulary.json"), + iri"$neuroshapes/wrong/vocabulary" -> jsonContentOf("schemas/vocabulary.json").replace("owl:Ontology", "owl:Other") + ).map { case (iri, json) => iri -> ResourceGen.resource(iri, projectRef, json) } + // format: on + + val errorReport = ResourceResolutionReport() + + val fetchSchema: Resolve[Schema] = { + case (ref, `projectRef`, _) => IO.pure(expandedSchemaMap.get(ref.iri).toRight(errorReport)) + case (_, _, _) => IO.pure(Left(errorReport)) + } + val fetchResource: Resolve[Resource] = { + case (ref, `projectRef`, _) => IO.pure(resourceMap.get(ref.iri).toRight(errorReport)) + case (_, _, _) => IO.pure(Left(errorReport)) + } + + private def toExpanded(json: Json) = toCatsIO(ExpandedJsonLd(json)) + + val imports = new SchemaImports(fetchSchema, fetchResource) + + test("Resolve all the imports") { + for { + expanded <- toExpanded(json) + result <- imports.resolve(parcellationlabel, projectRef, expanded) + } yield { + val expected = (resourceMap.take(1).values.map(_.expanded).toSet ++ Set( + entityExpandedSchema, + identifierExpandedSchema, + licenseExpandedSchema, + propertyValueExpandedSchema + ) + expanded) + assertEquals(result.toList.toSet, expected) + } + } + + test("Fail to resolve an import if it is not found") { + val other = iri"$neuroshapes/other" + val other2 = iri"$neuroshapes/other2" + val parcellation = json deepMerge json"""{"imports": ["$neuroshapes/commons/entity", "$other", "$other2"]}""" + + val expectedError = InvalidSchemaResolution( + parcellationlabel, + schemaImports = Map(ResourceRef(other) -> errorReport, ResourceRef(other2) -> errorReport), + resourceImports = Map(ResourceRef(other) -> errorReport, ResourceRef(other2) -> errorReport), + nonOntologyResources = Set.empty + ) + + toExpanded(parcellation).flatMap { expanded => + imports.resolve(parcellationlabel, projectRef, expanded).intercept(expectedError) + } + } + + test("Fail to resolve an import if it is a resource without owl:Ontology type") { + val wrong = iri"$neuroshapes/wrong/vocabulary" + val parcellation = json deepMerge json"""{"imports": ["$neuroshapes/commons/entity", "$wrong"]}""" + + val expectedError = InvalidSchemaResolution( + parcellationlabel, + schemaImports = Map(ResourceRef(wrong) -> errorReport), + resourceImports = Map.empty, + nonOntologyResources = Set(ResourceRef(wrong)) + ) + + toExpanded(parcellation).flatMap { expanded => + imports.resolve(parcellationlabel, projectRef, expanded).intercept(expectedError) + } + } +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImplSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImplSuite.scala index 4eb0090979..8f48436b6d 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImplSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/schemas/SchemasImplSuite.scala @@ -13,7 +13,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.{IdSegmentRef, Tags} import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.ResolverContextResolution -import ch.epfl.bluebrain.nexus.delta.sdk.resolvers.model.ResourceResolutionReport import ch.epfl.bluebrain.nexus.delta.sdk.schemas.model.SchemaRejection._ import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject @@ -22,7 +21,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie import ch.epfl.bluebrain.nexus.testkit.ce.{CatsEffectSuite, IOFixedClock} import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} -import monix.bio.{IO => BIO} import munit.AnyFixture import java.util.UUID @@ -53,15 +51,9 @@ class SchemasImplSuite contexts.schemasMetadata -> ContextValue.fromFile("contexts/schemas-metadata.json") ) - private val schemaImports: SchemaImports = new SchemaImports( - (_, _, _) => BIO.raiseError(ResourceResolutionReport()), - (_, _, _) => BIO.raiseError(ResourceResolutionReport()) - ) + private val schemaImports: SchemaImports = SchemaImports.alwaysFail - private val resolverContextResolution: ResolverContextResolution = new ResolverContextResolution( - res, - (_, _, _) => BIO.raiseError(ResourceResolutionReport()) - ) + private val resolverContextResolution: ResolverContextResolution = ResolverContextResolution(res) private val org = Label.unsafe("myorg") private val am = ApiMappings("nxv" -> nxv.base) diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/rejection/Rejection.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/rejection/Rejection.scala index d5ab0a6f94..5ec5d0b48b 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/rejection/Rejection.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/rejection/Rejection.scala @@ -7,6 +7,8 @@ abstract class Rejection extends Exception with Product with Serializable { self override def fillInStackTrace(): Throwable = self + override def getMessage: String = reason + def reason: String } diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala index 5c61d78599..5a618b9925 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala @@ -3,11 +3,11 @@ package ch.epfl.bluebrain.nexus.testkit.ce import cats.effect.{ContextShift, IO, Timer} import ch.epfl.bluebrain.nexus.testkit.NexusSuite import ch.epfl.bluebrain.nexus.testkit.bio.{CollectionAssertions, EitherAssertions, StreamAssertions} +import monix.bio.{IO => BIO} +import monix.execution.Scheduler import scala.concurrent.ExecutionContext import scala.concurrent.duration.{DurationInt, FiniteDuration} -import monix.bio.{IO => BIO} -import monix.execution.Scheduler /** * Adapted from: @@ -21,6 +21,8 @@ abstract class CatsEffectSuite with EitherAssertions { protected val ioTimeout: FiniteDuration = 45.seconds + implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) + override def munitValueTransforms: List[ValueTransform] = super.munitValueTransforms ++ List(munitIOTransform, munitBIOTransform) diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala index ea21b5afe9..b28bbbdbfb 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala @@ -2,7 +2,8 @@ package ch.epfl.bluebrain.nexus.testkit.ce import cats.effect.IO import org.scalactic.source -import org.scalatest.Assertions.fail +import org.scalatest.Assertion +import org.scalatest.Assertions._ import scala.reflect.ClassTag @@ -11,6 +12,9 @@ trait CatsIOValues { implicit final class CatsIOValuesOps[A](private val io: IO[A]) { def accepted: A = io.unsafeRunSync() + def rejected[E](expected: E)(implicit pos: source.Position, EE: ClassTag[E]): Assertion = + assertResult(expected)(rejectedWith[E]) + def rejectedWith[E](implicit pos: source.Position, EE: ClassTag[E]): E = { io.attempt.unsafeRunSync() match { case Left(EE(value)) => value From d24ff4022abc85d54a497ccd8ea35ded5b987c84 Mon Sep 17 00:00:00 2001 From: Oliver <20188437+olivergrabinski@users.noreply.github.com> Date: Fri, 6 Oct 2023 14:29:52 +0200 Subject: [PATCH 05/13] Add `Vary` header for resource/files fetch operations (#4337) --- .../nexus/delta/routes/ResourcesRoutes.scala | 4 ++-- .../delta/routes/ResourcesRoutesSpec.scala | 10 +++++++- .../storage/files/routes/FilesRoutes.scala | 2 +- .../files/routes/FilesRoutesSpec.scala | 11 ++++++++- .../nexus/delta/sdk/ce/DeltaDirectives.scala | 17 ++++++++++++- .../sdk/directives/DeltaDirectives.scala | 17 ++++++++++++- .../nexus/tests/kg/ResourcesSpec.scala | 24 +++++++++++++++---- 7 files changed, 74 insertions(+), 11 deletions(-) diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala index d535808d8e..f4c17f9785 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala @@ -144,7 +144,7 @@ final class ResourcesRoutes( } }, // Fetch a resource - (get & idSegmentRef(id)) { id => + (get & idSegmentRef(id) & varyAcceptHeaders) { id => emitOrFusionRedirect( ref, id, @@ -173,7 +173,7 @@ final class ResourcesRoutes( } }, // Fetch a resource original source - (pathPrefix("source") & get & pathEndOrSingleSlash & idSegmentRef(id)) { id => + (pathPrefix("source") & get & pathEndOrSingleSlash & idSegmentRef(id) & varyAcceptHeaders) { id => authorizeFor(ref, Read).apply { parameter("annotate".as[Boolean].withDefault(false)) { annotate => implicit val source: Printer = sourcePrinter diff --git a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala index da88aaa92c..b1ef9d1f86 100644 --- a/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala +++ b/delta/app/src/test/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutesSpec.scala @@ -1,7 +1,7 @@ package ch.epfl.bluebrain.nexus.delta.routes import akka.http.scaladsl.model.MediaTypes.`text/html` -import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken} +import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken, RawHeader} import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Route import cats.effect.IO @@ -119,6 +119,8 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { private val payloadUpdatedWithMetdata = payloadWithMetadata deepMerge json"""{"name": "Alice", "address": null}""" + private val varyHeader = RawHeader("Vary", "Accept,Accept-Encoding") + "A resource route" should { "fail to create a resource without resources/write permission" in { @@ -359,6 +361,7 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { status shouldEqual StatusCodes.OK val meta = resourceMetadata(projectRef, myId, schemas.resources, "Custom", deprecated = true, rev = 10) response.asJson shouldEqual payloadUpdated.dropNullValues.deepMerge(meta).deepMerge(resourceCtx) + response.headers should contain(varyHeader) } } @@ -376,6 +379,7 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { Get(endpoint) ~> routes ~> check { status shouldEqual StatusCodes.OK response.asJson shouldEqual payload.deepMerge(meta).deepMerge(resourceCtx) + response.headers should contain(varyHeader) } } } @@ -440,6 +444,7 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { "id" -> "https://bluebrain.github.io/nexus/vocabulary/wrongid", "proj" -> "myorg/myproject" ) + response.headers should not contain varyHeader } } } @@ -448,6 +453,7 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { Get("/v1/resources/myorg/myproject/_/myid/source?annotate=true") ~> routes ~> check { status shouldEqual StatusCodes.OK response.asJson shouldEqual payloadUpdatedWithMetdata + response.headers should contain(varyHeader) } } @@ -468,6 +474,7 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { Get(endpoint) ~> routes ~> check { status shouldEqual StatusCodes.OK response.asJson shouldEqual payload.deepMerge(meta) + response.headers should contain(varyHeader) } } } @@ -485,6 +492,7 @@ class ResourcesRoutesSpec extends BaseRouteSpec with IOFromMap { Get(endpoint) ~> routes ~> check { status shouldEqual StatusCodes.OK response.asJson shouldEqual payload + response.headers should contain(varyHeader) } } } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala index e154de1d20..d41f74e3cb 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala @@ -184,7 +184,7 @@ final class FilesRoutes( } def fetch(id: IdSegmentRef, ref: ProjectRef)(implicit caller: Caller): Route = - headerValueByType(Accept) { + (headerValueByType(Accept) & varyAcceptHeaders) { case accept if accept.mediaRanges.exists(metadataMediaRanges.contains) => emit(fetchMetadata(id, ref).rejectOn[FileNotFound]) case _ => diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala index ea5e957d8f..2d8cb6a800 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala @@ -4,7 +4,7 @@ import akka.actor.typed import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` import akka.http.scaladsl.model.MediaRanges._ import akka.http.scaladsl.model.MediaTypes.`text/html` -import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken} +import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken, RawHeader} import akka.http.scaladsl.model.{StatusCodes, Uri} import akka.http.scaladsl.server.Route import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig @@ -126,6 +126,8 @@ class FilesRoutesSpec private val diskIdRev = ResourceRef.Revision(dId, 1) private val s3IdRev = ResourceRef.Revision(s3Id, 2) + private val varyHeader = RawHeader("Vary", "Accept,Accept-Encoding") + "File routes" should { "create storages for files" in { @@ -310,6 +312,7 @@ class FilesRoutesSpec Get(s"/v1/files/org/proj/file1$suffix") ~> Accept(`*/*`) ~> routes ~> check { response.status shouldEqual StatusCodes.Forbidden response.asJson shouldEqual jsonContentOf("errors/authorization-failed.json") + response.headers should not contain varyHeader } } } @@ -320,6 +323,7 @@ class FilesRoutesSpec Get(s"/v1/files/org/proj/file1$suffix") ~> Accept(`video/*`) ~> routes ~> check { response.status shouldEqual StatusCodes.NotAcceptable response.asJson shouldEqual jsonContentOf("errors/content-type.json", "expected" -> "text/plain") + response.headers should not contain varyHeader } } } @@ -336,6 +340,7 @@ class FilesRoutesSpec header("Content-Disposition").value.value() shouldEqual s"""attachment; filename="=?UTF-8?B?$filename64?="""" response.asString shouldEqual content + response.headers should contain(varyHeader) } } } @@ -362,6 +367,7 @@ class FilesRoutesSpec header("Content-Disposition").value.value() shouldEqual s"""attachment; filename="=?UTF-8?B?$filename64?="""" response.asString shouldEqual content + response.headers should contain(varyHeader) } } } @@ -375,6 +381,7 @@ class FilesRoutesSpec Get(s"$endpoint$suffix") ~> Accept(`application/ld+json`) ~> routes ~> check { response.status shouldEqual StatusCodes.Forbidden response.asJson shouldEqual jsonContentOf("errors/authorization-failed.json") + response.headers should not contain varyHeader } } } @@ -386,6 +393,7 @@ class FilesRoutesSpec status shouldEqual StatusCodes.OK val attr = attributes("file-idx-1.txt") response.asJson shouldEqual fileMetadata(projectRef, file1, attr, diskIdRev, rev = 4, createdBy = alice) + response.headers should contain(varyHeader) } } @@ -406,6 +414,7 @@ class FilesRoutesSpec status shouldEqual StatusCodes.OK response.asJson shouldEqual fileMetadata(projectRef, file1, attr, s3IdRev, createdBy = alice, updatedBy = alice) + response.headers should contain(varyHeader) } } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/DeltaDirectives.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/DeltaDirectives.scala index ad57159de3..743850583b 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/DeltaDirectives.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/DeltaDirectives.scala @@ -3,7 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.ce import akka.http.scaladsl.model.MediaTypes.{`application/json`, `text/html`} import akka.http.scaladsl.model.StatusCodes.{Redirection, SeeOther} import akka.http.scaladsl.model._ -import akka.http.scaladsl.model.headers.{`Last-Event-ID`, Accept} +import akka.http.scaladsl.model.headers.{`Accept-Encoding`, `Last-Event-ID`, Accept, RawHeader} import akka.http.scaladsl.server.ContentNegotiator.Alternative import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ @@ -164,4 +164,19 @@ trait DeltaDirectives extends UriDirectives { } case None => provide(Offset.Start) } + + /** Injects a `Vary: Accept,Accept-Encoding` into the response */ + def varyAcceptHeaders: Directive0 = + vary(Set(Accept.name, `Accept-Encoding`.name)) + + private def vary(headers: Set[String]): Directive0 = + respondWithHeader(RawHeader("Vary", headers.mkString(","))) + + private def respondWithHeader(responseHeader: HttpHeader): Directive0 = + mapSuccessResponse(r => r.withHeaders(r.headers :+ responseHeader)) + + private def mapSuccessResponse(f: HttpResponse => HttpResponse): Directive0 = + mapRouteResultPF { + case RouteResult.Complete(response) if response.status.isSuccess => RouteResult.Complete(f(response)) + } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala index 01db29f332..231fc1a0b6 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala @@ -3,7 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.directives import akka.http.scaladsl.model.MediaTypes.{`application/json`, `text/html`} import akka.http.scaladsl.model.StatusCodes.{Redirection, SeeOther} import akka.http.scaladsl.model._ -import akka.http.scaladsl.model.headers.{`Last-Event-ID`, Accept} +import akka.http.scaladsl.model.headers.{`Accept-Encoding`, `Last-Event-ID`, Accept, RawHeader} import akka.http.scaladsl.server.ContentNegotiator.Alternative import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ @@ -179,4 +179,19 @@ trait DeltaDirectives extends UriDirectives { /** The URI of fusion's main login page */ def fusionLoginUri(implicit config: FusionConfig): UIO[Uri] = UIO.pure { config.base / "login" } + + /** Injects a `Vary: Accept,Accept-Encoding` into the response */ + def varyAcceptHeaders: Directive0 = + vary(Set(Accept.name, `Accept-Encoding`.name)) + + private def vary(headers: Set[String]): Directive0 = + respondWithHeader(RawHeader("Vary", headers.mkString(","))) + + private def respondWithHeader(responseHeader: HttpHeader): Directive0 = + mapSuccessResponse(r => r.withHeaders(r.headers :+ responseHeader)) + + private def mapSuccessResponse(f: HttpResponse => HttpResponse): Directive0 = + mapRouteResultPF { + case RouteResult.Complete(response) if response.status.isSuccess => RouteResult.Complete(f(response)) + } } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala index bb8af47eb3..4c014a503a 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala @@ -1,7 +1,7 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.MediaTypes.`text/html` -import akka.http.scaladsl.model.headers.{Accept, Location} +import akka.http.scaladsl.model.headers.{Accept, Location, RawHeader} import akka.http.scaladsl.model.{MediaRange, StatusCodes} import akka.http.scaladsl.unmarshalling.PredefinedFromEntityUnmarshallers import cats.implicits._ @@ -35,6 +35,8 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { private val IdLens: Optional[Json, String] = root.`@id`.string private val TypeLens: Optional[Json, String] = root.`@type`.string + private val varyHeader = RawHeader("Vary", "Accept,Accept-Encoding") + private val resource1Id = "https://dev.nexus.test.com/simplified-resource/1" private def resource1Response(rev: Int, priority: Int) = SimpleResource.fetchResponse(Rick, id1, resource1Id, rev, priority) @@ -130,18 +132,24 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { } "fail to fetch the resource when the user does not have access" in { - deltaClient.get[Json](s"/resources/$id1/test-schema/test-resource:1", Anonymous) { expectForbidden } + deltaClient.get[Json](s"/resources/$id1/test-schema/test-resource:1", Anonymous) { (_, response) => + expectForbidden + response.headers should not contain varyHeader + } } "fail to fetch the original payload when the user does not have access" in { - deltaClient.get[Json](s"/resources/$id1/test-schema/test-resource:1/source", Anonymous) { + deltaClient.get[Json](s"/resources/$id1/test-schema/test-resource:1/source", Anonymous) { (_, response) => expectForbidden + response.headers should not contain varyHeader } } "fail to fetch the annotated original payload when the user does not have access" in { deltaClient.get[Json](s"/resources/$id1/test-schema/test-resource:1/source?annotate=true", Anonymous) { - expectForbidden + (_, response) => + expectForbidden + response.headers should not contain varyHeader } } @@ -150,6 +158,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { val expected = resource1Response(1, 5) response.status shouldEqual StatusCodes.OK filterMetadataKeys(json) should equalIgnoreArrayOrder(expected) + response.headers should contain(varyHeader) } } @@ -158,6 +167,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { val expected = SimpleResource.sourcePayload(resource1Id, 5) response.status shouldEqual StatusCodes.OK json should equalIgnoreArrayOrder(expected) + response.headers should contain(varyHeader) } } @@ -167,6 +177,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { response.status shouldEqual StatusCodes.OK val expected = resource1AnnotatedSource(1, 5) filterMetadataKeys(json) should equalIgnoreArrayOrder(expected) + response.headers should contain(varyHeader) } } @@ -179,6 +190,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { } _ <- deltaClient.get[Json](s"/resources/$id1/_/42/source?annotate=true", Morty) { (json, response) => response.status shouldEqual StatusCodes.OK + response.headers should contain(varyHeader) json should have(`@id`(s"42")) } } yield succeed @@ -198,6 +210,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { _ <- deltaClient.get[Json](s"/resources/$id1/_/${UrlUtils.encode(generatedId)}/source?annotate=true", Morty) { (json, response) => response.status shouldEqual StatusCodes.OK + response.headers should contain(varyHeader) json should have(`@id`(generatedId)) } } yield succeed @@ -207,6 +220,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { deltaClient.get[Json](s"/resources/$id1/test-schema/does-not-exist-resource:1/source?annotate=true", Morty) { (_, response) => response.status shouldEqual StatusCodes.NotFound + response.headers should not contain varyHeader } } @@ -215,6 +229,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { deltaClient.put[Json](s"/resources/$id2/test-schema/test-resource:1", payload, Rick) { (_, response) => response.status shouldEqual StatusCodes.NotFound + response.headers should not contain varyHeader } } @@ -225,6 +240,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { deltaClient.put[Json](s"/resources/$id2/_/test-resource:1", payload, Rick) { (_, response) => response.status shouldEqual StatusCodes.BadRequest + response.headers should not contain varyHeader } } } From 3f06fd275e748a71df1d603e35085299c738c13b Mon Sep 17 00:00:00 2001 From: Simon Date: Mon, 9 Oct 2023 12:34:22 +0200 Subject: [PATCH 06/13] Migrate integration tests to use Cats Effect (#4341) Co-authored-by: Simon Dumas --- .../nexus/testkit/ce/CatsIOValues.scala | 15 +- .../epfl/bluebrain/nexus/tests/BaseSpec.scala | 97 +++++------- .../bluebrain/nexus/tests/BlazegraphDsl.scala | 20 +-- .../nexus/tests/ElasticsearchDsl.scala | 66 ++++---- .../bluebrain/nexus/tests/HttpClient.scala | 144 +++++++----------- .../bluebrain/nexus/tests/KeycloakDsl.scala | 122 ++++++++------- .../nexus/tests/admin/AdminDsl.scala | 62 ++++---- .../nexus/tests/admin/OrgsSpec.scala | 5 +- .../nexus/tests/admin/ProjectsSpec.scala | 5 +- .../bluebrain/nexus/tests/iam/AclDsl.scala | 88 +++++------ .../bluebrain/nexus/tests/iam/AclsSpec.scala | 1 - .../nexus/tests/iam/PermissionDsl.scala | 7 +- .../nexus/tests/iam/PermissionsSpec.scala | 14 +- .../nexus/tests/iam/RealmsSpec.scala | 3 +- .../nexus/tests/kg/AggregationsSpec.scala | 1 + .../tests/kg/AutoProjectDeletionSpec.scala | 3 +- .../kg/CompositeViewsLifeCycleSpec.scala | 17 ++- .../nexus/tests/kg/CompositeViewsSpec.scala | 51 +++---- .../nexus/tests/kg/DiskStorageSpec.scala | 4 +- .../tests/kg/ElasticSearchViewsDsl.scala | 5 +- .../tests/kg/ElasticSearchViewsSpec.scala | 9 +- .../bluebrain/nexus/tests/kg/ErrorsSpec.scala | 1 - .../bluebrain/nexus/tests/kg/EventsSpec.scala | 75 +++++---- .../nexus/tests/kg/IdResolutionSpec.scala | 1 + .../nexus/tests/kg/MultiFetchSpec.scala | 1 + .../nexus/tests/kg/RemoteStorageSpec.scala | 6 +- .../nexus/tests/kg/ResourcesSpec.scala | 3 +- .../nexus/tests/kg/S3StorageSpec.scala | 4 +- .../nexus/tests/kg/SchemasSpec.scala | 1 - .../nexus/tests/kg/SearchConfigSpec.scala | 8 +- .../nexus/tests/kg/SparqlViewsSpec.scala | 1 - .../nexus/tests/kg/StorageSpec.scala | 9 +- .../nexus/tests/kg/SupervisionSpec.scala | 4 +- .../nexus/tests/kg/VersionSpec.scala | 3 +- 34 files changed, 395 insertions(+), 461 deletions(-) diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala index b28bbbdbfb..f1c5cdcc63 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala @@ -5,9 +5,17 @@ import org.scalactic.source import org.scalatest.Assertion import org.scalatest.Assertions._ +import scala.concurrent.{ExecutionContext, Future} import scala.reflect.ClassTag -trait CatsIOValues { +trait CatsIOValues extends CatsIOValuesLowPrio { + + implicit def ioToFutureAssertion(io: IO[Assertion]): Future[Assertion] = io.unsafeToFuture() + + implicit def futureListToFutureAssertion(future: Future[List[Assertion]])(implicit + ec: ExecutionContext + ): Future[Assertion] = + future.map(_ => succeed) implicit final class CatsIOValuesOps[A](private val io: IO[A]) { def accepted: A = io.unsafeRunSync() @@ -31,3 +39,8 @@ trait CatsIOValues { } } + +trait CatsIOValuesLowPrio { + implicit def ioListToFutureAssertion(io: IO[List[Assertion]])(implicit ec: ExecutionContext): Future[Assertion] = + io.unsafeToFuture().map(_ => succeed) +} diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala index 324ce31b35..be13ddc138 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala @@ -6,8 +6,12 @@ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers._ import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.util.ByteString -import cats.implicits._ +import cats.effect.concurrent.Ref +import cats.effect.{ContextShift, IO} +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.testkit._ +import ch.epfl.bluebrain.nexus.testkit.ce.CatsIOValues import ch.epfl.bluebrain.nexus.tests.BaseSpec._ import ch.epfl.bluebrain.nexus.tests.HttpClient._ import ch.epfl.bluebrain.nexus.tests.Identity.{allUsers, testClient, testRealm, _} @@ -19,16 +23,14 @@ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.Organizations import ch.epfl.bluebrain.nexus.tests.iam.{AclDsl, PermissionDsl} import ch.epfl.bluebrain.nexus.tests.kg.{ElasticSearchViewsDsl, KgDsl} import com.typesafe.config.ConfigFactory -import com.typesafe.scalalogging.Logger import io.circe.Json -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalactic.source.Position import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpecLike import org.scalatest.{Assertion, BeforeAndAfterAll, OptionValues} +import scala.concurrent.ExecutionContext import scala.concurrent.duration._ trait BaseSpec @@ -42,12 +44,14 @@ trait BaseSpec with TestHelpers with ScalatestRouteTest with Eventually - with IOValues + with CatsIOValues with OptionValues with ScalaFutures with Matchers { - private val logger = Logger[this.type] + implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) + + private val logger = Logger.cats[this.type] implicit val config: TestsConfig = load[TestsConfig](ConfigFactory.load(), "tests") @@ -67,15 +71,10 @@ trait BaseSpec implicit override def patienceConfig: PatienceConfig = PatienceConfig(config.patience, 300.millis) - def eventually(t: Task[Assertion])(implicit pos: Position): Assertion = - eventually { - t.runSyncUnsafe() - } + def eventually(io: IO[Assertion])(implicit pos: Position): Assertion = + eventually { io.unsafeRunSync() } - def runTask[A](t: Task[A]): Assertion = - t.map { _ => - succeed - }.runSyncUnsafe() + def runIO[A](io: IO[A]): Assertion = io.map { _ => succeed }.unsafeRunSync() override def beforeAll(): Unit = { super.beforeAll() @@ -102,32 +101,28 @@ trait BaseSpec val allTasks = for { isSetupCompleted <- setupCompleted.get - _ <- Task.unless(isSetupCompleted)(setup) + _ <- IO.unlessA(isSetupCompleted)(setup) _ <- setupCompleted.set(true) _ <- aclDsl.cleanAclsAnonymous } yield () - allTasks.runSyncUnsafe() - + allTasks.unsafeRunSync() } override def afterAll(): Unit = - Task.when(config.cleanUp)(elasticsearchDsl.deleteAllIndices().void).runSyncUnsafe() + IO.whenA(config.cleanUp)(elasticsearchDsl.deleteAllIndices().void).unsafeRunSync() protected def toAuthorizationHeader(token: String) = - Authorization( - HttpCredentials.createOAuth2BearerToken(token) - ) - - private[tests] def authenticateUser(user: UserCredentials, client: ClientCredentials): Task[Unit] = { - keycloakDsl.userToken(user, client).map { token => - logger.info(s"Token for user ${user.name} is: $token") - tokensMap.put(user, toAuthorizationHeader(token)) - () - } - } + Authorization(HttpCredentials.createOAuth2BearerToken(token)) - private[tests] def authenticateClient(client: ClientCredentials): Task[Unit] = { + private[tests] def authenticateUser(user: UserCredentials, client: ClientCredentials): IO[Unit] = + for { + token <- keycloakDsl.userToken(user, client) + _ <- logger.info(s"Token for user ${user.name} is: $token") + _ <- IO(tokensMap.put(user, toAuthorizationHeader(token))) + } yield () + + private[tests] def authenticateClient(client: ClientCredentials): IO[Unit] = { keycloakDsl.serviceAccountToken(client).map { token => tokensMap.put(client, toAuthorizationHeader(token)) () @@ -152,35 +147,29 @@ trait BaseSpec identity: Identity, client: ClientCredentials, users: List[UserCredentials] - ): Task[Unit] = { - def createRealmInDelta: Task[Assertion] = + ): IO[Unit] = { + def createRealmInDelta: IO[Assertion] = deltaClient.get[Json](s"/realms/${realm.name}", identity) { (json, response) => - runTask { + runIO { response.status match { case StatusCodes.NotFound => - logger.info(s"Realm ${realm.name} is absent, we create it") val body = jsonContentOf( "/iam/realms/create.json", "realm" -> s"${config.realmSuffix(realm)}" ) for { - _ <- deltaClient.put[Json](s"/realms/${realm.name}", body, identity) { (_, response) => - response.status shouldEqual StatusCodes.Created - } - _ <- deltaClient.get[Json](s"/realms/${realm.name}", Identity.ServiceAccount) { (_, response) => - response.status shouldEqual StatusCodes.OK - } + _ <- logger.info(s"Realm ${realm.name} is absent, we create it") + _ <- deltaClient.put[Json](s"/realms/${realm.name}", body, identity) { expectCreated } + _ <- deltaClient.get[Json](s"/realms/${realm.name}", Identity.ServiceAccount) { expectOk } } yield () case StatusCodes.Forbidden | StatusCodes.OK => - logger.info(s"Realm ${realm.name} has already been created, we got status ${response.status}") - deltaClient.get[Json](s"/realms/${realm.name}", Identity.ServiceAccount) { (_, response) => - response.status shouldEqual StatusCodes.OK - } + for { + _ <- logger.info(s"Realm ${realm.name} has already been created, we got status ${response.status}") + _ <- deltaClient.get[Json](s"/realms/${realm.name}", Identity.ServiceAccount) { expectOk } + } yield () case s => - Task( - fail(s"$s wasn't expected here and we got this response: $json") - ) + IO(fail(s"$s wasn't expected here and we got this response: $json")) } } } @@ -189,12 +178,10 @@ trait BaseSpec // Create the realm in Keycloak _ <- keycloakDsl.importRealm(realm, client, users) // Get the tokens and cache them in the map - _ <- users.parTraverse { user => - authenticateUser(user, client) - } + _ <- users.parTraverse { user => authenticateUser(user, client) } _ <- authenticateClient(client) // Creating the realm in delta - _ <- Task { logger.info(s"Creating realm ${realm.name} in the delta instance") } + _ <- logger.info(s"Creating realm ${realm.name} in the delta instance") _ <- createRealmInDelta } yield () } @@ -202,11 +189,11 @@ trait BaseSpec /** * Create projects and the parent organization for the provided user */ - def createProjects(user: Authenticated, org: String, projects: String*): Task[Unit] = + def createProjects(user: Authenticated, org: String, projects: String*): IO[Unit] = for { _ <- aclDsl.addPermission("/", user, Organizations.Create) _ <- adminDsl.createOrganization(org, org, user, ignoreConflict = true) - _ <- projects.traverse { project => + _ <- projects.toList.traverse { project => val projectRef = s"$org/$project" adminDsl.createProject(org, project, kgDsl.projectJson(name = projectRef), user) } @@ -230,7 +217,7 @@ trait BaseSpec response.header[`Content-Encoding`].value.encodings private[tests] def decodeGzip(input: ByteString): String = - Coders.Gzip.decode(input).map(_.utf8String)(global).futureValue + Coders.Gzip.decode(input).map(_.utf8String).futureValue private[tests] def genId(length: Int = 15): String = genString(length = length, Vector.range('a', 'z') ++ Vector.range('0', '9')) @@ -251,6 +238,6 @@ trait BaseSpec object BaseSpec { - val setupCompleted: IORef[Boolean] = IORef.unsafe(false) + val setupCompleted: Ref[IO, Boolean] = Ref.unsafe(false) } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BlazegraphDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BlazegraphDsl.scala index bd8b5235cf..d5786eebf6 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BlazegraphDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BlazegraphDsl.scala @@ -6,14 +6,19 @@ import akka.http.scaladsl.model.HttpMethods.GET import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.model.{HttpRequest, MediaRange, MediaType} import akka.stream.Materializer +import cats.effect.{ContextShift, IO} import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} import io.circe.optics.JsonPath.root -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.matchers.should.Matchers -class BlazegraphDsl(implicit as: ActorSystem, materializer: Materializer) - extends TestHelpers +import scala.concurrent.ExecutionContext + +class BlazegraphDsl(implicit + as: ActorSystem, + materializer: Materializer, + contextShift: ContextShift[IO], + ec: ExecutionContext +) extends TestHelpers with CirceLiteral with CirceUnmarshalling with Matchers { @@ -39,17 +44,14 @@ class BlazegraphDsl(implicit as: ActorSystem, materializer: Materializer) all should not contain allElementsOf(namespaces) } - def allNamespaces: Task[List[String]] = { + def allNamespaces: IO[List[String]] = { blazegraphClient( HttpRequest( method = GET, uri = s"$blazegraphUrl/blazegraph/namespace?describe-each-named-graph=false" ).addHeader(Accept(sparqlJsonRange)) ).flatMap { res => - Task - .deferFuture { - jsonUnmarshaller(res.entity)(global, materializer) - } + IO.fromFuture(IO(jsonUnmarshaller(res.entity))) .map { json => root.results.bindings.each.filter(filterNamespaces).`object`.value.string.getAll(json) } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/ElasticsearchDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/ElasticsearchDsl.scala index 3759d04883..90ff6a354e 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/ElasticsearchDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/ElasticsearchDsl.scala @@ -5,49 +5,41 @@ import akka.http.scaladsl.model.HttpMethods.{DELETE, GET, PUT} import akka.http.scaladsl.model.headers.BasicHttpCredentials import akka.http.scaladsl.model.{ContentTypes, HttpEntity, HttpRequest, StatusCode} import akka.stream.Materializer +import cats.effect.{ContextShift, IO} +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} -import com.typesafe.scalalogging.Logger -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.matchers.should.Matchers -import scala.concurrent.duration._ +import scala.concurrent.ExecutionContext -class ElasticsearchDsl(implicit as: ActorSystem, materializer: Materializer) - extends TestHelpers +class ElasticsearchDsl(implicit + as: ActorSystem, + materializer: Materializer, + contextShift: ContextShift[IO], + ec: ExecutionContext +) extends TestHelpers with CirceLiteral with CirceUnmarshalling with Matchers { - private val logger = Logger[this.type] + private val logger = Logger.cats[this.type] private val elasticUrl = s"http://${sys.props.getOrElse("elasticsearch-url", "localhost:9200")}" private val elasticClient = HttpClient(elasticUrl) private val credentials = BasicHttpCredentials("elastic", "password") - def createTemplate(): Task[StatusCode] = { - logger.info("Creating template for Elasticsearch indices") - + def createTemplate(): IO[StatusCode] = { val json = jsonContentOf("/elasticsearch/template.json") - elasticClient( - HttpRequest( - method = PUT, - uri = s"$elasticUrl/_index_template/test_template", - entity = HttpEntity(ContentTypes.`application/json`, json.noSpaces) - ).addCredentials(credentials) - ).onErrorRestartLoop((10, 10.seconds)) { (err, state, retry) => - val (maxRetries, delay) = state - if (maxRetries > 0) - retry((maxRetries - 1, delay)).delayExecution(delay) - else - Task.raiseError(err) - }.tapError { t => - Task { logger.error(s"Error while importing elasticsearch template", t) } - }.map { res => - logger.info(s"Importing the elasticsearch template returned ${res.status}") - res.status - } + logger.info("Creating template for Elasticsearch indices") >> + elasticClient( + HttpRequest( + method = PUT, + uri = s"$elasticUrl/_index_template/test_template", + entity = HttpEntity(ContentTypes.`application/json`, json.noSpaces) + ).addCredentials(credentials) + ).map(_.status) } def includes(indices: String*) = @@ -60,32 +52,28 @@ class ElasticsearchDsl(implicit as: ActorSystem, materializer: Materializer) all should not contain allElementsOf(indices) } - def allIndices: Task[List[String]] = { + def allIndices: IO[List[String]] = { elasticClient( HttpRequest( method = GET, uri = s"$elasticUrl/_aliases" ).addCredentials(credentials) ).flatMap { res => - Task - .deferFuture { - jsonUnmarshaller(res.entity)(global, materializer) - } + IO.fromFuture(IO(jsonUnmarshaller(res.entity))) .map(_.asObject.fold(List.empty[String])(_.keys.toList)) } } - def deleteAllIndices(): Task[StatusCode] = + def deleteAllIndices(): IO[StatusCode] = elasticClient( HttpRequest( method = DELETE, uri = s"$elasticUrl/delta_*" ).addCredentials(credentials) - ).tapError { t => - Task { logger.error(s"Error while deleting elasticsearch indices", t) } - }.map { res => - logger.info(s"Deleting elasticsearch indices returned ${res.status}") - res.status + ).onError { t => + logger.error(t)(s"Error while deleting elasticsearch indices") + }.flatMap { res => + logger.info(s"Deleting elasticsearch indices returned ${res.status}").as(res.status) } } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala index 1b3d009d8e..5b1dbf48ad 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala @@ -5,55 +5,59 @@ import akka.http.scaladsl.model.HttpCharsets._ import akka.http.scaladsl.model.HttpMethods._ import akka.http.scaladsl.model.Multipart.FormData import akka.http.scaladsl.model.Multipart.FormData.BodyPart -import akka.http.scaladsl.model.headers.{`Accept-Encoding`, Accept, Authorization, HttpEncodings} import akka.http.scaladsl.model._ +import akka.http.scaladsl.model.headers.{`Accept-Encoding`, Accept, Authorization, HttpEncodings} import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller import akka.http.scaladsl.{Http, HttpExt} import akka.stream.Materializer import akka.stream.alpakka.sse.scaladsl.EventSource import akka.stream.scaladsl.Sink -import ch.epfl.bluebrain.nexus.tests.HttpClient.{jsonHeaders, logger, rdfApplicationSqlQuery, tokensMap} +import cats.effect.{ContextShift, IO} +import ch.epfl.bluebrain.nexus.tests.HttpClient.{jsonHeaders, rdfApplicationSqlQuery, tokensMap} import ch.epfl.bluebrain.nexus.tests.Identity.Anonymous -import com.typesafe.scalalogging.Logger import io.circe.Json import io.circe.parser._ import fs2._ -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.matchers.should.Matchers import org.scalatest.{AppendedClues, Assertion} import java.nio.file.{Files, Path} import java.util.concurrent.ConcurrentHashMap import scala.collection.immutable.Seq -import scala.concurrent.Future import scala.concurrent.duration._ - -class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSystem, materializer: Materializer) - extends Matchers +import scala.concurrent.{ExecutionContext, Future} + +class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit + as: ActorSystem, + materializer: Materializer, + contextShift: ContextShift[IO], + ec: ExecutionContext +) extends Matchers with AppendedClues { - def apply(req: HttpRequest): Task[HttpResponse] = - Task.deferFuture(httpExt.singleRequest(req)) + private def fromFuture[A](future: => Future[A]) = IO.fromFuture { IO(future) } - def head(url: Uri, identity: Identity)(assertResponse: HttpResponse => Assertion): Task[Assertion] = { + def apply(req: HttpRequest): IO[HttpResponse] = + fromFuture(httpExt.singleRequest(req)) + + def head(url: Uri, identity: Identity)(assertResponse: HttpResponse => Assertion): IO[Assertion] = { val req = HttpRequest(HEAD, s"$baseUrl$url", headers = identityHeader(identity).toList) - Task.deferFuture(httpExt.singleRequest(req)).map(assertResponse) + fromFuture(httpExt.singleRequest(req)).map(assertResponse) } - def run[A](req: HttpRequest)(implicit um: FromEntityUnmarshaller[A]): Task[(A, HttpResponse)] = - Task.deferFuture(httpExt.singleRequest(req)).flatMap { res => - Task.deferFuture(um.apply(res.entity)).map(a => (a, res)) + def run[A](req: HttpRequest)(implicit um: FromEntityUnmarshaller[A]): IO[(A, HttpResponse)] = + fromFuture(httpExt.singleRequest(req)).flatMap { res => + fromFuture(um.apply(res.entity)).map(a => (a, res)) } def post[A](url: String, body: Json, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(POST, url, Some(body), identity, extraHeaders)(assertResponse) def put[A](url: String, body: Json, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(PUT, url, Some(body), identity, extraHeaders)(assertResponse) def putAttachmentFromPath[A]( @@ -63,9 +67,7 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst fileName: String, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders - )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = { - def onFail(e: Throwable) = - fail(s"Something went wrong while processing the response for $url with identity $identity", e) + )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { request( PUT, url, @@ -76,7 +78,6 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst FormData(BodyPart.Strict("file", entity, Map("filename" -> fileName))).toEntity() }, assertResponse, - onFail, extraHeaders ) } @@ -88,7 +89,7 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst fileName: String, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders - )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = { + )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { def buildClue(a: A, response: HttpResponse) = s""" |Endpoint: PUT $url @@ -100,8 +101,6 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst |$a |""".stripMargin - def onFail(e: Throwable) = - fail(s"Something went wrong while processing the response for $url with identity $identity", e) request( PUT, url, @@ -112,38 +111,32 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst FormData(BodyPart.Strict("file", entity, Map("filename" -> fileName))).toEntity() }, (a: A, response: HttpResponse) => assertResponse(a, response) withClue buildClue(a, response), - onFail, extraHeaders ) } def patch[A](url: String, body: Json, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(PATCH, url, Some(body), identity, extraHeaders)(assertResponse) def getWithBody[A](url: String, body: Json, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(GET, url, Some(body), identity, extraHeaders)(assertResponse) def get[A](url: String, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(GET, url, None, identity, extraHeaders)(assertResponse) - def getJson[A](url: String, identity: Identity)(implicit um: FromEntityUnmarshaller[A]): Task[A] = { - def onFail(e: Throwable) = - throw new IllegalStateException( - s"Something went wrong while processing the response for url: $url with identity $identity", - e - ) - requestJson(GET, url, None, identity, (a: A, _: HttpResponse) => a, onFail, jsonHeaders) + def getJson[A](url: String, identity: Identity)(implicit um: FromEntityUnmarshaller[A]): IO[A] = { + requestJson(GET, url, None, identity, (a: A, _: HttpResponse) => a, jsonHeaders) } def delete[A](url: String, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = requestAssert(DELETE, url, None, identity, extraHeaders)(assertResponse) def requestAssert[A]( @@ -152,7 +145,7 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst body: Option[Json], identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders - )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = { + )(assertResponse: (A, HttpResponse) => Assertion)(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { def buildClue(a: A, response: HttpResponse) = s""" |Endpoint: ${method.value} $url @@ -164,27 +157,19 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst |$a |""".stripMargin - def onFail(e: Throwable) = - fail( - s"Something went wrong while processing the response for url: ${method.value} $url with identity $identity", - e - ) requestJson( method, url, body, identity, (a: A, response: HttpResponse) => assertResponse(a, response) withClue buildClue(a, response), - onFail, extraHeaders ) } def sparqlQuery[A](url: String, query: String, identity: Identity, extraHeaders: Seq[HttpHeader] = Nil)( assertResponse: (A, HttpResponse) => Assertion - )(implicit um: FromEntityUnmarshaller[A]): Task[Assertion] = { - def onFail(e: Throwable): Assertion = - fail(s"Something went wrong while processing the response for url: $url with identity $identity", e) + )(implicit um: FromEntityUnmarshaller[A]): IO[Assertion] = { request( POST, url, @@ -192,7 +177,6 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst identity, (s: String) => HttpEntity(rdfApplicationSqlQuery, s), assertResponse, - onFail, extraHeaders ) } @@ -203,9 +187,8 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst body: Option[Json], identity: Identity, f: (A, HttpResponse) => R, - handleError: Throwable => R, extraHeaders: Seq[HttpHeader] - )(implicit um: FromEntityUnmarshaller[A]): Task[R] = + )(implicit um: FromEntityUnmarshaller[A]): IO[R] = request( method, url, @@ -213,7 +196,6 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst identity, (j: Json) => HttpEntity(ContentTypes.`application/json`, j.noSpaces), f, - handleError, extraHeaders ) @@ -238,9 +220,8 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst identity: Identity, toEntity: B => HttpEntity.Strict, f: (A, HttpResponse) => R, - handleError: Throwable => R, extraHeaders: Seq[HttpHeader] - )(implicit um: FromEntityUnmarshaller[A]): Task[R] = + )(implicit um: FromEntityUnmarshaller[A]): IO[R] = apply( HttpRequest( method = method, @@ -249,22 +230,8 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst entity = body.fold(HttpEntity.Empty)(toEntity) ) ).flatMap { res => - Task - .deferFuture { - um(res.entity)(global, materializer) - } - .map { - f(_, res) - } - .onErrorHandleWith { e => - for { - _ <- Task { - logger.error(s"Status ${res.status} for url $baseUrl$url", e) - } - } yield { - handleError(e) - } - } + fromFuture { um(res.entity) } + .map { f(_, res) } } def stream[A, B]( @@ -273,20 +240,14 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst lens: A => B, identity: Identity, extraHeaders: Seq[HttpHeader] = jsonHeaders - )(implicit um: FromEntityUnmarshaller[A]): Stream[Task, B] = { - def onFail(e: Throwable) = - throw new IllegalStateException( - s"Something went wrong while processing the response for url: $baseUrl$url with identity $identity", - e - ) - Stream.unfoldLoopEval[Task, String, B](url) { currentUrl => + )(implicit um: FromEntityUnmarshaller[A]): Stream[IO, B] = { + Stream.unfoldLoopEval[IO, String, B](url) { currentUrl => requestJson[A, A]( GET, currentUrl, None, identity, (a: A, _: HttpResponse) => a, - onFail, extraHeaders ).map { a => (lens(a), nextUrl(a)) @@ -300,17 +261,16 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst initialLastEventId: Option[String], take: Long = 100L, takeWithin: FiniteDuration = 5.seconds - )(assertResponse: Seq[(Option[String], Option[Json])] => Assertion): Task[Assertion] = { + )(assertResponse: Seq[(Option[String], Option[Json])] => Assertion): IO[Assertion] = { def send(request: HttpRequest): Future[HttpResponse] = - apply(request.addHeader(tokensMap.get(identity))).runToFuture - Task - .deferFuture { - EventSource(s"$baseUrl$url", send, initialLastEventId = initialLastEventId) - //drop resolver, views and storage events - .take(take) - .takeWithin(takeWithin) - .runWith(Sink.seq) - } + apply(request.addHeader(tokensMap.get(identity))).unsafeToFuture() + fromFuture { + EventSource(s"$baseUrl$url", send, initialLastEventId = initialLastEventId) + //drop resolver, views and storage events + .take(take) + .takeWithin(takeWithin) + .runWith(Sink.seq) + } .map { seq => assertResponse( seq.map { s => @@ -324,8 +284,6 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit as: ActorSyst object HttpClient { - private val logger = Logger[this.type] - val tokensMap: ConcurrentHashMap[Identity, Authorization] = new ConcurrentHashMap[Identity, Authorization] val acceptAll: Seq[Accept] = Seq(Accept(MediaRanges.`*/*`)) @@ -340,6 +298,10 @@ object HttpClient { val gzipHeaders: Seq[HttpHeader] = Seq(Accept(MediaRanges.`*/*`), `Accept-Encoding`(HttpEncodings.gzip)) - def apply(baseUrl: Uri)(implicit as: ActorSystem, materializer: Materializer) = - new HttpClient(baseUrl, Http()) + def apply(baseUrl: Uri)(implicit + as: ActorSystem, + materializer: Materializer, + contextShift: ContextShift[IO], + ec: ExecutionContext + ) = new HttpClient(baseUrl, Http()) } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/KeycloakDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/KeycloakDsl.scala index a0af601df5..54335117f7 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/KeycloakDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/KeycloakDsl.scala @@ -7,20 +7,26 @@ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.Authorization import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller import akka.stream.Materializer +import cats.syntax.all._ +import cats.effect.{ContextShift, IO} +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.testkit.TestHelpers import ch.epfl.bluebrain.nexus.tests.Identity.{ClientCredentials, UserCredentials} import ch.epfl.bluebrain.nexus.tests.Optics._ -import com.typesafe.scalalogging.Logger import io.circe.Json -import monix.bio.Task -import scala.jdk.CollectionConverters._ -class KeycloakDsl(implicit as: ActorSystem, materializer: Materializer, um: FromEntityUnmarshaller[Json]) - extends TestHelpers { +import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ - import monix.execution.Scheduler.Implicits.global +class KeycloakDsl(implicit + as: ActorSystem, + materializer: Materializer, + um: FromEntityUnmarshaller[Json], + contextShift: ContextShift[IO], + executionContext: ExecutionContext +) extends TestHelpers { - private val logger = Logger[this.type] + private val logger = Logger.cats[this.type] private val keycloakUrl = Uri(s"http://${sys.props.getOrElse("keycloak-url", "localhost:9090")}") private val keycloakClient = HttpClient(keycloakUrl) @@ -33,8 +39,7 @@ class KeycloakDsl(implicit as: ActorSystem, materializer: Materializer, um: From realm: Realm, clientCredentials: ClientCredentials, userCredentials: List[UserCredentials] - ): Task[StatusCode] = { - logger.info(s"Creating realm $realm in Keycloak...") + ): IO[StatusCode] = { val users = userCredentials.map { u => Map( s"username" -> u.name, @@ -51,28 +56,26 @@ class KeycloakDsl(implicit as: ActorSystem, materializer: Materializer, um: From ) for { + _ <- logger.info(s"Creating realm $realm in Keycloak...") adminToken <- userToken(keycloakAdmin, adminClient) - status <- keycloakClient( + response <- keycloakClient( HttpRequest( method = POST, uri = s"$keycloakUrl/admin/realms/", headers = Authorization(HttpCredentials.createOAuth2BearerToken(adminToken)) :: Nil, entity = HttpEntity(ContentTypes.`application/json`, json.noSpaces) ) - ).tapError { t => - Task { logger.error(s"Error while importing realm: ${realm.name}", t) } - }.map { res => - logger.info(s"${realm.name} has been imported with code: ${res.status}") - res.status + ).onError { t => + logger.error(t)(s"Error while importing realm: ${realm.name}") } - } yield status + _ <- logger.info(s"${realm.name} has been imported with code: ${response.status}") + } yield response.status } private def realmEndpoint(realm: Realm) = Uri(s"$keycloakUrl/realms/${realm.name}/protocol/openid-connect/token") - def userToken(user: UserCredentials, client: ClientCredentials): Task[String] = { - logger.info(s"Getting token for user ${user.name} for ${user.realm.name}") + def userToken(user: UserCredentials, client: ClientCredentials): IO[String] = { val clientFields = if (client.secret == "") { Map("scope" -> "openid", "client_id" -> client.id) } else { @@ -97,53 +100,54 @@ class KeycloakDsl(implicit as: ActorSystem, materializer: Materializer, um: From .toEntity ) - keycloakClient(request) - .flatMap { res => - Task.deferFuture { um(res.entity) } - } - .tapError { t => - Task { logger.error(s"Error while getting user token for realm: ${user.realm.name} and user:$user", t) } - } - .map { response => + logger.info(s"Getting token for user ${user.name} for ${user.realm.name}") >> + keycloakClient(request) + .flatMap { res => + IO.fromFuture { IO(um(res.entity)) } + } + .onError { t => + logger.error(t)(s"Error while getting user token for realm: ${user.realm.name} and user:$user") + } + .map { response => + keycloak.access_token + .getOption(response) + .getOrElse( + throw new IllegalArgumentException( + s"Couldn't get a token for user ${user.name}, we got response: $response" + ) + ) + } + + } + + def serviceAccountToken(client: ClientCredentials): IO[String] = { + logger.info(s"Getting token for client ${client.name} for ${client.realm}") >> + keycloakClient( + HttpRequest( + method = POST, + uri = realmEndpoint(client.realm), + headers = Authorization(HttpCredentials.createBasicHttpCredentials(client.id, client.secret)) :: Nil, + entity = akka.http.scaladsl.model + .FormData( + Map( + "scope" -> "openid", + "grant_type" -> "client_credentials" + ) + ) + .toEntity + ) + ).flatMap { res => + IO.fromFuture { IO(um(res.entity)) } + }.onError { t => + logger.error(t)(s"Error while getting user token for realm: ${client.realm} and client: $client") + }.map { response => keycloak.access_token .getOption(response) .getOrElse( throw new IllegalArgumentException( - s"Couldn't get a token for user ${user.name}, we got response: $response" + s"Couldn't get a token for client ${client.id} for realm ${client.realm.name}, we got response: $response" ) ) } - - } - - def serviceAccountToken(client: ClientCredentials): Task[String] = { - logger.info(s"Getting token for client ${client.name} for ${client.realm}") - keycloakClient( - HttpRequest( - method = POST, - uri = realmEndpoint(client.realm), - headers = Authorization(HttpCredentials.createBasicHttpCredentials(client.id, client.secret)) :: Nil, - entity = akka.http.scaladsl.model - .FormData( - Map( - "scope" -> "openid", - "grant_type" -> "client_credentials" - ) - ) - .toEntity - ) - ).flatMap { res => - Task.deferFuture { um(res.entity) } - }.tapError { t => - Task { logger.error(s"Error while getting user token for realm: ${client.realm} and client: $client", t) } - }.map { response => - keycloak.access_token - .getOption(response) - .getOrElse( - throw new IllegalArgumentException( - s"Couldn't get a token for client ${client.id} for realm ${client.realm.name}, we got response: $response" - ) - ) - } } } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/AdminDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/AdminDsl.scala index 630309ba34..27be4cefc9 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/AdminDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/AdminDsl.scala @@ -1,21 +1,21 @@ package ch.epfl.bluebrain.nexus.tests.admin import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.testkit.TestHelpers import ch.epfl.bluebrain.nexus.tests.Identity.Authenticated import ch.epfl.bluebrain.nexus.tests.Optics.{filterMetadataKeys, _} import ch.epfl.bluebrain.nexus.tests.config.TestsConfig import ch.epfl.bluebrain.nexus.tests.{CirceUnmarshalling, ExpectedResponse, HttpClient, Identity} -import com.typesafe.scalalogging.Logger import io.circe.Json -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with CirceUnmarshalling with Matchers { - private val logger = Logger[this.type] + private val logger = Logger.cats[this.type] def orgPayload(description: String = genString()): Json = jsonContentOf("/admin/orgs/payload.json", "description" -> description) @@ -84,7 +84,7 @@ class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with Cir authenticated: Authenticated, expectedResponse: Option[ExpectedResponse] = None, ignoreConflict: Boolean = false - ): Task[Assertion] = + ): IO[Assertion] = updateOrganization(id, description, authenticated, 0, expectedResponse, ignoreConflict) def updateOrganization( @@ -94,7 +94,7 @@ class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with Cir rev: Int, expectedResponse: Option[ExpectedResponse] = None, ignoreConflict: Boolean = false - ): Task[Assertion] = { + ): IO[Assertion] = { cl.put[Json](s"/orgs/$id${queryParams(rev)}", orgPayload(description), authenticated) { (json, response) => expectedResponse match { case Some(e) => @@ -122,7 +122,7 @@ class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with Cir } } - def deprecateOrganization(id: String, authenticated: Authenticated): Task[Assertion] = + def deprecateOrganization(id: String, authenticated: Authenticated): IO[Assertion] = cl.get[Json](s"/orgs/$id", authenticated) { (json, response) => response.status shouldEqual StatusCodes.OK val rev = admin._rev.getOption(json).value @@ -137,7 +137,7 @@ class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with Cir "organizations", deprecated = true ) - }.runSyncUnsafe() + }.unsafeRunSync() } private[tests] val startPool = Vector.range('a', 'z') @@ -168,7 +168,7 @@ class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with Cir json: Json, authenticated: Authenticated, expectedResponse: Option[ExpectedResponse] = None - ): Task[Assertion] = + ): IO[Assertion] = updateProject(orgId, projectId, json, authenticated, 0, expectedResponse) def updateProject( @@ -178,30 +178,30 @@ class AdminDsl(cl: HttpClient, config: TestsConfig) extends TestHelpers with Cir authenticated: Authenticated, rev: Int, expectedResponse: Option[ExpectedResponse] = None - ): Task[Assertion] = - cl.put[Json](s"/projects/$orgId/$projectId${queryParams(rev)}", payload, authenticated) { (json, response) => - logger.info(s"Creating/updating project $orgId/$projectId at revision $rev") - expectedResponse match { - case Some(e) => - response.status shouldEqual e.statusCode - json shouldEqual e.json - case None => - if (rev == 0) - response.status shouldEqual StatusCodes.Created - else - response.status shouldEqual StatusCodes.OK - filterProjectMetadataKeys(json) shouldEqual createProjectRespJson( - projectId, - orgId, - rev + 1, - authenticated = authenticated, - schema = "projects" - ) - } + ): IO[Assertion] = + logger.info(s"Creating/updating project $orgId/$projectId at revision $rev") >> + cl.put[Json](s"/projects/$orgId/$projectId${queryParams(rev)}", payload, authenticated) { (json, response) => + expectedResponse match { + case Some(e) => + response.status shouldEqual e.statusCode + json shouldEqual e.json + case None => + if (rev == 0) + response.status shouldEqual StatusCodes.Created + else + response.status shouldEqual StatusCodes.OK + filterProjectMetadataKeys(json) shouldEqual createProjectRespJson( + projectId, + orgId, + rev + 1, + authenticated = authenticated, + schema = "projects" + ) + } - } + } - def getUuids(orgId: String, projectId: String, identity: Identity): Task[(String, String)] = + def getUuids(orgId: String, projectId: String, identity: Identity): IO[(String, String)] = for { orgUuid <- cl.getJson[Json](s"/orgs/$orgId", identity) projectUuid <- cl.getJson[Json](s"/projects/$orgId/$projectId", identity) diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/OrgsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/OrgsSpec.scala index fc3d7c3cc3..806f6529b5 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/OrgsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/OrgsSpec.scala @@ -6,7 +6,6 @@ import ch.epfl.bluebrain.nexus.tests.Identity.orgs.{Fry, Leela} import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.{BaseSpec, ExpectedResponse} import io.circe.Json -import monix.execution.Scheduler.Implicits.global class OrgsSpec extends BaseSpec with EitherValuable { @@ -110,7 +109,7 @@ class OrgsSpec extends BaseSpec with EitherValuable { "fetch organization by UUID" in { deltaClient.get[Json](s"/orgs/$id", Leela) { (jsonById, _) => - runTask { + runIO { val orgUuid = _uuid.getOption(jsonById).value deltaClient.get[Json](s"/orgs/$orgUuid", Leela) { (jsonByUuid, response) => @@ -223,7 +222,7 @@ class OrgsSpec extends BaseSpec with EitherValuable { 2 ) _ <- deltaClient.get[Json](s"/orgs/$id", Leela) { (lastVersion, response) => - runTask { + runIO { response.status shouldEqual StatusCodes.OK admin.validate(lastVersion, "Organization", "orgs", id, updatedName2, 3, id) deltaClient.get[Json](s"/orgs/$id?rev=3", Leela) { (thirdVersion, response) => diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/ProjectsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/ProjectsSpec.scala index b60dbf2a85..3dc991adad 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/ProjectsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/admin/ProjectsSpec.scala @@ -11,7 +11,6 @@ import ch.epfl.bluebrain.nexus.tests.Identity.resources.Rick import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.{BaseSpec, ExpectedResponse, Identity} import io.circe.Json -import monix.execution.Scheduler.Implicits.global class ProjectsSpec extends BaseSpec { @@ -143,10 +142,10 @@ class ProjectsSpec extends BaseSpec { "fetch project by UUID" in { deltaClient.get[Json](s"/orgs/$orgId", Identity.ServiceAccount) { (orgJson, _) => - runTask { + runIO { val orgUuid = _uuid.getOption(orgJson).value deltaClient.get[Json](s"/projects/$id", Bojack) { (projectJson, _) => - runTask { + runIO { val projectUuid = _uuid.getOption(projectJson).value deltaClient.get[Json](s"/projects/$orgUuid/$projectUuid", Bojack) { (json, response) => response.status shouldEqual StatusCodes.OK diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclDsl.scala index 3b6bcc0c9f..f5ed8c0e0c 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclDsl.scala @@ -1,27 +1,27 @@ package ch.epfl.bluebrain.nexus.tests.iam import akka.http.scaladsl.model.{HttpResponse, StatusCodes} +import cats.effect.{ContextShift, IO} import cats.implicits._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.testkit.TestHelpers import ch.epfl.bluebrain.nexus.tests.Identity.Authenticated import ch.epfl.bluebrain.nexus.tests.Optics.error import ch.epfl.bluebrain.nexus.tests.iam.types.{AclEntry, AclListing, Anonymous, Permission, User} import ch.epfl.bluebrain.nexus.tests.{CirceUnmarshalling, HttpClient, Identity} -import com.typesafe.scalalogging.Logger import io.circe.Json -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.matchers.should.Matchers import org.scalatest.{Assertion, OptionValues} + import scala.jdk.CollectionConverters._ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with OptionValues with Matchers { - private val logger = Logger[this.type] + private val logger = Logger.cats[this.type] def fetch(path: String, identity: Identity, self: Boolean = true, ancestors: Boolean = false)( assertAcls: AclListing => Assertion - ): Task[Assertion] = { + ): IO[Assertion] = { path should not startWith "/acls" cl.get[AclListing](s"/acls$path?ancestors=$ancestors&self=$self", identity) { (acls, response) => response.status shouldEqual StatusCodes.OK @@ -29,10 +29,10 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op } } - def addPermission(path: String, target: Authenticated, permission: Permission): Task[Assertion] = + def addPermission(path: String, target: Authenticated, permission: Permission): IO[Assertion] = addPermissions(path, target, Set(permission)) - def addPermissions(path: String, target: Authenticated, permissions: Set[Permission]): Task[Assertion] = { + def addPermissions(path: String, target: Authenticated, permissions: Set[Permission]): IO[Assertion] = { val json = jsonContentOf( "/iam/add.json", "realm" -> target.realm.name, @@ -43,10 +43,10 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op addPermissions(path, json, target.name) } - def addPermissionAnonymous(path: String, permission: Permission): Task[Assertion] = + def addPermissionAnonymous(path: String, permission: Permission): IO[Assertion] = addPermissionsAnonymous(path, Set(permission)) - def addPermissionsAnonymous(path: String, permissions: Set[Permission]): Task[Assertion] = { + def addPermissionsAnonymous(path: String, permissions: Set[Permission]): IO[Assertion] = { val json = jsonContentOf( "/iam/add_annon.json", "perms" -> permissions.asJava @@ -55,41 +55,37 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op addPermissions(path, json, "Anonymous") } - def addPermissions(path: String, payload: Json, targetName: String): Task[Assertion] = { - logger.info(s"Addings permissions to $path for $targetName") + def addPermissions(path: String, payload: Json, targetName: String): IO[Assertion] = { def assertResponse(json: Json, response: HttpResponse) = response.status match { case StatusCodes.Created | StatusCodes.OK => - logger.info(s"Permissions has been successfully added for $targetName on $path") succeed case StatusCodes.BadRequest => val errorType = error.`@type`.getOption(json) - logger.warn( - s"We got a bad request when adding permissions for $targetName on $path with error type $errorType" - ) errorType.value shouldBe "NothingToBeUpdated" case s => fail(s"We were not expecting $s when setting acls on $path for $targetName") } - fetch(path, Identity.ServiceAccount) { acls => - { - val rev = acls._results.headOption - rev match { - case Some(r) => - cl.patch[Json](s"/acls$path?rev=${r._rev}", payload, Identity.ServiceAccount) { - assertResponse - } - case None => - cl.patch[Json](s"/acls$path", payload, Identity.ServiceAccount) { - assertResponse - } - } - }.runSyncUnsafe() - } + logger.info(s"Addings permissions to $path for $targetName") >> + fetch(path, Identity.ServiceAccount) { acls => + { + val rev = acls._results.headOption + rev match { + case Some(r) => + cl.patch[Json](s"/acls$path?rev=${r._rev}", payload, Identity.ServiceAccount) { + assertResponse + } + case None => + cl.patch[Json](s"/acls$path", payload, Identity.ServiceAccount) { + assertResponse + } + } + }.unsafeRunSync() + } } - def cleanAcls(target: Authenticated): Task[Assertion] = + def cleanAcls(target: Authenticated)(implicit contextShift: ContextShift[IO]): IO[Assertion] = fetch(s"/*/*", Identity.ServiceAccount, ancestors = true, self = false) { acls => val permissions = acls._results .map { acls => @@ -114,10 +110,10 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op } } .map(_ => succeed) - .runSyncUnsafe() + .unsafeRunSync() } - def cleanAclsAnonymous: Task[Assertion] = + def cleanAclsAnonymous(implicit contextShift: ContextShift[IO]): IO[Assertion] = fetch(s"/*/*", Identity.ServiceAccount, ancestors = true, self = false) { acls => val permissions = acls._results .map { acls => @@ -140,23 +136,23 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op } } .map(_ => succeed) - .runSyncUnsafe() + .unsafeRunSync() } - def deletePermission(path: String, target: Authenticated, permission: Permission): Task[Assertion] = + def deletePermission(path: String, target: Authenticated, permission: Permission): IO[Assertion] = deletePermissions(path, target, Set(permission)) - def deletePermissions(path: String, target: Authenticated, permissions: Set[Permission]): Task[Assertion] = + def deletePermissions(path: String, target: Authenticated, permissions: Set[Permission]): IO[Assertion] = fetch(path, Identity.ServiceAccount) { acls => deletePermissions( path, target, acls._results.head._rev, permissions - ).runSyncUnsafe() + ).unsafeRunSync() } - def deletePermission(path: String, target: Authenticated, rev: Int, permission: Permission): Task[Assertion] = { + def deletePermission(path: String, target: Authenticated, rev: Int, permission: Permission): IO[Assertion] = { deletePermissions(path, target, rev, Set(permission)) } @@ -165,7 +161,7 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op target: Authenticated, rev: Int, permissions: Set[Permission] - ): Task[Assertion] = { + ): IO[Assertion] = { val body = jsonContentOf( "/iam/subtract-permissions.json", "realm" -> target.realm.name, @@ -177,13 +173,13 @@ class AclDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling with Op } } - def checkAdminAcls(path: String, authenticated: Authenticated): Task[Assertion] = { - logger.info(s"Gettings acls for $path using ${authenticated.name}") - fetch(path, authenticated) { acls => - val acl = acls._results.headOption.value - val entry = acl.acl.headOption.value - entry.permissions shouldEqual Permission.adminPermissions - } + def checkAdminAcls(path: String, authenticated: Authenticated): IO[Assertion] = { + logger.info(s"Gettings acls for $path using ${authenticated.name}") >> + fetch(path, authenticated) { acls => + val acl = acls._results.headOption.value + val entry = acl.acl.headOption.value + entry.permissions shouldEqual Permission.adminPermissions + } } } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclsSpec.scala index cf449e7010..e3bd124aeb 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/AclsSpec.scala @@ -5,7 +5,6 @@ import ch.epfl.bluebrain.nexus.tests.Identity.acls.Marge import ch.epfl.bluebrain.nexus.tests.Identity.testRealm import ch.epfl.bluebrain.nexus.tests.iam.types.{AclEntry, AclListing, Permission, User} import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Identity} -import monix.execution.Scheduler.Implicits.global class AclsSpec extends BaseSpec { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionDsl.scala index 5c78ca3320..4baeb7807d 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionDsl.scala @@ -1,12 +1,11 @@ package ch.epfl.bluebrain.nexus.tests.iam import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO import ch.epfl.bluebrain.nexus.testkit.TestHelpers import ch.epfl.bluebrain.nexus.tests.iam.types.{Permission, Permissions} import ch.epfl.bluebrain.nexus.tests.{CirceUnmarshalling, HttpClient, Identity} import io.circe.Json -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers @@ -15,7 +14,7 @@ class PermissionDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling def permissionsRepl(permissions: Iterable[Permission]) = "perms" -> permissions.map { _.value }.mkString("\",\"") - def addPermissions(list: Permission*): Task[Assertion] = + def addPermissions(list: Permission*): IO[Assertion] = cl.get[Permissions]("/permissions", Identity.ServiceAccount) { (permissions, response) => response.status shouldEqual StatusCodes.OK val body = jsonContentOf( @@ -25,7 +24,7 @@ class PermissionDsl(cl: HttpClient) extends TestHelpers with CirceUnmarshalling if (!list.toSet.subsetOf(permissions.permissions)) { cl.patch[Json](s"/permissions?rev=${permissions._rev}", body, Identity.ServiceAccount) { (_, response) => response.status shouldEqual StatusCodes.OK - }.runSyncUnsafe() + }.unsafeRunSync() } else { succeed } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionsSpec.scala index 600176aa59..e5f059ddec 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/PermissionsSpec.scala @@ -1,10 +1,10 @@ package ch.epfl.bluebrain.nexus.tests.iam import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO import ch.epfl.bluebrain.nexus.tests.iam.types.{Permission, Permissions} import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Identity} import io.circe.Json -import monix.bio.Task class PermissionsSpec extends BaseSpec { @@ -14,10 +14,10 @@ class PermissionsSpec extends BaseSpec { "clear permissions" in { deltaClient.get[Permissions]("/permissions", Identity.ServiceAccount) { (permissions, response) => - runTask { + runIO { response.status shouldEqual StatusCodes.OK if (permissions.permissions == Permission.minimalPermissions) - Task(succeed) + IO.pure(succeed) else deltaClient.delete[Json](s"/permissions?rev=${permissions._rev}", Identity.ServiceAccount) { (_, response) => @@ -43,7 +43,7 @@ class PermissionsSpec extends BaseSpec { "subtract permissions" in { deltaClient.get[Permissions]("/permissions", Identity.ServiceAccount) { (permissions, response) => - runTask { + runIO { response.status shouldEqual StatusCodes.OK val body = jsonContentOf( "/iam/permissions/subtract.json", @@ -66,7 +66,7 @@ class PermissionsSpec extends BaseSpec { "replace permissions" in { deltaClient.get[Permissions]("/permissions", Identity.ServiceAccount) { (permissions, response) => - runTask { + runIO { response.status shouldEqual StatusCodes.OK val body = jsonContentOf( @@ -92,7 +92,7 @@ class PermissionsSpec extends BaseSpec { "reject subtracting minimal permission" in { deltaClient.get[Permissions]("/permissions", Identity.ServiceAccount) { (permissions, response) => - runTask { + runIO { response.status shouldEqual StatusCodes.OK val body = jsonContentOf( "/iam/permissions/subtract.json", @@ -110,7 +110,7 @@ class PermissionsSpec extends BaseSpec { "reject replacing minimal permission" in { deltaClient.get[Permissions]("/permissions", Identity.ServiceAccount) { (permissions, response) => - runTask { + runIO { response.status shouldEqual StatusCodes.OK val body = jsonContentOf( "/iam/permissions/replace.json", diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/RealmsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/RealmsSpec.scala index 6ee062e076..2cb8b3d484 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/RealmsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/RealmsSpec.scala @@ -4,7 +4,6 @@ import akka.http.scaladsl.model.StatusCodes import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Identity, Realm} import io.circe.Json -import monix.execution.Scheduler.Implicits.global class RealmsSpec extends BaseSpec { @@ -25,7 +24,7 @@ class RealmsSpec extends BaseSpec { _ <- authenticateClient(testClient) } yield () - setup.runSyncUnsafe() + setup.unsafeRunSync() } "manage realms" should { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala index da6942532c..bb35a19b34 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala @@ -1,6 +1,7 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.testkit.{CirceEq, EitherValuable} import ch.epfl.bluebrain.nexus.tests.{BaseSpec, SchemaPayload} import ch.epfl.bluebrain.nexus.tests.Identity.Anonymous diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AutoProjectDeletionSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AutoProjectDeletionSpec.scala index 21928414d7..c80046e8bb 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AutoProjectDeletionSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AutoProjectDeletionSpec.scala @@ -4,7 +4,6 @@ import akka.http.scaladsl.model.StatusCodes import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.Identity.projects.Bojack import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Events, Organizations, Projects, Resources} -import monix.execution.Scheduler.Implicits.global import io.circe.Json import scala.concurrent.duration._ @@ -40,7 +39,7 @@ class AutoProjectDeletionSpec extends BaseSpec { _ <- deltaClient.get[Json](s"/projects/$ref1", Bojack)(expect(StatusCodes.OK)) } yield succeed - setup.void.runSyncUnsafe() + setup.void.unsafeRunSync() } "eventually return a not found when attempting to fetch the project" in eventually { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsLifeCycleSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsLifeCycleSpec.scala index ca261dde9a..ce15eef591 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsLifeCycleSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsLifeCycleSpec.scala @@ -1,6 +1,7 @@ package ch.epfl.bluebrain.nexus.tests.kg import cats.data.NonEmptyMap +import cats.effect.IO import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.Identity.compositeviews.Jerry import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Events, Organizations, Views} @@ -12,8 +13,6 @@ import org.scalactic.source.Position final class CompositeViewsLifeCycleSpec extends BaseSpec { - implicit private val classLoader: ClassLoader = getClass.getClassLoader - private val orgId = genId() private val projId = genId() private val proj2Id = genId() @@ -40,12 +39,14 @@ final class CompositeViewsLifeCycleSpec extends BaseSpec { "proj" -> proj2Id, "query" -> query ) ++ includeCrossProjectOpt ++ includeSparqlProjectionOpt - ioJsonContentOf( - "/kg/views/composite/composite-view-lifecycle.json", - replacements( - Jerry, - values: _* - ): _* + IO( + jsonContentOf( + "/kg/views/composite/composite-view-lifecycle.json", + replacements( + Jerry, + values: _* + ): _* + ) ) } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala index f40a8305c4..231128c2d2 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/CompositeViewsSpec.scala @@ -2,21 +2,19 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes import cats.implicits._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.HttpClient._ import ch.epfl.bluebrain.nexus.tests.Identity.compositeviews.Jerry import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Events, Organizations, Views} import ch.epfl.bluebrain.nexus.tests.kg.CompositeViewsSpec.{albumQuery, bandQuery} -import com.typesafe.scalalogging.Logger import io.circe.Json import io.circe.optics.JsonPath._ -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global class CompositeViewsSpec extends BaseSpec { - private val logger = Logger[this.type] + private val logger = Logger.cats[this.type] case class Stats(totalEvents: Long, remainingEvents: Long) @@ -45,13 +43,11 @@ class CompositeViewsSpec extends BaseSpec { val projectPayload = jsonContentOf("/kg/views/composite/project.json") for { _ <- adminDsl.createOrganization(orgId, orgId, Jerry) - _ <- Task.sequence( - List( - adminDsl.createProject(orgId, bandsProject, projectPayload, Jerry), - adminDsl.createProject(orgId, albumsProject, projectPayload, Jerry), - adminDsl.createProject(orgId, songsProject, projectPayload, Jerry) - ) - ) + _ <- List( + adminDsl.createProject(orgId, bandsProject, projectPayload, Jerry), + adminDsl.createProject(orgId, albumsProject, projectPayload, Jerry), + adminDsl.createProject(orgId, songsProject, projectPayload, Jerry) + ).sequence } yield succeed } @@ -314,32 +310,29 @@ class CompositeViewsSpec extends BaseSpec { private def waitForView(viewId: String = "composite") = { eventually { - deltaClient.get[Json](s"/views/$orgId/bands/$viewId/projections/_/statistics", Jerry) { (json, response) => - val stats = root._results.each.as[Stats].getAll(json) - logger.debug(s"Response: ${response.status} with ${stats.size} stats") - stats.foreach { stat => - logger.debug(s"totalEvents: ${stat.totalEvents}, remainingEvents: ${stat.remainingEvents}") - stat.totalEvents should be > 0L - stat.remainingEvents shouldEqual 0 + logger.info("Waiting for view to be indexed") >> + deltaClient.get[Json](s"/views/$orgId/bands/$viewId/projections/_/statistics", Jerry) { (json, response) => + val stats = root._results.each.as[Stats].getAll(json) + stats.foreach { stat => + stat.totalEvents should be > 0L + stat.remainingEvents shouldEqual 0 + } + response.status shouldEqual StatusCodes.OK } - response.status shouldEqual StatusCodes.OK - } } succeed } - private def resetView(viewId: String) = - deltaClient.delete[Json](s"/views/$orgId/bands/$viewId/projections/_/offset", Jerry) { (_, response) => - logger.info(s"Resetting view responded with ${response.status}") - response.status shouldEqual StatusCodes.OK - } + private def resetView(viewId: String) = { + logger.info("Resetting offsets") >> + deltaClient.delete[Json](s"/views/$orgId/bands/$viewId/projections/_/offset", Jerry) { (_, response) => + response.status shouldEqual StatusCodes.OK + } + } private def resetAndWait(viewId: String = "composite") = { - logger.info("Waiting for view to be indexed") waitForView(viewId) - logger.info("Resetting offsets") - resetView(viewId).runSyncUnsafe() - logger.info("Waiting for view to be indexed again") + resetView(viewId).unsafeRunSync() waitForView(viewId) } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala index 4f8365b39a..bdc4007d7a 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/DiskStorageSpec.scala @@ -1,11 +1,11 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO import ch.epfl.bluebrain.nexus.tests.Identity.storages.Coyote import ch.epfl.bluebrain.nexus.tests.Optics.filterMetadataKeys import ch.epfl.bluebrain.nexus.tests.iam.types.Permission import io.circe.Json -import monix.bio.Task import org.scalatest.Assertion class DiskStorageSpec extends StorageSpec { @@ -32,7 +32,7 @@ class DiskStorageSpec extends StorageSpec { ): _* ) - override def createStorages: Task[Assertion] = { + override def createStorages: IO[Assertion] = { val payload = jsonContentOf("/kg/storages/disk.json") val payload2 = jsonContentOf("/kg/storages/disk-perms.json") diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsDsl.scala index cca7ed916b..1e0a1e5a0b 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsDsl.scala @@ -1,12 +1,13 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} import ch.epfl.bluebrain.nexus.tests.{CirceUnmarshalling, HttpClient, Identity} import io.circe.Json -import monix.bio.Task import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers + import scala.jdk.CollectionConverters._ final class ElasticSearchViewsDsl(deltaClient: HttpClient) @@ -18,7 +19,7 @@ final class ElasticSearchViewsDsl(deltaClient: HttpClient) /** * Create an aggregate view and expects it to succeed */ - def aggregate(id: String, projectRef: String, identity: Identity, views: (String, String)*): Task[Assertion] = { + def aggregate(id: String, projectRef: String, identity: Identity, views: (String, String)*): IO[Assertion] = { val payload = jsonContentOf( "/kg/views/elasticsearch/aggregate.json", "views" -> views.map { case ((project, view)) => diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsSpec.scala index ace536c78e..58a3fdc4b4 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ElasticSearchViewsSpec.scala @@ -9,7 +9,6 @@ import ch.epfl.bluebrain.nexus.tests.Identity.views.ScoobyDoo import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Organizations, Views} import io.circe.{ACursor, Json} -import monix.execution.Scheduler.Implicits.global class ElasticSearchViewsSpec extends BaseSpec with EitherValuable with CirceEq { @@ -217,7 +216,7 @@ class ElasticSearchViewsSpec extends BaseSpec with EitherValuable with CirceEq { .post[Json](s"/views/$fullId/test-resource:cell-view/_search", matchAll, ScoobyDoo) { (json2, _) => filterKey("took")(json2) shouldEqual filterKey("took")(json) } - .runSyncUnsafe() + .unsafeRunSync() } } @@ -241,7 +240,7 @@ class ElasticSearchViewsSpec extends BaseSpec with EitherValuable with CirceEq { .post[Json](s"/views/$fullId2/test-resource:cell-view/_search", matchAll, ScoobyDoo) { (json2, _) => filterKey("took")(json2) shouldEqual filterKey("took")(json) } - .runSyncUnsafe() + .unsafeRunSync() } } @@ -385,7 +384,7 @@ class ElasticSearchViewsSpec extends BaseSpec with EitherValuable with CirceEq { .post[Json](s"/views/$fullId/test-resource:cell-view/_search", matchAll, ScoobyDoo) { (json2, _) => filterKey("took")(json2) shouldEqual filterKey("took")(json) } - .runSyncUnsafe() + .unsafeRunSync() } } @@ -410,7 +409,7 @@ class ElasticSearchViewsSpec extends BaseSpec with EitherValuable with CirceEq { .post[Json](s"/views/$fullId/test-resource:cell-view/_search", matchAll, ScoobyDoo) { (json2, _) => filterKey("took")(json2) shouldEqual filterKey("took")(json) } - .runSyncUnsafe() + .unsafeRunSync() } } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ErrorsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ErrorsSpec.scala index 415bc7e192..4075956f56 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ErrorsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ErrorsSpec.scala @@ -4,7 +4,6 @@ import akka.http.scaladsl.model.StatusCodes import ch.epfl.bluebrain.nexus.testkit.EitherValuable import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Identity} import io.circe.Json -import monix.execution.Scheduler.Implicits.global class ErrorsSpec extends BaseSpec with EitherValuable { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/EventsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/EventsSpec.scala index 53c235e41c..2abe0556e7 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/EventsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/EventsSpec.scala @@ -1,14 +1,13 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.{ContentTypes, StatusCodes} +import cats.effect.IO import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.Identity.events.BugsBunny import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Events, Organizations, Resources} import ch.epfl.bluebrain.nexus.tests.resources.SimpleResource import io.circe.Json -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.scalatest.Inspectors class EventsSpec extends BaseSpec with Inspectors { @@ -246,43 +245,41 @@ class EventsSpec extends BaseSpec with Inspectors { "fetch global events" in { // TODO: find a way to get the current event sequence in postgres - Task - .when(initialEventId.isDefined) { - for { - uuids <- adminDsl.getUuids(orgId, projId, BugsBunny) - uuids2 <- adminDsl.getUuids(orgId2, projId, BugsBunny) - _ <- deltaClient.sseEvents(s"/resources/events", BugsBunny, initialEventId, take = 21) { seq => - val projectEvents = seq.drop(14) - projectEvents.size shouldEqual 7 - projectEvents.flatMap(_._1) should contain theSameElementsInOrderAs List( - "ResourceCreated", - "ResourceCreated", - "ResourceUpdated", - "ResourceTagAdded", - "ResourceDeprecated", - "FileCreated", - "FileUpdated" - ) - val json = Json.arr(projectEvents.flatMap(_._2.map(events.filterFields)): _*) - json shouldEqual jsonContentOf( - "/kg/events/events-multi-project.json", - replacements( - BugsBunny, - "resources" -> s"${config.deltaUri}/resources/$id", - "organizationUuid" -> uuids._1, - "projectUuid" -> uuids._2, - "organization2Uuid" -> uuids2._1, - "project2Uuid" -> uuids2._2, - "project" -> s"${config.deltaUri}/projects/$orgId/$projId", - "project2" -> s"${config.deltaUri}/projects/$orgId2/$projId", - "schemaProject" -> s"${config.deltaUri}/projects/$orgId/$projId", - "schemaProject2" -> s"${config.deltaUri}/projects/$orgId2/$projId" - ): _* - ) - } - } yield () - } - .as(succeed) + IO.whenA(initialEventId.isDefined) { + for { + uuids <- adminDsl.getUuids(orgId, projId, BugsBunny) + uuids2 <- adminDsl.getUuids(orgId2, projId, BugsBunny) + _ <- deltaClient.sseEvents(s"/resources/events", BugsBunny, initialEventId, take = 21) { seq => + val projectEvents = seq.drop(14) + projectEvents.size shouldEqual 7 + projectEvents.flatMap(_._1) should contain theSameElementsInOrderAs List( + "ResourceCreated", + "ResourceCreated", + "ResourceUpdated", + "ResourceTagAdded", + "ResourceDeprecated", + "FileCreated", + "FileUpdated" + ) + val json = Json.arr(projectEvents.flatMap(_._2.map(events.filterFields)): _*) + json shouldEqual jsonContentOf( + "/kg/events/events-multi-project.json", + replacements( + BugsBunny, + "resources" -> s"${config.deltaUri}/resources/$id", + "organizationUuid" -> uuids._1, + "projectUuid" -> uuids._2, + "organization2Uuid" -> uuids2._1, + "project2Uuid" -> uuids2._2, + "project" -> s"${config.deltaUri}/projects/$orgId/$projId", + "project2" -> s"${config.deltaUri}/projects/$orgId2/$projId", + "schemaProject" -> s"${config.deltaUri}/projects/$orgId/$projId", + "schemaProject2" -> s"${config.deltaUri}/projects/$orgId2/$projId" + ): _* + ) + } + } yield () + }.as(succeed) } } } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/IdResolutionSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/IdResolutionSpec.scala index 9a6028e19b..193d44bec9 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/IdResolutionSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/IdResolutionSpec.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.MediaTypes.`text/html` import akka.http.scaladsl.model.headers.{Accept, Location} import akka.http.scaladsl.model.{HttpResponse, MediaRange, StatusCodes} import akka.http.scaladsl.unmarshalling.PredefinedFromEntityUnmarshallers +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.Identity.listings.{Alice, Bob} diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/MultiFetchSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/MultiFetchSpec.scala index c6e2e8d134..30993219b8 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/MultiFetchSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/MultiFetchSpec.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.{ContentTypes, HttpResponse, StatusCodes} import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Identity} import ch.epfl.bluebrain.nexus.tests.Identity.listings.{Alice, Bob} +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.Resources import ch.epfl.bluebrain.nexus.tests.resources.SimpleResource diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala index cef8374967..4bf6b3ab60 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/RemoteStorageSpec.scala @@ -2,6 +2,7 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.{ContentTypes, HttpCharsets, MediaTypes, StatusCodes} import akka.util.ByteString +import cats.effect.IO import ch.epfl.bluebrain.nexus.tests.HttpClient._ import ch.epfl.bluebrain.nexus.tests.Identity.storages.Coyote import ch.epfl.bluebrain.nexus.tests.Optics.{filterKey, filterMetadataKeys, projections} @@ -10,7 +11,6 @@ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.Supervision import io.circe.generic.semiauto.deriveDecoder import io.circe.syntax.KeyOps import io.circe.{Decoder, Json} -import monix.bio.Task import org.scalactic.source.Position import org.scalatest.Assertion @@ -60,7 +60,7 @@ class RemoteStorageSpec extends StorageSpec { ): _* ) - override def createStorages: Task[Assertion] = { + override def createStorages: IO[Assertion] = { val payload = jsonContentOf( "/kg/storages/remote-disk.json", "endpoint" -> externalEndpoint, @@ -191,7 +191,7 @@ class RemoteStorageSpec extends StorageSpec { } } - def createFile(filename: String) = Task.delay { + def createFile(filename: String) = IO.delay { val createFile = s"echo 'file content' > /tmp/$remoteFolder/$filename" s"docker exec nexus-storage-service bash -c \"$createFile\"".! } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala index 4c014a503a..444af35c2c 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/ResourcesSpec.scala @@ -16,7 +16,6 @@ import ch.epfl.bluebrain.nexus.tests.resources.SimpleResource import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Optics, SchemaPayload} import io.circe.Json import io.circe.optics.JsonPath.root -import monix.execution.Scheduler.Implicits.global import monocle.Optional import org.scalatest.matchers.{HavePropertyMatchResult, HavePropertyMatcher} @@ -303,7 +302,7 @@ class ResourcesSpec extends BaseSpec with EitherValuable with CirceEq { for { _ <- deltaClient.get[Json](s"/schemas/$id1/test-schema", Rick) { (json, response1) => response1.status shouldEqual StatusCodes.OK - runTask { + runIO { for { _ <- deltaClient.get[Json](s"/resolvers/$id2/_/test-schema", Rick) { (jsonResolved, response2) => response2.status shouldEqual StatusCodes.OK diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala index e71cc1d352..cd864ea7f6 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/S3StorageSpec.scala @@ -1,12 +1,12 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO import ch.epfl.bluebrain.nexus.tests.Identity.storages.Coyote import ch.epfl.bluebrain.nexus.tests.Optics.filterMetadataKeys import ch.epfl.bluebrain.nexus.tests.config.S3Config import ch.epfl.bluebrain.nexus.tests.iam.types.Permission import io.circe.Json -import monix.bio.Task import org.scalatest.Assertion import software.amazon.awssdk.auth.credentials.{AnonymousCredentialsProvider, AwsBasicCredentials, StaticCredentialsProvider} import software.amazon.awssdk.regions.Region @@ -82,7 +82,7 @@ class S3StorageSpec extends StorageSpec { ): _* ) - override def createStorages: Task[Assertion] = { + override def createStorages: IO[Assertion] = { val payload = jsonContentOf( "/kg/storages/s3.json", "storageId" -> s"https://bluebrain.github.io/nexus/vocabulary/$storageId", diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SchemasSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SchemasSpec.scala index 571ccde29b..0f2d7edd34 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SchemasSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SchemasSpec.scala @@ -8,7 +8,6 @@ import ch.epfl.bluebrain.nexus.tests.Identity.resources.Rick import ch.epfl.bluebrain.nexus.tests.builders.SchemaPayloads._ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.Organizations import io.circe.Json -import monix.execution.Scheduler.Implicits.global import org.scalatest.BeforeAndAfterAll import org.scalatest.LoneElement._ diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SearchConfigSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SearchConfigSpec.scala index 89ddd7bfba..831bb5a61d 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SearchConfigSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SearchConfigSpec.scala @@ -1,12 +1,12 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes +import cats.effect.IO import cats.implicits._ import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.Identity.resources.Rick import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Organizations, Resources} import io.circe.Json -import monix.bio.Task import org.scalatest.Assertion import java.time.Instant @@ -794,7 +794,7 @@ class SearchConfigSpec extends BaseSpec { jsonContentOf("/kg/search/id-query.json", "id" -> id, "field" -> field) /** Post a resource across all defined projects in the suite */ - private def postResource(resourcePath: String): Task[List[Assertion]] = { + private def postResource(resourcePath: String): IO[List[Assertion]] = { val json = jsonContentOf(resourcePath) projects.parTraverse { project => for { @@ -809,7 +809,7 @@ class SearchConfigSpec extends BaseSpec { * Queries ES using the provided query. Asserts that there is only on result in _source. Runs the provided assertion * on the _source. */ - private def assertOneSource(query: Json)(assertion: Json => Assertion): Task[Assertion] = + private def assertOneSource(query: Json)(assertion: Json => Assertion): IO[Assertion] = eventually { deltaClient.post[Json]("/search/query", query, Rick) { (body, response) => response.status shouldEqual StatusCodes.OK @@ -822,7 +822,7 @@ class SearchConfigSpec extends BaseSpec { } } - private def assertEmpty(query: Json): Task[Assertion] = + private def assertEmpty(query: Json): IO[Assertion] = assertOneSource(query)(j => assert(j == json"""{ }""")) /** Check that a given field in the json can be parsed as [[Instant]] */ diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SparqlViewsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SparqlViewsSpec.scala index 7136cc572b..9585bceaa5 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SparqlViewsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SparqlViewsSpec.scala @@ -9,7 +9,6 @@ import ch.epfl.bluebrain.nexus.tests.Identity.views.ScoobyDoo import ch.epfl.bluebrain.nexus.tests.Optics._ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Organizations, Views} import io.circe.Json -import monix.execution.Scheduler.Implicits.global class SparqlViewsSpec extends BaseSpec with EitherValuable with CirceEq { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala index 656ebdb761..5d72bb42e8 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/StorageSpec.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.headers.{ContentDispositionTypes, HttpEncodings} import akka.http.scaladsl.model._ import akka.util.ByteString +import cats.effect.IO import ch.epfl.bluebrain.nexus.testkit.CirceEq import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.HttpClient._ @@ -14,8 +15,6 @@ import ch.epfl.bluebrain.nexus.tests.iam.types.Permission import com.typesafe.config.ConfigFactory import io.circe.Json import io.circe.optics.JsonPath.root -import monix.bio.Task -import monix.execution.Scheduler.Implicits.global import org.apache.commons.codec.Charsets import org.scalatest.Assertion @@ -41,7 +40,7 @@ abstract class StorageSpec extends BaseSpec with CirceEq { def locationPrefix: Option[String] - def createStorages: Task[Assertion] + def createStorages: IO[Assertion] protected def fileSelf(project: String, id: String): String = { val uri = Uri(s"${config.deltaUri}/files/$project") @@ -226,7 +225,7 @@ abstract class StorageSpec extends BaseSpec with CirceEq { val textFileContent = "text file" - def uploadStorageWithCustomPermissions: ((Json, HttpResponse) => Assertion) => Task[Assertion] = + def uploadStorageWithCustomPermissions: ((Json, HttpResponse) => Assertion) => IO[Assertion] = deltaClient.uploadFile[Json]( s"/files/$projectRef/attachment3?storage=nxv:${storageId}2", textFileContent, @@ -348,7 +347,7 @@ abstract class StorageSpec extends BaseSpec with CirceEq { "Upload files with the .custom extension" should { val fileContent = "file content" - def uploadCustomFile(id: String, contentType: ContentType): ((Json, HttpResponse) => Assertion) => Task[Assertion] = + def uploadCustomFile(id: String, contentType: ContentType): ((Json, HttpResponse) => Assertion) => IO[Assertion] = deltaClient.uploadFile[Json]( s"/files/$projectRef/$id?storage=nxv:$storageId", fileContent, diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SupervisionSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SupervisionSpec.scala index de7773b5ea..af45c5b7f3 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SupervisionSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/SupervisionSpec.scala @@ -1,14 +1,14 @@ package ch.epfl.bluebrain.nexus.tests.kg import akka.http.scaladsl.model.StatusCodes -import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, EitherValuable, IOValues} +import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, EitherValuable} import ch.epfl.bluebrain.nexus.tests.BaseSpec import ch.epfl.bluebrain.nexus.tests.Identity.supervision.Mickey import ch.epfl.bluebrain.nexus.tests.Optics.{filterKeys, projections} import ch.epfl.bluebrain.nexus.tests.iam.types.Permission.{Events, Organizations, Supervision} import io.circe._ -class SupervisionSpec extends BaseSpec with EitherValuable with CirceLiteral with IOValues { +class SupervisionSpec extends BaseSpec with EitherValuable with CirceLiteral { "The supervision endpoint" should { s"reject calls without ${Supervision.Read.value} permission" in { diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/VersionSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/VersionSpec.scala index bf62fe9b04..98ec2c081e 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/VersionSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/VersionSpec.scala @@ -7,7 +7,6 @@ import ch.epfl.bluebrain.nexus.tests.kg.VersionSpec.VersionBundle import ch.epfl.bluebrain.nexus.tests.{BaseSpec, Identity} import io.circe.generic.semiauto.deriveDecoder import io.circe.{Decoder, Json} -import monix.execution.Scheduler.Implicits.global class VersionSpec extends BaseSpec with EitherValuable { @@ -19,7 +18,7 @@ class VersionSpec extends BaseSpec with EitherValuable { } "return the dependencies and plugin versions" in { - aclDsl.addPermissionAnonymous("/", Permission.Version.Read).runSyncUnsafe() + aclDsl.addPermissionAnonymous("/", Permission.Version.Read).unsafeRunSync() deltaClient.get[Json]("/version", Identity.Anonymous) { (json, response) => response.status shouldEqual StatusCodes.OK From f480c088d8c84606a7861f0f94152214a0cf57d5 Mon Sep 17 00:00:00 2001 From: Simon Date: Mon, 9 Oct 2023 15:24:37 +0200 Subject: [PATCH 07/13] Remove callback to Delta to check the token, validate it locally (#4340) * Remove callback to Delta to check the token, validate it locally --------- Co-authored-by: Simon Dumas --- build.sbt | 8 +- .../nexus/delta/kernel/jwt}/AuthToken.scala | 2 +- .../nexus/delta/kernel/jwt}/ParsedToken.scala | 42 ++++- .../delta/kernel/jwt/TokenRejection.scala | 55 ++++++ .../kernel}/syntax/NonEmptySetSyntax.scala | 2 +- .../nexus/delta/kernel/syntax/package.scala | 2 +- .../delta/sdk/auth/AuthTokenProvider.scala | 3 +- .../delta/sdk/auth/OpenIdAuthService.scala | 3 +- .../delta/sdk/directives/AuthDirectives.scala | 3 +- .../nexus/delta/sdk/error/IdentityError.scala | 32 +++- .../delta/sdk/identities/Identities.scala | 3 +- .../delta/sdk/identities/IdentitiesImpl.scala | 29 +-- .../sdk/identities/model/TokenRejection.scala | 80 -------- .../sdk/marshalling/HttpResponseFields.scala | 9 +- .../delta/sdk/syntax/HttpRequestSyntax.scala | 2 +- .../nexus/delta/sdk/syntax/package.scala | 1 - .../sdk/directives/AuthDirectivesSpec.scala | 5 +- .../sdk/identities/IdentitiesDummy.scala | 5 +- .../sdk/identities/IdentitiesImplSuite.scala | 60 +++--- .../identities/model/TokenRejectionSpec.scala | 5 +- .../nexus/testkit/jwt/TokenGenerator.scala | 58 ++++++ .../docs/releases/v1.9-release-notes.md | 12 ++ storage/src/main/resources/app.conf | 27 +-- .../nexus/storage/DeltaIdentitiesClient.scala | 175 ------------------ .../storage/DeltaIdentitiesClientError.scala | 57 ------ .../epfl/bluebrain/nexus/storage/Main.scala | 20 +- .../storage/auth/AuthorizationError.scala | 20 ++ .../storage/auth/AuthorizationMethod.scala | 68 +++++++ .../nexus/storage/config/AppConfig.scala | 49 +---- .../storage/config/DeltaClientConfig.scala | 24 --- .../nexus/storage/routes/AuthDirectives.scala | 42 ++--- .../nexus/storage/routes/Routes.scala | 13 +- storage/src/test/resources/app.conf | 7 +- .../auth/AuthorizationMethodSuite.scala | 137 ++++++++++++++ .../storage/routes/AppInfoRoutesSpec.scala | 13 +- .../storage/routes/AuthDirectivesSpec.scala | 127 +++++++------ .../storage/routes/StorageRoutesSpec.scala | 15 +- tests/docker/config/storage.conf | 11 +- .../test/resources/iam/identities/errors.json | 5 - .../nexus/tests/iam/IdentitiesSpec.scala | 2 +- 40 files changed, 607 insertions(+), 626 deletions(-) rename delta/{sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model => kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt}/AuthToken.scala (94%) rename delta/{sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities => kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt}/ParsedToken.scala (60%) create mode 100644 delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/TokenRejection.scala rename delta/{sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk => kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel}/syntax/NonEmptySetSyntax.scala (92%) delete mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala create mode 100644 delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/jwt/TokenGenerator.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClient.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClientError.scala create mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala create mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/DeltaClientConfig.scala create mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala delete mode 100644 tests/src/test/resources/iam/identities/errors.json diff --git a/build.sbt b/build.sbt index 76a4570a4c..222fa0e602 100755 --- a/build.sbt +++ b/build.sbt @@ -118,6 +118,7 @@ lazy val monixEval = "io.monix" %% "monix-eval" lazy val munit = "org.scalameta" %% "munit" % munitVersion lazy val nimbusJoseJwt = "com.nimbusds" % "nimbus-jose-jwt" % nimbusJoseJwtVersion lazy val pureconfig = "com.github.pureconfig" %% "pureconfig" % pureconfigVersion +lazy val pureconfigCats = "com.github.pureconfig" %% "pureconfig-cats" % pureconfigVersion lazy val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion lazy val scalaTest = "org.scalatest" %% "scalatest" % scalaTestVersion lazy val scalaXml = "org.scala-lang.modules" %% "scala-xml" % scalaXmlVersion @@ -207,14 +208,17 @@ lazy val kernel = project akkaActorTyped, // Needed to create content type akkaHttpCore, caffeine, + catsCore, catsRetry, circeCore, circeParser, handleBars, monixBio, + nimbusJoseJwt, kamonCore, log4cats, pureconfig, + pureconfigCats, scalaLogging, munit % Test, scalaTest % Test @@ -257,7 +261,6 @@ lazy val sourcingPsql = project .settings(shared, compilation, assertJavaVersion, coverage, release) .settings( libraryDependencies ++= Seq( - catsCore, circeCore, circeGenericExtras, circeParser, @@ -324,7 +327,6 @@ lazy val sdk = project distageCore, fs2, monixBio, - nimbusJoseJwt, akkaTestKitTyped % Test, akkaHttpTestKit % Test, munit % Test, @@ -735,7 +737,7 @@ lazy val storage = project servicePackaging, coverageMinimumStmtTotal := 75 ) - .dependsOn(kernel) + .dependsOn(kernel, testkit % "test->compile") .settings(cargo := { import scala.sys.process._ diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/AuthToken.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/AuthToken.scala similarity index 94% rename from delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/AuthToken.scala rename to delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/AuthToken.scala index c76f574504..129bf325b5 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/AuthToken.scala +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/AuthToken.scala @@ -1,4 +1,4 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.identities.model +package ch.epfl.bluebrain.nexus.delta.kernel.jwt import akka.http.scaladsl.model.headers.OAuth2BearerToken import io.circe.{Decoder, Encoder} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/ParsedToken.scala similarity index 60% rename from delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala rename to delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/ParsedToken.scala index 13e13e00b5..a9b01b95c5 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/ParsedToken.scala +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/ParsedToken.scala @@ -1,11 +1,18 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.identities +package ch.epfl.bluebrain.nexus.delta.kernel.jwt -import cats.implicits._ -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, TokenRejection} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection._ +import cats.data.NonEmptySet +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection._ +import com.nimbusds.jose.JWSAlgorithm +import com.nimbusds.jose.jwk.JWKSet +import ch.epfl.bluebrain.nexus.delta.kernel.syntax._ +import com.nimbusds.jose.jwk.source.ImmutableJWKSet +import com.nimbusds.jose.proc.{JWSVerificationKeySelector, SecurityContext} +import com.nimbusds.jwt.proc.{DefaultJWTClaimsVerifier, DefaultJWTProcessor} import com.nimbusds.jwt.{JWTClaimsSet, SignedJWT} import java.time.Instant +import scala.jdk.CollectionConverters._ import scala.util.Try /** @@ -18,7 +25,24 @@ final case class ParsedToken private ( expirationTime: Instant, groups: Option[Set[String]], jwtToken: SignedJWT -) +) { + + def validate(audiences: Option[NonEmptySet[String]], keySet: JWKSet): Either[InvalidAccessToken, Unit] = { + val proc = new DefaultJWTProcessor[SecurityContext] + val keySelector = new JWSVerificationKeySelector(JWSAlgorithm.RS256, new ImmutableJWKSet[SecurityContext](keySet)) + proc.setJWSKeySelector(keySelector) + audiences.foreach { aud => + proc.setJWTClaimsSetVerifier(new DefaultJWTClaimsVerifier(aud.toSet.asJava, null, null, null)) + } + Either + .catchNonFatal(proc.process(jwtToken, null)) + .bimap( + err => InvalidAccessToken(subject, issuer, err.getMessage), + _ => () + ) + } + +} object ParsedToken { @@ -33,13 +57,13 @@ object ParsedToken { def parseJwt: Either[TokenRejection, SignedJWT] = Either .catchNonFatal(SignedJWT.parse(token.value)) - .leftMap(_ => InvalidAccessTokenFormat) + .leftMap { e => InvalidAccessTokenFormat(e.getMessage) } def claims(jwt: SignedJWT): Either[TokenRejection, JWTClaimsSet] = Either - .catchNonFatal(jwt.getJWTClaimsSet) - .filterOrElse(_ != null, InvalidAccessTokenFormat) - .leftMap(_ => InvalidAccessTokenFormat) + .catchNonFatal(Option(jwt.getJWTClaimsSet)) + .leftMap { e => InvalidAccessTokenFormat(e.getMessage) } + .flatMap { _.toRight(InvalidAccessTokenFormat("No claim is defined.")) } def subject(claimsSet: JWTClaimsSet) = { val preferredUsername = Try(claimsSet.getStringClaim("preferred_username")) diff --git a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/TokenRejection.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/TokenRejection.scala new file mode 100644 index 0000000000..29e0d51ca4 --- /dev/null +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/jwt/TokenRejection.scala @@ -0,0 +1,55 @@ +package ch.epfl.bluebrain.nexus.delta.kernel.jwt + +/** + * Enumeration of token rejections. + * + * @param reason + * a descriptive message for reasons why a token is rejected by the system + */ +// $COVERAGE-OFF$ +sealed abstract class TokenRejection(reason: String) extends Exception with Product with Serializable { + override def fillInStackTrace(): Throwable = this + override def getMessage: String = reason +} + +object TokenRejection { + + /** + * Rejection for cases where the AccessToken is not a properly formatted signed JWT. + */ + final case class InvalidAccessTokenFormat(details: String) + extends TokenRejection( + s"Access token is invalid; possible causes are: JWT not signed, encoded parts are not properly encoded or each part is not a valid json, details: '$details'" + ) + + /** + * Rejection for cases where the access token does not contain a subject in the claim set. + */ + final case object AccessTokenDoesNotContainSubject extends TokenRejection("The token doesn't contain a subject.") + + /** + * Rejection for cases where the access token does not contain an issuer in the claim set. + */ + final case object AccessTokenDoesNotContainAnIssuer extends TokenRejection("The token doesn't contain an issuer.") + + /** + * Rejection for cases where the issuer specified in the access token claim set is unknown; also applies to issuers + * of deprecated realms. + */ + final case object UnknownAccessTokenIssuer extends TokenRejection("The issuer referenced in the token was not found.") + + /** + * Rejection for cases where the access token is invalid according to JWTClaimsVerifier + */ + final case class InvalidAccessToken(subject: String, issuer: String, details: String) + extends TokenRejection(s"The provided token is invalid for user '$subject/$issuer' .") + + /** + * Rejection for cases where we couldn't fetch the groups from the OIDC provider + */ + final case class GetGroupsFromOidcError(subject: String, issuer: String) + extends TokenRejection( + "The token is invalid; possible causes are: the OIDC provider is unreachable." + ) +} +// $COVERAGE-ON$ diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/NonEmptySetSyntax.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/NonEmptySetSyntax.scala similarity index 92% rename from delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/NonEmptySetSyntax.scala rename to delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/NonEmptySetSyntax.scala index 99d4d9a9a9..aa71ec8b52 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/NonEmptySetSyntax.scala +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/NonEmptySetSyntax.scala @@ -1,4 +1,4 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.syntax +package ch.epfl.bluebrain.nexus.delta.kernel.syntax import cats.data.NonEmptySet diff --git a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/package.scala b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/package.scala index 5874c3e5ce..1661242189 100644 --- a/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/package.scala +++ b/delta/kernel/src/main/scala/ch/epfl/bluebrain/nexus/delta/kernel/syntax/package.scala @@ -1,3 +1,3 @@ package ch.epfl.bluebrain.nexus.delta.kernel -package object syntax extends KamonSyntax with ClassTagSyntax with IOSyntax with InstantSyntax +package object syntax extends KamonSyntax with ClassTagSyntax with IOSyntax with InstantSyntax with NonEmptySetSyntax diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala index 31f9ed3691..2e84f0b0f6 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/AuthTokenProvider.scala @@ -4,10 +4,9 @@ import cats.effect.{Clock, IO} import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.kernel.cache.LocalCache import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.MigrateEffectSyntax +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, ParsedToken} import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials.ClientCredentials -import ch.epfl.bluebrain.nexus.delta.sdk.identities.ParsedToken -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken import monix.bio import java.time.{Duration, Instant} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala index 38685f73e2..410312e4cf 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/auth/OpenIdAuthService.scala @@ -7,11 +7,10 @@ import akka.http.scaladsl.model.{HttpRequest, Uri} import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.Secret import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration.MigrateEffectSyntax +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, ParsedToken} import ch.epfl.bluebrain.nexus.delta.sdk.auth.Credentials.ClientCredentials import ch.epfl.bluebrain.nexus.delta.sdk.error.AuthTokenError.{AuthTokenHttpError, AuthTokenNotFoundInResponse, RealmIsDeprecated} import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient -import ch.epfl.bluebrain.nexus.delta.sdk.identities.ParsedToken -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken import ch.epfl.bluebrain.nexus.delta.sdk.realms.Realms import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.Realm import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectives.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectives.scala index a79cd85448..292255eb60 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectives.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectives.scala @@ -12,10 +12,11 @@ import ch.epfl.bluebrain.nexus.delta.sdk.acls.model.AclAddress import ch.epfl.bluebrain.nexus.delta.sdk.error.IdentityError.{AuthenticationFailed, InvalidToken} import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.AuthorizationFailed import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller, ServiceAccount, TokenRejection} +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{Caller, ServiceAccount} import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, TokenRejection} import scala.concurrent.Future diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala index 598139952b..b5c06de8ca 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/IdentityError.scala @@ -1,11 +1,17 @@ package ch.epfl.bluebrain.nexus.delta.sdk.error +import akka.http.scaladsl.model.StatusCodes +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection.InvalidAccessToken +import ch.epfl.bluebrain.nexus.delta.kernel.utils.ClassUtils +import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.BNode import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection -import io.circe.syntax._ +import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields +import ch.epfl.bluebrain.nexus.delta.sdk.syntax.httpResponseFieldsSyntax +import io.circe.syntax.EncoderOps import io.circe.{Encoder, JsonObject} /** @@ -34,6 +40,19 @@ object IdentityError { */ final case class InvalidToken(rejection: TokenRejection) extends IdentityError(rejection.getMessage) + implicit val tokenRejectionEncoder: Encoder.AsObject[TokenRejection] = + Encoder.AsObject.instance { r => + val tpe = ClassUtils.simpleName(r) + val json = JsonObject.empty.add(keywords.tpe, tpe.asJson).add("reason", r.getMessage.asJson) + r match { + case InvalidAccessToken(_, _, error) => json.add("details", error.asJson) + case _ => json + } + } + + implicit final val tokenRejectionJsonLdEncoder: JsonLdEncoder[TokenRejection] = + JsonLdEncoder.computeFromCirce(id = BNode.random, ctx = ContextValue(contexts.error)) + implicit val identityErrorEncoder: Encoder.AsObject[IdentityError] = Encoder.AsObject.instance[IdentityError] { case InvalidToken(r) => @@ -44,4 +63,13 @@ object IdentityError { implicit val identityErrorJsonLdEncoder: JsonLdEncoder[IdentityError] = JsonLdEncoder.computeFromCirce(ContextValue(contexts.error)) + + implicit val responseFieldsTokenRejection: HttpResponseFields[TokenRejection] = + HttpResponseFields(_ => StatusCodes.Unauthorized) + + implicit val responseFieldsIdentities: HttpResponseFields[IdentityError] = + HttpResponseFields { + case IdentityError.AuthenticationFailed => StatusCodes.Unauthorized + case IdentityError.InvalidToken(rejection) => rejection.status + } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/Identities.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/Identities.scala index c8133ce177..297148e29f 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/Identities.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/Identities.scala @@ -1,7 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.sdk.identities import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller /** * Operations pertaining to authentication, token validation and identities. diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala index fe9200ecae..a6d1490edb 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImpl.scala @@ -2,30 +2,27 @@ package ch.epfl.bluebrain.nexus.delta.sdk.identities import akka.http.scaladsl.model.headers.{Authorization, OAuth2BearerToken} import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri} -import cats.data.{NonEmptySet, OptionT} +import cats.data.OptionT import cats.effect.IO import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.kernel.cache.{CacheConfig, LocalCache} import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection.{GetGroupsFromOidcError, InvalidAccessToken, UnknownAccessTokenIssuer} +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, ParsedToken} import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.search.Pagination.FromPagination import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClient import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpClientStatusError import ch.epfl.bluebrain.nexus.delta.sdk.identities.IdentitiesImpl.{extractGroups, logger, GroupsCache, RealmCache} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection.{GetGroupsFromOidcError, InvalidAccessToken, UnknownAccessTokenIssuer} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceF import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchParams.RealmSearchParams import ch.epfl.bluebrain.nexus.delta.sdk.realms.Realms import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.Realm import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, User} -import com.nimbusds.jose.JWSAlgorithm -import com.nimbusds.jose.jwk.source.ImmutableJWKSet import com.nimbusds.jose.jwk.{JWK, JWKSet} -import com.nimbusds.jose.proc.{JWSVerificationKeySelector, SecurityContext} -import com.nimbusds.jwt.proc.{DefaultJWTClaimsVerifier, DefaultJWTProcessor} import io.circe.{Decoder, HCursor, Json} import scala.util.Try @@ -48,22 +45,8 @@ class IdentitiesImpl private[identities] ( new JWKSet(jwks.toList.asJava) } - def validate(audiences: Option[NonEmptySet[String]], token: ParsedToken, keySet: JWKSet) = { - val proc = new DefaultJWTProcessor[SecurityContext] - val keySelector = new JWSVerificationKeySelector(JWSAlgorithm.RS256, new ImmutableJWKSet[SecurityContext](keySet)) - proc.setJWSKeySelector(keySelector) - audiences.foreach { aud => - proc.setJWTClaimsSetVerifier(new DefaultJWTClaimsVerifier(aud.toSet.asJava, null, null, null)) - } - IO.fromEither( - Either - .catchNonFatal(proc.process(token.jwtToken, null)) - .leftMap(err => InvalidAccessToken(token.subject, token.issuer, err.getMessage)) - ) - } - def fetchRealm(parsedToken: ParsedToken): IO[Realm] = { - val getRealm = realm.getOrElseAttemptUpdate(parsedToken.rawToken, findActiveRealm(parsedToken.issuer)) + val getRealm = realm.getOrElseAttemptUpdate(parsedToken.issuer, findActiveRealm(parsedToken.issuer)) OptionT(getRealm).getOrRaise(UnknownAccessTokenIssuer) } @@ -85,7 +68,7 @@ class IdentitiesImpl private[identities] ( val result = for { parsedToken <- IO.fromEither(ParsedToken.fromToken(token)) activeRealm <- fetchRealm(parsedToken) - _ <- validate(activeRealm.acceptedAudiences, parsedToken, realmKeyset(activeRealm)) + _ <- IO.fromEither(parsedToken.validate(activeRealm.acceptedAudiences, realmKeyset(activeRealm))) groups <- fetchGroups(parsedToken, activeRealm) } yield { val user = User(parsedToken.subject, activeRealm.label) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala deleted file mode 100644 index e4855bafcb..0000000000 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejection.scala +++ /dev/null @@ -1,80 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.sdk.identities.model - -import akka.http.scaladsl.model.StatusCodes -import ch.epfl.bluebrain.nexus.delta.kernel.utils.ClassUtils -import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.BNode -import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder -import ch.epfl.bluebrain.nexus.delta.sdk.error.SDKError -import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields -import io.circe.syntax._ -import io.circe.{Encoder, JsonObject} - -/** - * Enumeration of token rejections. - * - * @param reason - * a descriptive message for reasons why a token is rejected by the system - */ -sealed abstract class TokenRejection(reason: String) extends SDKError with Product with Serializable { - override def getMessage: String = reason -} - -object TokenRejection { - - /** - * Rejection for cases where the AccessToken is not a properly formatted signed JWT. - */ - final case object InvalidAccessTokenFormat - extends TokenRejection( - "Access token is invalid; possible causes are: JWT not signed, encoded parts are not properly encoded or each part is not a valid json." - ) - - /** - * Rejection for cases where the access token does not contain a subject in the claim set. - */ - final case object AccessTokenDoesNotContainSubject extends TokenRejection("The token doesn't contain a subject.") - - /** - * Rejection for cases where the access token does not contain an issuer in the claim set. - */ - final case object AccessTokenDoesNotContainAnIssuer extends TokenRejection("The token doesn't contain an issuer.") - - /** - * Rejection for cases where the issuer specified in the access token claim set is unknown; also applies to issuers - * of deprecated realms. - */ - final case object UnknownAccessTokenIssuer extends TokenRejection("The issuer referenced in the token was not found.") - - /** - * Rejection for cases where the access token is invalid according to JWTClaimsVerifier - */ - final case class InvalidAccessToken(subject: String, issuer: String, details: String) - extends TokenRejection(s"The provided token is invalid for user '$subject/$issuer' .") - - /** - * Rejection for cases where we couldn't fetch the groups from the OIDC provider - */ - final case class GetGroupsFromOidcError(subject: String, issuer: String) - extends TokenRejection( - "The token is invalid; possible causes are: the OIDC provider is unreachable." - ) - - implicit val tokenRejectionEncoder: Encoder.AsObject[TokenRejection] = - Encoder.AsObject.instance { r => - val tpe = ClassUtils.simpleName(r) - val json = JsonObject.empty.add(keywords.tpe, tpe.asJson).add("reason", r.getMessage.asJson) - r match { - case InvalidAccessToken(_, _, error) => json.add("details", error.asJson) - case _ => json - } - } - - implicit final val tokenRejectionJsonLdEncoder: JsonLdEncoder[TokenRejection] = - JsonLdEncoder.computeFromCirce(id = BNode.random, ctx = ContextValue(contexts.error)) - - implicit val responseFieldsTokenRejection: HttpResponseFields[TokenRejection] = - HttpResponseFields(_ => StatusCodes.Unauthorized) -} diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/HttpResponseFields.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/HttpResponseFields.scala index 0f3f8f0ee1..c87c448621 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/HttpResponseFields.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/HttpResponseFields.scala @@ -1,9 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.sdk.marshalling import akka.http.scaladsl.model.{HttpHeader, StatusCode, StatusCodes} +import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.{AuthorizationFailed, FetchContextFailed, IndexingFailed, ScopeInitializationFailed, UnknownSseLabel} -import ch.epfl.bluebrain.nexus.delta.sdk.error.{IdentityError, ServiceError} -import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ /** * Typeclass definition for ''A''s from which the HttpHeaders and StatusCode can be ontained. @@ -61,12 +60,6 @@ object HttpResponseFields { override def headersFrom(value: A): Seq[HttpHeader] = f(value)._2 } - implicit val responseFieldsIdentities: HttpResponseFields[IdentityError] = - HttpResponseFields { - case IdentityError.AuthenticationFailed => StatusCodes.Unauthorized - case IdentityError.InvalidToken(rejection) => rejection.status - } - implicit val responseFieldsServiceError: HttpResponseFields[ServiceError] = HttpResponseFields { case AuthorizationFailed => StatusCodes.Forbidden diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/HttpRequestSyntax.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/HttpRequestSyntax.scala index b07fbe7043..39be8ea7f7 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/HttpRequestSyntax.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/HttpRequestSyntax.scala @@ -2,7 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.sdk.syntax import akka.http.scaladsl.model.HttpRequest import akka.http.scaladsl.model.headers.{HttpCredentials, OAuth2BearerToken} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.AuthToken +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken trait HttpRequestSyntax { implicit final def httpRequestSyntax(req: HttpRequest): HttpRequestOpts = new HttpRequestOpts(req) diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/package.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/package.scala index f9ceadbbb5..c89d84aed5 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/package.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/syntax/package.scala @@ -21,5 +21,4 @@ package object syntax with ClassTagSyntax with IOSyntax with InstantSyntax - with NonEmptySetSyntax with ProjectionErrorsSyntax diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectivesSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectivesSpec.scala index 0971f4bde1..f3169950b8 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectivesSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/AuthDirectivesSpec.scala @@ -5,6 +5,7 @@ import akka.http.scaladsl.model.headers.{BasicHttpCredentials, OAuth2BearerToken import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{ExceptionHandler, Route} import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering @@ -13,8 +14,8 @@ import ch.epfl.bluebrain.nexus.delta.sdk.acls.model.AclAddress import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.AuthorizationFailed import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller.Anonymous -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection.InvalidAccessToken -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection.InvalidAccessToken +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfExceptionHandler import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesDummy.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesDummy.scala index 6d62b2e221..093625dc05 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesDummy.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesDummy.scala @@ -1,8 +1,9 @@ package ch.epfl.bluebrain.nexus.delta.sdk.identities import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection.InvalidAccessToken -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection.InvalidAccessToken +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User /** diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala index 7e000063db..aebfcaf182 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/IdentitiesImplSuite.scala @@ -7,26 +7,26 @@ import cats.effect.IO import cats.effect.concurrent.Ref import cats.implicits._ import ch.epfl.bluebrain.nexus.delta.kernel.cache.LocalCache +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, ParsedToken} +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection._ import ch.epfl.bluebrain.nexus.delta.sdk.generators.{RealmGen, WellKnownGen} import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError.HttpUnexpectedError import ch.epfl.bluebrain.nexus.delta.sdk.identities.IdentitiesImpl.{GroupsCache, RealmCache} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection.{AccessTokenDoesNotContainAnIssuer, AccessTokenDoesNotContainSubject, GetGroupsFromOidcError, InvalidAccessToken, InvalidAccessTokenFormat, UnknownAccessTokenIssuer} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.{AuthToken, Caller} +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.realms.model.Realm import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Authenticated, Group, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label import ch.epfl.bluebrain.nexus.testkit.ce.{CatsEffectSuite, IOFromMap} +import ch.epfl.bluebrain.nexus.testkit.jwt.TokenGenerator import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, TestHelpers} import com.nimbusds.jose.crypto.RSASSASigner import com.nimbusds.jose.jwk.RSAKey import com.nimbusds.jose.jwk.gen.RSAKeyGenerator -import com.nimbusds.jose.{JWSAlgorithm, JWSHeader} -import com.nimbusds.jwt.{JWTClaimsSet, PlainJWT, SignedJWT} +import com.nimbusds.jwt.{JWTClaimsSet, PlainJWT} import io.circe.{parser, Json} import java.time.Instant import java.util.Date -import scala.jdk.CollectionConverters._ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMap with CirceLiteral { @@ -58,35 +58,17 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa groups: Option[Set[String]] = None, useCommas: Boolean = false, preferredUsername: Option[String] = None - ): AuthToken = { - val csb = new JWTClaimsSet.Builder() - .issuer(issuer.value) - .subject(subject) - .expirationTime(Date.from(expires)) - .notBeforeTime(Date.from(notBefore)) - - groups.foreach { set => - if (useCommas) csb.claim("groups", set.mkString(",")) - else csb.claim("groups", set.toArray) - } - - aud.foreach(audiences => csb.audience(audiences.toList.asJava)) - - preferredUsername.foreach(pu => csb.claim("preferred_username", pu)) - - toSignedJwt(csb, rsaKey) - } - - private def toSignedJwt(builder: JWTClaimsSet.Builder, rsaKey: RSAKey = rsaKey): AuthToken = { - val jwt = new SignedJWT( - new JWSHeader.Builder(JWSAlgorithm.RS256) - .keyID(rsaKey.getKeyID) - .build(), - builder.build() - ) - jwt.sign(signer) - AuthToken(jwt.serialize()) - } + ): AuthToken = TokenGenerator.generateToken( + subject, + issuer.value, + rsaKey, + expires, + notBefore, + aud, + groups, + useCommas, + preferredUsername + ) private val githubLabel = Label.unsafe("github") private val githubLabel2 = Label.unsafe("github2") @@ -236,7 +218,7 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa } test("Fail when the token is invalid") { - identities.exchange(AuthToken(genString())).intercept(InvalidAccessTokenFormat) + identities.exchange(AuthToken(genString())).intercept[InvalidAccessTokenFormat] } test("Fail when the token is not signed") { @@ -245,7 +227,7 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa .expirationTime(Date.from(nowPlus1h)) val token = AuthToken(new PlainJWT(csb.build()).serialize()) - identities.exchange(token).intercept(InvalidAccessTokenFormat) + identities.exchange(token).intercept[InvalidAccessTokenFormat] } test("Fail when the token doesn't contain an issuer") { @@ -253,7 +235,7 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa .subject("subject") .expirationTime(Date.from(nowPlus1h)) - val token = toSignedJwt(csb) + val token = TokenGenerator.toSignedJwt(csb, rsaKey, signer) identities.exchange(token).intercept(AccessTokenDoesNotContainAnIssuer) } @@ -262,7 +244,7 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa .issuer(githubLabel.value) .expirationTime(Date.from(nowPlus1h)) - val token = toSignedJwt(csb) + val token = TokenGenerator.toSignedJwt(csb, rsaKey, signer) identities.exchange(token).intercept(AccessTokenDoesNotContainSubject) } @@ -353,7 +335,7 @@ class IdentitiesImplSuite extends CatsEffectSuite with TestHelpers with IOFromMa _ <- realm.get(parsedToken.rawToken).assertNone _ <- groups.get(parsedToken.rawToken).assertNone _ <- identitiesFromCaches(realm, groups)(findActiveRealm).exchange(token) - _ <- realm.get(parsedToken.rawToken).assertSome(github) + _ <- realm.get(parsedToken.issuer).assertSome(github) _ <- groups.get(parsedToken.rawToken).assertSome(Set(group3, group4)) } yield () } diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala index b6dc1cf571..5c36371daf 100644 --- a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/identities/model/TokenRejectionSpec.scala @@ -1,13 +1,14 @@ package ch.epfl.bluebrain.nexus.delta.sdk.identities.model import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, nxv} -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.TokenRejection._ +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection._ import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sdk.utils.Fixtures import ch.epfl.bluebrain.nexus.testkit.{CirceLiteral, IOValues, TestHelpers} import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike +import ch.epfl.bluebrain.nexus.delta.sdk.error.IdentityError._ class TokenRejectionSpec extends AnyWordSpecLike @@ -20,7 +21,7 @@ class TokenRejectionSpec "A TokenRejection" should { - val invalidFormat = InvalidAccessTokenFormat + val invalidFormat = InvalidAccessTokenFormat("Details") val noIssuer = AccessTokenDoesNotContainSubject "be converted to compacted JSON-LD" in { diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/jwt/TokenGenerator.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/jwt/TokenGenerator.scala new file mode 100644 index 0000000000..a5c81c2943 --- /dev/null +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/jwt/TokenGenerator.scala @@ -0,0 +1,58 @@ +package ch.epfl.bluebrain.nexus.testkit.jwt + +import cats.data.NonEmptySet +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken +import com.nimbusds.jose.crypto.RSASSASigner +import com.nimbusds.jose.jwk.RSAKey +import com.nimbusds.jose.{JWSAlgorithm, JWSHeader} +import com.nimbusds.jwt.{JWTClaimsSet, SignedJWT} + +import java.time.Instant +import java.util.Date + +object TokenGenerator { + + import scala.jdk.CollectionConverters._ + + def generateToken( + subject: String, + issuer: String, + rsaKey: RSAKey, + expires: Instant, + notBefore: Instant, + aud: Option[NonEmptySet[String]] = None, + groups: Option[Set[String]] = None, + useCommas: Boolean = false, + preferredUsername: Option[String] = None + ): AuthToken = { + val csb = new JWTClaimsSet.Builder() + .issuer(issuer) + .subject(subject) + .expirationTime(Date.from(expires)) + .notBeforeTime(Date.from(notBefore)) + + groups.foreach { set => + if (useCommas) csb.claim("groups", set.mkString(",")) + else csb.claim("groups", set.toArray) + } + + aud.foreach(audiences => csb.audience(audiences.toList.asJava)) + + preferredUsername.foreach(pu => csb.claim("preferred_username", pu)) + + toSignedJwt(csb, rsaKey, new RSASSASigner(rsaKey.toPrivateKey)) + } + + def toSignedJwt(builder: JWTClaimsSet.Builder, rsaKey: RSAKey, signer: RSASSASigner): AuthToken = { + val jwt = new SignedJWT( + new JWSHeader.Builder(JWSAlgorithm.RS256) + .keyID(rsaKey.getKeyID) + .build(), + builder.build() + ) + jwt.sign(signer) + AuthToken(jwt.serialize()) + } + +} diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index 5b2c7d0efe..b34cb08310 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -194,6 +194,18 @@ It is now possible to query whether the current logged in user has a specific pe TODO +## Nexus Storage + +### Automatic file detection + +The Nexus storage app also benefits from the changes about automatic file detection. + +@ref:[More information](../getting-started/running-nexus/configuration/index.md#file-configuration) + +### Removing Delta callback to validate the caller identity + +This callback has been replaced by a local validation of the token. + ## Nexus forge TODO diff --git a/storage/src/main/resources/app.conf b/storage/src/main/resources/app.conf index 27ff92a2db..c98e2f043b 100644 --- a/storage/src/main/resources/app.conf +++ b/storage/src/main/resources/app.conf @@ -64,27 +64,20 @@ app { } # Allowed subject to perform calls - subject { - # flag to decide whether or not the allowed subject is Anonymous or a User - anonymous = false + authorization { + # flag to decide whether a token is expected or not to accept the incoming requests + # valid values: "anonymous" or "verify-token" + type = anonymous # the user realm. It must be present when anonymous = false and it must be removed when anonymous = true - //realm = "realm" + # issuer = "realm" # the user name. It must be present when anonymous = false and it must be removed when anonymous = true - //name = "username" + # subject = "username" + # the optional set of audiences of the realm + # audiences = [ ] + # Public JWK keys to validate the incoming token + # keys = [ "key" ] } - # Delta client configuration - delta { - # The public iri to the Delta service - public-iri = "http://localhost:8080" - # The internal iri to the Delta service - internal-iri = "http://localhost:8080" - # The version prefix - prefix = "v1" - - # The delay for retrying after completion on SSE - sse-retry-delay = 1 second - } # monitoring config monitoring { # tracing settings diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClient.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClient.scala deleted file mode 100644 index ec961b39ed..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClient.scala +++ /dev/null @@ -1,175 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.actor.ActorSystem -import akka.http.scaladsl.Http -import akka.http.scaladsl.client.RequestBuilding.Get -import akka.http.scaladsl.model.{HttpRequest, Uri} -import akka.http.scaladsl.model.headers.OAuth2BearerToken -import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller -import akka.util.ByteString -import cats.effect.{ContextShift, Effect, IO} -import cats.implicits._ -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.Identity._ -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient._ -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClientError.IdentitiesSerializationError -import ch.epfl.bluebrain.nexus.storage.config.DeltaClientConfig -import de.heikoseeberger.akkahttpcirce.ErrorAccumulatingCirceSupport.{DecodingFailures => AccDecodingFailures} -import io.circe.Decoder.Result -import io.circe.{Decoder, DecodingFailure, HCursor} - -import scala.concurrent.ExecutionContext - -class DeltaIdentitiesClient[F[_]](config: DeltaClientConfig)(implicit F: Effect[F], as: ActorSystem) - extends JsonLdCirceSupport { - - private val um: FromEntityUnmarshaller[Caller] = unmarshaller[Caller] - implicit private val ec: ExecutionContext = as.dispatcher - implicit private val contextShift: ContextShift[IO] = IO.contextShift(ec) - - def apply()(implicit credentials: Option[AccessToken]): F[Caller] = - credentials match { - case Some(token) => - execute(Get(Uri(config.identitiesIri.toString)).addCredentials(OAuth2BearerToken(token.value))) - case None => - F.pure(Caller.anonymous) - } - - private def execute(req: HttpRequest): F[Caller] = { - IO.fromFuture(IO(Http().singleRequest(req))).to[F].flatMap { resp => - if (resp.status.isSuccess()) - IO.fromFuture(IO(um(resp.entity))).to[F].recoverWith { - case err: AccDecodingFailures => F.raiseError(IdentitiesSerializationError(err.getMessage)) - case err: Error => F.raiseError(IdentitiesSerializationError(err.getMessage)) - } - else - IO.fromFuture(IO(resp.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String))) - .to[F] - .flatMap { err => F.raiseError(DeltaIdentitiesClientError.unsafe(resp.status, err)) } - } - } - -} - -object DeltaIdentitiesClient { - - /** - * The client caller. It contains the subject and the list of identities (which contains the subject again) - * - * @param subject - * the identity that performed the call - * @param identities - * the set of other identities associated to the ''subject''. E.g.: groups, anonymous, authenticated - */ - final case class Caller(subject: Subject, identities: Set[Identity]) - - object Caller { - - /** - * An anonymous caller - */ - val anonymous: Caller = Caller(Anonymous: Subject, Set[Identity](Anonymous)) - - implicit final val callerDecoder: Decoder[Caller] = - Decoder.instance { cursor => - cursor - .get[Set[Identity]]("identities") - .flatMap { identities => - identities.collectFirst { case u: User => u } orElse identities.collectFirst { case Anonymous => - Anonymous - } match { - case Some(subject: Subject) => Right(Caller(subject, identities)) - case _ => - val pos = cursor.downField("identities").history - Left(DecodingFailure("Unable to find a subject in the collection of identities", pos)) - } - } - } - } - - /** - * A data structure which represents an access token - * - * @param value - * the token value - */ - final case class AccessToken(value: String) - - /** - * Base enumeration type for identity classes. - */ - sealed trait Identity extends Product with Serializable - - object Identity { - - /** - * Base enumeration type for subject classes. - */ - sealed trait Subject extends Identity - - sealed trait Anonymous extends Subject - - /** - * The Anonymous subject - */ - final case object Anonymous extends Anonymous - - /** - * The User subject - * - * @param subject - * unique user name - * @param realm - * user realm - */ - final case class User(subject: String, realm: String) extends Subject - - /** - * The Group identity - * - * @param group - * the group - * @param realm - * group realm - */ - final case class Group(group: String, realm: String) extends Identity - - /** - * The Authenticated identity - * - * @param realm - * the realm - */ - final case class Authenticated(realm: String) extends Identity - - private def decodeAnonymous(hc: HCursor): Result[Subject] = - hc.get[String]("@type").flatMap { - case "Anonymous" => Right(Anonymous) - case _ => Left(DecodingFailure("Cannot decode Anonymous Identity", hc.history)) - } - - private def decodeUser(hc: HCursor): Result[Subject] = - (hc.get[String]("subject"), hc.get[String]("realm")).mapN { case (subject, realm) => - User(subject, realm) - } - - private def decodeGroup(hc: HCursor): Result[Identity] = - (hc.get[String]("group"), hc.get[String]("realm")).mapN { case (group, realm) => - Group(group, realm) - } - - private def decodeAuthenticated(hc: HCursor): Result[Identity] = - hc.get[String]("realm").map(Authenticated) - - private val attempts = - List[HCursor => Result[Identity]](decodeAnonymous, decodeUser, decodeGroup, decodeAuthenticated) - - implicit val identityDecoder: Decoder[Identity] = - Decoder.instance { hc => - attempts.foldLeft(Left(DecodingFailure("Unexpected", hc.history)): Result[Identity]) { - case (acc @ Right(_), _) => acc - case (_, f) => f(hc) - } - } - } - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClientError.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClientError.scala deleted file mode 100644 index ee2640a604..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/DeltaIdentitiesClientError.scala +++ /dev/null @@ -1,57 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.{StatusCode, StatusCodes} - -/** - * Enumeration of possible Delta Client errors. - */ - -sealed abstract class DeltaIdentitiesClientError(val msg: String) extends Exception with Product with Serializable { - override def fillInStackTrace(): DeltaIdentitiesClientError = this - override def getMessage: String = msg -} - -object DeltaIdentitiesClientError { - - final def unsafe(status: StatusCode, body: String): DeltaIdentitiesClientError = - status match { - case _ if status.isSuccess() => - throw new IllegalArgumentException(s"Successful status code '$status' found, error expected.") - case code: StatusCodes.ClientError => IdentitiesClientStatusError(code, body) - case code: StatusCodes.ServerError => IdentitiesServerStatusError(code, body) - case _ => IdentitiesUnexpectedStatusError(status, body) - } - - /** - * A serialization error when attempting to cast response. - */ - final case class IdentitiesSerializationError(message: String) - extends DeltaIdentitiesClientError( - s"a Delta request to the identities endpoint could not be converted to 'Caller' type. Details '$message'" - ) - - /** - * A Client status error (HTTP status codes 4xx). - */ - final case class IdentitiesClientStatusError(code: StatusCodes.ClientError, message: String) - extends DeltaIdentitiesClientError( - s"a Delta request to the identities endpoint that should have been successful, returned the HTTP status code '$code'. Details '$message'" - ) - - /** - * A server status error (HTTP status codes 5xx). - */ - final case class IdentitiesServerStatusError(code: StatusCodes.ServerError, message: String) - extends DeltaIdentitiesClientError( - s"a Delta request to the identities endpoint that should have been successful, returned the HTTP status code '$code'. Details '$message'" - ) - - /** - * Some other response error which is not 4xx nor 5xx - */ - final case class IdentitiesUnexpectedStatusError(code: StatusCode, message: String) - extends DeltaIdentitiesClientError( - s"a Delta request to the identities endpoint that should have been successful, returned the HTTP status code '$code'. Details '$message'" - ) - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala index 8257a919cc..cb2e35821e 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala @@ -10,6 +10,7 @@ import akka.util.Timeout import cats.effect.Effect import ch.epfl.bluebrain.nexus.storage.Storages.DiskStorage import ch.epfl.bluebrain.nexus.storage.attributes.{AttributesCache, ContentTypeDetector} +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ import ch.epfl.bluebrain.nexus.storage.routes.Routes @@ -53,13 +54,13 @@ object Main { implicit val appConfig: AppConfig = Settings(config).appConfig - implicit val as: ActorSystem = ActorSystem(appConfig.description.fullName, config) - implicit val ec: ExecutionContext = as.dispatcher - implicit val eff: Effect[Task] = Task.catsEffect(Scheduler.global) - implicit val deltaIdentities: DeltaIdentitiesClient[Task] = new DeltaIdentitiesClient[Task](appConfig.delta) - implicit val timeout = Timeout(1.minute) - implicit val clock = Clock.systemUTC - implicit val contentTypeDetector = new ContentTypeDetector(appConfig.mediaTypeDetector) + implicit val as: ActorSystem = ActorSystem(appConfig.description.fullName, config) + implicit val ec: ExecutionContext = as.dispatcher + implicit val eff: Effect[Task] = Task.catsEffect(Scheduler.global) + implicit val authorizationMethod: AuthorizationMethod = appConfig.authorization + implicit val timeout = Timeout(1.minute) + implicit val clock = Clock.systemUTC + implicit val contentTypeDetector = new ContentTypeDetector(appConfig.mediaTypeDetector) val storages: Storages[Task, AkkaSource] = new DiskStorage(appConfig.storage, contentTypeDetector, appConfig.digest, AttributesCache[Task, AkkaSource]) @@ -67,6 +68,11 @@ object Main { val logger: LoggingAdapter = Logging(as, getClass) logger.info("==== Cluster is Live ====") + + if (authorizationMethod == AuthorizationMethod.Anonymous) { + logger.warning("The application has been configured with anonymous, the caller will not be verified !") + } + val routes: Route = Routes(storages) val httpBinding: Future[Http.ServerBinding] = { diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala new file mode 100644 index 0000000000..da68c9696a --- /dev/null +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala @@ -0,0 +1,20 @@ +package ch.epfl.bluebrain.nexus.storage.auth + +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection + +sealed abstract class AuthorizationError(message: String) extends Exception(message) with Product with Serializable { + override def fillInStackTrace(): AuthorizationError = this +} + +object AuthorizationError { + + final case object NoToken extends AuthorizationError("No token has been provided.") + final case class InvalidToken(tokenRejection: TokenRejection) extends AuthorizationError(tokenRejection.getMessage) + final case class UnauthorizedUser(issuer: String, subject: String) + extends AuthorizationError( + s"User '$subject' from realm '$issuer' wrongfully attempted to perform a call to this service." + ) + final case class TokenNotVerified(tokenRejection: TokenRejection) + extends AuthorizationError(tokenRejection.getMessage) + +} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala new file mode 100644 index 0000000000..c65506e2bf --- /dev/null +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala @@ -0,0 +1,68 @@ +package ch.epfl.bluebrain.nexus.storage.auth + +import cats.data.{NonEmptyList, NonEmptySet} +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, ParsedToken} +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationError._ +import com.nimbusds.jose.jwk.{JWK, JWKSet} +import pureconfig.ConfigReader +import pureconfig.generic.semiauto.deriveReader +import pureconfig.module.cats._ + +import scala.annotation.nowarn +import scala.jdk.CollectionConverters._ +import scala.util.Try + +/** + * Authorization config + */ +sealed trait AuthorizationMethod { + + /** + * Validates the incoming token + */ + def validate(token: Option[AuthToken]): Either[AuthorizationError, Unit] +} + +object AuthorizationMethod { + + /** + * No token/authorization is needed when performing calls + */ + final case object Anonymous extends AuthorizationMethod { + override def validate(token: Option[AuthToken]): Either[AuthorizationError, Unit] = Right(()) + } + + /** + * A token matching this realm and username is required and can be validated to the provided audiences and set of + * JSON Web Keys + */ + final case class VerifyToken(issuer: String, subject: String, audiences: Option[NonEmptySet[String]], keys: JWKSet) + extends AuthorizationMethod { + override def validate(token: Option[AuthToken]): Either[AuthorizationError, Unit] = { + for { + token <- token.toRight(NoToken) + parsedToken <- ParsedToken.fromToken(token).leftMap(InvalidToken) + _ <- Either.cond( + issuer == parsedToken.issuer && subject == parsedToken.subject, + (), + UnauthorizedUser(parsedToken.issuer, parsedToken.subject) + ) + _ <- parsedToken.validate(audiences, keys).leftMap(TokenNotVerified) + } yield () + } + } + + @nowarn("cat=unused") + implicit val authorizationMethodConfigReader: ConfigReader[AuthorizationMethod] = { + implicit val jwkReader: ConfigReader[JWK] = ConfigReader.fromStringTry { s => Try(JWK.parse(s)) } + implicit val jwkSetReader: ConfigReader[JWKSet] = ConfigReader[NonEmptyList[JWK]].map { l => + new JWKSet(l.toList.asJava) + } + implicit val anonymousReader = deriveReader[Anonymous.type] + implicit val verifyToken: ConfigReader[VerifyToken] = deriveReader[VerifyToken] + + deriveReader[AuthorizationMethod] + } + +} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala index 4c20aa4abe..130c3d402f 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala @@ -1,12 +1,12 @@ package ch.epfl.bluebrain.nexus.storage.config -import java.nio.file.Path import akka.http.scaladsl.model.Uri import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.Identity.{Anonymous, Subject, User} import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.OrderedKeys +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ +import java.nio.file.Path import scala.concurrent.duration.FiniteDuration /** @@ -18,10 +18,10 @@ import scala.concurrent.duration.FiniteDuration * http interface configuration * @param storage * storages configuration - * @param subject - * allowed subject to perform calls to this service - * @param delta - * delta client configuration + * @param authorization + * authorization configuration + * @param mediaTypeDetector + * media type configuration * @param digest * the digest configuration */ @@ -29,8 +29,7 @@ final case class AppConfig( description: Description, http: HttpConfig, storage: StorageConfig, - subject: SubjectConfig, - delta: DeltaClientConfig, + authorization: AuthorizationMethod, mediaTypeDetector: MediaTypeDetectorConfig, digest: DigestConfig ) @@ -93,33 +92,6 @@ object AppConfig { fixerCommand: Vector[String] ) - /** - * Allowed subject to perform calls to this service - * - * @param anonymous - * flag to decide whether or not the allowed subject is Anonymous or a User - * @param realm - * the user realm. It must be present when anonymous = false and it must be removed when anonymous = true - * @param name - * the user name. It must be present when anonymous = false and it must be removed when anonymous = true - */ - final case class SubjectConfig(anonymous: Boolean, realm: Option[String], name: Option[String]) { - // $COVERAGE-OFF$ - val subjectValue: Subject = (anonymous, realm, name) match { - case (false, Some(r), Some(s)) => User(s, r) - case (false, _, _) => - throw new IllegalArgumentException( - "subject configuration is wrong. When anonymous is set to false, a realm and a subject must be provided" - ) - case (true, None, None) => Anonymous - case _ => - throw new IllegalArgumentException( - "subject configuration is wrong. When anonymous is set to true, a realm and a subject should not be present" - ) - } - // $COVERAGE-ON$ - } - /** * The digest configuration. * @@ -142,10 +114,9 @@ object AppConfig { retriggerAfter: FiniteDuration ) - implicit def toStorage(implicit config: AppConfig): StorageConfig = config.storage - implicit def toHttp(implicit config: AppConfig): HttpConfig = config.http - implicit def toDelta(implicit config: AppConfig): DeltaClientConfig = config.delta - implicit def toDigest(implicit config: AppConfig): DigestConfig = config.digest + implicit def toStorage(implicit config: AppConfig): StorageConfig = config.storage + implicit def toHttp(implicit config: AppConfig): HttpConfig = config.http + implicit def toDigest(implicit config: AppConfig): DigestConfig = config.digest val orderedKeys: OrderedKeys = OrderedKeys( List( diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/DeltaClientConfig.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/DeltaClientConfig.scala deleted file mode 100644 index ea98fe5d61..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/DeltaClientConfig.scala +++ /dev/null @@ -1,24 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.config - -import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.storage.UriUtils.addPath - -/** - * Configuration for DeltaClient identities endpoint. - * - * @param publicIri - * base URL for all the identity IDs, excluding prefix. - * @param internalIri - * base URL for all the HTTP calls, excluding prefix. - * @param prefix - * the prefix - */ -final case class DeltaClientConfig( - publicIri: Uri, - internalIri: Uri, - prefix: String -) { - lazy val baseInternalIri: Uri = addPath(internalIri, prefix) - lazy val basePublicIri: Uri = addPath(publicIri, prefix) - lazy val identitiesIri: Uri = addPath(baseInternalIri, "identities") -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala index f67b6d034c..48e3bd3434 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala @@ -1,19 +1,12 @@ package ch.epfl.bluebrain.nexus.storage.routes -import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.model.headers.OAuth2BearerToken -import akka.http.scaladsl.server.Directive1 +import akka.http.scaladsl.server.Directive0 import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.directives.FutureDirectives.onComplete -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.{AccessToken, Caller} -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClientError.IdentitiesClientStatusError +import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken import ch.epfl.bluebrain.nexus.storage.StorageError._ +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import com.typesafe.scalalogging.Logger -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global - -import scala.util.{Failure, Success} object AuthDirectives { @@ -22,24 +15,19 @@ object AuthDirectives { /** * Extracts the credentials from the HTTP Authorization Header and builds the [[AccessToken]] */ - def extractToken: Directive1[Option[AccessToken]] = + def validUser(implicit authorizationMethod: AuthorizationMethod): Directive0 = { + def validate(token: Option[AuthToken]): Directive0 = + authorizationMethod.validate(token) match { + case Left(error) => + logger.error("The user could not be validated.", error) + failWith(AuthenticationFailed) + case Right(_) => pass + } + extractCredentials.flatMap { - case Some(OAuth2BearerToken(value)) => provide(Some(AccessToken(value))) + case Some(OAuth2BearerToken(value)) => validate(Some(AuthToken(value))) case Some(_) => failWith(AuthenticationFailed) - case _ => provide(None) - } - - /** - * Authenticates the requested with the provided ''token'' and returns the ''caller'' - */ - def extractCaller(implicit identities: DeltaIdentitiesClient[Task], token: Option[AccessToken]): Directive1[Caller] = - onComplete(identities().runToFuture).flatMap { - case Success(caller) => provide(caller) - case Failure(IdentitiesClientStatusError(StatusCodes.Unauthorized, _)) => failWith(AuthenticationFailed) - case Failure(IdentitiesClientStatusError(StatusCodes.Forbidden, _)) => failWith(AuthorizationFailed) - case Failure(err) => - val message = "Error when trying to extract the subject" - logger.error(message, err) - failWith(InternalError(message)) + case _ => validate(None) } + } } diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala index 54ec884323..360cc42749 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala @@ -3,14 +3,14 @@ package ch.epfl.bluebrain.nexus.storage.routes import akka.http.scaladsl.model.headers.{`WWW-Authenticate`, HttpChallenges} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{ExceptionHandler, RejectionHandler, Route} -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.Caller import ch.epfl.bluebrain.nexus.storage.StorageError._ +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.AppConfig import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ import ch.epfl.bluebrain.nexus.storage.routes.AuthDirectives._ import ch.epfl.bluebrain.nexus.storage.routes.PrefixDirectives._ import ch.epfl.bluebrain.nexus.storage.routes.instances._ -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, DeltaIdentitiesClient, Rejection, StorageError, Storages} +import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Rejection, StorageError, Storages} import com.typesafe.scalalogging.Logger import monix.eval.Task @@ -85,16 +85,13 @@ object Routes { */ def apply( storages: Storages[Task, AkkaSource] - )(implicit config: AppConfig, identities: DeltaIdentitiesClient[Task]): Route = + )(implicit config: AppConfig, authorizationMethod: AuthorizationMethod): Route = //TODO: Fetch Bearer token and verify identity wrap { concat( AppInfoRoutes(config.description).routes, - (pathPrefix(config.http.prefix) & extractToken) { implicit token => - extractCaller.apply { - case Caller(config.subject.subjectValue, _) => StorageRoutes(storages).routes - case _ => failWith(AuthenticationFailed) - } + (pathPrefix(config.http.prefix) & validUser) { + StorageRoutes(storages).routes } ) } diff --git a/storage/src/test/resources/app.conf b/storage/src/test/resources/app.conf index a9df4fd6fd..3899e80d82 100644 --- a/storage/src/test/resources/app.conf +++ b/storage/src/test/resources/app.conf @@ -1,8 +1,7 @@ # All application specific configuration should reside here app { - # Allowed subject to perform calls - subject { - # flag to decide whether or not the allowed subject is Anonymous or a User - anonymous = true + # Authorization method + authorization { + method = anonymous } } \ No newline at end of file diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala new file mode 100644 index 0000000000..31be7bdc4a --- /dev/null +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala @@ -0,0 +1,137 @@ +package ch.epfl.bluebrain.nexus.storage.auth + +import ch.epfl.bluebrain.nexus.storage.utils.Randomness.genString +import cats.data.NonEmptySet +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod._ +import com.nimbusds.jose.jwk.gen.RSAKeyGenerator +import com.nimbusds.jose.jwk.{JWK, JWKSet, RSAKey} +import munit.FunSuite +import pureconfig.ConfigSource + +import scala.jdk.CollectionConverters._ + +class AuthorizationMethodSuite extends FunSuite { + + private def generateKey: RSAKey = new RSAKeyGenerator(2048).keyID(genString()).generate() + + private def parseConfig(value: String) = + ConfigSource.string(value).at("authorization").load[AuthorizationMethod] + + test("Parse successfully for the anonymous method") { + val config = parseConfig( + """ + |authorization { + | type = anonymous + |} + |""".stripMargin + ) + assertEquals(config, Right(Anonymous)) + } + + test("Parse successfully for the verify token method") { + val key1: JWK = generateKey.toPublicJWK + val key2: JWK = generateKey.toPublicJWK + + val config = parseConfig( + s""" + |authorization { + | type = verify-token + | issuer = bbp + | subject = admin + | audiences = [dev, staging] + | keys = [ "${key1.toJSONString}", "${key2.toJSONString}"] + |} + |""".stripMargin + ) + + val expectedAudiences = Some(NonEmptySet.of("dev", "staging")) + val expectedKeySet = new JWKSet(List(key1, key2).asJava) + val expected = VerifyToken("bbp", "admin", expectedAudiences, expectedKeySet) + + assertEquals(config, Right(expected)) + } + + test("Parse successfully without audiences") { + val key1: JWK = generateKey.toPublicJWK + + val config = parseConfig( + s""" + |authorization { + | type = verify-token + | issuer = bbp + | subject = admin + | keys = [ "${key1.toJSONString}" ] + |} + |""".stripMargin + ) + + val expectedAudiences = None + val expectedKeySet = new JWKSet(key1) + val expected = VerifyToken("bbp", "admin", expectedAudiences, expectedKeySet) + + assertEquals(config, Right(expected)) + } + + test("Fail to parse the config if the issuer is missing") { + val key1: JWK = generateKey.toPublicJWK + + val config = parseConfig( + s""" + |authorization { + | type = verify-token + | subject = admin + | keys = [ "${key1.toJSONString}" ] + |} + |""".stripMargin + ) + + assert(config.isLeft, "Parsing must fail with an missing issuer") + } + + test("Fail to parse the config if the subject is missing") { + val key1: JWK = generateKey.toPublicJWK + + val config = parseConfig( + s""" + |authorization { + | type = verify-token + | issuer = bbp + | keys = [ "${key1.toJSONString}" ] + |} + |""".stripMargin + ) + + assert(config.isLeft, "Parsing must fail with an missing subject") + } + + test("Fail to parse the config if the key is invalid") { + val config = parseConfig( + s""" + |authorization { + | type = verify-token + | issuer = bbp + | subject = admin + | keys = [ "xxx" ] + |} + |""".stripMargin + ) + + assert(config.isLeft, "Parsing must fail with an invalid key") + } + + test("Fail to parse the config without a key") { + val config = parseConfig( + s""" + |authorization { + | type = verify-token + | issuer = bbp + | subject = admin + | keys = [ ] + |} + |""".stripMargin + ) + + assert(config.isLeft, "Parsing must fail without a key") + } + +} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala index 97157d5608..828fdbb5d9 100644 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala @@ -1,20 +1,21 @@ package ch.epfl.bluebrain.nexus.storage.routes -import java.util.regex.Pattern.quote - import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.testkit.ScalatestRouteTest +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} import ch.epfl.bluebrain.nexus.storage.routes.instances._ import ch.epfl.bluebrain.nexus.storage.utils.Resources -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, DeltaIdentitiesClient, Storages} +import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} import io.circe.Json import monix.eval.Task import org.mockito.IdiomaticMockito import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike +import java.util.regex.Pattern.quote + class AppInfoRoutesSpec extends AnyWordSpecLike with Matchers @@ -24,9 +25,9 @@ class AppInfoRoutesSpec "the app info routes" should { - implicit val config: AppConfig = Settings(system).appConfig - implicit val deltaIdentities: DeltaIdentitiesClient[Task] = mock[DeltaIdentitiesClient[Task]] - val route: Route = Routes(mock[Storages[Task, AkkaSource]]) + implicit val config: AppConfig = Settings(system).appConfig + implicit val authorizationMethod: AuthorizationMethod = AuthorizationMethod.Anonymous + val route: Route = Routes(mock[Storages[Task, AkkaSource]]) "return application information" in { Get("/") ~> route ~> check { diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala index 294e721e91..0fc0d30eaf 100644 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala @@ -4,98 +4,111 @@ import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.model.headers.OAuth2BearerToken import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.testkit.ScalatestRouteTest -import ch.epfl.bluebrain.nexus.storage.{DeltaIdentitiesClient, DeltaIdentitiesClientError} -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.Identity.Anonymous -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.{AccessToken, Caller} +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod.VerifyToken import ch.epfl.bluebrain.nexus.storage.config.AppConfig.HttpConfig import ch.epfl.bluebrain.nexus.storage.config.Settings import ch.epfl.bluebrain.nexus.storage.routes.AuthDirectives._ import ch.epfl.bluebrain.nexus.storage.utils.EitherValues -import monix.eval.Task -import org.mockito.matchers.MacroBasedMatchers -import org.mockito.{IdiomaticMockito, Mockito} +import ch.epfl.bluebrain.nexus.storage.utils.Randomness.genString +import ch.epfl.bluebrain.nexus.testkit.jwt.TokenGenerator +import com.nimbusds.jose.jwk.gen.RSAKeyGenerator +import com.nimbusds.jose.jwk.{JWKSet, RSAKey} import org.scalatest.BeforeAndAfter import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike +import java.time.Instant + //noinspection NameBooleanParameters class AuthDirectivesSpec extends AnyWordSpecLike with Matchers with EitherValues - with MacroBasedMatchers - with IdiomaticMockito with BeforeAndAfter with ScalatestRouteTest { implicit private val hc: HttpConfig = Settings(system).appConfig.http - implicit private val deltaIdentities: DeltaIdentitiesClient[Task] = mock[DeltaIdentitiesClient[Task]] - - before { - Mockito.reset(deltaIdentities) - } + def validateRoute(implicit authorizationMethod: AuthorizationMethod) = Routes.wrap(validUser.apply { + complete("") + }) - "The AuthDirectives" should { + "Validating with the anonymous method" should { - "extract the token" in { + implicit val anonymousMethod: AuthorizationMethod = AuthorizationMethod.Anonymous + "validate any token" in { val expected = "token" - val route = extractToken { - case Some(AccessToken(`expected`)) => complete("") - case Some(_) => fail("Token was not extracted correctly.") - case None => fail("Token was not extracted.") - } - Get("/").addCredentials(OAuth2BearerToken(expected)) ~> route ~> check { + Get("/").addCredentials(OAuth2BearerToken(expected)) ~> validateRoute ~> check { status shouldEqual StatusCodes.OK } } - "extract no token" in { - val route = extractToken { - case None => complete("") - case t @ Some(_) => fail(s"Extracted unknown token '$t'.") - } - Get("/") ~> route ~> check { + "validate if no token is provided" in { + Get("/") ~> validateRoute ~> check { status shouldEqual StatusCodes.OK } } + } + + "Validating with the verify token method" should { + + def generateKey: RSAKey = new RSAKeyGenerator(2048).keyID(genString()).generate() + + val rsaKey = generateKey + val validIssuer = "bbp" + val validSubject = "admin" - "extract the caller" in { - implicit val token: Option[AccessToken] = None - deltaIdentities()(any[Option[AccessToken]]) shouldReturn Task(Caller(Anonymous, Set.empty)) - val route = Routes.wrap(extractCaller.apply(_ => complete(""))) - Get("/") ~> route ~> check { + def generateToken(subject: String, issuer: String, rsaKey: RSAKey) = + TokenGenerator + .generateToken( + subject, + issuer, + rsaKey, + Instant.now().plusSeconds(100L), + Instant.now().minusSeconds(100L), + None, + None, + false, + Some(subject) + ) + .value + + implicit val verifyTokenMethod: AuthorizationMethod = + VerifyToken(validIssuer, validSubject, None, new JWKSet(rsaKey.toPublicJWK)) + + "Succeed with a valid token" in { + val token = generateToken(validSubject, validIssuer, rsaKey) + Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { status shouldEqual StatusCodes.OK } } - "fail the route" when { + "Fail with an invalid issuer" in { + val token = generateToken(validSubject, "xxx", rsaKey) + Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { + status shouldEqual StatusCodes.Unauthorized + } + } - "the client throws an error for caller" in { - implicit val token: Option[AccessToken] = None - deltaIdentities()(any[Option[AccessToken]]) shouldReturn - Task.raiseError(DeltaIdentitiesClientError.IdentitiesServerStatusError(StatusCodes.InternalServerError, "")) - val route = Routes.wrap(extractCaller.apply(_ => complete(""))) - Get("/") ~> route ~> check { - status shouldEqual StatusCodes.InternalServerError - } + "Fail with an invalid subject" in { + val token = generateToken("bob", validIssuer, rsaKey) + Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { + status shouldEqual StatusCodes.Unauthorized } - "the client returns Unauthorized for caller" in { - implicit val token: Option[AccessToken] = None - deltaIdentities()(any[Option[AccessToken]]) shouldReturn - Task.raiseError(DeltaIdentitiesClientError.IdentitiesClientStatusError(StatusCodes.Unauthorized, "")) - val route = Routes.wrap(extractCaller.apply(_ => complete(""))) - Get("/") ~> route ~> check { - status shouldEqual StatusCodes.Unauthorized - } + } + + "Fail with a token signed with another key" in { + val anotherKey: RSAKey = generateKey + val token = generateToken(validSubject, validIssuer, anotherKey) + Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { + status shouldEqual StatusCodes.Unauthorized } - "the client returns Forbidden for caller" in { - implicit val token: Option[AccessToken] = None - deltaIdentities()(any[Option[AccessToken]]) shouldReturn - Task.raiseError(DeltaIdentitiesClientError.IdentitiesClientStatusError(StatusCodes.Forbidden, "")) - val route = Routes.wrap(extractCaller.apply(_ => complete(""))) - Get("/") ~> route ~> check { - status shouldEqual StatusCodes.Forbidden - } + } + + "Fail with an invalid token" in { + val token = "token" + Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { + status shouldEqual StatusCodes.Unauthorized } } } diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala index 3271f4289b..749133e89b 100644 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala @@ -12,18 +12,17 @@ import akka.http.scaladsl.server.Route import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.stream.scaladsl.Source import akka.util.ByteString -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.Caller -import ch.epfl.bluebrain.nexus.storage.DeltaIdentitiesClient.Identity.Anonymous import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} import ch.epfl.bluebrain.nexus.storage.Rejection.PathNotFound import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError import ch.epfl.bluebrain.nexus.storage.Storages.BucketExistence.{BucketDoesNotExist, BucketExists} import ch.epfl.bluebrain.nexus.storage.Storages.PathExistence.{PathDoesNotExist, PathExists} +import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} import ch.epfl.bluebrain.nexus.storage.jsonld.JsonLdContext.addContext import ch.epfl.bluebrain.nexus.storage.routes.instances._ import ch.epfl.bluebrain.nexus.storage.utils.{Randomness, Resources} -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, DeltaIdentitiesClient, Storages} +import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} import io.circe.Json import monix.eval.Task import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} @@ -49,12 +48,10 @@ class StorageRoutesSpec implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.second, 15.milliseconds) - implicit val appConfig: AppConfig = Settings(system).appConfig - implicit val deltaIdentities: DeltaIdentitiesClient[Task] = mock[DeltaIdentitiesClient[Task]] - val storages: Storages[Task, AkkaSource] = mock[Storages[Task, AkkaSource]] - val route: Route = Routes(storages) - - deltaIdentities()(None) shouldReturn Task(Caller(Anonymous, Set.empty)) + implicit val appConfig: AppConfig = Settings(system).appConfig + implicit val authorizationMethod: AuthorizationMethod = AuthorizationMethod.Anonymous + val storages: Storages[Task, AkkaSource] = mock[Storages[Task, AkkaSource]] + val route: Route = Routes(storages) trait Ctx { val name = genString() diff --git a/tests/docker/config/storage.conf b/tests/docker/config/storage.conf index a4300e6137..67f187162d 100644 --- a/tests/docker/config/storage.conf +++ b/tests/docker/config/storage.conf @@ -8,21 +8,14 @@ app { interface = "0.0.0.0" } - subject { - anonymous = false - realm = "internal" - name = "service-account-delta" - } - storage { root-volume = "/tmp" protected-directory = "protected" fixer-enabled = false } - delta { - public-iri = "https://test.nexus.bbp.epfl.ch" - internal-iri = "http://delta:8080" + authorization { + method = "anonymous" } media-type-detector { diff --git a/tests/src/test/resources/iam/identities/errors.json b/tests/src/test/resources/iam/identities/errors.json deleted file mode 100644 index 9279b0e866..0000000000 --- a/tests/src/test/resources/iam/identities/errors.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "@context": "https://bluebrain.github.io/nexus/contexts/error.json", - "@type": "InvalidAccessTokenFormat", - "reason": "Access token is invalid; possible causes are: JWT not signed, encoded parts are not properly encoded or each part is not a valid json." -} \ No newline at end of file diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/IdentitiesSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/IdentitiesSpec.scala index de35070b33..7dda0d43d6 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/IdentitiesSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/iam/IdentitiesSpec.scala @@ -23,7 +23,7 @@ class IdentitiesSpec extends BaseSpec { deltaClient.get[Json]("/identities", Identity.InvalidTokenUser) { (json, response) => response.status shouldEqual StatusCodes.Unauthorized - json shouldEqual jsonContentOf("/iam/identities/errors.json") + json.asObject.flatMap(_("reason")) should not be empty } } } From cd9396abfceaf26b7add9c049dff5c8669b4a758 Mon Sep 17 00:00:00 2001 From: Oliver <20188437+olivergrabinski@users.noreply.github.com> Date: Mon, 9 Oct 2023 17:51:43 +0200 Subject: [PATCH 08/13] Reset `createdAt` when restarting a view (#4345) --- .../store/CompositeProgressStore.scala | 1 + .../sourcing/projections/Projections.scala | 10 +++++++ .../sourcing/stream/ProjectionStore.scala | 23 ++++++++++++++++ .../delta/sourcing/stream/Supervisor.scala | 2 +- .../stream/ProjectionStoreSuite.scala | 27 +++++++++++++++++++ 5 files changed, 62 insertions(+), 1 deletion(-) diff --git a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/store/CompositeProgressStore.scala b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/store/CompositeProgressStore.scala index c6116c5aab..5431c49566 100644 --- a/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/store/CompositeProgressStore.scala +++ b/delta/plugins/composite-views/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/compositeviews/store/CompositeProgressStore.scala @@ -90,6 +90,7 @@ final class CompositeProgressStore(xas: Transactors)(implicit clock: Clock[UIO]) | processed = ${reset.processed}, | discarded = ${reset.discarded}, | failed = ${reset.failed}, + | created_at = $instant, | updated_at = $instant |$where |""".stripMargin.update.run diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/projections/Projections.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/projections/Projections.scala index d7209366c2..d41ac9f9c5 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/projections/Projections.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/projections/Projections.scala @@ -42,6 +42,14 @@ trait Projections { */ def save(metadata: ProjectionMetadata, progress: ProjectionProgress): UIO[Unit] + /** + * Resets the progress of a projection to 0, and the instants (createdAt, updatedAt) to the time of the reset + * + * @param name + * the name of the projection to reset + */ + def reset(name: String): UIO[Unit] + /** * Deletes a projection offset if found. * @@ -104,6 +112,8 @@ object Projections { override def save(metadata: ProjectionMetadata, progress: ProjectionProgress): UIO[Unit] = projectionStore.save(metadata, progress) + override def reset(name: String): UIO[Unit] = projectionStore.reset(name) + override def delete(name: String): UIO[Unit] = projectionStore.delete(name) override def scheduleRestart(projectionName: String)(implicit subject: Subject): UIO[Unit] = { diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStore.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStore.scala index 68d5075dbb..635fe82071 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStore.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStore.scala @@ -32,6 +32,13 @@ trait ProjectionStore { */ def save(metadata: ProjectionMetadata, progress: ProjectionProgress): UIO[Unit] + /** + * Resets the progress of a projection to 0, and the instants (createdAt, updatedAt) to the time of the reset + * @param name + * the name of the projection to reset + */ + def reset(name: String): UIO[Unit] + /** * Retrieves a projection offset if found. * @@ -83,6 +90,22 @@ object ProjectionStore { .hideErrors } + override def reset(name: String): UIO[Unit] = + IOUtils.instant.flatMap { instant => + sql"""UPDATE projection_offsets + SET ordering = 0, + processed = 0, + discarded = 0, + failed = 0, + created_at = $instant, + updated_at = $instant + WHERE name = $name + """.stripMargin.update.run + .transact(xas.write) + .void + .hideErrors + } + override def offset(name: String): UIO[Option[ProjectionProgress]] = sql"""SELECT * FROM projection_offsets |WHERE name = $name; diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/Supervisor.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/Supervisor.scala index f79c364408..89cab9786b 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/Supervisor.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/Supervisor.scala @@ -341,7 +341,7 @@ object Supervisor { _ <- log.info(s"Restarting '${metadata.module}/${metadata.name}'...") _ <- stopProjection(s) _ <- Task.when(s.executionStrategy == PersistentSingleNode)( - projections.save(metadata, ProjectionProgress.NoProgress) + projections.reset(metadata.name) ) _ <- Supervisor.restartProjection(s, mapRef) status <- s.control.status diff --git a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStoreSuite.scala b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStoreSuite.scala index d6c99c4024..c78c0073a9 100644 --- a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStoreSuite.scala +++ b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/stream/ProjectionStoreSuite.scala @@ -27,6 +27,7 @@ class ProjectionStoreSuite extends BioSuite with IOFixedClock with Doobie.Fixtur private val metadata = ProjectionMetadata("test", name, Some(project), Some(resource)) private val progress = ProjectionProgress(Offset.At(42L), Instant.EPOCH, 5, 2, 1) private val newProgress = progress.copy(offset = Offset.At(100L), processed = 100L) + private val noProgress = ProjectionProgress.NoProgress test("Return an empty offset when not found") { store.offset("not found").assertNone @@ -71,4 +72,30 @@ class ProjectionStoreSuite extends BioSuite with IOFixedClock with Doobie.Fixtur _ <- store.offset(name).assertNone } yield () } + + test("Reset an offset") { + val later = Instant.EPOCH.plusSeconds(1000) + val storeLater = ProjectionStore(xas, QueryConfig(10, RefreshStrategy.Stop))(ioClock(later)) + + for { + _ <- store.save(metadata, progress) + _ <- assertProgressAndInstants(metadata.name, progress, Instant.EPOCH, Instant.EPOCH)(store) + _ <- storeLater.reset(metadata.name) + _ <- assertProgressAndInstants(metadata.name, noProgress.copy(instant = later), later, later)(store) + } yield () + } + + private def assertProgressAndInstants( + name: String, + progress: ProjectionProgress, + createdAt: Instant, + updatedAt: Instant + )( + store: ProjectionStore + ) = + for { + entries <- store.entries.compile.toList + r = entries.assertOneElem + _ = assertEquals((r.name, r.progress, r.createdAt, r.updatedAt), (name, progress, createdAt, updatedAt)) + } yield () } From 2ae9369da997e9910c9bf4a8acb5f3db463868b3 Mon Sep 17 00:00:00 2001 From: Daniel Bell Date: Tue, 10 Oct 2023 13:57:14 +0100 Subject: [PATCH 09/13] Ensure file errors are written correctly (#4346) * Ensure file errors are written correctly * deal with scalafmt * Add unit test for ResponseToJsonLd * Remove unnecessary test changes * tidy up test * add CatsResponseToJsonLdSpec, fix in that too --- .../nexus/delta/sdk/JsonLdValue.scala | 19 ++++ .../delta/sdk/ce/CatsResponseToJsonLd.scala | 9 +- .../sdk/directives/ResponseToJsonLd.scala | 9 +- .../delta/sdk/error/AuthTokenError.scala | 8 -- .../test/resources/directives/blank-id.json | 5 + .../sdk/ce/CatsResponseToJsonLdSpec.scala | 97 +++++++++++++++++++ .../sdk/directives/ResponseToJsonLdSpec.scala | 93 ++++++++++++++++++ 7 files changed, 220 insertions(+), 20 deletions(-) create mode 100644 delta/sdk/src/test/resources/directives/blank-id.json create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLdSpec.scala create mode 100644 delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLdSpec.scala diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/JsonLdValue.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/JsonLdValue.scala index d1277fa9d3..aacffff40b 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/JsonLdValue.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/JsonLdValue.scala @@ -1,6 +1,11 @@ package ch.epfl.bluebrain.nexus.delta.sdk +import ch.epfl.bluebrain.nexus.delta.rdf.RdfError +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.{JsonLdApi, JsonLdOptions} +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.{CompactedJsonLd, ExpandedJsonLd} +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder +import monix.bio.IO /** * A definition of a value that can be converted to JSONLD @@ -24,4 +29,18 @@ object JsonLdValue { override val value: A = v override val encoder: JsonLdEncoder[A] = implicitly[JsonLdEncoder[A]] } + + implicit val jsonLdEncoder: JsonLdEncoder[JsonLdValue] = { + new JsonLdEncoder[JsonLdValue] { + override def context(value: JsonLdValue): ContextValue = value.encoder.context(value.value) + override def expand( + value: JsonLdValue + )(implicit opts: JsonLdOptions, api: JsonLdApi, rcr: RemoteContextResolution): IO[RdfError, ExpandedJsonLd] = + value.encoder.expand(value.value) + override def compact( + value: JsonLdValue + )(implicit opts: JsonLdOptions, api: JsonLdApi, rcr: RemoteContextResolution): IO[RdfError, CompactedJsonLd] = + value.encoder.compact(value.value) + } + } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLd.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLd.scala index fcbbde44b2..de2f33fd3d 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLd.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLd.scala @@ -116,12 +116,9 @@ object CatsResponseToJsonLd extends FileBytesInstances { } onSuccess(flattened.unsafeToFuture()) { - case Left(complete: Complete[E]) => emit(complete) - case Left(reject: Reject[E]) => emit(reject) - case Right(Left(c)) => - implicit val valueEncoder = c.value.encoder - emit(c.value.value) - + case Left(complete: Complete[E]) => emit(complete) + case Left(reject: Reject[E]) => emit(reject) + case Right(Left(c)) => emit(c) case Right(Right((metadata, content))) => headerValueByType(Accept) { accept => if (accept.mediaRanges.exists(_.matches(metadata.contentType.mediaType))) { diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLd.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLd.scala index 6dfde427e9..1b28a3a957 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLd.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLd.scala @@ -106,12 +106,9 @@ object ResponseToJsonLd extends FileBytesInstances { override def apply(statusOverride: Option[StatusCode]): Route = { val flattened = io.flatMap { fr => fr.content.attempt.map(_.map { s => fr.metadata -> s }) }.attempt onSuccess(flattened.runToFuture) { - case Left(complete: Complete[E]) => emit(complete) - case Left(reject: Reject[E]) => emit(reject) - case Right(Left(c)) => - implicit val valueEncoder = c.value.encoder - emit(c.value.value) - + case Left(complete: Complete[E]) => emit(complete) + case Left(reject: Reject[E]) => emit(reject) + case Right(Left(c)) => emit(c) case Right(Right((metadata, content))) => headerValueByType(Accept) { accept => if (accept.mediaRanges.exists(_.matches(metadata.contentType.mediaType))) { diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala index 2f1bd3e3f8..6ef19a4c2e 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/error/AuthTokenError.scala @@ -27,12 +27,6 @@ object AuthTokenError { final case class AuthTokenNotFoundInResponse(failure: DecodingFailure) extends AuthTokenError(s"Auth token not found in auth response: ${failure.reason}") - /** - * Signals that the expiry was missing from the authentication response - */ - final case class ExpiryNotFoundInResponse(failure: DecodingFailure) - extends AuthTokenError(s"Expiry not found in auth response: ${failure.reason}") - /** * Signals that the realm specified for authentication is deprecated */ @@ -45,8 +39,6 @@ object AuthTokenError { JsonObject(keywords.tpe := "AuthTokenHttpError", "reason" := r.reason) case AuthTokenNotFoundInResponse(r) => JsonObject(keywords.tpe -> "AuthTokenNotFoundInResponse".asJson, "reason" := r.message) - case ExpiryNotFoundInResponse(r) => - JsonObject(keywords.tpe -> "ExpiryNotFoundInResponse".asJson, "reason" := r.message) case r: RealmIsDeprecated => JsonObject(keywords.tpe := "RealmIsDeprecated", "reason" := r.getMessage) } diff --git a/delta/sdk/src/test/resources/directives/blank-id.json b/delta/sdk/src/test/resources/directives/blank-id.json new file mode 100644 index 0000000000..66a095e9d3 --- /dev/null +++ b/delta/sdk/src/test/resources/directives/blank-id.json @@ -0,0 +1,5 @@ +{ + "@context" : "https://bluebrain.github.io/nexus/contexts/error.json", + "@type" : "BlankResourceId", + "reason" : "Resource identifier cannot be blank." +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLdSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLdSpec.scala new file mode 100644 index 0000000000..6d2758a72e --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/ce/CatsResponseToJsonLdSpec.scala @@ -0,0 +1,97 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.ce + +import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` +import akka.http.scaladsl.model.MediaRanges.`*/*` +import akka.http.scaladsl.model.headers.Accept +import akka.http.scaladsl.model.{ContentType, StatusCodes} +import akka.http.scaladsl.server.RouteConcatenation +import akka.stream.scaladsl.Source +import akka.util.ByteString +import ch.epfl.bluebrain.nexus.delta.rdf.RdfMediaTypes.`application/ld+json` +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder +import ch.epfl.bluebrain.nexus.delta.rdf.syntax.JsonSyntax +import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering +import ch.epfl.bluebrain.nexus.delta.sdk.ce.DeltaDirectives._ +import ch.epfl.bluebrain.nexus.delta.sdk.directives.FileResponse +import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields +import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection +import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection.BlankResourceId +import ch.epfl.bluebrain.nexus.delta.sdk.utils.RouteHelpers +import ch.epfl.bluebrain.nexus.delta.sdk.{AkkaSource, SimpleRejection, SimpleResource} +import ch.epfl.bluebrain.nexus.testkit.ShouldMatchers.convertToAnyShouldWrapper +import ch.epfl.bluebrain.nexus.testkit.TestHelpers.jsonContentOf +import monix.bio.IO +import cats.effect.{IO => CatsIO} +import monix.execution.Scheduler + +class CatsResponseToJsonLdSpec extends RouteHelpers with JsonSyntax with RouteConcatenation { + + implicit val s: Scheduler = Scheduler.global + implicit val rcr: RemoteContextResolution = + RemoteContextResolution.fixed( + SimpleResource.contextIri -> SimpleResource.context, + SimpleRejection.contextIri -> SimpleRejection.context, + contexts.error -> jsonContentOf("/contexts/error.json").topContextValueOrEmpty + ) + implicit val jo: JsonKeyOrdering = JsonKeyOrdering.default() + + private def responseWithSourceError[E: JsonLdEncoder: HttpResponseFields](error: E) = { + responseWith( + `text/plain(UTF-8)`, + IO.raiseError(error) + ) + } + + private val expectedBlankIdErrorResponse = jsonContentOf( + "/directives/blank-id.json" + ) + + private val FileContents = "hello" + + private def fileSourceOfString(value: String) = { + IO.pure(Source.single(ByteString(value))) + } + + private def responseWith[E: JsonLdEncoder: HttpResponseFields]( + contentType: ContentType, + contents: IO[E, AkkaSource] + ) = { + CatsIO.pure( + Right( + FileResponse( + "file.name", + contentType, + 1024, + contents + ) + ) + ) + } + + private def request = { + Get() ~> Accept(`*/*`) + } + + "ResponseToJsonLd file handling" should { + + "Return the contents of a file" in { + request ~> emit( + responseWith(`text/plain(UTF-8)`, fileSourceOfString(FileContents)) + ) ~> check { + status shouldEqual StatusCodes.OK + contentType shouldEqual `text/plain(UTF-8)` + response.asString shouldEqual FileContents + } + } + + "Return an error from a file content IO" in { + request ~> emit(responseWithSourceError[ResourceRejection](BlankResourceId)) ~> check { + status shouldEqual StatusCodes.BadRequest // BlankResourceId is supposed to result in BadRequest + contentType.mediaType shouldEqual `application/ld+json` + response.asJson shouldEqual expectedBlankIdErrorResponse + } + } + } +} diff --git a/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLdSpec.scala b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLdSpec.scala new file mode 100644 index 0000000000..05c8b40b1a --- /dev/null +++ b/delta/sdk/src/test/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToJsonLdSpec.scala @@ -0,0 +1,93 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.directives + +import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` +import akka.http.scaladsl.model.MediaRanges.`*/*` +import akka.http.scaladsl.model.headers.Accept +import akka.http.scaladsl.model.{ContentType, StatusCodes} +import akka.http.scaladsl.server.RouteConcatenation +import akka.stream.scaladsl.Source +import akka.util.ByteString +import ch.epfl.bluebrain.nexus.delta.rdf.RdfMediaTypes.`application/ld+json` +import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution +import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder +import ch.epfl.bluebrain.nexus.delta.rdf.syntax.JsonSyntax +import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering +import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ +import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields +import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection +import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection.BlankResourceId +import ch.epfl.bluebrain.nexus.delta.sdk.utils.RouteHelpers +import ch.epfl.bluebrain.nexus.delta.sdk.{AkkaSource, SimpleRejection, SimpleResource} +import ch.epfl.bluebrain.nexus.testkit.ShouldMatchers.convertToAnyShouldWrapper +import ch.epfl.bluebrain.nexus.testkit.TestHelpers.jsonContentOf +import monix.bio.IO +import monix.execution.Scheduler + +class ResponseToJsonLdSpec extends RouteHelpers with JsonSyntax with RouteConcatenation { + + implicit val s: Scheduler = Scheduler.global + implicit val rcr: RemoteContextResolution = + RemoteContextResolution.fixed( + SimpleResource.contextIri -> SimpleResource.context, + SimpleRejection.contextIri -> SimpleRejection.context, + contexts.error -> jsonContentOf("/contexts/error.json").topContextValueOrEmpty + ) + implicit val jo: JsonKeyOrdering = JsonKeyOrdering.default() + + private def responseWithSourceError[E: JsonLdEncoder: HttpResponseFields](error: E) = { + responseWith( + `text/plain(UTF-8)`, + IO.raiseError(error) + ) + } + + private val expectedBlankIdErrorResponse = jsonContentOf( + "/directives/blank-id.json" + ) + + private val FileContents = "hello" + + private def fileSourceOfString(value: String) = { + IO.pure(Source.single(ByteString(value))) + } + + private def responseWith[E: JsonLdEncoder: HttpResponseFields]( + contentType: ContentType, + contents: IO[E, AkkaSource] + ) = { + IO.pure( + FileResponse( + "file.name", + contentType, + 1024, + contents + ) + ) + } + + private def request = { + Get() ~> Accept(`*/*`) + } + + "ResponseToJsonLd file handling" should { + + "Return the contents of a file" in { + request ~> emit( + responseWith(`text/plain(UTF-8)`, fileSourceOfString(FileContents)) + ) ~> check { + status shouldEqual StatusCodes.OK + contentType shouldEqual `text/plain(UTF-8)` + response.asString shouldEqual FileContents + } + } + + "Return an error from a file content IO" in { + request ~> emit(responseWithSourceError[ResourceRejection](BlankResourceId)) ~> check { + status shouldEqual StatusCodes.BadRequest // BlankResourceId is supposed to result in BadRequest + contentType.mediaType shouldEqual `application/ld+json` + response.asJson shouldEqual expectedBlankIdErrorResponse + } + } + } +} From 38adf11914f8499a8849d320a79fcfadafbdc60b Mon Sep 17 00:00:00 2001 From: Oliver <20188437+olivergrabinski@users.noreply.github.com> Date: Wed, 11 Oct 2023 10:25:12 +0200 Subject: [PATCH 10/13] Migrate storage app to cats effect (#4351) --- build.sbt | 2 - storage/src/main/resources/kamon.conf | 9 ---- .../epfl/bluebrain/nexus/storage/Main.scala | 10 ++-- .../nexus/storage/routes/Routes.scala | 4 +- .../nexus/storage/routes/StorageRoutes.scala | 11 ++--- .../nexus/storage/routes/instances.scala | 7 ++- .../attributes/AttributesCacheSpec.scala | 48 +++++++++---------- .../storage/routes/AppInfoRoutesSpec.scala | 4 +- .../storage/routes/StorageRoutesSpec.scala | 16 +++---- 9 files changed, 48 insertions(+), 63 deletions(-) diff --git a/build.sbt b/build.sbt index 222fa0e602..8e0efcefdc 100755 --- a/build.sbt +++ b/build.sbt @@ -114,7 +114,6 @@ lazy val logback = "ch.qos.logback" % "logback-classic lazy val magnolia = "com.softwaremill.magnolia1_2" %% "magnolia" % magnoliaVersion lazy val mockito = "org.mockito" %% "mockito-scala" % mockitoVersion lazy val monixBio = "io.monix" %% "monix-bio" % monixBioVersion -lazy val monixEval = "io.monix" %% "monix-eval" % monixVersion lazy val munit = "org.scalameta" %% "munit" % munitVersion lazy val nimbusJoseJwt = "com.nimbusds" % "nimbus-jose-jwt" % nimbusJoseJwtVersion lazy val pureconfig = "com.github.pureconfig" %% "pureconfig" % pureconfigVersion @@ -769,7 +768,6 @@ lazy val storage = project circeCore, circeGenericExtras, logback, - monixEval, pureconfig, scalaLogging, akkaHttpTestKit % Test, diff --git a/storage/src/main/resources/kamon.conf b/storage/src/main/resources/kamon.conf index 68d9c1da63..dde64e4dd9 100644 --- a/storage/src/main/resources/kamon.conf +++ b/storage/src/main/resources/kamon.conf @@ -1,12 +1,3 @@ -// TODO: remove when the upstream kamon-monix support is merged: https://github.com/kamon-io/Kamon/pull/879 -kanela.modules { - executor-service { - within += "monix.eval..*" - within += "monix.execution..*" - within += "monix.bio..*" - } -} - kamon { environment { service = ${app.description.name} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala index cb2e35821e..7db387364f 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala @@ -7,7 +7,7 @@ import akka.event.{Logging, LoggingAdapter} import akka.http.scaladsl.Http import akka.http.scaladsl.server.Route import akka.util.Timeout -import cats.effect.Effect +import cats.effect.{Effect, IO} import ch.epfl.bluebrain.nexus.storage.Storages.DiskStorage import ch.epfl.bluebrain.nexus.storage.attributes.{AttributesCache, ContentTypeDetector} import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod @@ -16,8 +16,6 @@ import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ import ch.epfl.bluebrain.nexus.storage.routes.Routes import com.typesafe.config.{Config, ConfigFactory} import kamon.Kamon -import monix.eval.Task -import monix.execution.Scheduler import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} @@ -56,14 +54,14 @@ object Main { implicit val as: ActorSystem = ActorSystem(appConfig.description.fullName, config) implicit val ec: ExecutionContext = as.dispatcher - implicit val eff: Effect[Task] = Task.catsEffect(Scheduler.global) + implicit val eff: Effect[IO] = IO.ioEffect implicit val authorizationMethod: AuthorizationMethod = appConfig.authorization implicit val timeout = Timeout(1.minute) implicit val clock = Clock.systemUTC implicit val contentTypeDetector = new ContentTypeDetector(appConfig.mediaTypeDetector) - val storages: Storages[Task, AkkaSource] = - new DiskStorage(appConfig.storage, contentTypeDetector, appConfig.digest, AttributesCache[Task, AkkaSource]) + val storages: Storages[IO, AkkaSource] = + new DiskStorage(appConfig.storage, contentTypeDetector, appConfig.digest, AttributesCache[IO, AkkaSource]) val logger: LoggingAdapter = Logging(as, getClass) diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala index 360cc42749..495bbb0fa6 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.storage.routes import akka.http.scaladsl.model.headers.{`WWW-Authenticate`, HttpChallenges} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{ExceptionHandler, RejectionHandler, Route} +import cats.effect.IO import ch.epfl.bluebrain.nexus.storage.StorageError._ import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.AppConfig @@ -12,7 +13,6 @@ import ch.epfl.bluebrain.nexus.storage.routes.PrefixDirectives._ import ch.epfl.bluebrain.nexus.storage.routes.instances._ import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Rejection, StorageError, Storages} import com.typesafe.scalalogging.Logger -import monix.eval.Task import scala.util.control.NonFatal @@ -84,7 +84,7 @@ object Routes { * the storages operations */ def apply( - storages: Storages[Task, AkkaSource] + storages: Storages[IO, AkkaSource] )(implicit config: AppConfig, authorizationMethod: AuthorizationMethod): Route = //TODO: Fetch Bearer token and verify identity wrap { diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala index 2341facfd7..2436139b4e 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala @@ -5,6 +5,7 @@ import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.model.{HttpEntity, StatusCode, Uri} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route +import cats.effect.IO import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} import ch.epfl.bluebrain.nexus.storage.config.AppConfig import ch.epfl.bluebrain.nexus.storage.config.AppConfig.HttpConfig @@ -16,10 +17,8 @@ import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} import io.circe.generic.semiauto._ import io.circe.{Decoder, Encoder} import kamon.instrumentation.akka.http.TracingDirectives.operationName -import monix.eval.Task -import monix.execution.Scheduler.Implicits.global -class StorageRoutes()(implicit storages: Storages[Task, AkkaSource], hc: HttpConfig) { +class StorageRoutes()(implicit storages: Storages[IO, AkkaSource], hc: HttpConfig) { def routes: Route = // Consume buckets/{name}/ @@ -42,7 +41,7 @@ class StorageRoutes()(implicit storages: Storages[Task, AkkaSource], hc: HttpCon pathNotExists(name, path).apply { implicit pathNotExistEvidence => // Upload file fileUpload("file") { case (_, source) => - complete(Created -> storages.createFile(name, path, source).runToFuture) + complete(Created -> storages.createFile(name, path, source).unsafeToFuture()) } } }, @@ -79,7 +78,7 @@ class StorageRoutes()(implicit storages: Storages[Task, AkkaSource], hc: HttpCon case attr @ FileAttributes(_, _, Digest.empty, _) => Accepted -> attr case attr => OK -> attr } - complete(result.runToFuture) + complete(result.unsafeToFuture()) } } } @@ -105,7 +104,7 @@ object StorageRoutes { implicit val linkFileEnc: Encoder[LinkFile] = deriveEncoder[LinkFile] } - final def apply(storages: Storages[Task, AkkaSource])(implicit cfg: AppConfig): StorageRoutes = { + final def apply(storages: Storages[IO, AkkaSource])(implicit cfg: AppConfig): StorageRoutes = { implicit val s = storages new StorageRoutes() } diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala index d8a34f2966..3a3f76d66b 100644 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala +++ b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.marshalling.GenericMarshallers.eitherMarshaller import akka.http.scaladsl.marshalling._ import akka.http.scaladsl.model.MediaTypes._ import akka.http.scaladsl.model._ +import cats.effect.IO import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.sortKeys import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.OrderedKeys import ch.epfl.bluebrain.nexus.storage.Rejection @@ -11,8 +12,6 @@ import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport import io.circe._ import io.circe.syntax._ -import monix.eval.Task -import monix.execution.Scheduler import ch.epfl.bluebrain.nexus.storage.MediaTypes.`application/ld+json` import scala.collection.immutable.Seq @@ -48,9 +47,9 @@ object instances extends LowPriority { statusFrom(value) -> value.asJson } - implicit final class EitherFSyntax[A](f: Task[Either[Rejection, A]])(implicit scheduler: Scheduler) { + implicit final class EitherFSyntax[A](f: IO[Either[Rejection, A]]) { def runWithStatus(code: StatusCode): Future[Either[Rejection, (StatusCode, A)]] = - f.map(_.map(code -> _)).runToFuture + f.map(_.map(code -> _)).unsafeToFuture() } } diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala index f84fe1fe9b..7b57e639da 100644 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala @@ -3,22 +3,20 @@ package ch.epfl.bluebrain.nexus.storage.attributes import java.nio.file.{Path, Paths} import java.time.{Clock, Instant, ZoneId} import java.util.concurrent.atomic.AtomicInteger - import akka.actor.ActorSystem import akka.testkit.TestKit import akka.util.Timeout import ch.epfl.bluebrain.nexus.storage._ import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} import ch.epfl.bluebrain.nexus.storage.config.AppConfig.DigestConfig -import monix.eval.Task import org.mockito.{IdiomaticMockito, Mockito} import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.{BeforeAndAfter, Inspectors} -import scala.concurrent.Future +import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ -import monix.execution.Scheduler.Implicits.global import akka.http.scaladsl.model.MediaTypes.{`application/octet-stream`, `image/jpeg`} +import cats.effect.{ContextShift, IO} import ch.epfl.bluebrain.nexus.storage.utils.Randomness import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike @@ -36,10 +34,12 @@ class AttributesCacheSpec implicit override def patienceConfig: PatienceConfig = PatienceConfig(20.second, 100.milliseconds) - implicit val config: DigestConfig = + implicit val config: DigestConfig = DigestConfig("SHA-256", maxInMemory = 10, concurrentComputations = 3, 20, 5.seconds) - implicit val computation: AttributesComputation[Task, String] = mock[AttributesComputation[Task, String]] - implicit val timeout: Timeout = Timeout(1.minute) + implicit val computation: AttributesComputation[IO, String] = mock[AttributesComputation[IO, String]] + implicit val timeout: Timeout = Timeout(1.minute) + implicit val executionContext: ExecutionContext = ExecutionContext.global + implicit val contextShift: ContextShift[IO] = IO.contextShift(executionContext) before { Mockito.reset(computation) @@ -58,9 +58,9 @@ class AttributesCacheSpec // For every attribute computation done, it passes one second override def instant(): Instant = Instant.ofEpochSecond(counter.get + 1L) } - val attributesCache = AttributesCache[Task, String] + val attributesCache = AttributesCache[IO, String] computation(path, config.algorithm) shouldReturn - Task { counter.incrementAndGet(); attributes } + IO { counter.incrementAndGet(); attributes } } "An AttributesCache" should { @@ -69,15 +69,15 @@ class AttributesCacheSpec attributesCache.asyncComputePut(path, config.algorithm) eventually(counter.get shouldEqual 1) computation(path, config.algorithm) wasCalled once - attributesCache.get(path).runToFuture.futureValue shouldEqual attributes + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributes computation(path, config.algorithm) wasCalled once } "get file that triggers attributes computation" in new Ctx { - attributesCache.get(path).runToFuture.futureValue shouldEqual attributesEmpty() + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributesEmpty() eventually(counter.get shouldEqual 1) computation(path, config.algorithm) wasCalled once - attributesCache.get(path).runToFuture.futureValue shouldEqual attributes + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributes computation(path, config.algorithm) wasCalled once } @@ -92,12 +92,12 @@ class AttributesCacheSpec forAll(list) { case (path, attr) => computation(path, config.algorithm) shouldReturn - Task.deferFuture(Future { + IO.fromFuture(IO.pure(Future { Thread.sleep(1000) counter.incrementAndGet() attr - }) - attributesCache.get(path).runToFuture.futureValue shouldEqual attributesEmpty(path) + })) + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributesEmpty(path) } eventually(counter.get() shouldEqual 10) @@ -111,7 +111,7 @@ class AttributesCacheSpec diff should be < 6500L forAll(list) { case (path, attr) => - attributesCache.get(path).runToFuture.futureValue shouldEqual attr + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attr } } @@ -125,18 +125,18 @@ class AttributesCacheSpec forAll(list) { case (path, attr) => computation(path, config.algorithm) shouldReturn - Task { counter.incrementAndGet(); attr } - attributesCache.get(path).runToFuture.futureValue shouldEqual attributesEmpty(path) + IO { counter.incrementAndGet(); attr } + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributesEmpty(path) } eventually(counter.get() shouldEqual 20) forAll(list.takeRight(10)) { case (path, attr) => - attributesCache.get(path).runToFuture.futureValue shouldEqual attr + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attr } forAll(list.take(10)) { case (path, _) => - attributesCache.get(path).runToFuture.futureValue shouldEqual attributesEmpty(path) + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributesEmpty(path) } } @@ -149,15 +149,15 @@ class AttributesCacheSpec forAll(list) { case (path, attr) => if (attr.bytes == 0L) - computation(path, config.algorithm) shouldReturn Task.raiseError(new RuntimeException) + computation(path, config.algorithm) shouldReturn IO.raiseError(new RuntimeException) else - computation(path, config.algorithm) shouldReturn Task(attr) + computation(path, config.algorithm) shouldReturn IO(attr) - attributesCache.get(path).runToFuture.futureValue shouldEqual attributesEmpty(path) + attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attributesEmpty(path) } forAll(list.drop(1)) { case (path, attr) => - eventually(attributesCache.get(path).runToFuture.futureValue shouldEqual attr) + eventually(attributesCache.get(path).unsafeToFuture().futureValue shouldEqual attr) } } } diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala index 828fdbb5d9..cc7f40d0d5 100644 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala @@ -3,13 +3,13 @@ package ch.epfl.bluebrain.nexus.storage.routes import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.testkit.ScalatestRouteTest +import cats.effect.IO import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} import ch.epfl.bluebrain.nexus.storage.routes.instances._ import ch.epfl.bluebrain.nexus.storage.utils.Resources import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} import io.circe.Json -import monix.eval.Task import org.mockito.IdiomaticMockito import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike @@ -27,7 +27,7 @@ class AppInfoRoutesSpec implicit val config: AppConfig = Settings(system).appConfig implicit val authorizationMethod: AuthorizationMethod = AuthorizationMethod.Anonymous - val route: Route = Routes(mock[Storages[Task, AkkaSource]]) + val route: Route = Routes(mock[Storages[IO, AkkaSource]]) "return application information" in { Get("/") ~> route ~> check { diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala index 749133e89b..e7615bd4dd 100644 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala +++ b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala @@ -12,6 +12,7 @@ import akka.http.scaladsl.server.Route import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.stream.scaladsl.Source import akka.util.ByteString +import cats.effect.IO import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} import ch.epfl.bluebrain.nexus.storage.Rejection.PathNotFound import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError @@ -24,7 +25,6 @@ import ch.epfl.bluebrain.nexus.storage.routes.instances._ import ch.epfl.bluebrain.nexus.storage.utils.{Randomness, Resources} import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} import io.circe.Json -import monix.eval.Task import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} import org.scalatest.OptionValues import org.scalatest.concurrent.ScalaFutures @@ -50,7 +50,7 @@ class StorageRoutesSpec implicit val appConfig: AppConfig = Settings(system).appConfig implicit val authorizationMethod: AuthorizationMethod = AuthorizationMethod.Anonymous - val storages: Storages[Task, AkkaSource] = mock[Storages[Task, AkkaSource]] + val storages: Storages[IO, AkkaSource] = mock[Storages[IO, AkkaSource]] val route: Route = Routes(storages) trait Ctx { @@ -149,7 +149,7 @@ class StorageRoutesSpec eqTo(BucketExists), eqTo(PathDoesNotExist) ) shouldReturn - Task.raiseError(InternalError("something went wrong")) + IO.raiseError(InternalError("something went wrong")) Put(s"/v1/buckets/$name/files/path/to/file/$filename", multipartForm) ~> route ~> check { status shouldEqual InternalServerError @@ -176,7 +176,7 @@ class StorageRoutesSpec storages.createFile(eqTo(name), eqTo(filePathUri), any[AkkaSource])( eqTo(BucketExists), eqTo(PathDoesNotExist) - ) shouldReturn Task( + ) shouldReturn IO( attributes ) @@ -223,7 +223,7 @@ class StorageRoutesSpec val source = "source/dir" val dest = "dest/dir" storages.moveFile(name, Uri.Path(source), Uri.Path(dest))(BucketExists) shouldReturn - Task.raiseError(InternalError("something went wrong")) + IO.raiseError(InternalError("something went wrong")) val json = jsonContentOf("/file-link.json", Map(quote("{source}") -> source)) @@ -268,7 +268,7 @@ class StorageRoutesSpec val dest = "dest/dir" val attributes = FileAttributes(s"file://some/prefix/$dest", 12L, Digest.empty, `application/octet-stream`) storages.moveFile(name, Uri.Path(source), Uri.Path(dest))(BucketExists) shouldReturn - Task.pure(Right(attributes)) + IO.pure(Right(attributes)) val json = jsonContentOf("/file-link.json", Map(quote("{source}") -> source)) @@ -390,7 +390,7 @@ class StorageRoutesSpec storages.exists(name) shouldReturn BucketExists val attributes = FileAttributes(s"file://$filePathUri", genInt().toLong, Digest("SHA-256", genString()), `image/jpeg`) - storages.getAttributes(name, filePathUri) shouldReturn Task(attributes) + storages.getAttributes(name, filePathUri) shouldReturn IO(attributes) storages.pathExists(name, filePathUri) shouldReturn PathExists Get(s"/v1/buckets/$name/attributes/$filename") ~> Accept(`*/*`) ~> route ~> check { @@ -416,7 +416,7 @@ class StorageRoutesSpec "return empty attributes" in new RandomFile { val filePathUri = Uri.Path(s"$filename") storages.exists(name) shouldReturn BucketExists - storages.getAttributes(name, filePathUri) shouldReturn Task( + storages.getAttributes(name, filePathUri) shouldReturn IO( FileAttributes(s"file://$filePathUri", 0L, Digest.empty, `application/octet-stream`) ) storages.pathExists(name, filePathUri) shouldReturn PathExists From 7e7c822f3f2801855ac99ea7b1e0583af4195f68 Mon Sep 17 00:00:00 2001 From: Oliver <20188437+olivergrabinski@users.noreply.github.com> Date: Wed, 11 Oct 2023 11:57:48 +0200 Subject: [PATCH 11/13] Add aggregation for specific resource listings (#4348) Co-authored-by: Daniel Bell --- .../routes/ElasticSearchQueryRoutes.scala | 27 ++++++---- .../routes/ElasticSearchViewsDirectives.scala | 9 ++-- .../routes/ElasticSearchQueryRoutesSpec.scala | 14 ++++-- .../main/paradox/docs/delta/api/files-api.md | 4 ++ .../paradox/docs/delta/api/resolvers-api.md | 4 ++ .../paradox/docs/delta/api/schemas-api.md | 5 +- .../paradox/docs/delta/api/storages-api.md | 4 ++ .../paradox/docs/delta/api/views/index.md | 5 ++ .../docs/releases/v1.9-release-notes.md | 2 + .../aggregations/resolvers-aggregation.json | 28 +++++++++++ .../kg/aggregations/schemas-aggregation.json | 22 ++++++++ .../kg/aggregations/storages-aggregation.json | 28 +++++++++++ .../kg/aggregations/views-aggregation.json | 36 +++++++++++++ .../nexus/tests/kg/AggregationsSpec.scala | 50 +++++++++++++++++++ 14 files changed, 221 insertions(+), 17 deletions(-) create mode 100644 tests/src/test/resources/kg/aggregations/resolvers-aggregation.json create mode 100644 tests/src/test/resources/kg/aggregations/schemas-aggregation.json create mode 100644 tests/src/test/resources/kg/aggregations/storages-aggregation.json create mode 100644 tests/src/test/resources/kg/aggregations/views-aggregation.json diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutes.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutes.scala index 92321872e2..9eae8a83e0 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutes.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutes.scala @@ -52,13 +52,13 @@ class ElasticSearchQueryRoutes( // List/aggregate all resources pathEndOrSingleSlash { concat( - aggregated { _ => aggregate(RootSearch(params)) }, + aggregate(RootSearch(params)), list(RootSearch(params, page, sort)) ) }, (label & pathEndOrSingleSlash) { org => concat( - aggregated { _ => aggregate(OrgSearch(org, params)) }, + aggregate(OrgSearch(org, params)), list(OrgSearch(org, params, page, sort)) ) } @@ -71,7 +71,7 @@ class ElasticSearchQueryRoutes( // List/aggregate all resources inside a project pathEndOrSingleSlash { concat( - aggregated { _ => aggregate(ProjectSearch(ref, params)) }, + aggregate(ProjectSearch(ref, params)), list(ProjectSearch(ref, params, page, sort)) ) }, @@ -81,12 +81,12 @@ class ElasticSearchQueryRoutes( underscoreToOption(schema) match { case None => concat( - aggregated { _ => aggregate(ProjectSearch(ref, params)) }, + aggregate(ProjectSearch(ref, params)), list(ProjectSearch(ref, params, page, sort)) ) case Some(value) => concat( - aggregated { _ => aggregate(ProjectSearch(ref, params, value)(fetchContext)) }, + aggregate(ProjectSearch(ref, params, value)(fetchContext)), list(ProjectSearch(ref, params, page, sort, value)(fetchContext)) ) } @@ -110,12 +110,18 @@ class ElasticSearchQueryRoutes( // List all resources of type resourceSegment pathEndOrSingleSlash { val request = DefaultSearchRequest.RootSearch(params, page, sort, resourceSchema)(fetchContext) - list(request) + concat( + aggregate(request), + list(request) + ) }, // List all resources of type resourceSegment inside an organization (label & pathEndOrSingleSlash) { org => val request = DefaultSearchRequest.OrgSearch(org, params, page, sort, resourceSchema)(fetchContext) - list(request) + concat( + aggregate(request), + list(request) + ) } ) }, @@ -125,7 +131,10 @@ class ElasticSearchQueryRoutes( (searchParametersInProject & paginated & pathEndOrSingleSlash) { (params, sort, page) => val request = DefaultSearchRequest.ProjectSearch(ref, params, page, sort, resourceSchema)(fetchContext) - list(request) + concat( + aggregate(request), + list(request) + ) } } } @@ -149,7 +158,7 @@ class ElasticSearchQueryRoutes( aggregate(IO.pure(request)) private def aggregate(request: IO[ElasticSearchQueryError, DefaultSearchRequest])(implicit caller: Caller): Route = - get { + (get & aggregated) { implicit val searchJsonLdEncoder: JsonLdEncoder[AggregationResult] = aggregationResultJsonLdEncoder(ContextValue(contexts.aggregations)) diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsDirectives.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsDirectives.scala index 14689bcf17..aacc599f72 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsDirectives.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsDirectives.scala @@ -2,7 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.routes import akka.http.scaladsl.model.Uri import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.{Directive, Directive1, MalformedQueryParamRejection} +import akka.http.scaladsl.server.{Directive, Directive0, Directive1, MalformedQueryParamRejection} import akka.http.scaladsl.unmarshalling.{FromStringUnmarshaller, Unmarshaller} import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.ResourcesSearchParams import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.ResourcesSearchParams.TypeOperator.Or @@ -45,8 +45,11 @@ trait ElasticSearchViewsDirectives extends UriDirectives { /** * Matches only if the ''aggregations'' parameter is set to ''true'' */ - def aggregated: Directive1[Option[Boolean]] = - parameter("aggregations".as[Boolean].?).filter(_.contains(true)) + def aggregated: Directive0 = + parameter("aggregations".as[Boolean].?).flatMap { + case Some(true) => pass + case _ => reject + } /** * Extract the ''sort'' query parameter(s) and provide a [[SortList]] diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala index 34f352dcb6..a068a0d417 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchQueryRoutesSpec.scala @@ -128,10 +128,16 @@ class ElasticSearchQueryRoutesSpec extends ElasticSearchViewsRoutesFixtures with } List( - ("aggregate at project level", "/v1/resources/myorg/myproject?aggregations=true"), - ("aggregate at project level with schema", "/v1/resources/myorg/myproject/schema?aggregations=true"), - ("aggregate at org level", "/v1/resources/myorg?aggregations=true"), - ("aggregate at root level", "/v1/resources?aggregations=true") + ("aggregate generic resources at project level", "/v1/resources/myorg/myproject?aggregations=true"), + ( + "aggregate generic resources at project level with schema", + "/v1/resources/myorg/myproject/schema?aggregations=true" + ), + ("aggregate generic resources at org level", "/v1/resources/myorg?aggregations=true"), + ("aggregate generic resources at root level", "/v1/resources?aggregations=true"), + ("aggregate views at root level", "/v1/views?aggregations=true"), + ("aggregate views at org level", "/v1/views/myorg?aggregations=true"), + ("aggregate views at project level", "/v1/views/myorg/myproject?aggregations=true") ).foreach { case (testName, path) => testName in { Get(path) ~> routes ~> check { diff --git a/docs/src/main/paradox/docs/delta/api/files-api.md b/docs/src/main/paradox/docs/delta/api/files-api.md index 116c159574..e74707d4bd 100644 --- a/docs/src/main/paradox/docs/delta/api/files-api.md +++ b/docs/src/main/paradox/docs/delta/api/files-api.md @@ -314,6 +314,7 @@ GET /v1/files/{org_label}/{project_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within an organization @@ -330,6 +331,7 @@ GET /v1/files/{org_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within all projects @@ -346,6 +348,7 @@ GET /v1/files?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Parameter description @@ -362,6 +365,7 @@ GET /v1/files?from={from} (containing) the provided string - `{sort}`: String - can be used to sort files based on a payloads' field. This parameter can appear multiple times to enable sorting by multiple fields. The default is done by `_createdBy` and `@id`. +- `{aggregations}`: Boolean - if `true` then the response will only contain aggregations of the `@type` and `_project` fields; defaults to `false`. See @ref:[Aggregations](resources-api.md#aggregations). **Example** diff --git a/docs/src/main/paradox/docs/delta/api/resolvers-api.md b/docs/src/main/paradox/docs/delta/api/resolvers-api.md index 3e66e7e3e5..a8096f1aea 100644 --- a/docs/src/main/paradox/docs/delta/api/resolvers-api.md +++ b/docs/src/main/paradox/docs/delta/api/resolvers-api.md @@ -302,6 +302,7 @@ GET /v1/resolvers/{org_label}/{project_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within an organization @@ -318,6 +319,7 @@ GET /v1/resolvers/{org_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within all projects @@ -334,6 +336,7 @@ GET /v1/resolvers?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Parameter description @@ -350,6 +353,7 @@ GET /v1/resolvers?from={from} matching (containing) the provided string - `{sort}`: String - can be used to sort resolvers based on a payloads' field. This parameter can appear multiple times to enable sorting by multiple fields. The default is done by `_createdBy` and `@id`. +- `{aggregations}`: Boolean - if `true` then the response will only contain aggregations of the `@type` and `_project` fields; defaults to `false`. See @ref:[Aggregations](resources-api.md#aggregations). **Example** diff --git a/docs/src/main/paradox/docs/delta/api/schemas-api.md b/docs/src/main/paradox/docs/delta/api/schemas-api.md index 98ed2044ea..aa75c93e42 100644 --- a/docs/src/main/paradox/docs/delta/api/schemas-api.md +++ b/docs/src/main/paradox/docs/delta/api/schemas-api.md @@ -286,6 +286,7 @@ GET /v1/schemas/{org_label}/{project_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within an organization @@ -302,6 +303,7 @@ GET /v1/schemas/{org_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within all projects @@ -318,6 +320,7 @@ GET /v1/schemas?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Parameter description @@ -334,7 +337,7 @@ GET /v1/schemas?from={from} (containing) the provided string - `{sort}`: String - can be used to sort schemas based on a payloads' field. This parameter can appear multiple times to enable sorting by multiple fields. The default is done by `_createdBy` and `@id`. - +- `{aggregations}`: Boolean - if `true` then the response will only contain aggregations of the `@type` and `_project` fields; defaults to `false`. See @ref:[Aggregations](resources-api.md#aggregations). **Example** diff --git a/docs/src/main/paradox/docs/delta/api/storages-api.md b/docs/src/main/paradox/docs/delta/api/storages-api.md index 3fbbbb05b8..39fcfcdcf2 100644 --- a/docs/src/main/paradox/docs/delta/api/storages-api.md +++ b/docs/src/main/paradox/docs/delta/api/storages-api.md @@ -332,6 +332,7 @@ GET /v1/storages/{org_label}/{project_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within an organization @@ -348,6 +349,7 @@ GET /v1/storages/{org_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within all projects @@ -364,6 +366,7 @@ GET /v1/storages?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Parameter description @@ -380,6 +383,7 @@ GET /v1/storages?from={from} (containing) the provided string - `{sort}`: String - can be used to sort storages based on a payloads' field. This parameter can appear multiple times to enable sorting by multiple fields. The default is done by `_createdBy` and `@id`. +- `{aggregations}`: Boolean - if `true` then the response will only contain aggregations of the `@type` and `_project` fields; defaults to `false`. See @ref:[Aggregations](resources-api.md#aggregations). **Example** diff --git a/docs/src/main/paradox/docs/delta/api/views/index.md b/docs/src/main/paradox/docs/delta/api/views/index.md index c8b92b2159..e665bdf8f9 100644 --- a/docs/src/main/paradox/docs/delta/api/views/index.md +++ b/docs/src/main/paradox/docs/delta/api/views/index.md @@ -96,6 +96,7 @@ GET /v1/views/{org_label}/{project_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within an organization @@ -112,6 +113,7 @@ GET /v1/views/{org_label}?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` ### Within all projects @@ -128,6 +130,7 @@ GET /v1/views?from={from} &updatedBy={updatedBy} &q={search} &sort={sort} + &aggregations={aggregations} ``` #### Parameter description @@ -144,6 +147,8 @@ GET /v1/views?from={from} (containing) the provided string - `{sort}`: String - can be used to sort views based on a payloads' field. This parameter can appear multiple times to enable sorting by multiple fields. The default is done by `_createdBy` and `@id`. +- `{aggregations}`: Boolean - if `true` then the response will only contain aggregations of the `@type` and `_project` fields; defaults to `false`. See @ref:[Aggregations](../resources-api.md#aggregations). + **Example** diff --git a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md index b34cb08310..6f8066543b 100644 --- a/docs/src/main/paradox/docs/releases/v1.9-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.9-release-notes.md @@ -48,6 +48,8 @@ Listings by types can now be controlled with the `typeOperator` query parameter. It is now possible to aggregate resources by `@type` or `project`. +This feature is also available on files, resolvers, schemas, storages, and views. + @ref:[More information](../delta/api/resources-api.md#aggregations) #### Remote contexts diff --git a/tests/src/test/resources/kg/aggregations/resolvers-aggregation.json b/tests/src/test/resources/kg/aggregations/resolvers-aggregation.json new file mode 100644 index 0000000000..a2b3351c1e --- /dev/null +++ b/tests/src/test/resources/kg/aggregations/resolvers-aggregation.json @@ -0,0 +1,28 @@ +{ + "@context": "https://bluebrain.github.io/nexus/contexts/aggregations.json", + "aggregations": { + "projects": { + "buckets": { + "doc_count": 1, + "key": "http://delta:8080/v1/projects/{{org}}/{{project}}" + }, + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + }, + "types": { + "buckets": [ + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/Resolver" + }, + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/InProject" + } + ], + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + } + }, + "total": 1 +} \ No newline at end of file diff --git a/tests/src/test/resources/kg/aggregations/schemas-aggregation.json b/tests/src/test/resources/kg/aggregations/schemas-aggregation.json new file mode 100644 index 0000000000..c6b7570667 --- /dev/null +++ b/tests/src/test/resources/kg/aggregations/schemas-aggregation.json @@ -0,0 +1,22 @@ +{ + "@context" : "https://bluebrain.github.io/nexus/contexts/aggregations.json", + "aggregations" : { + "projects" : { + "buckets" : { + "doc_count" : 1, + "key" : "http://delta:8080/v1/projects/{{org}}/{{project}}" + }, + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0 + }, + "types" : { + "buckets" : { + "doc_count" : 1, + "key" : "https://bluebrain.github.io/nexus/vocabulary/Schema" + }, + "doc_count_error_upper_bound" : 0, + "sum_other_doc_count" : 0 + } + }, + "total" : 1 +} \ No newline at end of file diff --git a/tests/src/test/resources/kg/aggregations/storages-aggregation.json b/tests/src/test/resources/kg/aggregations/storages-aggregation.json new file mode 100644 index 0000000000..a2d58b034c --- /dev/null +++ b/tests/src/test/resources/kg/aggregations/storages-aggregation.json @@ -0,0 +1,28 @@ +{ + "@context": "https://bluebrain.github.io/nexus/contexts/aggregations.json", + "aggregations": { + "projects": { + "buckets": { + "doc_count": 1, + "key": "http://delta:8080/v1/projects/{{org}}/{{project}}" + }, + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + }, + "types": { + "buckets": [ + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/Storage" + }, + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/DiskStorage" + } + ], + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + } + }, + "total": 1 +} \ No newline at end of file diff --git a/tests/src/test/resources/kg/aggregations/views-aggregation.json b/tests/src/test/resources/kg/aggregations/views-aggregation.json new file mode 100644 index 0000000000..23145129b6 --- /dev/null +++ b/tests/src/test/resources/kg/aggregations/views-aggregation.json @@ -0,0 +1,36 @@ +{ + "@context": "https://bluebrain.github.io/nexus/contexts/aggregations.json", + "aggregations": { + "projects": { + "buckets": { + "doc_count": 3, + "key": "http://delta:8080/v1/projects/{{org}}/{{project}}" + }, + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + }, + "types": { + "buckets": [ + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/CompositeView" + }, + { + "doc_count": 3, + "key": "https://bluebrain.github.io/nexus/vocabulary/View" + }, + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/ElasticSearchView" + }, + { + "doc_count": 1, + "key": "https://bluebrain.github.io/nexus/vocabulary/SparqlView" + } + ], + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0 + } + }, + "total": 3 +} \ No newline at end of file diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala index bb35a19b34..7a36de912a 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/AggregationsSpec.scala @@ -57,6 +57,8 @@ final class AggregationsSpec extends BaseSpec with Inspectors with EitherValuabl "Aggregating resources within a project" should { + List("resources", "files", "schemas", "storages", "views") + "get an error if the user has no access" in { deltaClient.get[Json](s"/resources/$ref11?aggregations=true", Rose) { (_, response) => @@ -76,6 +78,54 @@ final class AggregationsSpec extends BaseSpec with Inspectors with EitherValuabl } } + "aggregate resolvers" in { + val expected = jsonContentOf( + "/kg/aggregations/resolvers-aggregation.json", + "org" -> org1, + "project" -> proj11 + ) + deltaClient.get[Json](s"/resolvers/$ref11?aggregations=true", Charlie) { (json, response) => + response.status shouldEqual StatusCodes.OK + json should equalIgnoreArrayOrder(expected) + } + } + + "aggregate views" in { + val expected = jsonContentOf( + "/kg/aggregations/views-aggregation.json", + "org" -> org1, + "project" -> proj11 + ) + deltaClient.get[Json](s"/views/$ref11?aggregations=true", Charlie) { (json, response) => + response.status shouldEqual StatusCodes.OK + json should equalIgnoreArrayOrder(expected) + } + } + + "aggregate schemas" in { + val expected = jsonContentOf( + "/kg/aggregations/schemas-aggregation.json", + "org" -> org1, + "project" -> proj11 + ) + deltaClient.get[Json](s"/schemas/$ref11?aggregations=true", Charlie) { (json, response) => + response.status shouldEqual StatusCodes.OK + json should equalIgnoreArrayOrder(expected) + } + } + + "aggregate storages" in { + val expected = jsonContentOf( + "/kg/aggregations/storages-aggregation.json", + "org" -> org1, + "project" -> proj11 + ) + deltaClient.get[Json](s"/storages/$ref11?aggregations=true", Charlie) { (json, response) => + response.status shouldEqual StatusCodes.OK + json should equalIgnoreArrayOrder(expected) + } + } + } "Aggregating resources within an org" should { From cfa7f5ef88966380b8395e380d3405364a1d2a0b Mon Sep 17 00:00:00 2001 From: Simon Date: Wed, 11 Oct 2023 12:39:29 +0200 Subject: [PATCH 12/13] Migrate archives to Cats Effect (#4352) * Migrate archives to Cats Effect --------- Co-authored-by: Simon Dumas --- .../nexus/delta/wiring/DeltaModule.scala | 2 + .../nexus/delta/wiring/StreamModule.scala | 7 +- .../plugins/archive/ArchiveDownload.scala | 115 +++++++++++------- .../plugins/archive/ArchivePluginConfig.scala | 6 +- .../plugins/archive/ArchivePluginModule.scala | 33 +++-- .../delta/plugins/archive/Archives.scala | 93 ++++++-------- .../delta/plugins/archive/FileSelf.scala | 34 +++--- .../archive/model/ArchiveRejection.scala | 3 +- .../delta/plugins/archive/model/Zip.scala | 12 +- .../archive/routes/ArchiveRoutes.scala | 98 +++++++-------- .../plugins/archive/ArchiveDownloadSpec.scala | 24 ++-- .../plugins/archive/ArchiveRoutesSpec.scala | 29 +++-- .../plugins/archive/ArchivesSTMSpec.scala | 5 +- .../delta/plugins/archive/ArchivesSpec.scala | 50 +++----- .../delta/plugins/archive/FileSelfSuite.scala | 23 ++-- .../sdk/directives/ResponseToRedirect.scala | 9 +- .../sdk/stream/CatsStreamConverter.scala | 89 ++++++++++++++ .../nexus/delta/sourcing/DeleteExpired.scala | 39 +++--- .../delta/sourcing/EphemeralDefinition.scala | 18 +-- .../nexus/delta/sourcing/EphemeralLog.scala | 27 ++-- .../execution/EvaluationExecution.scala | 5 + .../sourcing/state/EphemeralStateStore.scala | 9 +- .../delta/sourcing/EphemeralLogSuite.scala | 30 ++--- .../nexus/delta/sourcing/Message.scala | 9 +- .../state/EphemeralStateStoreSuite.scala | 11 +- .../nexus/testkit/ce/CatsEffectSuite.scala | 4 +- .../nexus/testkit/ce/CatsIOValues.scala | 5 +- .../epfl/bluebrain/nexus/tests/BaseSpec.scala | 5 +- 28 files changed, 443 insertions(+), 351 deletions(-) create mode 100644 delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/stream/CatsStreamConverter.scala create mode 100644 delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/execution/EvaluationExecution.scala diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala index a22575c37e..e5d12a4401 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala @@ -30,6 +30,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.plugin.PluginDef import ch.epfl.bluebrain.nexus.delta.sdk.projects.{OwnerPermissionsScopeInitialization, ProjectsConfig} import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors import ch.epfl.bluebrain.nexus.delta.sourcing.config.{DatabaseConfig, ProjectionConfig, QueryConfig} +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.megard.akka.http.cors.scaladsl.settings.CorsSettings import com.typesafe.config.Config import izumi.distage.model.definition.{Id, ModuleDef} @@ -106,6 +107,7 @@ class DeltaModule(appCfg: AppConfig, config: Config)(implicit classLoader: Class make[Clock[IO]].from(Clock.create[IO]) make[Timer[IO]].from(IO.timer(ExecutionContext.global)) make[ContextShift[IO]].from(IO.contextShift(ExecutionContext.global)) + make[EvaluationExecution].from(EvaluationExecution(_, _)) make[UUIDF].from(UUIDF.random) make[Scheduler].from(scheduler) make[JsonKeyOrdering].from( diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/StreamModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/StreamModule.scala index 22a1b2e6a9..41d1c22b09 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/StreamModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/StreamModule.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.wiring -import cats.effect.{Clock, Sync} +import cats.effect.{Clock, IO, Sync, Timer} import ch.epfl.bluebrain.nexus.delta.sdk.ResourceShifts import ch.epfl.bluebrain.nexus.delta.sdk.stream.GraphResourceStream import ch.epfl.bluebrain.nexus.delta.sourcing.config.{ProjectionConfig, QueryConfig} @@ -9,6 +9,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.stream._ import ch.epfl.bluebrain.nexus.delta.sourcing.stream.pipes._ import ch.epfl.bluebrain.nexus.delta.sourcing.{DeleteExpired, PurgeElemFailures, Transactors} import izumi.distage.model.definition.ModuleDef +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import monix.bio.{Task, UIO} /** @@ -55,8 +56,8 @@ object StreamModule extends ModuleDef { } make[DeleteExpired].fromEffect { - (supervisor: Supervisor, config: ProjectionConfig, xas: Transactors, clock: Clock[UIO]) => - DeleteExpired(supervisor, config, xas)(clock) + (supervisor: Supervisor, config: ProjectionConfig, xas: Transactors, clock: Clock[IO], timer: Timer[IO]) => + DeleteExpired(supervisor, config, xas)(clock, timer).toUIO } make[PurgeElemFailures].fromEffect { diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala index 43a7115c8c..876ae894c6 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownload.scala @@ -1,14 +1,18 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive +import akka.stream.alpakka.file.ArchiveMetadata import akka.stream.scaladsl.Source import akka.util.ByteString -import cats.implicits._ +import cats.effect.{ContextShift, IO} +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveReference.{FileReference, FileSelfReference, ResourceReference} import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection._ -import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceRepresentation._ import ch.epfl.bluebrain.nexus.delta.plugins.archive.model._ +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.Files import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection -import ch.epfl.bluebrain.nexus.delta.rdf.RdfError import ch.epfl.bluebrain.nexus.delta.rdf.implicits._ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.{JsonLdApi, JsonLdJavaApi} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution @@ -22,20 +26,18 @@ import ch.epfl.bluebrain.nexus.delta.sdk.error.SDKError import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.JsonLdContent import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.AnnotatedSource -import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, ResourceRepresentation} +import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceRepresentation._ +import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegmentRef, ResourceRepresentation} import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions.resources -import ch.epfl.bluebrain.nexus.delta.sdk.stream.StreamConverter -import ch.epfl.bluebrain.nexus.delta.sdk.{AkkaSource, JsonLdValue} +import ch.epfl.bluebrain.nexus.delta.sdk.stream.CatsStreamConverter +import ch.epfl.bluebrain.nexus.delta.sdk.{AkkaSource, JsonLdValue, ResourceShifts} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} -import com.typesafe.scalalogging.Logger import fs2.Stream import io.circe.{Json, Printer} -import monix.bio.{IO, Task, UIO} -import monix.execution.Scheduler +import monix.bio.{Task, UIO} import java.nio.ByteBuffer import java.nio.charset.StandardCharsets -import akka.stream.alpakka.file.ArchiveMetadata /** * Archive download functionality. @@ -60,13 +62,13 @@ trait ArchiveDownload { value: ArchiveValue, project: ProjectRef, ignoreNotFound: Boolean - )(implicit caller: Caller, scheduler: Scheduler): IO[ArchiveRejection, AkkaSource] + )(implicit caller: Caller): IO[AkkaSource] } object ArchiveDownload { - implicit private val logger: Logger = Logger[ArchiveDownload] + private val logger = Logger.cats[ArchiveDownload] case class ArchiveDownloadError(filename: String, response: Complete[JsonLdValue]) extends SDKError { override def getMessage: String = { @@ -86,10 +88,15 @@ object ArchiveDownload { */ def apply( aclCheck: AclCheck, - fetchResource: (ResourceRef, ProjectRef) => UIO[Option[JsonLdContent[_, _]]], - fetchFileContent: (ResourceRef, ProjectRef, Caller) => IO[FileRejection, FileResponse], + fetchResource: (ResourceRef, ProjectRef) => IO[Option[JsonLdContent[_, _]]], + fetchFileContent: (ResourceRef, ProjectRef, Caller) => IO[FileResponse], fileSelf: FileSelf - )(implicit sort: JsonKeyOrdering, baseUri: BaseUri, rcr: RemoteContextResolution): ArchiveDownload = + )(implicit + sort: JsonKeyOrdering, + baseUri: BaseUri, + rcr: RemoteContextResolution, + contextShift: ContextShift[IO] + ): ArchiveDownload = new ArchiveDownload { implicit private val api: JsonLdApi = JsonLdJavaApi.lenient @@ -100,17 +107,17 @@ object ArchiveDownload { value: ArchiveValue, project: ProjectRef, ignoreNotFound: Boolean - )(implicit caller: Caller, scheduler: Scheduler): IO[ArchiveRejection, AkkaSource] = { + )(implicit caller: Caller): IO[AkkaSource] = { for { references <- value.resources.toList.traverse(toFullReference) _ <- checkResourcePermissions(references, project) contentStream <- resolveReferencesAsStream(references, project, ignoreNotFound) } yield { - Source.fromGraph(StreamConverter(contentStream)).via(Zip.writeFlow) + Source.fromGraph(CatsStreamConverter(contentStream)).via(Zip.writeFlow) } } - private def toFullReference(archiveReference: ArchiveReference): IO[ArchiveRejection, FullArchiveReference] = { + private def toFullReference(archiveReference: ArchiveReference): IO[FullArchiveReference] = { archiveReference match { case reference: FullArchiveReference => IO.pure(reference) case reference: FileSelfReference => @@ -119,7 +126,9 @@ object ArchiveDownload { .map { case (projectRef, resourceRef) => FileReference(resourceRef, Some(projectRef), reference.path) } - .mapError(InvalidFileSelf) + .adaptError { case e: ParsingError => + InvalidFileSelf(e) + } } } @@ -127,7 +136,7 @@ object ArchiveDownload { references: List[FullArchiveReference], project: ProjectRef, ignoreNotFound: Boolean - )(implicit caller: Caller): IO[ArchiveRejection, Stream[Task, (ArchiveMetadata, AkkaSource)]] = { + )(implicit caller: Caller): IO[Stream[IO, (ArchiveMetadata, AkkaSource)]] = { references .traverseFilter { case ref: FileReference => fileEntry(ref, project, ignoreNotFound) @@ -137,12 +146,12 @@ object ArchiveDownload { .map(asStream) } - private def sortWith(list: List[(ArchiveMetadata, Task[AkkaSource])]): List[(ArchiveMetadata, Task[AkkaSource])] = + private def sortWith(list: List[(ArchiveMetadata, IO[AkkaSource])]): List[(ArchiveMetadata, IO[AkkaSource])] = list.sortBy { case (entry, _) => entry }(Zip.ordering) private def asStream( - list: List[(ArchiveMetadata, Task[AkkaSource])] - ): Stream[Task, (ArchiveMetadata, AkkaSource)] = + list: List[(ArchiveMetadata, IO[AkkaSource])] + ): Stream[IO, (ArchiveMetadata, AkkaSource)] = Stream.iterable(list).evalMap { case (metadata, source) => source.map(metadata -> _) } @@ -150,15 +159,16 @@ object ArchiveDownload { private def checkResourcePermissions( refs: List[FullArchiveReference], project: ProjectRef - )(implicit caller: Caller): IO[AuthorizationFailed, Unit] = + )(implicit caller: Caller): IO[Unit] = toCatsIO { aclCheck .mapFilterOrRaise( refs, (a: FullArchiveReference) => AclAddress.Project(a.project.getOrElse(project)) -> resources.read, identity[ArchiveReference], - address => IO.raiseError(AuthorizationFailed(address, resources.read)) + address => Task.raiseError(AuthorizationFailed(address, resources.read)) ) .void + } private def fileEntry( ref: FileReference, @@ -166,32 +176,30 @@ object ArchiveDownload { ignoreNotFound: Boolean )(implicit caller: Caller - ): IO[ArchiveRejection, Option[(ArchiveMetadata, Task[AkkaSource])]] = { + ): IO[Option[(ArchiveMetadata, IO[AkkaSource])]] = { val refProject = ref.project.getOrElse(project) // the required permissions are checked for each file content fetch val entry = fetchFileContent(ref.ref, refProject, caller) - .mapError { + .adaptError { case _: FileRejection.FileNotFound => ResourceNotFound(ref.ref, project) case _: FileRejection.TagNotFound => ResourceNotFound(ref.ref, project) case _: FileRejection.RevisionNotFound => ResourceNotFound(ref.ref, project) case FileRejection.AuthorizationFailed(addr, perm) => AuthorizationFailed(addr, perm) - case other => WrappedFileRejection(other) + case other: FileRejection => WrappedFileRejection(other) } .map { case FileResponse(fileMetadata, content) => - val path = pathOf(ref, project, fileMetadata.filename) - val archiveMetadata = Zip.metadata(path) - val contentTask: Task[AkkaSource] = content + val path = pathOf(ref, project, fileMetadata.filename) + val archiveMetadata = Zip.metadata(path) + val contentTask: IO[AkkaSource] = content .tapError(response => - UIO.delay( - logger - .error(s"Error streaming file '${fileMetadata.filename}' for archive: ${response.value.value}") - ) + logger + .error(s"Error streaming file '${fileMetadata.filename}' for archive: ${response.value.value}") + .toUIO ) .mapError(response => ArchiveDownloadError(fileMetadata.filename, response)) - Some((archiveMetadata, contentTask)) - + Option((archiveMetadata, contentTask)) } - if (ignoreNotFound) entry.onErrorRecover { case _: ResourceNotFound => None } + if (ignoreNotFound) entry.recover { case _: ResourceNotFound => None } else entry } @@ -209,34 +217,34 @@ object ArchiveDownload { ref: ResourceReference, project: ProjectRef, ignoreNotFound: Boolean - ): IO[ArchiveRejection, Option[(ArchiveMetadata, Task[AkkaSource])]] = { + ): IO[Option[(ArchiveMetadata, IO[AkkaSource])]] = { val archiveEntry = resourceRefToByteString(ref, project).map { content => val path = pathOf(ref, project) val metadata = Zip.metadata(path) - Some((metadata, Task.pure(Source.single(content)))) + Option((metadata, IO.pure(Source.single(content)))) } - if (ignoreNotFound) archiveEntry.onErrorHandle { _: ResourceNotFound => None } + if (ignoreNotFound) archiveEntry.recover { _: ResourceNotFound => None } else archiveEntry } private def resourceRefToByteString( ref: ResourceReference, project: ProjectRef - ): IO[ResourceNotFound, ByteString] = { + ): IO[ByteString] = { val p = ref.project.getOrElse(project) for { valueOpt <- fetchResource(ref.ref, p) - value <- IO.fromOption(valueOpt, ResourceNotFound(ref.ref, project)) - bytes <- valueToByteString(value, ref.representationOrDefault).logAndDiscardErrors( - "serialize resource to ByteString" - ) + value <- IO.fromOption(valueOpt)(ResourceNotFound(ref.ref, project)) + bytes <- valueToByteString(value, ref.representationOrDefault).onError { error => + logger.error(error)(s"Serializing resource '$ref' to ByteString failed.") + } } yield bytes } private def valueToByteString[A]( value: JsonLdContent[A, _], repr: ResourceRepresentation - ): IO[RdfError, ByteString] = { + ): IO[ByteString] = toCatsIO { implicit val encoder: JsonLdEncoder[A] = value.encoder repr match { case SourceJson => UIO.pure(ByteString(prettyPrintSource(value.source))) @@ -265,4 +273,17 @@ object ArchiveDownload { } } + def apply(aclCheck: AclCheck, shifts: ResourceShifts, files: Files, fileSelf: FileSelf)(implicit + sort: JsonKeyOrdering, + baseUri: BaseUri, + rcr: RemoteContextResolution, + contextShift: ContextShift[IO] + ): ArchiveDownload = + ArchiveDownload( + aclCheck, + shifts.fetch, + (id: ResourceRef, project: ProjectRef, caller: Caller) => files.fetchContent(IdSegmentRef(id), project)(caller), + fileSelf + ) + } diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginConfig.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginConfig.scala index f14a7bb82b..f74d0baf5f 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginConfig.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginConfig.scala @@ -1,8 +1,8 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.sourcing.config.EphemeralLogConfig import com.typesafe.config.Config -import monix.bio.UIO import pureconfig.generic.semiauto.deriveReader import pureconfig.{ConfigReader, ConfigSource} @@ -23,8 +23,8 @@ object ArchivePluginConfig { /** * Converts a [[Config]] into an [[ArchivePluginConfig]] */ - def load(config: Config): UIO[ArchivePluginConfig] = - UIO.delay { + def load(config: Config): IO[ArchivePluginConfig] = + IO.delay { ConfigSource .fromConfig(config) .at("plugins.archive") diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginModule.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginModule.scala index 91a06c2214..70360672ee 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginModule.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivePluginModule.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive -import cats.effect.Clock +import cats.effect.{Clock, ContextShift, IO} import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection.ProjectContextRejection import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.contexts @@ -13,17 +13,15 @@ import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities -import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller -import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegmentRef, MetadataContextValue} +import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, MetadataContextValue} import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sourcing.Transactors -import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import com.typesafe.config.Config import izumi.distage.model.definition.{Id, ModuleDef} -import monix.bio.UIO -import monix.execution.Scheduler +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ /** * Archive plugin wiring. @@ -31,24 +29,20 @@ import monix.execution.Scheduler object ArchivePluginModule extends ModuleDef { implicit private val classLoader: ClassLoader = getClass.getClassLoader - make[ArchivePluginConfig].fromEffect { cfg: Config => ArchivePluginConfig.load(cfg) } + make[ArchivePluginConfig].fromEffect { cfg: Config => ArchivePluginConfig.load(cfg).toUIO } make[ArchiveDownload].from { ( aclCheck: AclCheck, shifts: ResourceShifts, files: Files, + fileSelf: FileSelf, sort: JsonKeyOrdering, baseUri: BaseUri, rcr: RemoteContextResolution @Id("aggregate"), - fileSelf: FileSelf + contextShift: ContextShift[IO] ) => - ArchiveDownload( - aclCheck, - shifts.fetch, - (id: ResourceRef, project: ProjectRef, caller: Caller) => files.fetchContent(IdSegmentRef(id), project)(caller), - fileSelf - )(sort, baseUri, rcr) + ArchiveDownload(aclCheck, shifts, files, fileSelf)(sort, baseUri, rcr, contextShift) } make[FileSelf].from { (fetchContext: FetchContext[ContextRejection], baseUri: BaseUri) => @@ -64,13 +58,15 @@ object ArchivePluginModule extends ModuleDef { api: JsonLdApi, uuidF: UUIDF, rcr: RemoteContextResolution @Id("aggregate"), - clock: Clock[UIO] + clock: Clock[IO], + ec: EvaluationExecution ) => Archives(fetchContext.mapRejection(ProjectContextRejection), archiveDownload, cfg, xas)( api, uuidF, rcr, - clock + clock, + ec ) } @@ -82,10 +78,9 @@ object ArchivePluginModule extends ModuleDef { schemeDirectives: DeltaSchemeDirectives, baseUri: BaseUri, rcr: RemoteContextResolution @Id("aggregate"), - jko: JsonKeyOrdering, - sc: Scheduler + jko: JsonKeyOrdering ) => - new ArchiveRoutes(archives, identities, aclCheck, schemeDirectives)(baseUri, rcr, jko, sc) + new ArchiveRoutes(archives, identities, aclCheck, schemeDirectives)(baseUri, rcr, jko) } many[PriorityRoute].add { (cfg: ArchivePluginConfig, routes: ArchiveRoutes) => diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala index c99b3d5a04..12822dc8b7 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/Archives.scala @@ -1,15 +1,16 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive -import cats.effect.Clock +import cats.effect.{Clock, IO} import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.syntax._ -import ch.epfl.bluebrain.nexus.delta.kernel.utils.{IOUtils, UUIDF} +import ch.epfl.bluebrain.nexus.delta.kernel.utils.{IOInstant, UUIDF} import ch.epfl.bluebrain.nexus.delta.plugins.archive.Archives.{entityType, expandIri, ArchiveLog} import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.archive.model._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource import ch.epfl.bluebrain.nexus.delta.sdk.instances._ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller @@ -19,12 +20,11 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.IdSegment import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sourcing.config.EphemeralLogConfig +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.sourcing.model.{EntityType, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.{EphemeralDefinition, EphemeralLog, Transactors} import io.circe.Json -import monix.bio.{IO, UIO} -import monix.execution.Scheduler /** * Archives module. @@ -50,30 +50,14 @@ class Archives( archiveDownload: ArchiveDownload, sourceDecoder: JsonLdSourceDecoder[ArchiveRejection, ArchiveValue], config: EphemeralLogConfig -)(implicit uuidF: UUIDF, rcr: RemoteContextResolution) { +)(implicit rcr: RemoteContextResolution) { implicit private val kamonComponent: KamonMetricComponent = KamonMetricComponent(entityType.value) - /** - * Creates an archive with a system generated id. - * - * @param project - * the archive parent project - * @param value - * the archive value - * @param subject - * the subject that initiated the action - */ - def create( - project: ProjectRef, - value: ArchiveValue - )(implicit subject: Subject): IO[ArchiveRejection, ArchiveResource] = - uuidF().flatMap(uuid => create(uuid.toString, project, value)) - /** * Creates an archive with a specific id. * - * @param id + * @param iri * the archive identifier * @param project * the archive parent project @@ -82,16 +66,8 @@ class Archives( * @param subject * the subject that initiated the action */ - def create( - id: IdSegment, - project: ProjectRef, - value: ArchiveValue - )(implicit subject: Subject): IO[ArchiveRejection, ArchiveResource] = - (for { - p <- fetchContext.onRead(project) - iri <- expandIri(id, p) - res <- eval(CreateArchive(iri, project, value, subject)) - } yield res).span("createArchive") + def create(iri: Iri, project: ProjectRef, value: ArchiveValue)(implicit subject: Subject): IO[ArchiveResource] = + eval(CreateArchive(iri, project, value, subject)).span("createArchive") /** * Creates an archive from a json-ld representation. If an id is detected in the source document it will be used. @@ -104,11 +80,11 @@ class Archives( * @param subject * the subject that initiated the action */ - def create(project: ProjectRef, source: Json)(implicit subject: Subject): IO[ArchiveRejection, ArchiveResource] = + def create(project: ProjectRef, source: Json)(implicit subject: Subject): IO[ArchiveResource] = (for { - p <- fetchContext.onRead(project) - (iri, value) <- sourceDecoder(p, source) - res <- eval(CreateArchive(iri, project, value, subject)) + p <- toCatsIO(fetchContext.onRead(project)) + (iri, value) <- toCatsIO(sourceDecoder(p, source)) + res <- create(iri, project, value) } yield res).span("createArchive") /** @@ -129,12 +105,11 @@ class Archives( id: IdSegment, project: ProjectRef, source: Json - )(implicit subject: Subject): IO[ArchiveRejection, ArchiveResource] = + )(implicit subject: Subject): IO[ArchiveResource] = (for { - p <- fetchContext.onRead(project) - iri <- expandIri(id, p) - value <- sourceDecoder(p, iri, source) - res <- eval(CreateArchive(iri, project, value, subject)) + (iri, p) <- expandWithContext(id, project) + value <- toCatsIO(sourceDecoder(p, iri, source)) + res <- create(iri, project, value) } yield res).span("createArchive") /** @@ -145,13 +120,13 @@ class Archives( * @param project * the archive parent project */ - def fetch(id: IdSegment, project: ProjectRef): IO[ArchiveRejection, ArchiveResource] = - (for { - p <- fetchContext.onRead(project) - iri <- expandIri(id, p) - state <- log.stateOr(project, iri, ArchiveNotFound(iri, project)) - res = state.toResource(config.ttl) - } yield res).span("fetchArchive") + def fetch(id: IdSegment, project: ProjectRef): IO[ArchiveResource] = { + for { + (iri, _) <- expandWithContext(id, project) + state <- log.stateOr(project, iri, ArchiveNotFound(iri, project)) + res = state.toResource(config.ttl) + } yield res + }.span("fetchArchive") /** * Provides an [[AkkaSource]] for streaming an archive content. @@ -167,14 +142,21 @@ class Archives( id: IdSegment, project: ProjectRef, ignoreNotFound: Boolean - )(implicit caller: Caller, scheduler: Scheduler): IO[ArchiveRejection, AkkaSource] = + )(implicit caller: Caller): IO[AkkaSource] = (for { resource <- fetch(id, project) value = resource.value source <- archiveDownload(value.value, project, ignoreNotFound) } yield source).span("downloadArchive") - private def eval(cmd: CreateArchive): IO[ArchiveRejection, ArchiveResource] = + private def expandWithContext(id: IdSegment, project: ProjectRef) = toCatsIO { + for { + p <- fetchContext.onRead(project) + iri <- expandIri(id, p) + } yield (iri, p) + } + + private def eval(cmd: CreateArchive): IO[ArchiveResource] = log.evaluate(cmd.project, cmd.id, cmd).map { _.toResource(config.ttl) } } @@ -211,7 +193,8 @@ object Archives { api: JsonLdApi, uuidF: UUIDF, rcr: RemoteContextResolution, - clock: Clock[UIO] + clock: Clock[IO], + execution: EvaluationExecution ): Archives = new Archives( EphemeralLog( definition, @@ -224,7 +207,7 @@ object Archives { cfg.ephemeral ) - private def definition(implicit clock: Clock[UIO]) = + private def definition(implicit clock: Clock[IO]) = EphemeralDefinition( entityType, evaluate, @@ -240,9 +223,9 @@ object Archives { private[archive] def evaluate( command: CreateArchive - )(implicit clock: Clock[UIO]): IO[ArchiveRejection, ArchiveState] = - IOUtils.instant.map { instant => - ArchiveState(command.id, command.project, command.value.resources, instant, command.subject) + )(implicit clock: Clock[IO]): IO[ArchiveState] = + IOInstant.now.map { now => + ArchiveState(command.id, command.project, command.value.resources, now, command.subject) } } diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala index 24f57b8896..a1ac940d44 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelf.scala @@ -1,15 +1,17 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive import akka.http.scaladsl.model.Uri +import cats.effect.IO +import cats.syntax.all._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils -import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError -import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError.{ExternalLink, InvalidFileId, InvalidPath, InvalidProject, InvalidProjectContext} +import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError._ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.implicits._ +import ch.epfl.bluebrain.nexus.delta.sdk.error.SDKError import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment} import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} -import monix.bio.IO /** * Attempts to parse an incoming iri/uri as in order to extract file identifiers if it is a valid file "_self". @@ -18,10 +20,10 @@ import monix.bio.IO */ trait FileSelf { - def parse(input: Uri): IO[ParsingError, (ProjectRef, ResourceRef)] = + def parse(input: Uri): IO[(ProjectRef, ResourceRef)] = parse(input.toIri) - def parse(input: Iri): IO[ParsingError, (ProjectRef, ResourceRef)] + def parse(input: Iri): IO[(ProjectRef, ResourceRef)] } object FileSelf { @@ -29,8 +31,10 @@ object FileSelf { /** * Enumeration of errors that can be raised while attempting to resolve a self */ - sealed trait ParsingError extends Product with Serializable { + sealed trait ParsingError extends SDKError { def message: String + + override def getMessage: String = message } object ParsingError { @@ -83,7 +87,7 @@ object FileSelf { val filePrefixIri = baseUri.iriEndpoint / "files" / "" new FileSelf { - override def parse(input: Iri): IO[ParsingError, (ProjectRef, ResourceRef)] = + override def parse(input: Iri): IO[(ProjectRef, ResourceRef)] = validateSelfPrefix(input) >> parseSelf(input) private def validateSelfPrefix(self: Iri) = @@ -92,18 +96,18 @@ object FileSelf { else IO.raiseError(ParsingError.NonAbsoluteLink(self)) - private def parseSelf(self: Iri): IO[ParsingError, (ProjectRef, ResourceRef)] = + private def parseSelf(self: Iri): IO[(ProjectRef, ResourceRef)] = self.stripPrefix(filePrefixIri).split('/') match { case Array(org, project, id) => for { - project <- IO.fromEither(ProjectRef.parse(org, project)).mapError(_ => InvalidProject(self)) - projectContext <- fetchContext.onRead(project).mapError { _ => InvalidProjectContext(self, project) } + project <- IO.fromEither(ProjectRef.parse(org, project).leftMap(_ => InvalidProject(self))) + projectContext <- toCatsIO( + fetchContext.onRead(project).mapError { _ => InvalidProjectContext(self, project) } + ) decodedId = UrlUtils.decode(id) - resourceRef <- - IO.fromOption( - IdSegment(decodedId).toIri(projectContext.apiMappings, projectContext.base).map(ResourceRef(_)), - InvalidFileId(self) - ) + iriOption = + IdSegment(decodedId).toIri(projectContext.apiMappings, projectContext.base).map(ResourceRef(_)) + resourceRef <- IO.fromOption(iriOption)(InvalidFileId(self)) } yield { (project, resourceRef) } diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala index 49637d95b6..daaf6a9411 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/ArchiveRejection.scala @@ -19,6 +19,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContext.ContextRejection import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ProjectRef, ResourceRef} +import ch.epfl.bluebrain.nexus.delta.sourcing.rejection.Rejection import io.circe.syntax.EncoderOps import io.circe.{Encoder, JsonObject} @@ -28,7 +29,7 @@ import io.circe.{Encoder, JsonObject} * @param reason * a descriptive message as to why the rejection occurred */ -sealed abstract class ArchiveRejection(val reason: String) extends Product with Serializable +sealed abstract class ArchiveRejection(val reason: String) extends Rejection object ArchiveRejection { diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala index 7477eb6a19..ce6f87e8da 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/model/Zip.scala @@ -1,9 +1,11 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive.model import akka.NotUsed -import akka.http.scaladsl.model.{ContentType, HttpRequest, MediaTypes} -import akka.stream.alpakka.file.scaladsl.Archive +import akka.http.scaladsl.model.{ContentType, MediaTypes} +import akka.http.scaladsl.server.Directive +import akka.http.scaladsl.server.Directives.extractRequest import akka.stream.alpakka.file.ArchiveMetadata +import akka.stream.alpakka.file.scaladsl.Archive import akka.stream.scaladsl.{Flow, Source} import akka.util.ByteString import ch.epfl.bluebrain.nexus.delta.sdk.utils.HeadersUtils @@ -25,5 +27,9 @@ object Zip { def metadata(filename: String): ArchiveMetadata = ArchiveMetadata.create(filename) - def checkHeader(req: HttpRequest): Boolean = HeadersUtils.matches(req.headers, Zip.contentType.mediaType) + def checkHeader: Directive[Tuple1[Boolean]] = + extractRequest.map { req => + HeadersUtils.matches(req.headers, Zip.contentType.mediaType) + } + } diff --git a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala index f4cba81633..1a06913a5f 100644 --- a/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala +++ b/delta/plugins/archive/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/routes/ArchiveRoutes.scala @@ -1,23 +1,26 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive.routes +import akka.http.scaladsl.model.StatusCode import akka.http.scaladsl.model.StatusCodes.{Created, SeeOther} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route +import cats.effect.IO +import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.plugins.archive.Archives -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.permissions -import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.Zip +import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{permissions, ArchiveRejection, ArchiveResource, Zip} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck +import ch.epfl.bluebrain.nexus.delta.sdk.ce.DeltaDirectives._ import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling -import ch.epfl.bluebrain.nexus.delta.sdk.directives.{AuthDirectives, DeltaDirectives, DeltaSchemeDirectives, FileResponse} +import ch.epfl.bluebrain.nexus.delta.sdk.directives.{AuthDirectives, DeltaSchemeDirectives, FileResponse} import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities +import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ -import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri +import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import io.circe.Json -import kamon.instrumentation.akka.http.TracingDirectives.operationName -import monix.execution.Scheduler /** * The Archive routes. @@ -36,59 +39,39 @@ class ArchiveRoutes( identities: Identities, aclCheck: AclCheck, schemeDirectives: DeltaSchemeDirectives -)(implicit baseUri: BaseUri, rcr: RemoteContextResolution, jko: JsonKeyOrdering, sc: Scheduler) +)(implicit baseUri: BaseUri, rcr: RemoteContextResolution, jko: JsonKeyOrdering) extends AuthDirectives(identities, aclCheck) - with CirceUnmarshalling - with DeltaDirectives { + with CirceUnmarshalling { - private val prefix = baseUri.prefixSegment import schemeDirectives._ def routes: Route = baseUriPrefix(baseUri.prefix) { pathPrefix("archives") { extractCaller { implicit caller => - resolveProjectRef.apply { implicit ref => + resolveProjectRef.apply { implicit project => concat( // create an archive without an id (post & entity(as[Json]) & pathEndOrSingleSlash) { json => - operationName(s"$prefix/archives/{org}/{project}") { - authorizeFor(ref, permissions.write).apply { - archiveResponse( - emitRedirect(SeeOther, archives.create(ref, json).map(_.uris.accessUri)), - emit(Created, archives.create(ref, json).mapValue(_.metadata)) - ) - } + authorizeFor(project, permissions.write).apply { + emitCreatedArchive(archives.create(project, json)) } }, (idSegment & pathEndOrSingleSlash) { id => - operationName(s"$prefix/archives/{org}/{project}/{id}") { - concat( - // create an archive with an id - (put & entity(as[Json]) & pathEndOrSingleSlash) { json => - authorizeFor(ref, permissions.write).apply { - archiveResponse( - emitRedirect(SeeOther, archives.create(id, ref, json).map(_.uris.accessUri)), - emit(Created, archives.create(id, ref, json).mapValue(_.metadata)) - ) - } - }, - // fetch or download an archive - (get & pathEndOrSingleSlash) { - authorizeFor(ref, permissions.read).apply { - archiveResponse( - parameter("ignoreNotFound".as[Boolean] ? false) { ignoreNotFound => - val response = archives.download(id, ref, ignoreNotFound).map { source => - sourceToFileResponse(source) - } - emit(response) - }, - emit(archives.fetch(id, ref)) - ) - } + concat( + // create an archive with an id + (put & entity(as[Json]) & pathEndOrSingleSlash) { json => + authorizeFor(project, permissions.write).apply { + emitCreatedArchive(archives.create(id, project, json)) } - ) - } + }, + // fetch or download an archive + (get & pathEndOrSingleSlash) { + authorizeFor(project, permissions.read).apply { + emitArchiveDownload(id, project) + } + } + ) } ) } @@ -96,9 +79,28 @@ class ArchiveRoutes( } } - private def archiveResponse(validResp: Route, invalidResp: Route): Route = - extractRequest.map(Zip.checkHeader(_)).apply(valid => if (valid) validResp else invalidResp) + private def emitMetadata(statusCode: StatusCode, io: IO[ArchiveResource]): Route = + emit(statusCode, io.mapValue(_.metadata).attemptNarrow[ArchiveRejection]) + + private def emitArchiveFile(source: IO[AkkaSource]) = { + val response = source.map { s => + FileResponse(s"archive.zip", Zip.contentType, 0L, s) + } + emit(response.attemptNarrow[ArchiveRejection]) + } + + private def emitCreatedArchive(io: IO[ArchiveResource]): Route = + Zip.checkHeader { + case true => emitRedirect(SeeOther, io.map(_.uris.accessUri).attemptNarrow[ArchiveRejection]) + case false => emitMetadata(Created, io) + } - private def sourceToFileResponse(source: AkkaSource): FileResponse = - FileResponse(s"archive.zip", Zip.contentType, 0L, source) + private def emitArchiveDownload(id: IdSegment, project: ProjectRef)(implicit caller: Caller): Route = + Zip.checkHeader { + case true => + parameter("ignoreNotFound".as[Boolean] ? false) { ignoreNotFound => + emitArchiveFile(archives.download(id, project, ignoreNotFound)) + } + case false => emit(archives.fetch(id, project).attemptNarrow[ArchiveRejection]) + } } diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala index 3d75c62a63..93d03ee0a4 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala @@ -7,16 +7,16 @@ import akka.stream.scaladsl.Source import akka.testkit.TestKit import akka.util.ByteString import cats.data.NonEmptySet +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils.encode import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveReference.{FileReference, FileSelfReference, ResourceReference} import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection.{AuthorizationFailed, InvalidFileSelf, ResourceNotFound} -import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceRepresentation.{CompactedJsonLd, Dot, ExpandedJsonLd, NQuads, NTriples, SourceJson} import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.{ArchiveRejection, ArchiveValue} import ch.epfl.bluebrain.nexus.delta.plugins.storage.RemoteContextResolutionFixture import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin.Client import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.FileNotFound -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{Digest, FileAttributes, FileRejection} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{Digest, FileAttributes} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.{schemas, FileGen} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StorageFixtures import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.AbsolutePath @@ -32,16 +32,16 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.JsonLdContent import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri +import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceRepresentation.{CompactedJsonLd, Dot, ExpandedJsonLd, NQuads, NTriples, SourceJson} import ch.epfl.bluebrain.nexus.delta.sdk.permissions.Permissions import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef, ResourceRef} import ch.epfl.bluebrain.nexus.testkit.archive.ArchiveHelpers -import ch.epfl.bluebrain.nexus.testkit.{EitherValuable, IOValues, TestHelpers} +import ch.epfl.bluebrain.nexus.testkit.ce.CatsIOValues +import ch.epfl.bluebrain.nexus.testkit.{EitherValuable, TestHelpers} import io.circe.syntax.EncoderOps -import monix.bio.{IO, UIO} -import monix.execution.Scheduler.Implicits.global import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.{Inspectors, OptionValues} @@ -55,7 +55,7 @@ class ArchiveDownloadSpec with AnyWordSpecLike with Inspectors with EitherValuable - with IOValues + with CatsIOValues with OptionValues with TestHelpers with StorageFixtures @@ -109,13 +109,13 @@ class ArchiveDownloadSpec val file2 = FileGen.resourceFor(id2, projectRef, storageRef, fileAttributes(file2Name, file2Size)) val file2Content: String = "file content 2" - val fetchResource: (Iri, ProjectRef) => UIO[Option[JsonLdContent[_, _]]] = { + val fetchResource: (Iri, ProjectRef) => IO[Option[JsonLdContent[_, _]]] = { case (`id1`, `projectRef`) => - UIO.some(JsonLdContent(file1, file1.value.asJson, None)) + IO.pure(Some(JsonLdContent(file1, file1.value.asJson, None))) case (`id2`, `projectRef`) => - UIO.some(JsonLdContent(file2, file2.value.asJson, None)) + IO.pure(Some(JsonLdContent(file2, file2.value.asJson, None))) case _ => - UIO.none + IO.none } val file1SelfIri: Iri = file1Self.toIri @@ -124,7 +124,7 @@ class ArchiveDownloadSpec case other => IO.raiseError(ParsingError.InvalidPath(other)) } - val fetchFileContent: (Iri, ProjectRef) => IO[FileRejection, FileResponse] = { + val fetchFileContent: (Iri, ProjectRef) => IO[FileResponse] = { case (`id1`, `projectRef`) => IO.pure( FileResponse(file1Name, ContentTypes.`text/plain(UTF-8)`, file1Size, Source.single(ByteString(file1Content))) @@ -154,7 +154,7 @@ class ArchiveDownloadSpec def rejectedAccess(value: ArchiveValue) = { archiveDownload - .apply(value, project.ref, ignoreNotFound = true)(Caller.Anonymous, global) + .apply(value, project.ref, ignoreNotFound = true)(Caller.Anonymous) .rejectedWith[AuthorizationFailed] } diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala index 0e061ee440..590f4fc502 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala @@ -8,6 +8,7 @@ import akka.http.scaladsl.model.{ContentTypes, StatusCodes, Uri} import akka.http.scaladsl.server.Route import akka.stream.scaladsl.Source import akka.util.ByteString +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils.encode import ch.epfl.bluebrain.nexus.delta.kernel.utils.{StatefulUUIDF, UUIDF} import ch.epfl.bluebrain.nexus.delta.plugins.archive.FileSelf.ParsingError.InvalidPath @@ -41,26 +42,32 @@ import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sdk.utils.BaseRouteSpec import ch.epfl.bluebrain.nexus.delta.sourcing.config.EphemeralLogConfig +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Identity, Label, ProjectRef, ResourceRef} import ch.epfl.bluebrain.nexus.testkit.archive.ArchiveHelpers +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsIOValues, IOFixedClock} import io.circe.Json import io.circe.syntax.EncoderOps -import monix.bio.{IO, UIO} -import monix.execution.Scheduler import org.scalatest.TryValues import java.util.UUID import scala.concurrent.duration._ -class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with TryValues with ArchiveHelpers { - - implicit private val scheduler: Scheduler = Scheduler.global +class ArchiveRoutesSpec + extends BaseRouteSpec + with StorageFixtures + with IOFixedClock + with TryValues + with ArchiveHelpers + with CatsIOValues { private val uuid = UUID.fromString("8249ba90-7cc6-4de5-93a1-802c04200dcc") implicit private val uuidF: StatefulUUIDF = UUIDF.stateful(uuid).accepted + implicit private val ee: EvaluationExecution = EvaluationExecution(timer, contextShift) + implicit override def rcr: RemoteContextResolution = RemoteContextResolutionFixture.rcr private val subject: Subject = Identity.User("user", Label.unsafe("realm")) @@ -90,7 +97,7 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with TryValue private val acceptAll = Accept(`*/*`) private val fetchContext = FetchContextDummy(List(project)) - private val groupDirectives = DeltaSchemeDirectives(fetchContext, _ => UIO.none, _ => UIO.none) + private val groupDirectives = DeltaSchemeDirectives(fetchContext) private val storageRef = ResourceRef.Revision(iri"http://localhost/${genString()}", 5) @@ -116,14 +123,14 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with TryValue private val generatedId = project.base.iri / uuid.toString - val fetchResource: (Iri, ProjectRef) => UIO[Option[JsonLdContent[_, _]]] = { + val fetchResource: (Iri, ProjectRef) => IO[Option[JsonLdContent[_, _]]] = { case (`fileId`, `projectRef`) => - UIO.some(JsonLdContent(file, file.value.asJson, None)) + IO.pure(Some(JsonLdContent(file, file.value.asJson, None))) case _ => - UIO.none + IO.none } - val fetchFileContent: (Iri, ProjectRef, Caller) => IO[FileRejection, FileResponse] = (id, p, c) => { + val fetchFileContent: (Iri, ProjectRef, Caller) => IO[FileResponse] = (id, p, c) => { val s = c.subject (id, p, s) match { case (_, _, `subjectNoFilePerms`) => @@ -290,7 +297,7 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with TryValue ) ~> routes ~> check { status shouldEqual StatusCodes.OK header[`Content-Type`].value.value() shouldEqual `application/zip`.value - val result = fromZip(responseEntity.dataBytes) + val result = fromZip(responseEntity.dataBytes)(materializer, executor) result.keySet shouldEqual Set( s"${project.ref}/file/file.txt", diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSTMSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSTMSpec.scala index bed5a9c895..7e124f683f 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSTMSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSTMSpec.scala @@ -8,7 +8,8 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.AbsolutePath import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.{EitherValuable, IOFixedClock, IOValues, TestHelpers} +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsIOValues, IOFixedClock} +import ch.epfl.bluebrain.nexus.testkit.{EitherValuable, TestHelpers} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike @@ -18,7 +19,7 @@ import java.time.Instant class ArchivesSTMSpec extends AnyWordSpecLike with Matchers - with IOValues + with CatsIOValues with IOFixedClock with EitherValuable with TestHelpers { diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala index 273d3ee9dd..48e63df7a8 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchivesSpec.scala @@ -2,6 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.archive import akka.stream.scaladsl.Source import cats.data.NonEmptySet +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveReference.{FileReference, ResourceReference} import ch.epfl.bluebrain.nexus.delta.plugins.archive.model.ArchiveRejection.{ArchiveNotFound, ProjectContextRejection} @@ -16,14 +17,14 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.ResourceUris.EphemeralResourceInP import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sdk.projects.model.ApiMappings import ch.epfl.bluebrain.nexus.delta.sourcing.config.EphemeralLogConfig +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Subject, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.DoobieScalaTestFixture -import ch.epfl.bluebrain.nexus.testkit._ +import ch.epfl.bluebrain.nexus.testkit.{EitherValuable, TestHelpers} +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsIOValues, IOFixedClock} import io.circe.literal._ -import monix.bio.IO -import monix.execution.Scheduler import org.scalatest.matchers.should.Matchers import java.net.URLEncoder @@ -35,15 +36,14 @@ import scala.concurrent.duration._ class ArchivesSpec extends DoobieScalaTestFixture with Matchers - with IOValues with IOFixedClock + with CatsIOValues with EitherValuable with TestHelpers with RemoteContextResolutionFixture { - private val uuid = UUID.randomUUID() - implicit private val uuidF: UUIDF = UUIDF.random - implicit private val sc: Scheduler = Scheduler.global + private val uuid = UUID.randomUUID() + implicit private val uuidF: UUIDF = UUIDF.random implicit private val api: JsonLdApi = JsonLdJavaApi.strict @@ -61,15 +61,16 @@ class ArchivesSpec ProjectContextRejection ) - private val cfg = ArchivePluginConfig(1, EphemeralLogConfig(5.seconds, 5.hours)) - private val download = new ArchiveDownload { + private val cfg = ArchivePluginConfig(1, EphemeralLogConfig(5.seconds, 5.hours)) + private val download = new ArchiveDownload { override def apply(value: ArchiveValue, project: ProjectRef, ignoreNotFound: Boolean)(implicit - caller: Caller, - scheduler: Scheduler - ): IO[ArchiveRejection, AkkaSource] = + caller: Caller + ): IO[AkkaSource] = IO.pure(Source.empty) } - private lazy val archives = Archives(fetchContext, download, cfg, xas) + + implicit val ee: EvaluationExecution = EvaluationExecution(timer, contextShift) + private lazy val archives = Archives(fetchContext, download, cfg, xas) "An Archives module" should { "create an archive from source" in { @@ -180,29 +181,6 @@ class ArchivesSpec ) } - "create an archive from value" in { - val resourceId = iri"http://localhost/${genString()}" - val fileId = iri"http://localhost/${genString()}" - val value = ArchiveValue.unsafe( - NonEmptySet.of( - ResourceReference(Latest(resourceId), None, None, None), - FileReference(Latest(fileId), None, None) - ) - ) - - val resource = archives.create(project.ref, value).accepted - - val id = resource.id - val encodedId = URLEncoder.encode(id.toString, StandardCharsets.UTF_8) - resource.uris shouldEqual EphemeralResourceInProjectUris( - project.ref, - s"archives/${project.ref}/$encodedId" - ) - - resource.id shouldEqual id - resource.value shouldEqual Archive(id, project.ref, value.resources, 5.hours.toSeconds) - } - "create an archive from value with a fixed id" in { val id = iri"http://localhost/${genString()}" val resourceId = iri"http://localhost/${genString()}" diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelfSuite.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelfSuite.scala index e4c00c644c..8ec4181e6a 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelfSuite.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/FileSelfSuite.scala @@ -11,10 +11,9 @@ import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import ch.epfl.bluebrain.nexus.delta.sdk.projects.FetchContextDummy import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ResourceRef} -import ch.epfl.bluebrain.nexus.testkit.bio.BioSuite -import monix.bio.UIO +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite -class FileSelfSuite extends BioSuite { +class FileSelfSuite extends CatsEffectSuite { implicit private val baseUri: BaseUri = BaseUri("http://bbp.epfl.ch", Label.unsafe("v1")) @@ -34,46 +33,46 @@ class FileSelfSuite extends BioSuite { test("An expanded self should be parsed") { val input = iri"http://bbp.epfl.ch/v1/files/$org/$project/${encode(expandedResourceId)}" - fileSelf.parse(input).tapError { p => UIO.delay(println(p)) }.assert((projectRef, latestRef)) + fileSelf.parse(input).assertEquals((projectRef, latestRef)) } test("An expanded self with a revision should be parsed") { val input = iri"http://bbp.epfl.ch/v1/files/$org/$project/${encode(expandedResourceId)}?rev=$rev" - fileSelf.parse(input).tapError { p => UIO.delay(println(p)) }.assert((projectRef, revisionRef)) + fileSelf.parse(input).assertEquals((projectRef, revisionRef)) } test("An expanded self with a tag should be parsed") { val input = iri"http://bbp.epfl.ch/v1/files/$org/$project/${encode(expandedResourceId)}?tag=${tag.value}" - fileSelf.parse(input).tapError { p => UIO.delay(println(p)) }.assert((projectRef, tagRef)) + fileSelf.parse(input).assertEquals((projectRef, tagRef)) } test("A curie self should be parsed") { val input = iri"http://bbp.epfl.ch/v1/files/$org/$project/nxv:$compactResourceId" - fileSelf.parse(input).assert((projectRef, latestRef)) + fileSelf.parse(input).assertEquals((projectRef, latestRef)) } test("A relative self should not be parsed") { val input = iri"/$org/$project/$compactResourceId" - fileSelf.parse(input).error(NonAbsoluteLink(input)) + fileSelf.parse(input).intercept(NonAbsoluteLink(input)) } test("A self from an external website should not be parsed") { val input = iri"http://localhost/v1/files/$org/$project/$compactResourceId" - fileSelf.parse(input).error(ExternalLink(input)) + fileSelf.parse(input).intercept(ExternalLink(input)) } test("A self with an incorrect path should not be parsed") { val input = iri"http://bbp.epfl.ch/v1/files/$org/$project/$compactResourceId/extra" - fileSelf.parse(input).error(InvalidPath(input)) + fileSelf.parse(input).intercept(InvalidPath(input)) } test("A self with an incorrect project label should not be parsed") { val input = iri"http://bbp.epfl.ch/v1/files/%illegal/$project/$compactResourceId" - fileSelf.parse(input).error(InvalidProject(input)) + fileSelf.parse(input).intercept(InvalidProject(input)) } test("A self with an incorrect id should not resolve") { val input = iri"""http://bbp.epfl.ch/v1/files/$org/$project/badcurie:$compactResourceId")}""" - fileSelf.parse(input).error(InvalidFileId(input)) + fileSelf.parse(input).intercept(InvalidFileId(input)) } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToRedirect.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToRedirect.scala index 6c648b9267..c2823877c1 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToRedirect.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/ResponseToRedirect.scala @@ -5,7 +5,6 @@ import akka.http.scaladsl.model.Uri import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import cats.effect.IO -import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering @@ -14,8 +13,6 @@ import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.HttpResponseFields import monix.bio.{IO => BIO, UIO} import monix.execution.Scheduler -import scala.reflect.ClassTag - /** * Redirection response magnet. */ @@ -52,12 +49,12 @@ object ResponseToRedirect { } } - implicit def ioRedirectWithError[E <: Throwable: ClassTag: JsonLdEncoder: HttpResponseFields]( - io: IO[Uri] + implicit def ioRedirectWithError[E <: Throwable: JsonLdEncoder: HttpResponseFields]( + io: IO[Either[E, Uri]] )(implicit cr: RemoteContextResolution, jo: JsonKeyOrdering): ResponseToRedirect = new ResponseToRedirect { override def apply(redirection: Redirection): Route = - onSuccess(io.attemptNarrow[E].unsafeToFuture()) { + onSuccess(io.unsafeToFuture()) { case Left(value) => CatsResponseToJsonLd.valueWithHttpResponseFields[E](value).apply(None) case Right(location) => redirect(location, redirection) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/stream/CatsStreamConverter.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/stream/CatsStreamConverter.scala new file mode 100644 index 0000000000..33d3c241ef --- /dev/null +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/stream/CatsStreamConverter.scala @@ -0,0 +1,89 @@ +package ch.epfl.bluebrain.nexus.delta.sdk.stream + +import akka.NotUsed +import akka.stream._ +import akka.stream.scaladsl.{Sink => AkkaSink, Source => AkkaSource, _} +import cats.effect._ +import cats.syntax.all._ +import fs2._ + +/** + * Converts a fs2 stream to an Akka source Original code from the streamz library from Martin Krasser (published under + * Apache License 2.0): + * https://github.com/krasserm/streamz/blob/master/streamz-converter/src/main/scala/streamz/converter/Converter.scala + */ +object CatsStreamConverter { + + private def publisherStream[A](publisher: SourceQueueWithComplete[A], stream: Stream[IO, A])(implicit + contextShift: ContextShift[IO] + ): Stream[IO, Unit] = { + def publish(a: A): IO[Option[Unit]] = IO + .fromFuture(IO(publisher.offer(a))) + .flatMap { + case QueueOfferResult.Enqueued => IO.pure(Some(())) + case QueueOfferResult.Failure(cause) => IO.raiseError[Option[Unit]](cause) + case QueueOfferResult.QueueClosed => IO.none + case QueueOfferResult.Dropped => + IO.raiseError[Option[Unit]]( + new IllegalStateException("This should never happen because we use OverflowStrategy.backpressure") + ) + } + .recover { + // This handles a race condition between `interruptWhen` and `publish`. + // There's no guarantee that, when the akka sink is terminated, we will observe the + // `interruptWhen` termination before calling publish one last time. + // Such a call fails with StreamDetachedException + case _: StreamDetachedException => None + } + + def watchCompletion: IO[Unit] = IO.fromFuture(IO(publisher.watchCompletion())).void + def fail(e: Throwable): IO[Unit] = IO.delay(publisher.fail(e)) >> watchCompletion + def complete: IO[Unit] = IO.delay(publisher.complete()) >> watchCompletion + + stream + .interruptWhen(watchCompletion.attempt) + .evalMap(publish) + .unNoneTerminate + .onFinalizeCase { + case ExitCase.Completed | ExitCase.Canceled => complete + case ExitCase.Error(e) => fail(e) + } + } + + def apply[A](stream: Stream[IO, A])(implicit contextShift: ContextShift[IO]): Graph[SourceShape[A], NotUsed] = { + val source = AkkaSource.queue[A](0, OverflowStrategy.backpressure) + // A sink that runs an FS2 publisherStream when consuming the publisher actor (= materialized value) of source + val sink = AkkaSink.foreach[SourceQueueWithComplete[A]] { p => + // Fire and forget Future so it runs in the background + publisherStream[A](p, stream).compile.drain.unsafeToFuture() + () + } + + AkkaSource + .fromGraph(GraphDSL.createGraph(source) { implicit builder => source => + import GraphDSL.Implicits._ + builder.materializedValue ~> sink + SourceShape(source.out) + }) + .mapMaterializedValue(_ => NotUsed) + } + + def apply[A]( + source: Graph[SourceShape[A], NotUsed] + )(implicit materializer: Materializer, contextShift: ContextShift[IO]): Stream[IO, A] = + Stream.force { + IO.delay { + val subscriber = AkkaSource.fromGraph(source).toMat(AkkaSink.queue[A]())(Keep.right).run() + subscriberStream[A](subscriber) + } + } + + private def subscriberStream[A]( + subscriber: SinkQueueWithCancel[A] + )(implicit contextShift: ContextShift[IO]): Stream[IO, A] = { + val pull = IO.fromFuture(IO(subscriber.pull())) + val cancel = IO.delay(subscriber.cancel()) + Stream.repeatEval(pull).unNoneTerminate.onFinalize(cancel) + } + +} diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/DeleteExpired.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/DeleteExpired.scala index 18b5f0eb6c..fe2333ed9d 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/DeleteExpired.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/DeleteExpired.scala @@ -1,35 +1,35 @@ package ch.epfl.bluebrain.nexus.delta.sourcing -import cats.effect.Clock -import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOUtils +import cats.effect.{Clock, IO, Timer} +import ch.epfl.bluebrain.nexus.delta.kernel.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ +import ch.epfl.bluebrain.nexus.delta.kernel.utils.IOInstant import ch.epfl.bluebrain.nexus.delta.sourcing.DeleteExpired.logger import ch.epfl.bluebrain.nexus.delta.sourcing.config.ProjectionConfig import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{CompiledProjection, ExecutionStrategy, ProjectionMetadata, Supervisor} -import com.typesafe.scalalogging.Logger import doobie.implicits._ import doobie.postgres.implicits._ import fs2.Stream -import monix.bio.{Task, UIO} /** * Allow to delete expired ephemeral states */ -final class DeleteExpired private[sourcing] (xas: Transactors)(implicit clock: Clock[UIO]) { +final class DeleteExpired private[sourcing] (xas: Transactors)(implicit clock: Clock[IO]) { - def apply(): UIO[Unit] = { + def apply(): IO[Unit] = { for { - instant <- IOUtils.instant + instant <- IOInstant.now deleted <- sql""" | DELETE FROM public.ephemeral_states | WHERE expires < $instant - """.stripMargin.update.run.transact(xas.write).hideErrors - _ <- UIO.when(deleted > 0)(UIO.delay(logger.info(s"Deleted $deleted expired ephemeral states"))) + """.stripMargin.update.run.transact(xas.writeCE) + _ <- IO.whenA(deleted > 0)(logger.info(s"Deleted $deleted expired ephemeral states")) } yield () } } object DeleteExpired { - private val logger: Logger = Logger[DeleteExpired] + private val logger = Logger.cats[DeleteExpired] private val metadata: ProjectionMetadata = ProjectionMetadata("system", "delete-expired", None, None) @@ -37,23 +37,18 @@ object DeleteExpired { * Creates a [[DeleteExpired]] instance and schedules in the supervisor the deletion of expired ephemeral states */ def apply(supervisor: Supervisor, config: ProjectionConfig, xas: Transactors)(implicit - clock: Clock[UIO] - ): Task[DeleteExpired] = { + clock: Clock[IO], + timer: Timer[IO] + ): IO[DeleteExpired] = { val deleteExpired = new DeleteExpired(xas) val stream = Stream - .awakeEvery[Task](config.deleteExpiredEvery) + .awakeEvery[IO](config.deleteExpiredEvery) .evalTap(_ => deleteExpired()) .drain - supervisor - .run( - CompiledProjection.fromStream( - metadata, - ExecutionStrategy.TransientSingleNode, - _ => stream - ) - ) - .as(deleteExpired) + val deleteExpiredProjection = + CompiledProjection.fromStream(metadata, ExecutionStrategy.TransientSingleNode, _ => stream.translate(ioToUioK)) + supervisor.run(deleteExpiredProjection).as(deleteExpired) } } diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralDefinition.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralDefinition.scala index 945c5b5ed9..48b80b0289 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralDefinition.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralDefinition.scala @@ -1,26 +1,30 @@ package ch.epfl.bluebrain.nexus.delta.sourcing +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.sourcing.EvaluationError.EvaluationTimeout +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.epfl.bluebrain.nexus.delta.sourcing.model.EntityType +import ch.epfl.bluebrain.nexus.delta.sourcing.rejection.Rejection import ch.epfl.bluebrain.nexus.delta.sourcing.state.State.EphemeralState -import monix.bio.IO import scala.concurrent.duration.FiniteDuration -final case class EphemeralDefinition[Id, S <: EphemeralState, Command, Rejection]( +final case class EphemeralDefinition[Id, S <: EphemeralState, Command, +R <: Rejection]( tpe: EntityType, - evaluate: Command => IO[Rejection, S], + evaluate: Command => IO[S], stateSerializer: Serializer[Id, S], - onUniqueViolation: (Id, Command) => Rejection + onUniqueViolation: (Id, Command) => R ) { /** * Fetches the current state and attempt to apply an incoming command on it */ - def evaluate(command: Command, maxDuration: FiniteDuration): IO[Rejection, S] = + def evaluate(command: Command, maxDuration: FiniteDuration)(implicit execution: EvaluationExecution): IO[S] = evaluate(command).attempt - .timeoutWith(maxDuration, EvaluationTimeout(command, maxDuration)) - .hideErrors + .timeoutTo(maxDuration, IO.raiseError(EvaluationTimeout(command, maxDuration)))( + execution.timer, + execution.contextShift + ) .flatMap(IO.fromEither) } diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLog.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLog.scala index d6ffb8cd3d..cc9ca19c64 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLog.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLog.scala @@ -1,12 +1,14 @@ package ch.epfl.bluebrain.nexus.delta.sourcing +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.sourcing.config.EphemeralLogConfig +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef +import ch.epfl.bluebrain.nexus.delta.sourcing.rejection.Rejection import ch.epfl.bluebrain.nexus.delta.sourcing.state.EphemeralStateStore import ch.epfl.bluebrain.nexus.delta.sourcing.state.State.EphemeralState import doobie.implicits._ import doobie.postgres.sqlstate -import monix.bio.IO /** * Event log for ephemeral entities that can be controlled through commands; @@ -17,7 +19,7 @@ import monix.bio.IO * Unsuccessful commands result in rejections returned to the caller context without any events being generated or * state transitions applied. */ -trait EphemeralLog[Id, S <: EphemeralState, Command, Rejection] { +trait EphemeralLog[Id, S <: EphemeralState, Command, R <: Rejection] { /** * Get the current state for the entity with the given __id__ @@ -28,7 +30,7 @@ trait EphemeralLog[Id, S <: EphemeralState, Command, Rejection] { * @param notFound * if no state is found, fails with this rejection */ - def stateOr[R <: Rejection](ref: ProjectRef, id: Id, notFound: => R): IO[R, S] + def stateOr[R2 <: R](ref: ProjectRef, id: Id, notFound: => R2): IO[S] /** * Evaluates the argument __command__ in the context of entity identified by __id__. @@ -43,7 +45,7 @@ trait EphemeralLog[Id, S <: EphemeralState, Command, Rejection] { * the newly generated state if the command was evaluated successfully, or the rejection of the __command__ * otherwise */ - def evaluate(ref: ProjectRef, id: Id, command: Command): IO[Rejection, S] + def evaluate(ref: ProjectRef, id: Id, command: Command): IO[S] } @@ -52,20 +54,20 @@ object EphemeralLog { /** * Creates on a ephemeral log for the given definition and config */ - def apply[Id, S <: EphemeralState, Command, Rejection]( - definition: EphemeralDefinition[Id, S, Command, Rejection], + def apply[Id, S <: EphemeralState, Command, R <: Rejection]( + definition: EphemeralDefinition[Id, S, Command, R], config: EphemeralLogConfig, xas: Transactors - ): EphemeralLog[Id, S, Command, Rejection] = { + )(implicit execution: EvaluationExecution): EphemeralLog[Id, S, Command, R] = { val stateStore = EphemeralStateStore(definition.tpe, definition.stateSerializer, config.ttl, xas) - new EphemeralLog[Id, S, Command, Rejection] { + new EphemeralLog[Id, S, Command, R] { - override def stateOr[R <: Rejection](ref: ProjectRef, id: Id, notFound: => R): IO[R, S] = + override def stateOr[R2 <: R](ref: ProjectRef, id: Id, notFound: => R2): IO[S] = stateStore.get(ref, id).flatMap { - IO.fromOption(_, notFound) + IO.fromOption(_)(notFound) } - override def evaluate(ref: ProjectRef, id: Id, command: Command): IO[Rejection, S] = { + override def evaluate(ref: ProjectRef, id: Id, command: Command): IO[S] = { for { newState <- definition.evaluate(command, config.maxDuration) res <- stateStore @@ -73,8 +75,7 @@ object EphemeralLog { .attemptSomeSqlState { case sqlstate.class23.UNIQUE_VIOLATION => definition.onUniqueViolation(id, command) } - .transact(xas.write) - .hideErrors + .transact(xas.writeCE) _ <- IO.fromEither(res) } yield newState } diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/execution/EvaluationExecution.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/execution/EvaluationExecution.scala new file mode 100644 index 0000000000..727a6b763d --- /dev/null +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/execution/EvaluationExecution.scala @@ -0,0 +1,5 @@ +package ch.epfl.bluebrain.nexus.delta.sourcing.execution + +import cats.effect.{ContextShift, IO, Timer} + +final case class EvaluationExecution(timer: Timer[IO], contextShift: ContextShift[IO]) diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStore.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStore.scala index d055f0d3b1..06736445ae 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStore.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStore.scala @@ -1,5 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.sourcing.state +import cats.effect.IO import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.sourcing.{Serializer, Transactors} import ch.epfl.bluebrain.nexus.delta.sourcing.model._ @@ -7,7 +8,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.state.State.EphemeralState import doobie._ import doobie.implicits._ import doobie.postgres.implicits._ -import monix.bio.UIO import ch.epfl.bluebrain.nexus.delta.sourcing.implicits.IriInstances import scala.concurrent.duration.FiniteDuration @@ -25,7 +25,7 @@ trait EphemeralStateStore[Id, S <: EphemeralState] { /** * Returns the state */ - def get(ref: ProjectRef, id: Id): UIO[Option[S]] + def get(ref: ProjectRef, id: Id): IO[Option[S]] } object EphemeralStateStore { @@ -66,12 +66,11 @@ object EphemeralStateStore { """.stripMargin }.update.run.void - override def get(ref: ProjectRef, id: Id): UIO[Option[S]] = + override def get(ref: ProjectRef, id: Id): IO[Option[S]] = sql"""SELECT value FROM public.ephemeral_states WHERE type = $tpe AND org = ${ref.organization} AND project = ${ref.project} AND id = $id""" .query[S] .option - .transact(xas.read) - .hideErrors + .transact(xas.readCE) } } diff --git a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLogSuite.scala b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLogSuite.scala index f965355d51..af893e2f00 100644 --- a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLogSuite.scala +++ b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/EphemeralLogSuite.scala @@ -5,16 +5,17 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.Message.MessageRejection.{AlreadyE import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv import ch.epfl.bluebrain.nexus.delta.sourcing.Message.{CreateMessage, MessageRejection, MessageState} import ch.epfl.bluebrain.nexus.delta.sourcing.config.EphemeralLogConfig +import ch.epfl.bluebrain.nexus.delta.sourcing.execution.EvaluationExecution import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import ch.epfl.bluebrain.nexus.testkit.bio.BioSuite import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite import munit.AnyFixture import java.time.Instant import scala.concurrent.duration._ -class EphemeralLogSuite extends BioSuite with Doobie.Fixture with Doobie.Assertions { +class EphemeralLogSuite extends CatsEffectSuite with Doobie.Fixture with Doobie.Assertions { override def munitFixtures: Seq[AnyFixture[_]] = List(doobie) private lazy val xas = doobie() @@ -27,6 +28,8 @@ class EphemeralLogSuite extends BioSuite with Doobie.Fixture with Doobie.Asserti (_, command) => AlreadyExists(command.id, command.project) ) + implicit val ee: EvaluationExecution = EvaluationExecution(timer, contextShift) + private lazy val log = EphemeralLog( definition, EphemeralLogConfig(100.millis, 5.hours), @@ -39,36 +42,35 @@ class EphemeralLogSuite extends BioSuite with Doobie.Fixture with Doobie.Asserti private val alice = User("Alice", Label.unsafe("Wonderland")) private val message = MessageState(id, proj, text, alice, Instant.EPOCH, Anonymous) + private def createMessage(text: String) = + log.evaluate(proj, id, CreateMessage(id, proj, text, alice)) + test("Raise an error with a non-existent project") { - log.stateOr(ProjectRef.unsafe("xxx", "xxx"), id, NotFound).error(NotFound) + log.stateOr(ProjectRef.unsafe("xxx", "xxx"), id, NotFound).intercept(NotFound) } test("Raise an error with a non-existent id") { - log.stateOr(proj, nxv + "xxx", NotFound).error(NotFound) + log.stateOr(proj, nxv + "xxx", NotFound).intercept(NotFound) } test("Raise an error if the text message is too long and save nothing") { for { - _ <- log - .evaluate(proj, id, CreateMessage(id, proj, "Hello, World !", alice)) - .error(MessageTooLong(id, proj)) - _ <- log.stateOr(proj, id, NotFound).error(NotFound) + _ <- createMessage("Hello, World !").intercept(MessageTooLong(id, proj)) + _ <- log.stateOr(proj, id, NotFound).intercept(NotFound) } yield () } test("Evaluate successfully the command and save the message") { for { - _ <- log.evaluate(proj, id, CreateMessage(id, proj, text, alice)).assert(message) - _ <- log.stateOr(proj, id, NotFound).assert(message) + _ <- createMessage(text).assertEquals(message) + _ <- log.stateOr(proj, id, NotFound).assertEquals(message) } yield () } test("Raise an error if id already exists and save nothing") { for { - _ <- log - .evaluate(proj, id, CreateMessage(id, proj, "Bye", alice)) - .error(AlreadyExists(id, proj)) - _ <- log.stateOr(proj, id, NotFound).assert(message) + _ <- createMessage("Bye").intercept(AlreadyExists(id, proj)) + _ <- log.stateOr(proj, id, NotFound).assertEquals(message) } yield () } } diff --git a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/Message.scala b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/Message.scala index ff55241f32..186e5c9faa 100644 --- a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/Message.scala +++ b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/Message.scala @@ -1,16 +1,17 @@ package ch.epfl.bluebrain.nexus.delta.sourcing +import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{nxv, schemas} import ch.epfl.bluebrain.nexus.delta.sourcing.Message.MessageRejection.MessageTooLong import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, Subject} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ResourceRef.Latest import ch.epfl.bluebrain.nexus.delta.sourcing.model.{EntityType, ProjectRef, ResourceRef} +import ch.epfl.bluebrain.nexus.delta.sourcing.rejection.Rejection import ch.epfl.bluebrain.nexus.delta.sourcing.state.State.EphemeralState import io.circe.Codec import io.circe.generic.extras.Configuration import io.circe.generic.extras.semiauto.deriveConfiguredCodec -import monix.bio.IO import java.time.Instant import scala.annotation.nowarn @@ -18,7 +19,7 @@ import scala.annotation.nowarn object Message { val entityType: EntityType = EntityType("message") - def evaluate(c: CreateMessage): IO[MessageRejection, MessageState] = + def evaluate(c: CreateMessage): IO[MessageState] = IO.raiseWhen(c.text.length > 10)(MessageTooLong(c.id, c.project)) .as(MessageState(c.id, c.project, c.text, c.from, Instant.EPOCH, Anonymous)) @@ -37,7 +38,9 @@ object Message { override def types: Set[Iri] = Set(nxv + "Message") } - sealed trait MessageRejection extends Product with Serializable + sealed trait MessageRejection extends Rejection { + override def reason: String = "Something bad happened." + } object MessageRejection { final case object NotFound extends MessageRejection diff --git a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStoreSuite.scala b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStoreSuite.scala index 8db1579544..de9af45b71 100644 --- a/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStoreSuite.scala +++ b/delta/sourcing-psql/src/test/scala/ch/epfl/bluebrain/nexus/delta/sourcing/state/EphemeralStateStoreSuite.scala @@ -6,16 +6,15 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.{DeleteExpired, Message} import ch.epfl.bluebrain.nexus.delta.sourcing.Message.MessageState import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Anonymous, User} import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} -import ch.epfl.bluebrain.nexus.testkit.IOFixedClock -import ch.epfl.bluebrain.nexus.testkit.bio.BioSuite import ch.epfl.bluebrain.nexus.delta.sourcing.postgres.Doobie +import ch.epfl.bluebrain.nexus.testkit.ce.{CatsEffectSuite, IOFixedClock} import doobie.implicits._ import munit.AnyFixture import java.time.Instant import scala.concurrent.duration._ -class EphemeralStateStoreSuite extends BioSuite with Doobie.Fixture with Doobie.Assertions { +class EphemeralStateStoreSuite extends CatsEffectSuite with Doobie.Fixture with Doobie.Assertions { override def munitFixtures: Seq[AnyFixture[_]] = List(doobie) private lazy val xas = doobie() @@ -37,12 +36,12 @@ class EphemeralStateStoreSuite extends BioSuite with Doobie.Fixture with Doobie. private val m2 = nxv + "m2" private val message2 = MessageState(m2, project1, "Bye !", alice, Instant.EPOCH.plusSeconds(60L), Anonymous) - private lazy val deleteExpired = new DeleteExpired(xas)(IOFixedClock.ioClock(Instant.EPOCH.plusSeconds(6L))) + private lazy val deleteExpired = new DeleteExpired(xas)(IOFixedClock.ceClock(Instant.EPOCH.plusSeconds(6L))) test("save the states") { for { - _ <- store.save(message1).transact(xas.write).assert(()) - _ <- store.save(message2).transact(xas.write).assert(()) + _ <- store.save(message1).transact(xas.writeCE).assert + _ <- store.save(message2).transact(xas.writeCE).assert } yield () } diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala index 5a618b9925..39a87deca8 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsEffectSuite.scala @@ -22,14 +22,12 @@ abstract class CatsEffectSuite protected val ioTimeout: FiniteDuration = 45.seconds implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) + implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) override def munitValueTransforms: List[ValueTransform] = super.munitValueTransforms ++ List(munitIOTransform, munitBIOTransform) private val munitIOTransform: ValueTransform = { - implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) - implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) - new ValueTransform( "IO", { case io: IO[_] => diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala index f1c5cdcc63..6daa66fdf7 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/ce/CatsIOValues.scala @@ -1,6 +1,6 @@ package ch.epfl.bluebrain.nexus.testkit.ce -import cats.effect.IO +import cats.effect.{ContextShift, IO, Timer} import org.scalactic.source import org.scalatest.Assertion import org.scalatest.Assertions._ @@ -10,6 +10,9 @@ import scala.reflect.ClassTag trait CatsIOValues extends CatsIOValuesLowPrio { + implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) + implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) + implicit def ioToFutureAssertion(io: IO[Assertion]): Future[Assertion] = io.unsafeToFuture() implicit def futureListToFutureAssertion(future: Future[List[Assertion]])(implicit diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala index be13ddc138..13eb936f20 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/BaseSpec.scala @@ -6,8 +6,8 @@ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers._ import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.util.ByteString +import cats.effect.IO import cats.effect.concurrent.Ref -import cats.effect.{ContextShift, IO} import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.testkit._ @@ -30,7 +30,6 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpecLike import org.scalatest.{Assertion, BeforeAndAfterAll, OptionValues} -import scala.concurrent.ExecutionContext import scala.concurrent.duration._ trait BaseSpec @@ -49,8 +48,6 @@ trait BaseSpec with ScalaFutures with Matchers { - implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) - private val logger = Logger.cats[this.type] implicit val config: TestsConfig = load[TestsConfig](ConfigFactory.load(), "tests") From 09f3df631f9edef92157e90eb72bea5b50da015a Mon Sep 17 00:00:00 2001 From: Simon Date: Wed, 11 Oct 2023 22:17:26 +0200 Subject: [PATCH 13/13] Migrate indexing actions to Cats Effect (#4355) Co-authored-by: Simon Dumas --- .../nexus/delta/routes/ResolversRoutes.scala | 1 - .../nexus/delta/routes/ResourcesRoutes.scala | 21 ++-- .../nexus/delta/routes/SchemasRoutes.scala | 1 - .../nexus/delta/wiring/DeltaModule.scala | 5 +- .../nexus/delta/wiring/ResolversModule.scala | 5 +- .../nexus/delta/wiring/ResourcesModule.scala | 5 +- .../nexus/delta/wiring/SchemasModule.scala | 5 +- .../blazegraph/BlazegraphIndexingAction.scala | 5 +- .../blazegraph/BlazegraphPluginModule.scala | 5 +- .../routes/BlazegraphViewsRoutes.scala | 18 ++-- .../BlazegraphIndexingActionSuite.scala | 13 ++- .../ElasticSearchIndexingAction.scala | 13 +-- .../ElasticSearchPluginModule.scala | 10 +- .../routes/ElasticSearchViewsRoutes.scala | 15 ++- .../ElasticSearchIndexingActionSuite.scala | 13 ++- .../plugins/storage/StoragePluginModule.scala | 9 +- .../storage/files/routes/FilesRoutes.scala | 26 +++-- .../storages/routes/StoragesRoutes.scala | 19 ++-- .../nexus/delta/sdk/IndexingAction.scala | 100 ++++++++---------- .../delta/sourcing/config/BatchConfig.scala | 2 +- 20 files changed, 153 insertions(+), 138 deletions(-) diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala index 95826f8a29..7031e7f0a5 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResolversRoutes.scala @@ -6,7 +6,6 @@ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import cats.effect.IO import cats.implicits._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.{contexts, schemas} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala index f4c17f9785..db803992b7 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/ResourcesRoutes.scala @@ -10,6 +10,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.routes.ResourcesRoutes.asSourceWithMetadata import ch.epfl.bluebrain.nexus.delta.sdk._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ @@ -25,6 +26,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.resources.NexusSource.DecodingOption import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.ResourceRejection.{InvalidJsonLdFormat, InvalidSchemaRejection, ResourceNotFound} import ch.epfl.bluebrain.nexus.delta.sdk.resources.model.{Resource, ResourceRejection} import ch.epfl.bluebrain.nexus.delta.sdk.resources.{NexusSource, Resources} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import io.circe.{Json, Printer} import monix.bio.IO import monix.execution.Scheduler @@ -67,6 +69,9 @@ final class ResourcesRoutes( implicit private def resourceFAJsonLdEncoder[A: JsonLdEncoder]: JsonLdEncoder[ResourceF[A]] = ResourceF.resourceFAJsonLdEncoder(ContextValue.empty) + private def indexUIO(project: ProjectRef, resource: ResourceF[Resource], mode: IndexingMode) = + index(project, resource, mode).toUIO + def routes: Route = baseUriPrefix(baseUri.prefix) { pathPrefix("resources") { @@ -79,7 +84,7 @@ final class ResourcesRoutes( authorizeFor(ref, Write).apply { emit( Created, - resources.create(ref, resourceSchema, source.value).tapEval(index(ref, _, mode)).map(_.void) + resources.create(ref, resourceSchema, source.value).tapEval(indexUIO(ref, _, mode)).map(_.void) ) } }, @@ -94,7 +99,7 @@ final class ResourcesRoutes( Created, resources .create(ref, schema, source.value) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectWhen(wrongJsonOrNotFound) ) @@ -115,7 +120,7 @@ final class ResourcesRoutes( Created, resources .create(id, ref, schema, source.value) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectWhen(wrongJsonOrNotFound) ) @@ -124,7 +129,7 @@ final class ResourcesRoutes( emit( resources .update(id, ref, schemaOpt, rev, source.value) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectWhen(wrongJsonOrNotFound) ) @@ -137,7 +142,7 @@ final class ResourcesRoutes( emit( resources .deprecate(id, ref, schemaOpt, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectWhen(wrongJsonOrNotFound) ) @@ -166,7 +171,7 @@ final class ResourcesRoutes( OK, resources .refresh(id, ref, schemaOpt) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectWhen(wrongJsonOrNotFound) ) @@ -214,7 +219,7 @@ final class ResourcesRoutes( Created, resources .tag(id, ref, schemaOpt, tag, tagRev, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectWhen(wrongJsonOrNotFound) ) @@ -229,7 +234,7 @@ final class ResourcesRoutes( emit( resources .deleteTag(id, ref, schemaOpt, tag, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .map(_.void) .rejectOn[ResourceNotFound] ) diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutes.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutes.scala index f93d41a7ee..1e908f1f0f 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutes.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/routes/SchemasRoutes.scala @@ -6,7 +6,6 @@ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import cats.effect.IO import cats.implicits._ -import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.contexts import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.schemas.shacl import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala index e5d12a4401..8bfba3c365 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/DeltaModule.scala @@ -73,8 +73,9 @@ class DeltaModule(appCfg: AppConfig, config: Config)(implicit classLoader: Class many[MetadataContextValue].addEffect(MetadataContextValue.fromFile("contexts/metadata.json")) - make[IndexingAction].named("aggregate").from { (internal: Set[IndexingAction]) => - AggregateIndexingAction(NonEmptyList.fromListUnsafe(internal.toList)) + make[AggregateIndexingAction].from { + (internal: Set[IndexingAction], contextShift: ContextShift[IO], cr: RemoteContextResolution @Id("aggregate")) => + AggregateIndexingAction(NonEmptyList.fromListUnsafe(internal.toList))(contextShift, cr) } make[RemoteContextResolution].named("aggregate").fromEffect { (otherCtxResolutions: Set[RemoteContextResolution]) => diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala index a9f0cf6a18..c34e912a59 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResolversModule.scala @@ -9,6 +9,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.routes.ResolversRoutes +import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives @@ -70,7 +71,7 @@ object ResolversModule extends ModuleDef { aclCheck: AclCheck, resolvers: Resolvers, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, shift: Resolver.Shift, multiResolution: MultiResolution, baseUri: BaseUri, @@ -84,7 +85,7 @@ object ResolversModule extends ModuleDef { resolvers, multiResolution, schemeDirectives, - indexingAction(_, _, _)(shift, cr) + indexingAction(_, _, _)(shift) )( baseUri, cr, diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResourcesModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResourcesModule.scala index d4ec23b9b8..4f158debde 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResourcesModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/ResourcesModule.scala @@ -8,6 +8,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.routes.ResourcesRoutes +import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives @@ -81,7 +82,7 @@ object ResourcesModule extends ModuleDef { aclCheck: AclCheck, resources: Resources, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, shift: Resource.Shift, baseUri: BaseUri, s: Scheduler, @@ -95,7 +96,7 @@ object ResourcesModule extends ModuleDef { aclCheck, resources, schemeDirectives, - indexingAction(_, _, _)(shift, cr) + indexingAction(_, _, _)(shift) )( baseUri, s, diff --git a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala index 550cc08b2a..40f4ac41b0 100644 --- a/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala +++ b/delta/app/src/main/scala/ch/epfl/bluebrain/nexus/delta/wiring/SchemasModule.scala @@ -9,6 +9,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering import ch.epfl.bluebrain.nexus.delta.routes.SchemasRoutes +import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives @@ -71,14 +72,14 @@ object SchemasModule extends ModuleDef { aclCheck: AclCheck, schemas: Schemas, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, shift: Schema.Shift, baseUri: BaseUri, cr: RemoteContextResolution @Id("aggregate"), ordering: JsonKeyOrdering, fusionConfig: FusionConfig ) => - new SchemasRoutes(identities, aclCheck, schemas, schemeDirectives, indexingAction(_, _, _)(shift, cr))( + new SchemasRoutes(identities, aclCheck, schemas, schemeDirectives, indexingAction(_, _, _)(shift))( baseUri, cr, ordering, diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingAction.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingAction.scala index 02606a1585..c5a864023c 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingAction.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingAction.scala @@ -3,7 +3,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.blazegraph import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.client.BlazegraphClient import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing.IndexingViewDef.{ActiveViewDef, DeprecatedViewDef} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.indexing.{BlazegraphSink, IndexingViewDef} -import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri import ch.epfl.bluebrain.nexus.delta.sourcing.config.BatchConfig @@ -49,9 +48,7 @@ final class BlazegraphIndexingAction( case _: DeprecatedViewDef => UIO.none } - def projections(project: ProjectRef, elem: Elem[GraphResource])(implicit - cr: RemoteContextResolution - ): ElemStream[CompiledProjection] = + def projections(project: ProjectRef, elem: Elem[GraphResource]): ElemStream[CompiledProjection] = fetchCurrentViews(project).evalMap { _.evalMapFilter(compile(_, elem)) } } diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala index 15b85899d7..f37b107cf3 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphPluginModule.scala @@ -13,6 +13,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.slowqueries.{BlazegraphS import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering +import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.deletion.ProjectDeletionTask @@ -180,7 +181,7 @@ class BlazegraphPluginModule(priority: Int) extends ModuleDef { views: BlazegraphViews, viewsQuery: BlazegraphViewsQuery, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, shift: BlazegraphView.Shift, baseUri: BaseUri, cfg: BlazegraphViewsConfig, @@ -195,7 +196,7 @@ class BlazegraphPluginModule(priority: Int) extends ModuleDef { identities, aclCheck, schemeDirectives, - indexingAction(_, _, _)(shift, cr) + indexingAction(_, _, _)(shift) )( baseUri, s, diff --git a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutes.scala b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutes.scala index dea871edd3..fce82669c3 100644 --- a/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutes.scala +++ b/delta/plugins/blazegraph/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/routes/BlazegraphViewsRoutes.scala @@ -4,6 +4,7 @@ import akka.http.scaladsl.model.StatusCodes.Created import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Directive0, Route} import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphView._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.BlazegraphViewRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model._ import ch.epfl.bluebrain.nexus.delta.plugins.blazegraph.model.permissions.{read => Read, write => Write} @@ -13,7 +14,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteCon import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder import ch.epfl.bluebrain.nexus.delta.rdf.query.SparqlQuery import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering -import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction +import ch.epfl.bluebrain.nexus.delta.sdk.{IndexingAction, IndexingMode} import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling import ch.epfl.bluebrain.nexus.delta.sdk.directives.{AuthDirectives, DeltaDirectives, DeltaSchemeDirectives} @@ -25,7 +26,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfMarshalling import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag import ch.epfl.bluebrain.nexus.delta.sdk.model.search.SearchResults._ import ch.epfl.bluebrain.nexus.delta.sdk.model.search.{PaginationConfig, SearchResults} -import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment} +import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment, ResourceF} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import io.circe.Json import monix.execution.Scheduler @@ -66,6 +67,9 @@ class BlazegraphViewsRoutes( import schemeDirectives._ + private def indexUIO(project: ProjectRef, resource: ResourceF[BlazegraphView], mode: IndexingMode) = + index(project, resource, mode).toUIO + def routes: Route = concat( pathPrefix("views") { @@ -79,7 +83,7 @@ class BlazegraphViewsRoutes( Created, views .create(ref, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -98,7 +102,7 @@ class BlazegraphViewsRoutes( Created, views .create(id, ref, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -107,7 +111,7 @@ class BlazegraphViewsRoutes( emit( views .update(id, ref, rev, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -120,7 +124,7 @@ class BlazegraphViewsRoutes( emit( views .deprecate(id, ref, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectOn[ViewNotFound] ) @@ -163,7 +167,7 @@ class BlazegraphViewsRoutes( Created, views .tag(id, ref, tag, tagRev, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectOn[ViewNotFound] ) diff --git a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingActionSuite.scala b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingActionSuite.scala index fcb89af841..f5c89610bf 100644 --- a/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingActionSuite.scala +++ b/delta/plugins/blazegraph/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/blazegraph/BlazegraphIndexingActionSuite.scala @@ -20,13 +20,15 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem.{DroppedElem, FailedElem, SuccessElem} import ch.epfl.bluebrain.nexus.delta.sourcing.stream.ProjectionErr.CouldNotFindPipeErr import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{NoopSink, PipeChain, PipeRef} -import ch.epfl.bluebrain.nexus.testkit.bio.{BioSuite, PatienceConfig} +import ch.epfl.bluebrain.nexus.testkit.bio.PatienceConfig +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import fs2.Stream import java.time.Instant import scala.concurrent.duration._ -class BlazegraphIndexingActionSuite extends BioSuite with Fixtures { +class BlazegraphIndexingActionSuite extends CatsEffectSuite with Fixtures { implicit private val patienceConfig: PatienceConfig = PatienceConfig(5.seconds, 10.millis) @@ -162,6 +164,7 @@ class BlazegraphIndexingActionSuite extends BioSuite with Fixtures { indexingAction .projections(project, elem) + .translate(taskToIoK) .fold(emptyAcc) { case (acc, s: SuccessElem[_]) => acc.success(s.id) case (acc, d: DroppedElem) => acc.drop(d.id) @@ -169,11 +172,11 @@ class BlazegraphIndexingActionSuite extends BioSuite with Fixtures { } .compile .lastOrError - .assert(expected) + .assertEquals(expected) } test("A valid elem should be indexed") { - indexingAction.apply(project, elem).assert(List.empty) + indexingAction.apply(project, elem).assertEquals(List.empty) } test("A failed elem should be returned") { @@ -187,7 +190,7 @@ class BlazegraphIndexingActionSuite extends BioSuite with Fixtures { rev = 1 ) - indexingAction.apply(project, failed).assert(List(failed)) + indexingAction.apply(project, failed).assertEquals(List(failed)) } } diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingAction.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingAction.scala index 2d041aa4c3..d9b9649cd4 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingAction.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingAction.scala @@ -32,11 +32,10 @@ final class ElasticSearchIndexingAction( compilePipeChain: PipeChain => Either[ProjectionErr, Operation], sink: ActiveViewDef => Sink, override val timeout: FiniteDuration -) extends IndexingAction { +)(implicit cr: RemoteContextResolution) + extends IndexingAction { - private def compile(view: IndexingViewDef, elem: Elem[GraphResource])(implicit - cr: RemoteContextResolution - ): Task[Option[CompiledProjection]] = view match { + private def compile(view: IndexingViewDef, elem: Elem[GraphResource]): Task[Option[CompiledProjection]] = view match { // Synchronous indexing only applies to views that index the latest version case active: ActiveViewDef if active.selectFilter.tag == Tag.latest => IndexingViewDef @@ -51,9 +50,7 @@ final class ElasticSearchIndexingAction( case _: DeprecatedViewDef => UIO.none } - def projections(project: ProjectRef, elem: Elem[GraphResource])(implicit - cr: RemoteContextResolution - ): ElemStream[CompiledProjection] = + def projections(project: ProjectRef, elem: Elem[GraphResource]): ElemStream[CompiledProjection] = fetchCurrentViews(project).evalMap { _.evalMapFilter(compile(_, elem)) } } object ElasticSearchIndexingAction { @@ -64,7 +61,7 @@ object ElasticSearchIndexingAction { client: ElasticSearchClient, timeout: FiniteDuration, syncIndexingRefresh: Refresh - ): ElasticSearchIndexingAction = { + )(implicit cr: RemoteContextResolution): ElasticSearchIndexingAction = { val batchConfig = BatchConfig.individual new ElasticSearchIndexingAction( views.currentIndexingViews, diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchPluginModule.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchPluginModule.scala index a7de7a4604..f76e602bc4 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchPluginModule.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchPluginModule.scala @@ -16,6 +16,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue.ContextObject import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering +import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.deletion.ProjectDeletionTask @@ -171,7 +172,7 @@ class ElasticSearchPluginModule(priority: Int) extends ModuleDef { aclCheck: AclCheck, views: ElasticSearchViews, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, viewsQuery: ElasticSearchViewsQuery, shift: ElasticSearchView.Shift, baseUri: BaseUri, @@ -186,7 +187,7 @@ class ElasticSearchPluginModule(priority: Int) extends ModuleDef { views, viewsQuery, schemeDirectives, - indexingAction(_, _, _)(shift, cr) + indexingAction(_, _, _)(shift) )( baseUri, s, @@ -371,9 +372,10 @@ class ElasticSearchPluginModule(priority: Int) extends ModuleDef { views: ElasticSearchViews, registry: ReferenceRegistry, client: ElasticSearchClient, - config: ElasticSearchViewsConfig + config: ElasticSearchViewsConfig, + cr: RemoteContextResolution @Id("aggregate") ) => - ElasticSearchIndexingAction(views, registry, client, config.syncIndexingTimeout, config.syncIndexingRefresh) + ElasticSearchIndexingAction(views, registry, client, config.syncIndexingTimeout, config.syncIndexingRefresh)(cr) } make[ElasticSearchView.Shift].fromEffect { (views: ElasticSearchViews, base: BaseUri) => diff --git a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutes.scala b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutes.scala index ab71f1fc9d..65cae6df62 100644 --- a/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutes.scala +++ b/delta/plugins/elasticsearch/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/routes/ElasticSearchViewsRoutes.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.routes import akka.http.scaladsl.model.StatusCodes.Created import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.ElasticSearchViewRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model._ import ch.epfl.bluebrain.nexus.delta.plugins.elasticsearch.model.permissions.{read => Read, write => Write} @@ -20,6 +21,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfMarshalling import ch.epfl.bluebrain.nexus.delta.sdk.model._ import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import io.circe.{Json, JsonObject} import monix.execution.Scheduler @@ -59,6 +61,9 @@ final class ElasticSearchViewsRoutes( import schemeDirectives._ + private def indexUIO(project: ProjectRef, resource: ResourceF[ElasticSearchView], mode: IndexingMode) = + index(project, resource, mode).toUIO + def routes: Route = pathPrefix("views") { extractCaller { implicit caller => @@ -72,7 +77,7 @@ final class ElasticSearchViewsRoutes( Created, views .create(ref, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -93,7 +98,7 @@ final class ElasticSearchViewsRoutes( Created, views .create(id, ref, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -102,7 +107,7 @@ final class ElasticSearchViewsRoutes( emit( views .update(id, ref, rev, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -115,7 +120,7 @@ final class ElasticSearchViewsRoutes( emit( views .deprecate(id, ref, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) @@ -159,7 +164,7 @@ final class ElasticSearchViewsRoutes( Created, views .tag(id, ref, tag, tagRev, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectWhen(decodingFailedOrViewNotFound) ) diff --git a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingActionSuite.scala b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingActionSuite.scala index ca58fabf18..a28120c191 100644 --- a/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingActionSuite.scala +++ b/delta/plugins/elasticsearch/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/elasticsearch/ElasticSearchIndexingActionSuite.scala @@ -14,6 +14,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.PullRequest.PullRequestState import ch.epfl.bluebrain.nexus.delta.sourcing.PullRequest.PullRequestState.PullRequestActive import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Anonymous import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ElemStream, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.offset.Offset import ch.epfl.bluebrain.nexus.delta.sourcing.query.SelectFilter @@ -21,14 +22,15 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem.{DroppedElem, FailedEl import ch.epfl.bluebrain.nexus.delta.sourcing.stream.ProjectionErr.CouldNotFindPipeErr import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{NoopSink, PipeChain, PipeRef} import ch.epfl.bluebrain.nexus.testkit.CirceLiteral -import ch.epfl.bluebrain.nexus.testkit.bio.{BioSuite, PatienceConfig} +import ch.epfl.bluebrain.nexus.testkit.bio.PatienceConfig +import ch.epfl.bluebrain.nexus.testkit.ce.CatsEffectSuite import fs2.Stream import io.circe.Json import java.time.Instant import scala.concurrent.duration._ -class ElasticSearchIndexingActionSuite extends BioSuite with CirceLiteral with Fixtures { +class ElasticSearchIndexingActionSuite extends CatsEffectSuite with CirceLiteral with Fixtures { implicit private val patienceConfig: PatienceConfig = PatienceConfig(5.seconds, 10.millis) @@ -174,6 +176,7 @@ class ElasticSearchIndexingActionSuite extends BioSuite with CirceLiteral with F indexingAction .projections(project, elem) + .translate(taskToIoK) .fold(emptyAcc) { case (acc, s: SuccessElem[_]) => acc.success(s.id) case (acc, d: DroppedElem) => acc.drop(d.id) @@ -181,11 +184,11 @@ class ElasticSearchIndexingActionSuite extends BioSuite with CirceLiteral with F } .compile .lastOrError - .assert(expected) + .assertEquals(expected) } test("A valid elem should be indexed") { - indexingAction.apply(project, elem).assert(List.empty) + indexingAction.apply(project, elem).assertEquals(List.empty) } test("A failed elem should be returned") { @@ -199,7 +202,7 @@ class ElasticSearchIndexingActionSuite extends BioSuite with CirceLiteral with F rev = 1 ) - indexingAction.apply(project, failed).assert(List(failed)) + indexingAction.apply(project, failed).assertEquals(List(failed)) } } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala index 7e98d6daa5..24374f0941 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala @@ -22,6 +22,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.{StorageDeletionTa import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering +import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction import ch.epfl.bluebrain.nexus.delta.sdk._ import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.auth.{AuthTokenProvider, Credentials} @@ -118,7 +119,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { storages: Storages, storagesStatistics: StoragesStatistics, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, shift: Storage.Shift, baseUri: BaseUri, s: Scheduler, @@ -133,7 +134,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { storages, storagesStatistics, schemeDirectives, - indexingAction(_, _, _)(shift, cr) + indexingAction(_, _, _)(shift) )( baseUri, s, @@ -197,7 +198,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { aclCheck: AclCheck, files: Files, schemeDirectives: DeltaSchemeDirectives, - indexingAction: IndexingAction @Id("aggregate"), + indexingAction: AggregateIndexingAction, shift: File.Shift, baseUri: BaseUri, s: Scheduler, @@ -206,7 +207,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { fusionConfig: FusionConfig ) => val storageConfig = cfg.storages.storageTypeConfig - new FilesRoutes(identities, aclCheck, files, schemeDirectives, indexingAction(_, _, _)(shift, cr))( + new FilesRoutes(identities, aclCheck, files, schemeDirectives, indexingAction(_, _, _)(shift))( baseUri, storageConfig, s, diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala index d41f74e3cb..8f6ab8b69c 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala @@ -6,6 +6,7 @@ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.model.{ContentType, MediaRange} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{File, FileRejection} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.permissions.{read => Read, write => Write} @@ -24,7 +25,7 @@ import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag -import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment, IdSegmentRef} +import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment, IdSegmentRef, ResourceF} import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import io.circe.Decoder import io.circe.generic.extras.Configuration @@ -68,6 +69,9 @@ final class FilesRoutes( import baseUri.prefixSegment import schemeDirectives._ + private def indexUIO(project: ProjectRef, resource: ResourceF[File], mode: IndexingMode) = + index(project, resource, mode).toUIO + def routes: Route = (baseUriPrefix(baseUri.prefix) & replaceUri("files", schemas.files)) { pathPrefix("files") { @@ -83,12 +87,12 @@ final class FilesRoutes( entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => emit( Created, - files.createLink(storage, ref, filename, mediaType, path).tapEval(index(ref, _, mode)) + files.createLink(storage, ref, filename, mediaType, path).tapEval(indexUIO(ref, _, mode)) ) }, // Create a file without id segment extractRequestEntity { entity => - emit(Created, files.create(storage, ref, entity).tapEval(index(ref, _, mode))) + emit(Created, files.create(storage, ref, entity).tapEval(indexUIO(ref, _, mode))) } ) } @@ -108,12 +112,12 @@ final class FilesRoutes( Created, files .createLink(id, storage, ref, filename, mediaType, path) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) ) }, // Create a file with id segment extractRequestEntity { entity => - emit(Created, files.create(id, storage, ref, entity).tapEval(index(ref, _, mode))) + emit(Created, files.create(id, storage, ref, entity).tapEval(indexUIO(ref, _, mode))) } ) case (Some(rev), storage) => @@ -123,12 +127,12 @@ final class FilesRoutes( emit( files .updateLink(id, storage, ref, filename, mediaType, path, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) ) }, // Update a file extractRequestEntity { entity => - emit(files.update(id, storage, ref, rev, entity).tapEval(index(ref, _, mode))) + emit(files.update(id, storage, ref, rev, entity).tapEval(indexUIO(ref, _, mode))) } ) } @@ -136,7 +140,7 @@ final class FilesRoutes( // Deprecate a file (delete & parameter("rev".as[Int])) { rev => authorizeFor(ref, Write).apply { - emit(files.deprecate(id, ref, rev).tapEval(index(ref, _, mode)).rejectOn[FileNotFound]) + emit(files.deprecate(id, ref, rev).tapEval(indexUIO(ref, _, mode)).rejectOn[FileNotFound]) } }, // Fetch a file @@ -161,7 +165,7 @@ final class FilesRoutes( (post & parameter("rev".as[Int]) & pathEndOrSingleSlash) { rev => authorizeFor(ref, Write).apply { entity(as[Tag]) { case Tag(tagRev, tag) => - emit(Created, files.tag(id, ref, tag, tagRev, rev).tapEval(index(ref, _, mode))) + emit(Created, files.tag(id, ref, tag, tagRev, rev).tapEval(indexUIO(ref, _, mode))) } } }, @@ -170,7 +174,9 @@ final class FilesRoutes( ref, Write )) { (tag, rev) => - emit(files.deleteTag(id, ref, tag, rev).tapEval(index(ref, _, mode)).rejectOn[FileNotFound]) + emit( + files.deleteTag(id, ref, tag, rev).tapEval(indexUIO(ref, _, mode)).rejectOn[FileNotFound] + ) } ) } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutes.scala index 15e275efd8..3f96759601 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/routes/StoragesRoutes.scala @@ -2,6 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.routes import akka.http.scaladsl.model.StatusCodes.Created import akka.http.scaladsl.server.Directives._ +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import akka.http.scaladsl.server._ import cats.implicits._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages._ @@ -10,7 +11,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{Storage, St import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.permissions.{read => Read, write => Write} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering -import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction +import ch.epfl.bluebrain.nexus.delta.sdk.{IndexingAction, IndexingMode} import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck import ch.epfl.bluebrain.nexus.delta.sdk.circe.CirceUnmarshalling import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._ @@ -19,8 +20,9 @@ import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ import ch.epfl.bluebrain.nexus.delta.sdk.marshalling.RdfMarshalling -import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri +import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, ResourceF} import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag +import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import io.circe.Json import kamon.instrumentation.akka.http.TracingDirectives.operationName import monix.execution.Scheduler @@ -59,6 +61,9 @@ final class StoragesRoutes( import baseUri.prefixSegment import schemeDirectives._ + private def indexUIO(project: ProjectRef, resource: ResourceF[Storage], mode: IndexingMode) = + index(project, resource, mode).toUIO + def routes: Route = (baseUriPrefix(baseUri.prefix) & replaceUri("storages", schemas.storage)) { pathPrefix("storages") { @@ -71,7 +76,7 @@ final class StoragesRoutes( authorizeFor(ref, Write).apply { emit( Created, - storages.create(ref, source).tapEval(index(ref, _, mode)).mapValue(_.metadata) + storages.create(ref, source).tapEval(indexUIO(ref, _, mode)).mapValue(_.metadata) ) } } @@ -91,7 +96,7 @@ final class StoragesRoutes( Created, storages .create(id, ref, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) ) case (Some(rev), source) => @@ -99,7 +104,7 @@ final class StoragesRoutes( emit( storages .update(id, ref, rev, source) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) ) } @@ -111,7 +116,7 @@ final class StoragesRoutes( emit( storages .deprecate(id, ref, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) .rejectOn[StorageNotFound] ) @@ -162,7 +167,7 @@ final class StoragesRoutes( Created, storages .tag(id, ref, tag, tagRev, rev) - .tapEval(index(ref, _, mode)) + .tapEval(indexUIO(ref, _, mode)) .mapValue(_.metadata) ) } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/IndexingAction.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/IndexingAction.scala index bf74cc9da8..2682ad24b2 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/IndexingAction.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/IndexingAction.scala @@ -2,7 +2,10 @@ package ch.epfl.bluebrain.nexus.delta.sdk import cats.data.NonEmptyList import cats.effect.concurrent.Ref +import cats.effect.{ContextShift, IO} import cats.implicits._ +import ch.epfl.bluebrain.nexus.delta.kernel.Logger +import ch.epfl.bluebrain.nexus.delta.kernel.effect.migration._ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.logger import ch.epfl.bluebrain.nexus.delta.sdk.IndexingMode.{Async, Sync} @@ -13,9 +16,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{ElemStream, ProjectRef} import ch.epfl.bluebrain.nexus.delta.sourcing.state.GraphResource import ch.epfl.bluebrain.nexus.delta.sourcing.stream.Elem.{DroppedElem, FailedElem, SuccessElem} import ch.epfl.bluebrain.nexus.delta.sourcing.stream.{CompiledProjection, Elem, Projection} -import com.typesafe.scalalogging.Logger -import monix.bio.{IO, Task, UIO} -import fs2.Stream +import monix.bio.UIO import scala.concurrent.duration._ @@ -36,62 +37,33 @@ trait IndexingAction { * @param elem * the element to index */ - def projections(project: ProjectRef, elem: Elem[GraphResource])(implicit - cr: RemoteContextResolution - ): ElemStream[CompiledProjection] - - /** - * Perform an indexing action based on the indexing parameter. - * - * @param project - * the project in which the resource is located - * @param res - * the resource to perform the indexing action for - * @param indexingMode - * the execution type - */ - def apply[A](project: ProjectRef, res: ResourceF[A], indexingMode: IndexingMode)(implicit - shift: ResourceShift[_, A, _], - cr: RemoteContextResolution - ): UIO[Unit] = { - indexingMode match { - case Async => UIO.unit - case Sync => - for { - _ <- UIO.delay(logger.debug("Synchronous indexing of resource '{}/{}' has been requested.", project, res.id)) - // We create the GraphResource wrapped in an `Elem` - elem <- shift.toGraphResourceElem(project, res) - errors <- apply(project, elem) - _ <- IO.raiseWhen(errors.nonEmpty)(IndexingFailed(res.void, errors.map(_.throwable))) - } yield () - } - }.hideErrors + def projections(project: ProjectRef, elem: Elem[GraphResource]): ElemStream[CompiledProjection] def apply(project: ProjectRef, elem: Elem[GraphResource])(implicit - cr: RemoteContextResolution - ): Task[List[FailedElem]] = { + contextShift: ContextShift[IO] + ): IO[List[FailedElem]] = { for { // To collect the errors - errorsRef <- Ref.of[Task, List[FailedElem]](List.empty) + errorsRef <- Ref.of[IO, List[FailedElem]](List.empty) // We build and start the projections where the resource will apply _ <- projections(project, elem) + .translate(taskToIoK) // TODO make this configurable .parEvalMap(5) { case s: SuccessElem[CompiledProjection] => - runProjection(s.value, failed => errorsRef.update(_ ++ failed).hideErrors) - case _: DroppedElem => UIO.unit - case f: FailedElem => UIO.delay(logger.error(s"Fetching '$f' returned an error.", f.throwable)).as(None) + runProjection(s.value, failed => errorsRef.update(_ ++ failed)) + case _: DroppedElem => IO.unit + case f: FailedElem => logger.error(f.throwable)(s"Fetching '$f' returned an error.").as(None) } .compile .toList - .hideErrors - errors <- errorsRef.get.hideErrors + errors <- errorsRef.get } yield errors } - private def runProjection(compiled: CompiledProjection, saveFailedElems: List[FailedElem] => UIO[Unit]) = + private def runProjection(compiled: CompiledProjection, saveFailedElems: List[FailedElem] => IO[Unit]) = for { - projection <- Projection(compiled, UIO.none, _ => UIO.unit, saveFailedElems) + projection <- Projection(compiled, UIO.none, _ => UIO.unit, saveFailedElems(_).toUIO) _ <- projection.waitForCompletion(timeout) // We stop the projection if it has not complete yet _ <- projection.stop() @@ -100,32 +72,44 @@ trait IndexingAction { object IndexingAction { - type Execute[A] = (ProjectRef, ResourceF[A], IndexingMode) => UIO[Unit] + type Execute[A] = (ProjectRef, ResourceF[A], IndexingMode) => IO[Unit] /** * Does not perform any action */ - def noop[A]: Execute[A] = (_, _, _) => UIO.unit - - private val logger: Logger = Logger[IndexingAction] + def noop[A]: Execute[A] = (_, _, _) => IO.unit - private val noProjection: ElemStream[CompiledProjection] = Stream.empty + private val logger = Logger.cats[IndexingAction] /** * An instance of [[IndexingAction]] which executes other [[IndexingAction]] s in parallel. */ - final class AggregateIndexingAction(private val internal: NonEmptyList[IndexingAction]) extends IndexingAction { - - // We pick the maximum timeout of all - override val timeout: FiniteDuration = internal.maximumBy(_.timeout).timeout - - override def projections(project: ProjectRef, elem: Elem[GraphResource])(implicit - cr: RemoteContextResolution - ): ElemStream[CompiledProjection] = - internal.foldLeft(noProjection) { case (acc, action) => acc.merge(action.projections(project, elem)) } + final class AggregateIndexingAction(private val internal: NonEmptyList[IndexingAction])(implicit + contextShift: ContextShift[IO], + cr: RemoteContextResolution + ) { + + def apply[A](project: ProjectRef, res: ResourceF[A], indexingMode: IndexingMode)(implicit + shift: ResourceShift[_, A, _] + ): IO[Unit] = + indexingMode match { + case Async => IO.unit + case Sync => + for { + _ <- logger.debug(s"Synchronous indexing of resource '$project/${res.id}' has been requested.") + // We create the GraphResource wrapped in an `Elem` + elem <- toCatsIO(shift.toGraphResourceElem(project, res)) + errorsPerAction <- internal.traverse(_.apply(project, elem)) + errors = errorsPerAction.toList.flatMap(_.map(_.throwable)) + _ <- IO.raiseWhen(errors.nonEmpty)(IndexingFailed(res.void, errors)) + } yield () + } } object AggregateIndexingAction { - def apply(internal: NonEmptyList[IndexingAction]): AggregateIndexingAction = new AggregateIndexingAction(internal) + def apply( + internal: NonEmptyList[IndexingAction] + )(implicit contextShift: ContextShift[IO], cr: RemoteContextResolution): AggregateIndexingAction = + new AggregateIndexingAction(internal) } } diff --git a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/config/BatchConfig.scala b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/config/BatchConfig.scala index 33a7f73422..63eb1569d1 100644 --- a/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/config/BatchConfig.scala +++ b/delta/sourcing-psql/src/main/scala/ch/epfl/bluebrain/nexus/delta/sourcing/config/BatchConfig.scala @@ -17,7 +17,7 @@ final case class BatchConfig(maxElements: Int, maxInterval: FiniteDuration) object BatchConfig { - val individual = BatchConfig(1, 5.millis) + val individual = BatchConfig(1, 200.millis) implicit final val batchConfigReader: ConfigReader[BatchConfig] = deriveReader[BatchConfig]