Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Do not index storages in Blazegraph and Elasticsearch #5285

Merged
merged 1 commit into from
Feb 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.clie
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.{S3FileOperations, S3LocationGenerator}
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.{FileOperations, LinkFileAction}
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.routes.StoragesRoutes
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.schemas.{storage => storagesSchemaId}
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution}
import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering
import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction.AggregateIndexingAction
Expand Down Expand Up @@ -124,8 +123,6 @@ class StoragePluginModule(priority: Int) extends ModuleDef {
storages: Storages,
storagesStatistics: StoragesStatistics,
schemeDirectives: DeltaSchemeDirectives,
indexingAction: AggregateIndexingAction,
shift: Storage.Shift,
baseUri: BaseUri,
cr: RemoteContextResolution @Id("aggregate"),
ordering: JsonKeyOrdering,
Expand All @@ -137,8 +134,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef {
aclCheck,
storages,
storagesStatistics,
schemeDirectives,
indexingAction(_, _, _)(shift)
schemeDirectives
)(
baseUri,
cr,
Expand All @@ -148,12 +144,6 @@ class StoragePluginModule(priority: Int) extends ModuleDef {
}
}

make[Storage.Shift].from { (storages: Storages, base: BaseUri) =>
Storage.shift(storages)(base)
}

many[ResourceShift[_, _, _]].ref[Storage.Shift]

make[FilesLog].from { (cfg: StoragePluginConfig, xas: Transactors, clock: Clock[IO]) =>
ScopedEventLog(Files.definition(clock), cfg.files.eventLog, xas)
}
Expand Down Expand Up @@ -299,7 +289,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef {
}

many[ResourceToSchemaMappings].add(
ResourceToSchemaMappings(Label.unsafe("storages") -> storagesSchemaId, Label.unsafe("files") -> filesSchemaId)
ResourceToSchemaMappings(Label.unsafe("files") -> filesSchemaId)
)

many[ApiMappings].add(Storages.mappings + Files.mappings)
Expand All @@ -310,7 +300,6 @@ class StoragePluginModule(priority: Int) extends ModuleDef {
}

many[ScopedEventMetricEncoder[_]].add { FileEvent.fileEventMetricEncoder }
many[ScopedEventMetricEncoder[_]].add { () => StorageEvent.storageEventMetricEncoder }

many[PriorityRoute].add { (storagesRoutes: StoragesRoutes) =>
PriorityRoute(priority, storagesRoutes.routes, requiresStrictEntity = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.ContextValue
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.encoder.JsonLdEncoder
import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.JsonLdContent
import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegmentRef}
import ch.epfl.bluebrain.nexus.delta.sdk.{OrderingFields, ResourceShift}
import ch.epfl.bluebrain.nexus.delta.sdk.OrderingFields
import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef
import io.circe.syntax._
import io.circe.{Encoder, Json, JsonObject}
Expand Down Expand Up @@ -108,15 +106,4 @@ object Storage {
OrderingFields { case "_algorithm" =>
Ordering[String] on (_.storageValue.algorithm.value)
}

type Shift = ResourceShift[StorageState, Storage, Metadata]

def shift(storages: Storages)(implicit baseUri: BaseUri): Shift =
ResourceShift.withMetadata[StorageState, Storage, Metadata](
Storages.entityType,
(ref, project) => storages.fetch(IdSegmentRef(ref), project),
state => state.toResource,
value => JsonLdContent(value, value.value.source, Some(value.value.metadata))
)

}
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@ import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords
import ch.epfl.bluebrain.nexus.delta.sdk.instances._
import ch.epfl.bluebrain.nexus.delta.sdk.jsonld.IriEncoder
import ch.epfl.bluebrain.nexus.delta.sdk.model.BaseUri
import ch.epfl.bluebrain.nexus.delta.sdk.model.metrics.EventMetric._
import ch.epfl.bluebrain.nexus.delta.sdk.model.metrics.ScopedEventMetricEncoder
import ch.epfl.bluebrain.nexus.delta.sdk.sse.{resourcesSelector, SseEncoder}
import ch.epfl.bluebrain.nexus.delta.sdk.sse.SseEncoder
import ch.epfl.bluebrain.nexus.delta.sourcing.Serializer
import ch.epfl.bluebrain.nexus.delta.sourcing.event.Event.ScopedEvent
import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.Subject
Expand Down Expand Up @@ -194,34 +192,12 @@ object StorageEvent {
Serializer.dropNulls()
}

def storageEventMetricEncoder: ScopedEventMetricEncoder[StorageEvent] =
new ScopedEventMetricEncoder[StorageEvent] {
override def databaseDecoder: Decoder[StorageEvent] = serializer.codec

override def entityType: EntityType = Storages.entityType

override def eventToMetric: StorageEvent => ProjectScopedMetric = event =>
ProjectScopedMetric.from(
event,
event match {
case _: StorageCreated => Created
case _: StorageUpdated => Updated
case _: StorageTagAdded => Tagged
case _: StorageDeprecated => Deprecated
case _: StorageUndeprecated => Undeprecated
},
event.id,
event.tpe.types,
JsonObject.empty
)
}

def sseEncoder(implicit base: BaseUri): SseEncoder[StorageEvent] = new SseEncoder[StorageEvent] {
override val databaseDecoder: Decoder[StorageEvent] = serializer.codec

override def entityType: EntityType = Storages.entityType

override val selectors: Set[Label] = Set(Label.unsafe("storages"), resourcesSelector)
override val selectors: Set[Label] = Set(Label.unsafe("storages"))

override val sseEncoder: Encoder.AsObject[StorageEvent] = {
val context = ContextValue(Vocabulary.contexts.metadata, contexts.storages)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,11 @@ import cats.implicits._
import ch.epfl.bluebrain.nexus.delta.kernel.circe.CirceUnmarshalling
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragePluginExceptionHandler.handleStorageExceptions
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages._
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection._
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{Storage, StorageRejection}
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.permissions.{read => Read, write => Write}
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.RemoteContextResolution
import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering
import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction
import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclCheck
import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaDirectives._
import ch.epfl.bluebrain.nexus.delta.sdk.directives.{AuthDirectives, DeltaSchemeDirectives}
Expand All @@ -33,16 +32,13 @@ import io.circe.Json
* the storages module
* @param schemeDirectives
* directives related to orgs and projects
* @param index
* the indexing action on write operations
*/
final class StoragesRoutes(
identities: Identities,
aclCheck: AclCheck,
storages: Storages,
storagesStatistics: StoragesStatistics,
schemeDirectives: DeltaSchemeDirectives,
index: IndexingAction.Execute[Storage]
schemeDirectives: DeltaSchemeDirectives
)(implicit
baseUri: BaseUri,
cr: RemoteContextResolution,
Expand All @@ -62,16 +58,16 @@ final class StoragesRoutes(
concat(
pathEndOrSingleSlash {
// Create a storage without id segment
(post & noParameter("rev") & entity(as[Json]) & indexingMode) { (source, mode) =>
(post & noParameter("rev") & entity(as[Json])) { source =>
authorizeFor(project, Write).apply {
emit(
Created,
storages.create(project, source).flatTap(index(project, _, mode)).mapValue(_.metadata)
storages.create(project, source).mapValue(_.metadata)
)
}
}
},
(idSegment & indexingMode) { (id, mode) =>
idSegment { id =>
concat(
pathEndOrSingleSlash {
concat(
Expand All @@ -85,15 +81,13 @@ final class StoragesRoutes(
Created,
storages
.create(id, project, source)
.flatTap(index(project, _, mode))
.mapValue(_.metadata)
)
case (Some(rev), source) =>
// Update a storage
emit(
storages
.update(id, project, rev, source)
.flatTap(index(project, _, mode))
.mapValue(_.metadata)
)
}
Expand All @@ -105,7 +99,6 @@ final class StoragesRoutes(
emit(
storages
.deprecate(id, project, rev)
.flatTap(index(project, _, mode))
.mapValue(_.metadata)
.attemptNarrow[StorageRejection]
.rejectOn[StorageNotFound]
Expand Down Expand Up @@ -135,7 +128,6 @@ final class StoragesRoutes(
emit(
storages
.undeprecate(id, project, rev)
.flatTap(index(project, _, mode))
.mapValue(_.metadata)
.attemptNarrow[StorageRejection]
.rejectOn[StorageNotFound]
Expand Down Expand Up @@ -176,14 +168,13 @@ object StoragesRoutes {
aclCheck: AclCheck,
storages: Storages,
storagesStatistics: StoragesStatistics,
schemeDirectives: DeltaSchemeDirectives,
index: IndexingAction.Execute[Storage]
schemeDirectives: DeltaSchemeDirectives
)(implicit
baseUri: BaseUri,
cr: RemoteContextResolution,
ordering: JsonKeyOrdering,
fusionConfig: FusionConfig
): Route =
new StoragesRoutes(identities, aclCheck, storages, storagesStatistics, schemeDirectives, index).routes
new StoragesRoutes(identities, aclCheck, storages, storagesStatistics, schemeDirectives).routes

}
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StorageFixtures
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageEvent._
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageType.{DiskStorage => DiskStorageType}
import ch.epfl.bluebrain.nexus.delta.sdk.SerializationSuite
import ch.epfl.bluebrain.nexus.delta.sdk.model.metrics.EventMetric._
import ch.epfl.bluebrain.nexus.delta.sdk.sse.SseEncoder.SseData
import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.{Subject, User}
import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag
import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef}
import io.circe.JsonObject

import java.time.Instant
import scala.collection.immutable.VectorMap
Expand All @@ -33,20 +31,19 @@ class StorageSerializationSuite extends SerializationSuite with StorageFixtures
private val diskUndeprecated = StorageUndeprecated(dId, projectRef, DiskStorageType, 5, instant, subject)

private val storagesMapping = List(
(diskCreated, loadEvents("storages", "disk-storage-created.json"), Created),
(s3Created, loadEvents("storages", "s3-storage-created.json"), Created),
(diskUpdated, loadEvents("storages", "disk-storage-updated.json"), Updated),
(s3Updated, loadEvents("storages", "s3-storage-updated.json"), Updated),
(diskTagged, loadEvents("storages", "storage-tag-added.json"), Tagged),
(diskDeprecated, loadEvents("storages", "storage-deprecated.json"), Deprecated),
(diskUndeprecated, loadEvents("storages", "storage-undeprecated.json"), Undeprecated)
(diskCreated, loadEvents("storages", "disk-storage-created.json")),
(s3Created, loadEvents("storages", "s3-storage-created.json")),
(diskUpdated, loadEvents("storages", "disk-storage-updated.json")),
(s3Updated, loadEvents("storages", "s3-storage-updated.json")),
(diskTagged, loadEvents("storages", "storage-tag-added.json")),
(diskDeprecated, loadEvents("storages", "storage-deprecated.json")),
(diskUndeprecated, loadEvents("storages", "storage-undeprecated.json"))
)

private val storageEventSerializer = StorageEvent.serializer
private val storageSseEncoder = StorageEvent.sseEncoder
private val storageEventMetricEncoder = StorageEvent.storageEventMetricEncoder
private val storageEventSerializer = StorageEvent.serializer
private val storageSseEncoder = StorageEvent.sseEncoder

storagesMapping.foreach { case (event, (database, sse), action) =>
storagesMapping.foreach { case (event, (database, sse)) =>
test(s"Correctly serialize ${event.getClass.getSimpleName} for ${event.tpe}") {
assertEquals(storageEventSerializer.codec(event), database)
}
Expand All @@ -60,22 +57,6 @@ class StorageSerializationSuite extends SerializationSuite with StorageFixtures
.decodeJson(database)
.assertRight(SseData(ClassUtils.simpleName(event), Some(projectRef), sse))
}

test(s"Correctly encode ${event.getClass.getSimpleName} for ${event.tpe} to metric") {
storageEventMetricEncoder.toMetric.decodeJson(database).assertRight {
ProjectScopedMetric(
instant,
subject,
event.rev,
Set(action),
projectRef,
Label.unsafe("myorg"),
event.id,
event.tpe.types,
JsonObject.empty
)
}
}
}

private val statesMapping = VectorMap(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary
import ch.epfl.bluebrain.nexus.delta.rdf.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution}
import ch.epfl.bluebrain.nexus.delta.sdk.IndexingAction
import ch.epfl.bluebrain.nexus.delta.sdk.acls.AclSimpleCheck
import ch.epfl.bluebrain.nexus.delta.sdk.acls.model.AclAddress
import ch.epfl.bluebrain.nexus.delta.sdk.directives.DeltaSchemeDirectives
Expand Down Expand Up @@ -110,7 +109,7 @@ class StoragesRoutesSpec extends BaseRouteSpec with StorageFixtures with UUIDFFi

private lazy val routes =
Route.seal(
StoragesRoutes(identities, aclCheck, storages, storageStatistics, schemeDirectives, IndexingAction.noop)
StoragesRoutes(identities, aclCheck, storages, storageStatistics, schemeDirectives)
)

override def beforeAll(): Unit = {
Expand Down

This file was deleted.

66 changes: 0 additions & 66 deletions docs/src/main/paradox/docs/delta/api/assets/storages/listed.json

This file was deleted.

Loading