Skip to content

Commit

Permalink
Allow a description to be specified at point of file creation (#4724)
Browse files Browse the repository at this point in the history
Co-authored-by: Oliver <[email protected]>
  • Loading branch information
shinyhappydan and olivergrabinski authored Feb 27, 2024
1 parent d2ff9c5 commit c167efe
Show file tree
Hide file tree
Showing 39 changed files with 615 additions and 232 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ class ArchiveDownloadSpec
filename,
Some(`text/plain(UTF-8)`),
Map.empty,
None,
None,
bytes,
Digest.NotComputedDigest,
Client
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,8 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with ArchiveH
"myfile",
Some(`text/plain(UTF-8)`),
Map.empty,
None,
None,
12L,
ComputedDigest(DigestAlgorithm.default, "digest"),
Client
Expand Down
2 changes: 2 additions & 0 deletions delta/plugins/storage/src/main/resources/contexts/files.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
"_keywords": "https://bluebrain.github.io/nexus/vocabulary/keywords",
"_location": "https://bluebrain.github.io/nexus/vocabulary/location",
"_filename": "https://bluebrain.github.io/nexus/vocabulary/filename",
"description": "http://schema.org/description",
"name": "http://schema.org/name",
"_mediaType": "https://bluebrain.github.io/nexus/vocabulary/mediaType",
"_uuid": "https://bluebrain.github.io/nexus/vocabulary/uuid",
"_storage": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files
import akka.actor.typed.ActorSystem
import akka.actor.{ActorSystem => ClassicActorSystem}
import akka.http.scaladsl.model.ContentTypes.`application/octet-stream`
import akka.http.scaladsl.model.{BodyPartEntity, ContentType, HttpEntity, Uri}
import akka.http.scaladsl.model.{BodyPartEntity, HttpEntity, Uri}
import cats.effect.{Clock, IO}
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent
Expand Down Expand Up @@ -68,7 +68,7 @@ final class Files(
// format: off
private val testStorageRef = ResourceRef.Revision(iri"http://localhost/test", 1)
private val testStorageType = StorageType.DiskStorage
private val testAttributes = FileAttributes(UUID.randomUUID(), "http://localhost", Uri.Path.Empty, "", None, Map.empty, 0, ComputedDigest(DigestAlgorithm.default, "value"), Client)
private val testAttributes = FileAttributes(UUID.randomUUID(), "http://localhost", Uri.Path.Empty, "", None, Map.empty, None, None, 0, ComputedDigest(DigestAlgorithm.default, "value"), Client)
// format: on

/**
Expand All @@ -94,8 +94,8 @@ final class Files(
iri <- generateId(pc)
_ <- test(CreateFile(iri, projectRef, testStorageRef, testStorageType, testAttributes, caller.subject, tag))
(storageRef, storage) <- fetchAndValidateActiveStorage(storageId, projectRef, pc)
metadata <- saveFileToStorage(iri, entity, storage)
res <- eval(CreateFile(iri, projectRef, storageRef, storage.tpe, metadata, caller.subject, tag))
attributes <- saveFileToStorage(iri, entity, storage)
res <- eval(CreateFile(iri, projectRef, storageRef, storage.tpe, attributes, caller.subject, tag))
} yield res
}.span("createFile")

Expand Down Expand Up @@ -147,15 +147,14 @@ final class Files(
def createLink(
storageId: Option[IdSegment],
projectRef: ProjectRef,
filename: Option[String],
mediaType: Option[ContentType],
description: FileDescription,
path: Uri.Path,
tag: Option[UserTag]
)(implicit caller: Caller): IO[FileResource] = {
for {
pc <- fetchContext.onCreate(projectRef)
iri <- generateId(pc)
res <- createLink(iri, projectRef, pc, storageId, filename, mediaType, path, tag)
res <- createLink(iri, projectRef, pc, storageId, description, path, tag)
} yield res
}.span("createLink")

Expand All @@ -180,14 +179,13 @@ final class Files(
def createLink(
id: FileId,
storageId: Option[IdSegment],
filename: Option[String],
mediaType: Option[ContentType],
description: FileDescription,
path: Uri.Path,
tag: Option[UserTag]
)(implicit caller: Caller): IO[FileResource] = {
for {
(iri, pc) <- id.expandIri(fetchContext.onCreate)
res <- createLink(iri, id.project, pc, storageId, filename, mediaType, path, tag)
res <- createLink(iri, id.project, pc, storageId, description, path, tag)
} yield res
}.span("createLink")

Expand Down Expand Up @@ -242,8 +240,7 @@ final class Files(
def updateLink(
id: FileId,
storageId: Option[IdSegment],
filename: Option[String],
mediaType: Option[ContentType],
description: FileDescription,
path: Uri.Path,
rev: Int,
tag: Option[UserTag]
Expand All @@ -252,16 +249,15 @@ final class Files(
(iri, pc) <- id.expandIri(fetchContext.onModify)
_ <- test(UpdateFile(iri, id.project, testStorageRef, testStorageType, testAttributes, rev, caller.subject, tag))
(storageRef, storage) <- fetchAndValidateActiveStorage(storageId, id.project, pc)
resolvedFilename <- IO.fromOption(filename.orElse(path.lastSegment))(InvalidFileLink(iri))
metadata <- linkFile(storage, path, resolvedFilename, iri)
metadata <- linkFile(storage, path, description.filename, iri)
res <- eval(
UpdateFile(
iri,
id.project,
storageRef,
storage.tpe,
FileAttributes.from(
FileDescription(resolvedFilename, Map.empty, mediaType),
description,
metadata
),
rev,
Expand Down Expand Up @@ -407,24 +403,25 @@ final class Files(
ref: ProjectRef,
pc: ProjectContext,
storageId: Option[IdSegment],
filename: Option[String],
mediaType: Option[ContentType],
description: FileDescription,
path: Uri.Path,
tag: Option[UserTag]
)(implicit caller: Caller): IO[FileResource] =
for {
_ <- test(CreateFile(iri, ref, testStorageRef, testStorageType, testAttributes, caller.subject, tag))
(storageRef, storage) <- fetchAndValidateActiveStorage(storageId, ref, pc)
resolvedFilename <- IO.fromOption(filename.orElse(path.lastSegment))(InvalidFileLink(iri))
fileMetadata <- linkFile(storage, path, resolvedFilename, iri)
storageMetadata <- linkFile(storage, path, description.filename, iri)
res <- eval(
CreateFile(
iri,
ref,
storageRef,
storage.tpe,
FileAttributes
.from(FileDescription(resolvedFilename, Map.empty, mediaType), fileMetadata),
.from(
description,
storageMetadata
),
caller.subject,
tag
)
Expand Down Expand Up @@ -474,10 +471,10 @@ final class Files(
storage: Storage
): IO[FileAttributes] =
for {
info <- extractFormData(iri, storage, entity)
userSuppliedMetadata = FileDescription.from(info)
fileMetadata <- saveFile(iri, storage, userSuppliedMetadata, info.contents)
} yield FileAttributes.from(userSuppliedMetadata, fileMetadata)
info <- extractFormData(iri, storage, entity)
description = FileDescription.from(info)
storageMetadata <- saveFile(iri, storage, description, info.contents)
} yield FileAttributes.from(description, storageMetadata)

private def extractFormData(iri: Iri, storage: Storage, entity: HttpEntity): IO[UploadedFileInformation] =
for {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,21 @@ import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.error.NotARejection
import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig
import ch.epfl.bluebrain.nexus.delta.kernel.utils.FileUtils
import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidKeywords, InvalidMultipartFieldName, WrappedAkkaRejection}
import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection
import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidCustomMetadata, InvalidMultipartFieldName, WrappedAkkaRejection}
import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label
import io.circe.parser
import io.circe.generic.semiauto.deriveDecoder
import io.circe.{parser, Decoder}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

sealed trait FormDataExtractor {

/**
* Extracts the part with fieldName ''file'' from the passed ''entity'' MultiPart/FormData
* Extracts the part with fieldName ''file'' from the passed ''entity'' MultiPart/FormData. Any other part is
* discarded.
*
* @param id
* the file id
Expand All @@ -35,7 +38,7 @@ sealed trait FormDataExtractor {
* @param storageAvailableSpace
* the remaining available space on the storage
* @return
* the file description plus the entity with the file content
* the file metadata. plus the entity with the file content
*/
def apply(
id: Iri,
Expand All @@ -48,6 +51,8 @@ sealed trait FormDataExtractor {
case class UploadedFileInformation(
filename: String,
keywords: Map[Label, String],
description: Option[String],
name: Option[String],
suppliedContentType: ContentType,
contents: BodyPartEntity
)
Expand Down Expand Up @@ -136,27 +141,41 @@ object FormDataExtractor {
val filename = part.filename.getOrElse("file")
val contentType = detectContentType(filename, part.entity.contentType)

val result = for {
keywords <- extractKeywords(part)
} yield {
Some(UploadedFileInformation(filename, keywords, contentType, part.entity))
val result = extractMetadata(part).map { md =>
UploadedFileInformation(
filename,
md.keywords.getOrElse(Map.empty),
md.description,
md.name,
contentType,
part.entity
).some
}

Future.fromTry(result.toTry)
case part =>
part.entity.discardBytes().future.as(None)
}

private def extractKeywords(
private case class FileCustomMetadata(
name: Option[String],
description: Option[String],
keywords: Option[Map[Label, String]]
)
implicit private val fileUploadMetadataDecoder: Decoder[FileCustomMetadata] =
deriveDecoder[FileCustomMetadata]

private def extractMetadata(
part: Multipart.FormData.BodyPart
): Either[InvalidKeywords, Map[Label, String]] = {
part.dispositionParams.get("keywords") match {
): Either[FileRejection, FileCustomMetadata] = {
val metadata = part.dispositionParams.get("metadata").filter(_.nonEmpty)
metadata match {
case Some(value) =>
parser
.parse(value)
.flatMap(_.as[Map[Label, String]])
.leftMap(err => InvalidKeywords(err.getMessage))
case None => Right(Map.empty)
.flatMap(_.as[FileCustomMetadata])
.leftMap(err => InvalidCustomMetadata(err.getMessage))
case None => Right(FileCustomMetadata(None, None, None))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ object File {
)
val attrEncoder: Encoder.AsObject[FileAttributes] = FileAttributes.createConfiguredEncoder(
Configuration.default,
underscoreFields = true,
underscoreFieldsForMetadata = true,
removePath = true,
removeLocation = !showLocation.types.contains(storageType)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ final case class FileAttributes(
mediaType: Option[ContentType],
// TODO: Remove default after ??? migration
keywords: Map[Label, String] = Map.empty,
description: Option[String] = None,
name: Option[String],
bytes: Long,
digest: Digest,
origin: FileAttributesOrigin
Expand All @@ -51,24 +53,28 @@ trait LimitedFileAttributes {
def filename: String
def mediaType: Option[ContentType]
def keywords: Map[Label, String]
def description: Option[String]
def name: Option[String]
def bytes: Long
def digest: Digest
def origin: FileAttributesOrigin
}

object FileAttributes {

def from(userSuppliedMetadata: FileDescription, metadata: FileStorageMetadata): FileAttributes = {
def from(description: FileDescription, storageMetadata: FileStorageMetadata): FileAttributes = {
FileAttributes(
metadata.uuid,
metadata.location,
metadata.path,
userSuppliedMetadata.filename,
userSuppliedMetadata.mediaType,
userSuppliedMetadata.keywords,
metadata.bytes,
metadata.digest,
metadata.origin
storageMetadata.uuid,
storageMetadata.location,
storageMetadata.path,
description.filename,
description.mediaType,
description.keywords,
description.description,
description.name,
storageMetadata.bytes,
storageMetadata.digest,
storageMetadata.origin
)
}

Expand Down Expand Up @@ -96,34 +102,46 @@ object FileAttributes {

def createConfiguredEncoder(
originalConfig: Configuration,
underscoreFields: Boolean = false,
underscoreFieldsForMetadata: Boolean = false,
removePath: Boolean = false,
removeLocation: Boolean = false
)(implicit @nowarn("cat=unused") digestEncoder: Encoder.AsObject[Digest]): Encoder.AsObject[FileAttributes] = {
@nowarn("cat=unused")
implicit val config: Configuration = underscoreFields match {
case true => withUnderscoreFields(originalConfig)
implicit val config: Configuration = underscoreFieldsForMetadata match {
case true => withUnderscoreMetadataFields(originalConfig)
case false => originalConfig
}

object Key {
def unapply(key: String): Option[String] = {
if (underscoreFields && key.startsWith("_")) Some(key.drop(1))
if (underscoreFieldsForMetadata && key.startsWith("_")) Some(key.drop(1))
else Some(key)
}
}

deriveConfiguredEncoder[FileAttributes].mapJsonObject { json =>
json.filter {
case (Key("location"), _) => !removeLocation
case (Key("path"), _) => !removePath
case (Key("keywords"), value) => !value.isEmpty()
case _ => true
case (Key("location"), _) => !removeLocation
case (Key("path"), _) => !removePath
case (Key("keywords"), value) => !value.isEmpty()
case (Key("description"), value) => !value.isNull
case (Key("name"), value) => !value.isNull
case _ => true
}
}
}

private def withUnderscoreFields(configuration: Configuration): Configuration = {
configuration.copy(transformMemberNames = key => s"_$key")
object NonMetadataKey {
private val keys = Set("description", "name")
def unapply(key: String): Option[String] = {
Option.when(keys.contains(key))(key)
}
}

private def withUnderscoreMetadataFields(configuration: Configuration): Configuration = {
configuration.copy(transformMemberNames = {
case NonMetadataKey(key) => key
case metadataKey => s"_$metadataKey"
})
}
}
Loading

0 comments on commit c167efe

Please sign in to comment.