From c167efe645981c77b4798554f8562936fb2435c6 Mon Sep 17 00:00:00 2001 From: Daniel Bell Date: Tue, 27 Feb 2024 13:55:47 +0000 Subject: [PATCH] Allow a description to be specified at point of file creation (#4724) Co-authored-by: Oliver <20188437+olivergrabinski@users.noreply.github.com> --- .../plugins/archive/ArchiveDownloadSpec.scala | 2 + .../plugins/archive/ArchiveRoutesSpec.scala | 2 + .../src/main/resources/contexts/files.json | 2 + .../delta/plugins/storage/files/Files.scala | 45 +++++----- .../storage/files/FormDataExtractor.scala | 47 +++++++---- .../plugins/storage/files/model/File.scala | 2 +- .../storage/files/model/FileAttributes.scala | 58 ++++++++----- .../storage/files/model/FileDescription.scala | 21 +++-- .../storage/files/model/FileEvent.scala | 2 +- .../storage/files/model/FileRejection.scala | 20 +---- .../storage/files/routes/FilesRoutes.scala | 58 +++++++++---- .../disk/DiskStorageCopyFiles.scala | 2 + .../remote/RemoteDiskStorageCopyFiles.scala | 18 ++-- ...=> file-created-tagged-with-metadata.json} | 2 + ...s.json => file-created-with-metadata.json} | 2 + ...rds.json => file-state-with-metadata.json} | 2 + .../files/errors/file-link-no-filename.json | 6 ++ ...=> file-created-tagged-with-metadata.json} | 2 + ...s.json => file-created-with-metadata.json} | 2 + .../FileAttributesUpdateStreamSuite.scala | 2 + .../plugins/storage/files/FileFixtures.scala | 6 +- .../plugins/storage/files/FilesSpec.scala | 50 ++++++----- .../plugins/storage/files/FilesStmSpec.scala | 2 + .../storage/files/FormDataExtractorSpec.scala | 81 ++++++++++++------ .../storage/files/batch/BatchCopySuite.scala | 17 ++-- .../storage/files/generators/FileGen.scala | 16 +++- .../files/model/FileSerializationSuite.scala | 47 ++++++----- .../files/routes/FilesRoutesSpec.scala | 76 +++++++++++------ .../sdk/directives/DeltaDirectives.scala | 10 +++ .../sdk/marshalling/RdfRejectionHandler.scala | 4 +- .../testkit/scalatest/FileMatchers.scala | 40 +++++++++ .../testkit/scalatest/ResourceMatchers.scala | 1 + .../main/paradox/docs/delta/api/files-api.md | 22 +++-- .../docs/releases/v1.10-release-notes.md | 9 ++ .../bluebrain/nexus/tests/HttpClient.scala | 15 +++- .../nexus/tests/kg/files/BatchCopySpec.scala | 32 ++++++-- .../nexus/tests/kg/files/FilesDsl.scala | 10 ++- .../nexus/tests/kg/files/FilesSpec.scala | 82 ++++++++++++++++++- .../tests/kg/files/model/FileInput.scala | 30 +++++-- 39 files changed, 615 insertions(+), 232 deletions(-) rename delta/plugins/storage/src/test/resources/files/database/{file-created-tagged-with-keywords.json => file-created-tagged-with-metadata.json} (92%) rename delta/plugins/storage/src/test/resources/files/database/{file-created-with-keywords.json => file-created-with-metadata.json} (92%) rename delta/plugins/storage/src/test/resources/files/database/{file-state-with-keywords.json => file-state-with-metadata.json} (94%) create mode 100644 delta/plugins/storage/src/test/resources/files/errors/file-link-no-filename.json rename delta/plugins/storage/src/test/resources/files/sse/{file-created-tagged-with-keywords.json => file-created-tagged-with-metadata.json} (94%) rename delta/plugins/storage/src/test/resources/files/sse/{file-created-with-keywords.json => file-created-with-metadata.json} (94%) create mode 100644 delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/FileMatchers.scala diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala index 4e2754e5df..1a3fc7a729 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveDownloadSpec.scala @@ -85,6 +85,8 @@ class ArchiveDownloadSpec filename, Some(`text/plain(UTF-8)`), Map.empty, + None, + None, bytes, Digest.NotComputedDigest, Client diff --git a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala index cb5441084c..9bbc3f8776 100644 --- a/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala +++ b/delta/plugins/archive/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/archive/ArchiveRoutesSpec.scala @@ -101,6 +101,8 @@ class ArchiveRoutesSpec extends BaseRouteSpec with StorageFixtures with ArchiveH "myfile", Some(`text/plain(UTF-8)`), Map.empty, + None, + None, 12L, ComputedDigest(DigestAlgorithm.default, "digest"), Client diff --git a/delta/plugins/storage/src/main/resources/contexts/files.json b/delta/plugins/storage/src/main/resources/contexts/files.json index c97b6d02cc..1146b67c00 100644 --- a/delta/plugins/storage/src/main/resources/contexts/files.json +++ b/delta/plugins/storage/src/main/resources/contexts/files.json @@ -22,6 +22,8 @@ "_keywords": "https://bluebrain.github.io/nexus/vocabulary/keywords", "_location": "https://bluebrain.github.io/nexus/vocabulary/location", "_filename": "https://bluebrain.github.io/nexus/vocabulary/filename", + "description": "http://schema.org/description", + "name": "http://schema.org/name", "_mediaType": "https://bluebrain.github.io/nexus/vocabulary/mediaType", "_uuid": "https://bluebrain.github.io/nexus/vocabulary/uuid", "_storage": { diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala index adc9a0761f..7775fc3714 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala @@ -3,7 +3,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files import akka.actor.typed.ActorSystem import akka.actor.{ActorSystem => ClassicActorSystem} import akka.http.scaladsl.model.ContentTypes.`application/octet-stream` -import akka.http.scaladsl.model.{BodyPartEntity, ContentType, HttpEntity, Uri} +import akka.http.scaladsl.model.{BodyPartEntity, HttpEntity, Uri} import cats.effect.{Clock, IO} import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent @@ -68,7 +68,7 @@ final class Files( // format: off private val testStorageRef = ResourceRef.Revision(iri"http://localhost/test", 1) private val testStorageType = StorageType.DiskStorage - private val testAttributes = FileAttributes(UUID.randomUUID(), "http://localhost", Uri.Path.Empty, "", None, Map.empty, 0, ComputedDigest(DigestAlgorithm.default, "value"), Client) + private val testAttributes = FileAttributes(UUID.randomUUID(), "http://localhost", Uri.Path.Empty, "", None, Map.empty, None, None, 0, ComputedDigest(DigestAlgorithm.default, "value"), Client) // format: on /** @@ -94,8 +94,8 @@ final class Files( iri <- generateId(pc) _ <- test(CreateFile(iri, projectRef, testStorageRef, testStorageType, testAttributes, caller.subject, tag)) (storageRef, storage) <- fetchAndValidateActiveStorage(storageId, projectRef, pc) - metadata <- saveFileToStorage(iri, entity, storage) - res <- eval(CreateFile(iri, projectRef, storageRef, storage.tpe, metadata, caller.subject, tag)) + attributes <- saveFileToStorage(iri, entity, storage) + res <- eval(CreateFile(iri, projectRef, storageRef, storage.tpe, attributes, caller.subject, tag)) } yield res }.span("createFile") @@ -147,15 +147,14 @@ final class Files( def createLink( storageId: Option[IdSegment], projectRef: ProjectRef, - filename: Option[String], - mediaType: Option[ContentType], + description: FileDescription, path: Uri.Path, tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = { for { pc <- fetchContext.onCreate(projectRef) iri <- generateId(pc) - res <- createLink(iri, projectRef, pc, storageId, filename, mediaType, path, tag) + res <- createLink(iri, projectRef, pc, storageId, description, path, tag) } yield res }.span("createLink") @@ -180,14 +179,13 @@ final class Files( def createLink( id: FileId, storageId: Option[IdSegment], - filename: Option[String], - mediaType: Option[ContentType], + description: FileDescription, path: Uri.Path, tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = { for { (iri, pc) <- id.expandIri(fetchContext.onCreate) - res <- createLink(iri, id.project, pc, storageId, filename, mediaType, path, tag) + res <- createLink(iri, id.project, pc, storageId, description, path, tag) } yield res }.span("createLink") @@ -242,8 +240,7 @@ final class Files( def updateLink( id: FileId, storageId: Option[IdSegment], - filename: Option[String], - mediaType: Option[ContentType], + description: FileDescription, path: Uri.Path, rev: Int, tag: Option[UserTag] @@ -252,8 +249,7 @@ final class Files( (iri, pc) <- id.expandIri(fetchContext.onModify) _ <- test(UpdateFile(iri, id.project, testStorageRef, testStorageType, testAttributes, rev, caller.subject, tag)) (storageRef, storage) <- fetchAndValidateActiveStorage(storageId, id.project, pc) - resolvedFilename <- IO.fromOption(filename.orElse(path.lastSegment))(InvalidFileLink(iri)) - metadata <- linkFile(storage, path, resolvedFilename, iri) + metadata <- linkFile(storage, path, description.filename, iri) res <- eval( UpdateFile( iri, @@ -261,7 +257,7 @@ final class Files( storageRef, storage.tpe, FileAttributes.from( - FileDescription(resolvedFilename, Map.empty, mediaType), + description, metadata ), rev, @@ -407,16 +403,14 @@ final class Files( ref: ProjectRef, pc: ProjectContext, storageId: Option[IdSegment], - filename: Option[String], - mediaType: Option[ContentType], + description: FileDescription, path: Uri.Path, tag: Option[UserTag] )(implicit caller: Caller): IO[FileResource] = for { _ <- test(CreateFile(iri, ref, testStorageRef, testStorageType, testAttributes, caller.subject, tag)) (storageRef, storage) <- fetchAndValidateActiveStorage(storageId, ref, pc) - resolvedFilename <- IO.fromOption(filename.orElse(path.lastSegment))(InvalidFileLink(iri)) - fileMetadata <- linkFile(storage, path, resolvedFilename, iri) + storageMetadata <- linkFile(storage, path, description.filename, iri) res <- eval( CreateFile( iri, @@ -424,7 +418,10 @@ final class Files( storageRef, storage.tpe, FileAttributes - .from(FileDescription(resolvedFilename, Map.empty, mediaType), fileMetadata), + .from( + description, + storageMetadata + ), caller.subject, tag ) @@ -474,10 +471,10 @@ final class Files( storage: Storage ): IO[FileAttributes] = for { - info <- extractFormData(iri, storage, entity) - userSuppliedMetadata = FileDescription.from(info) - fileMetadata <- saveFile(iri, storage, userSuppliedMetadata, info.contents) - } yield FileAttributes.from(userSuppliedMetadata, fileMetadata) + info <- extractFormData(iri, storage, entity) + description = FileDescription.from(info) + storageMetadata <- saveFile(iri, storage, description, info.contents) + } yield FileAttributes.from(description, storageMetadata) private def extractFormData(iri: Iri, storage: Storage, entity: HttpEntity): IO[UploadedFileInformation] = for { diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala index d13a304e17..a08930867e 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala @@ -13,10 +13,12 @@ import cats.syntax.all._ import ch.epfl.bluebrain.nexus.delta.kernel.error.NotARejection import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.FileUtils -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidKeywords, InvalidMultipartFieldName, WrappedAkkaRejection} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidCustomMetadata, InvalidMultipartFieldName, WrappedAkkaRejection} import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label -import io.circe.parser +import io.circe.generic.semiauto.deriveDecoder +import io.circe.{parser, Decoder} import scala.concurrent.{ExecutionContext, Future} import scala.util.Try @@ -24,7 +26,8 @@ import scala.util.Try sealed trait FormDataExtractor { /** - * Extracts the part with fieldName ''file'' from the passed ''entity'' MultiPart/FormData + * Extracts the part with fieldName ''file'' from the passed ''entity'' MultiPart/FormData. Any other part is + * discarded. * * @param id * the file id @@ -35,7 +38,7 @@ sealed trait FormDataExtractor { * @param storageAvailableSpace * the remaining available space on the storage * @return - * the file description plus the entity with the file content + * the file metadata. plus the entity with the file content */ def apply( id: Iri, @@ -48,6 +51,8 @@ sealed trait FormDataExtractor { case class UploadedFileInformation( filename: String, keywords: Map[Label, String], + description: Option[String], + name: Option[String], suppliedContentType: ContentType, contents: BodyPartEntity ) @@ -136,10 +141,15 @@ object FormDataExtractor { val filename = part.filename.getOrElse("file") val contentType = detectContentType(filename, part.entity.contentType) - val result = for { - keywords <- extractKeywords(part) - } yield { - Some(UploadedFileInformation(filename, keywords, contentType, part.entity)) + val result = extractMetadata(part).map { md => + UploadedFileInformation( + filename, + md.keywords.getOrElse(Map.empty), + md.description, + md.name, + contentType, + part.entity + ).some } Future.fromTry(result.toTry) @@ -147,16 +157,25 @@ object FormDataExtractor { part.entity.discardBytes().future.as(None) } - private def extractKeywords( + private case class FileCustomMetadata( + name: Option[String], + description: Option[String], + keywords: Option[Map[Label, String]] + ) + implicit private val fileUploadMetadataDecoder: Decoder[FileCustomMetadata] = + deriveDecoder[FileCustomMetadata] + + private def extractMetadata( part: Multipart.FormData.BodyPart - ): Either[InvalidKeywords, Map[Label, String]] = { - part.dispositionParams.get("keywords") match { + ): Either[FileRejection, FileCustomMetadata] = { + val metadata = part.dispositionParams.get("metadata").filter(_.nonEmpty) + metadata match { case Some(value) => parser .parse(value) - .flatMap(_.as[Map[Label, String]]) - .leftMap(err => InvalidKeywords(err.getMessage)) - case None => Right(Map.empty) + .flatMap(_.as[FileCustomMetadata]) + .leftMap(err => InvalidCustomMetadata(err.getMessage)) + case None => Right(FileCustomMetadata(None, None, None)) } } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/File.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/File.scala index ac0b324c72..a145514fb8 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/File.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/File.scala @@ -59,7 +59,7 @@ object File { ) val attrEncoder: Encoder.AsObject[FileAttributes] = FileAttributes.createConfiguredEncoder( Configuration.default, - underscoreFields = true, + underscoreFieldsForMetadata = true, removePath = true, removeLocation = !showLocation.types.contains(storageType) ) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala index 41b16cbf16..9aeba03e56 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileAttributes.scala @@ -40,6 +40,8 @@ final case class FileAttributes( mediaType: Option[ContentType], // TODO: Remove default after ??? migration keywords: Map[Label, String] = Map.empty, + description: Option[String] = None, + name: Option[String], bytes: Long, digest: Digest, origin: FileAttributesOrigin @@ -51,6 +53,8 @@ trait LimitedFileAttributes { def filename: String def mediaType: Option[ContentType] def keywords: Map[Label, String] + def description: Option[String] + def name: Option[String] def bytes: Long def digest: Digest def origin: FileAttributesOrigin @@ -58,17 +62,19 @@ trait LimitedFileAttributes { object FileAttributes { - def from(userSuppliedMetadata: FileDescription, metadata: FileStorageMetadata): FileAttributes = { + def from(description: FileDescription, storageMetadata: FileStorageMetadata): FileAttributes = { FileAttributes( - metadata.uuid, - metadata.location, - metadata.path, - userSuppliedMetadata.filename, - userSuppliedMetadata.mediaType, - userSuppliedMetadata.keywords, - metadata.bytes, - metadata.digest, - metadata.origin + storageMetadata.uuid, + storageMetadata.location, + storageMetadata.path, + description.filename, + description.mediaType, + description.keywords, + description.description, + description.name, + storageMetadata.bytes, + storageMetadata.digest, + storageMetadata.origin ) } @@ -96,34 +102,46 @@ object FileAttributes { def createConfiguredEncoder( originalConfig: Configuration, - underscoreFields: Boolean = false, + underscoreFieldsForMetadata: Boolean = false, removePath: Boolean = false, removeLocation: Boolean = false )(implicit @nowarn("cat=unused") digestEncoder: Encoder.AsObject[Digest]): Encoder.AsObject[FileAttributes] = { @nowarn("cat=unused") - implicit val config: Configuration = underscoreFields match { - case true => withUnderscoreFields(originalConfig) + implicit val config: Configuration = underscoreFieldsForMetadata match { + case true => withUnderscoreMetadataFields(originalConfig) case false => originalConfig } object Key { def unapply(key: String): Option[String] = { - if (underscoreFields && key.startsWith("_")) Some(key.drop(1)) + if (underscoreFieldsForMetadata && key.startsWith("_")) Some(key.drop(1)) else Some(key) } } deriveConfiguredEncoder[FileAttributes].mapJsonObject { json => json.filter { - case (Key("location"), _) => !removeLocation - case (Key("path"), _) => !removePath - case (Key("keywords"), value) => !value.isEmpty() - case _ => true + case (Key("location"), _) => !removeLocation + case (Key("path"), _) => !removePath + case (Key("keywords"), value) => !value.isEmpty() + case (Key("description"), value) => !value.isNull + case (Key("name"), value) => !value.isNull + case _ => true } } } - private def withUnderscoreFields(configuration: Configuration): Configuration = { - configuration.copy(transformMemberNames = key => s"_$key") + object NonMetadataKey { + private val keys = Set("description", "name") + def unapply(key: String): Option[String] = { + Option.when(keys.contains(key))(key) + } + } + + private def withUnderscoreMetadataFields(configuration: Configuration): Configuration = { + configuration.copy(transformMemberNames = { + case NonMetadataKey(key) => key + case metadataKey => s"_$metadataKey" + }) } } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileDescription.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileDescription.scala index 75854216d8..766015d807 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileDescription.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileDescription.scala @@ -4,7 +4,13 @@ import akka.http.scaladsl.model.ContentType import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.UploadedFileInformation import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label -case class FileDescription(filename: String, keywords: Map[Label, String], mediaType: Option[ContentType]) +case class FileDescription( + filename: String, + keywords: Map[Label, String], + mediaType: Option[ContentType], + description: Option[String], + name: Option[String] +) object FileDescription { def from(file: File): FileDescription = { @@ -12,13 +18,16 @@ object FileDescription { } def from(fileAttributes: FileAttributes): FileDescription = { - FileDescription(fileAttributes.filename, fileAttributes.keywords, fileAttributes.mediaType) + FileDescription( + fileAttributes.filename, + fileAttributes.keywords, + fileAttributes.mediaType, + fileAttributes.description, + fileAttributes.name + ) } def from(info: UploadedFileInformation): FileDescription = { - FileDescription(info.filename, info.keywords, info.suppliedContentType) + FileDescription(info.filename, info.keywords, Some(info.suppliedContentType), info.description, info.name) } - - def apply(filename: String, keywords: Map[Label, String], mediaType: ContentType): FileDescription = - FileDescription(filename, keywords, Some(mediaType)) } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileEvent.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileEvent.scala index 6f217cfcca..0377d1ae05 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileEvent.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileEvent.scala @@ -370,7 +370,7 @@ object FileEvent { implicit val attributesEncoder: Encoder[FileAttributes] = FileAttributes.createConfiguredEncoder( implicitly[Configuration], - underscoreFields = true, + underscoreFieldsForMetadata = true, removePath = true, removeLocation = !showLocation.types.contains(storageType) ) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala index 897f8fb0f2..50d51cb781 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala @@ -134,18 +134,6 @@ object FileRejection { */ final case class FileIsNotDeprecated(id: Iri) extends FileRejection(s"File '$id' is not deprecated.") - /** - * Rejection returned when attempting to link a file without providing a filename or a path that ends with a - * filename. - * - * @param id - * the file identifier - */ - final case class InvalidFileLink(id: Iri) - extends FileRejection( - s"Linking a file '$id' cannot be performed without a 'filename' or a 'path' that does not end with a filename." - ) - /** * Rejection returned when attempting to create/update a file with a Multipart/Form-Data payload that does not * contain a ''file'' fieldName @@ -154,11 +142,11 @@ object FileRejection { extends FileRejection(s"File '$id' payload a Multipart/Form-Data without a 'file' part.") /** - * Rejection returned when attempting to create/update a file with a Multipart/Form-Data payload that has keywords - * which cannot be parsed + * Rejection returned when attempting to create/update a file with a Multipart/Form-Data payload that contains + * invalid metadata */ - final case class InvalidKeywords(err: String) - extends FileRejection(s"File payload contained keywords which could not be parsed: $err") + final case class InvalidCustomMetadata(err: String) + extends FileRejection(s"File payload contained metadata which could not be parsed: $err") /** * Rejection returned when attempting to create/update a file with a Multipart/Form-Data payload that does not diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala index 4ce0e138e1..b10bbc8486 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutes.scala @@ -1,5 +1,6 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes +import akka.http.scaladsl.model.MediaTypes.`multipart/form-data` import akka.http.scaladsl.model.StatusCodes.Created import akka.http.scaladsl.model.Uri.Path import akka.http.scaladsl.model.headers.Accept @@ -7,9 +8,8 @@ import akka.http.scaladsl.model.{ContentType, MediaRange} import akka.http.scaladsl.server._ import cats.effect.IO import cats.syntax.all._ - import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{File, FileId, FileRejection} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{File, FileDescription, FileId, FileRejection} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.permissions.{read => Read, write => Write} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes.FilesRoutes._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.{schemas, FileResource, Files} @@ -26,9 +26,9 @@ import ch.epfl.bluebrain.nexus.delta.sdk.fusion.FusionConfig import ch.epfl.bluebrain.nexus.delta.sdk.identities.Identities import ch.epfl.bluebrain.nexus.delta.sdk.identities.model.Caller import ch.epfl.bluebrain.nexus.delta.sdk.implicits._ - import ch.epfl.bluebrain.nexus.delta.sdk.model.routes.Tag import ch.epfl.bluebrain.nexus.delta.sdk.model.{BaseUri, IdSegment} +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag import io.circe.Decoder import io.circe.generic.extras.Configuration @@ -85,17 +85,17 @@ final class FilesRoutes( operationName(s"$prefixSegment/files/{org}/{project}") { concat( // Link a file without id segment - entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => + entity(as[LinkFile]) { case LinkFile(path, description) => emit( Created, files - .createLink(storage, project, filename, mediaType, path, tag) + .createLink(storage, project, description, path, tag) .index(mode) .attemptNarrow[FileRejection] ) }, // Create a file without id segment - extractRequestEntity { entity => + (contentType(`multipart/form-data`) & extractRequestEntity) { entity => emit( Created, files.create(storage, project, entity, tag).index(mode).attemptNarrow[FileRejection] @@ -116,10 +116,10 @@ final class FilesRoutes( case (rev, storage, tag) => concat( // Update a Link - entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => + entity(as[LinkFile]) { case LinkFile(path, description) => emit( files - .updateLink(fileId, storage, filename, mediaType, path, rev, tag) + .updateLink(fileId, storage, description, path, rev, tag) .index(mode) .attemptNarrow[FileRejection] ) @@ -138,11 +138,11 @@ final class FilesRoutes( parameters("storage".as[IdSegment].?, "tag".as[UserTag].?) { case (storage, tag) => concat( // Link a file with id segment - entity(as[LinkFile]) { case LinkFile(filename, mediaType, path) => + entity(as[LinkFile]) { case LinkFile(path, description) => emit( Created, files - .createLink(fileId, storage, filename, mediaType, path, tag) + .createLink(fileId, storage, description, path, tag) .index(mode) .attemptNarrow[FileRejection] ) @@ -275,11 +275,41 @@ object FilesRoutes { fusionConfig: FusionConfig ): Route = new FilesRoutes(identities, aclCheck, files, schemeDirectives, index).routes - final case class LinkFile(filename: Option[String], mediaType: Option[ContentType], path: Path) + final case class LinkFileRequest( + path: Path, + filename: Option[String], + mediaType: Option[ContentType], + keywords: Map[Label, String] = Map.empty, + description: Option[String], + name: Option[String] + ) + final case class LinkFile(path: Path, fileDescription: FileDescription) object LinkFile { - import ch.epfl.bluebrain.nexus.delta.rdf.instances._ @nowarn("cat=unused") - implicit private val config: Configuration = Configuration.default.withStrictDecoding - implicit val linkFileDecoder: Decoder[LinkFile] = deriveConfiguredDecoder[LinkFile] + implicit private val config: Configuration = Configuration.default.withStrictDecoding.withDefaults + implicit val linkFileDecoder: Decoder[LinkFile] = { + deriveConfiguredDecoder[LinkFileRequest] + .flatMap { case LinkFileRequest(path, filename, mediaType, keywords, description, name) => + filename.orElse(path.lastSegment) match { + case Some(derivedFilename) => + Decoder.const( + LinkFile( + path, + FileDescription( + derivedFilename, + keywords, + mediaType, + description, + name + ) + ) + ) + case None => + Decoder.failedWithMessage( + "Linking a file cannot be performed without a 'filename' or a 'path' that does not end with a filename." + ) + } + } + } } } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageCopyFiles.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageCopyFiles.scala index fdde33747d..84e2e79fe6 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageCopyFiles.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageCopyFiles.scala @@ -50,6 +50,8 @@ object DiskStorageCopyFiles { location = Uri(destPath.toUri.toString), path = Uri.Path(destRelativePath.toString), filename = cd.sourceAttributes.filename, + description = cd.sourceAttributes.description, + name = cd.sourceAttributes.name, mediaType = cd.sourceAttributes.mediaType, keywords = cd.sourceAttributes.keywords, bytes = cd.sourceAttributes.bytes, diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageCopyFiles.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageCopyFiles.scala index 7dfeb897d8..4fc3e52ade 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageCopyFiles.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageCopyFiles.scala @@ -40,18 +40,20 @@ object RemoteDiskStorageCopyFiles { relativeDestPath: Path, absoluteDestPath: Uri ): FileAttributes = { - val sourceAttr = cd.sourceMetadata - val bob = cd.sourceUserSuppliedMetadata + val sourceFileMetadata = cd.sourceMetadata + val sourceFileDescription = cd.sourceUserSuppliedMetadata FileAttributes( uuid = cd.destUuid, location = absoluteDestPath, path = relativeDestPath, - filename = bob.filename, - mediaType = bob.mediaType, - keywords = bob.keywords, - bytes = sourceAttr.bytes, - digest = sourceAttr.digest, - origin = sourceAttr.origin + filename = sourceFileDescription.filename, + mediaType = sourceFileDescription.mediaType, + keywords = sourceFileDescription.keywords, + description = sourceFileDescription.description, + name = sourceFileDescription.name, + bytes = sourceFileMetadata.bytes, + digest = sourceFileMetadata.digest, + origin = sourceFileMetadata.origin ) } diff --git a/delta/plugins/storage/src/test/resources/files/database/file-created-tagged-with-keywords.json b/delta/plugins/storage/src/test/resources/files/database/file-created-tagged-with-metadata.json similarity index 92% rename from delta/plugins/storage/src/test/resources/files/database/file-created-tagged-with-keywords.json rename to delta/plugins/storage/src/test/resources/files/database/file-created-tagged-with-metadata.json index 0a71317365..895a4b31b5 100644 --- a/delta/plugins/storage/src/test/resources/files/database/file-created-tagged-with-keywords.json +++ b/delta/plugins/storage/src/test/resources/files/database/file-created-tagged-with-metadata.json @@ -15,6 +15,8 @@ "digest" : { "@type" : "NotComputedDigest" }, + "description" : "A description", + "name" : "A name", "keywords": { "key": "value" } diff --git a/delta/plugins/storage/src/test/resources/files/database/file-created-with-keywords.json b/delta/plugins/storage/src/test/resources/files/database/file-created-with-metadata.json similarity index 92% rename from delta/plugins/storage/src/test/resources/files/database/file-created-with-keywords.json rename to delta/plugins/storage/src/test/resources/files/database/file-created-with-metadata.json index e919fe4f4c..a039acc69f 100644 --- a/delta/plugins/storage/src/test/resources/files/database/file-created-with-keywords.json +++ b/delta/plugins/storage/src/test/resources/files/database/file-created-with-metadata.json @@ -13,6 +13,8 @@ "keywords": { "key": "value" }, + "description" : "A description", + "name" : "A name", "bytes" : 12, "digest" : { "@type" : "NotComputedDigest" diff --git a/delta/plugins/storage/src/test/resources/files/database/file-state-with-keywords.json b/delta/plugins/storage/src/test/resources/files/database/file-state-with-metadata.json similarity index 94% rename from delta/plugins/storage/src/test/resources/files/database/file-state-with-keywords.json rename to delta/plugins/storage/src/test/resources/files/database/file-state-with-metadata.json index 7794a5fa87..43c795d43f 100644 --- a/delta/plugins/storage/src/test/resources/files/database/file-state-with-keywords.json +++ b/delta/plugins/storage/src/test/resources/files/database/file-state-with-metadata.json @@ -19,6 +19,8 @@ "keywords": { "key": "value" }, + "description" : "A description", + "name" : "A name", "origin": "Client", "path": "file.txt", "uuid": "8049ba90-7cc6-4de5-93a1-802c04200dcc" diff --git a/delta/plugins/storage/src/test/resources/files/errors/file-link-no-filename.json b/delta/plugins/storage/src/test/resources/files/errors/file-link-no-filename.json new file mode 100644 index 0000000000..839268a033 --- /dev/null +++ b/delta/plugins/storage/src/test/resources/files/errors/file-link-no-filename.json @@ -0,0 +1,6 @@ +{ + "@context" : "https://bluebrain.github.io/nexus/contexts/error.json", + "@type" : "MalformedRequestContentRejection", + "reason" : "The request content was malformed.", + "details" : "DecodingFailure at : Linking a file cannot be performed without a 'filename' or a 'path' that does not end with a filename." +} diff --git a/delta/plugins/storage/src/test/resources/files/sse/file-created-tagged-with-keywords.json b/delta/plugins/storage/src/test/resources/files/sse/file-created-tagged-with-metadata.json similarity index 94% rename from delta/plugins/storage/src/test/resources/files/sse/file-created-tagged-with-keywords.json rename to delta/plugins/storage/src/test/resources/files/sse/file-created-tagged-with-metadata.json index fa8e93e55e..a7c39814a1 100644 --- a/delta/plugins/storage/src/test/resources/files/sse/file-created-tagged-with-keywords.json +++ b/delta/plugins/storage/src/test/resources/files/sse/file-created-tagged-with-metadata.json @@ -14,6 +14,8 @@ "_keywords": { "key": "value" }, + "description" : "A description", + "name" : "A name", "_mediaType": "text/plain; charset=UTF-8", "_origin": "Client", "_uuid": "8049ba90-7cc6-4de5-93a1-802c04200dcc" diff --git a/delta/plugins/storage/src/test/resources/files/sse/file-created-with-keywords.json b/delta/plugins/storage/src/test/resources/files/sse/file-created-with-metadata.json similarity index 94% rename from delta/plugins/storage/src/test/resources/files/sse/file-created-with-keywords.json rename to delta/plugins/storage/src/test/resources/files/sse/file-created-with-metadata.json index e42fd00f16..b70f50f922 100644 --- a/delta/plugins/storage/src/test/resources/files/sse/file-created-with-keywords.json +++ b/delta/plugins/storage/src/test/resources/files/sse/file-created-with-metadata.json @@ -13,6 +13,8 @@ "_keywords": { "key": "value" }, + "description" : "A description", + "name" : "A name", "_mediaType": "text/plain; charset=UTF-8", "_origin": "Client", "_uuid": "8049ba90-7cc6-4de5-93a1-802c04200dcc" diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileAttributesUpdateStreamSuite.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileAttributesUpdateStreamSuite.scala index 2f76118936..db783221c1 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileAttributesUpdateStreamSuite.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileAttributesUpdateStreamSuite.scala @@ -36,6 +36,8 @@ class FileAttributesUpdateStreamSuite extends NexusSuite with StorageFixtures { filename = "myfile.txt", mediaType = mediaType, keywords = Map(Label.unsafe("key") -> "value"), + None, + None, bytes = 10, NotComputedDigest, Client diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala index 2b686fd380..32bd3e994d 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FileFixtures.scala @@ -51,8 +51,10 @@ trait FileFixtures extends Generators { size: Long = 12, id: UUID = uuid, projRef: ProjectRef = projectRef, - keywords: Map[Label, String] = Map.empty - ): FileAttributes = FileGen.attributes(filename, size, id, projRef, path, keywords) + keywords: Map[Label, String] = Map.empty, + description: Option[String] = None, + name: Option[String] = None + ): FileAttributes = FileGen.attributes(filename, size, id, projRef, path, keywords, description, name) def genKeywords(): Map[Label, String] = Map(Label.unsafe(genString()) -> genString()) diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala index 6651605799..696f63250a 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesSpec.scala @@ -6,13 +6,14 @@ import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` import akka.http.scaladsl.model.Uri import akka.testkit.TestKit import cats.effect.IO +import akka.http.scaladsl.model.ContentType import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig import ch.epfl.bluebrain.nexus.delta.plugins.storage.RemoteContextResolutionFixture import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.generators.FileGen import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.Digest.NotComputedDigest import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin.Storage import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection._ -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{FileAttributes, FileId} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{FileAttributes, FileDescription, FileId} import ch.epfl.bluebrain.nexus.delta.plugins.storage.remotestorage.RemoteStorageClientFixtures import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotFound import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageType.{RemoteDiskStorage => RemoteStorageType} @@ -60,6 +61,14 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) private val bob = User("Bob", realm) private val alice = User("Alice", realm) + def description(filename: String): FileDescription = { + FileDescription(filename, Map.empty, None, None, None) + } + + def description(filename: String, contentType: ContentType): FileDescription = { + FileDescription(filename, Map.empty, Some(contentType), None, None) + } + "The Files operations bundle" when { implicit val typedSystem: typed.ActorSystem[Nothing] = system.toTyped implicit val caller: Caller = Caller(bob, Set(bob, Group("mygroup", realm), Authenticated(realm))) @@ -254,7 +263,7 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) "reject if no write permissions" in { files - .createLink(fileId("file2"), Some(remoteId), None, None, Uri.Path.Empty, None) + .createLink(fileId("file2"), Some(remoteId), description("myfile.txt"), Uri.Path.Empty, None) .rejectedWith[AuthorizationFailed] } @@ -272,7 +281,7 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) mkResource(file2, projectRef, remoteRev, attr, storageType = RemoteStorageType, tags = Tags(tag -> 1)) val result = files - .createLink(fileId("file2"), Some(remoteId), Some("myfile.txt"), None, path, Some(tag)) + .createLink(fileId("file2"), Some(remoteId), description("myfile.txt"), path, Some(tag)) .accepted val fileByTag = files.fetch(FileId("file2", tag, projectRef)).accepted @@ -280,34 +289,30 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) fileByTag.value.tags.tags should contain(tag) } - "reject if no filename" in { - files - .createLink(fileId("file3"), Some(remoteId), None, None, Uri.Path("a/b/"), None) - .rejectedWith[InvalidFileLink] - } - "reject if file id already exists" in { files - .createLink(fileId("file2"), Some(remoteId), None, None, Uri.Path.Empty, None) + .createLink(fileId("file2"), Some(remoteId), description("myfile.txt"), Uri.Path.Empty, None) .rejected shouldEqual ResourceAlreadyExists(file2, projectRef) } "reject if storage does not exist" in { files - .createLink(fileId("file3"), Some(storage), None, None, Uri.Path.Empty, None) + .createLink(fileId("file3"), Some(storage), description("myfile.txt"), Uri.Path.Empty, None) .rejected shouldEqual WrappedStorageRejection(StorageNotFound(storageIri, projectRef)) } "reject if project does not exist" in { val projectRef = ProjectRef(org, Label.unsafe("other")) - files.createLink(None, projectRef, None, None, Uri.Path.Empty, None).rejectedWith[ProjectNotFound] + files + .createLink(None, projectRef, description("myfile.txt"), Uri.Path.Empty, None) + .rejectedWith[ProjectNotFound] } "reject if project is deprecated" in { files - .createLink(Some(remoteId), deprecatedProject.ref, None, None, Uri.Path.Empty, None) + .createLink(Some(remoteId), deprecatedProject.ref, description("myfile.txt"), Uri.Path.Empty, None) .rejectedWith[ProjectIsDeprecated] } } @@ -385,7 +390,14 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) tags = Tags(tag -> 1, newTag -> 3) ) val actual = files - .updateLink(fileId("file2"), Some(remoteId), None, Some(`text/plain(UTF-8)`), path, 2, Some(newTag)) + .updateLink( + fileId("file2"), + Some(remoteId), + description("file-4.txt", `text/plain(UTF-8)`), + path, + 2, + Some(newTag) + ) .accepted val byTag = files.fetch(FileId("file2", newTag, projectRef)).accepted @@ -395,20 +407,20 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) "reject if file doesn't exists" in { files - .updateLink(fileIdIri(nxv + "other"), None, None, None, Uri.Path.Empty, 1, None) + .updateLink(fileIdIri(nxv + "other"), None, description("myfile.txt"), Uri.Path.Empty, 1, None) .rejectedWith[FileNotFound] } "reject if digest is not computed" in { files - .updateLink(fileId("file2"), None, None, None, Uri.Path.Empty, 3, None) + .updateLink(fileId("file2"), None, description("myfile.txt"), Uri.Path.Empty, 3, None) .rejectedWith[DigestNotComputed] } "reject if storage does not exist" in { val storage = nxv + "other-storage" files - .updateLink(fileId("file1"), Some(storage), None, None, Uri.Path.Empty, 2, None) + .updateLink(fileId("file1"), Some(storage), description("myfile.txt"), Uri.Path.Empty, 2, None) .rejected shouldEqual WrappedStorageRejection(StorageNotFound(storage, projectRef)) } @@ -417,13 +429,13 @@ class FilesSpec(fixture: RemoteStorageClientFixtures) val projectRef = ProjectRef(org, Label.unsafe("other")) files - .updateLink(FileId(file1, projectRef), None, None, None, Uri.Path.Empty, 2, None) + .updateLink(FileId(file1, projectRef), None, description("myfile.txt"), Uri.Path.Empty, 2, None) .rejectedWith[ProjectNotFound] } "reject if project is deprecated" in { files - .updateLink(FileId(file1, deprecatedProject.ref), None, None, None, Uri.Path.Empty, 2, None) + .updateLink(FileId(file1, deprecatedProject.ref), None, description("myfile.txt"), Uri.Path.Empty, 2, None) .rejectedWith[ProjectIsDeprecated] } } diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesStmSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesStmSpec.scala index 60222fb2f3..cc183f7100 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesStmSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FilesStmSpec.scala @@ -42,6 +42,8 @@ class FilesStmSpec extends CatsEffectSpec with FileFixtures with StorageFixtures filename = "myfile.txt", mediaType = mediaType, keywords = Map(Label.unsafe("key") -> "value"), + description = Some("A description"), + name = Some("A name"), bytes = 10, dig, Client diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala index 582fea3228..93a7858f5a 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala @@ -5,12 +5,12 @@ import akka.http.scaladsl.model.ContentTypes._ import akka.http.scaladsl.model._ import akka.testkit.TestKit import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidKeywords, InvalidMultipartFieldName} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidCustomMetadata, InvalidMultipartFieldName} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.AkkaSourceHelpers import ch.epfl.bluebrain.nexus.delta.sdk.syntax._ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import io.circe.syntax.KeyOps +import io.circe.syntax.{EncoderOps, KeyOps} import io.circe.{Json, JsonObject} class FormDataExtractorSpec @@ -34,45 +34,69 @@ class FormDataExtractorSpec createEntity("file", NoContentType, Some("file.custom"), keywords.toMap) } + def entityWithDescription(description: String) = { + createEntity("file", NoContentType, Some("file.custom"), description = Some(description)) + } + + def entityWithName(name: String) = { + createEntity("file", NoContentType, Some("file.custom"), name = Some(name)) + } + def createEntity( bodyPart: String, contentType: ContentType, filename: Option[String], - keywords: Map[String, Json] = Map.empty + keywords: Map[String, Json] = Map.empty, + description: Option[String] = None, + name: Option[String] = None ) = Multipart .FormData( Multipart.FormData - .BodyPart(bodyPart, HttpEntity(contentType, content.getBytes), dispositionParameters(filename, keywords)) + .BodyPart( + bodyPart, + HttpEntity(contentType, content.getBytes), + dispositionParameters(filename, keywords, description, name) + ) ) .toEntity() - def dispositionParameters(filename: Option[String], keywords: Map[String, Json]): Map[String, String] = { + def dispositionParameters( + filename: Option[String], + keywords: Map[String, Json], + description: Option[String], + name: Option[String] + ): Map[String, String] = { + + val metadata = JsonObject( + "name" -> name.asJson, + "description" -> description.asJson, + "keywords" -> JsonObject.fromMap(keywords).toJson + ).toJson + Map.from( - filename.map("filename" -> _) ++ - Option.when(keywords.nonEmpty)("keywords" -> JsonObject.fromMap(keywords).toJson.noSpaces) + filename.map("filename" -> _) ++ + Option.when(!metadata.isEmpty())("metadata" -> metadata.noSpaces) ) } "be extracted with the default content type" in { val entity = createEntity("file", NoContentType, Some("filename")) - val UploadedFileInformation(filename, keywords, contentType, contents) = - extractor(iri, entity, 179, None).accepted + val UploadedFileInformation(filename, _, _, _, contentType, contents) = + extractor(iri, entity, 250, None).accepted filename shouldEqual "filename" - keywords shouldEqual Map.empty contentType shouldEqual `application/octet-stream` consume(contents.dataBytes) shouldEqual content } "be extracted with the custom media type from the config" in { - val entity = createEntity("file", NoContentType, Some("file.custom")) - val UploadedFileInformation(filename, keywords, contentType, contents) = + val entity = createEntity("file", NoContentType, Some("file.custom")) + val UploadedFileInformation(filename, _, _, _, contentType, contents) = extractor(iri, entity, 2000, None).accepted filename shouldEqual "file.custom" - keywords shouldEqual Map.empty contentType shouldEqual customContentType consume(contents.dataBytes) shouldEqual content } @@ -80,38 +104,49 @@ class FormDataExtractorSpec "be extracted with the akka detection from the extension" in { val entity = createEntity("file", NoContentType, Some("file.txt")) - val UploadedFileInformation(filename, keywords, contentType, contents) = - extractor(iri, entity, 179, None).accepted + val UploadedFileInformation(filename, _, _, _, contentType, contents) = + extractor(iri, entity, 250, None).accepted filename shouldEqual "file.txt" - keywords shouldEqual Map.empty contentType shouldEqual `text/plain(UTF-8)` consume(contents.dataBytes) shouldEqual content } "be extracted with the provided content type header" in { - val entity = createEntity("file", `text/plain(UTF-8)`, Some("file.custom")) - val UploadedFileInformation(filename, keywords, contentType, contents) = + val entity = createEntity("file", `text/plain(UTF-8)`, Some("file.custom")) + val UploadedFileInformation(filename, _, _, _, contentType, contents) = extractor(iri, entity, 2000, None).accepted filename shouldEqual "file.custom" - keywords shouldEqual Map.empty contentType shouldEqual `text/plain(UTF-8)` consume(contents.dataBytes) shouldEqual content } + "be extracted with a description" in { + val entity = entityWithDescription("this file is cool") + val UploadedFileInformation(_, _, description, _, _, _) = extractor(iri, entity, 2000, None).accepted + description shouldEqual Some("this file is cool") + } + + "be extracted with a name" in { + val entity = entityWithName("File One") + val UploadedFileInformation(_, _, _, name, _, _) = extractor(iri, entity, 2000, None).accepted + name shouldEqual Some("File One") + } + "be extracted with keywords" in { - val entity = entityWithKeywords("key" := "value") - val UploadedFileInformation(_, keywords, _, _) = extractor(iri, entity, 2000, None).accepted + val entity = entityWithKeywords("key" := "value") + val UploadedFileInformation(_, keywords, _, _, _, _) = extractor(iri, entity, 2000, None).accepted keywords shouldEqual Map(Label.unsafe("key") -> "value") } "fail to be extracted if the custom user metadata has invalid keywords" in { val entity = entityWithKeywords(KeyThatIsTooLong := "value") - extractor(iri, entity, 2000, None).rejectedWith[InvalidKeywords] + val rej = extractor(iri, entity, 2000, None).rejectedWith[InvalidCustomMetadata] + println(rej) } "fail to be extracted if no file part exists found" in { val entity = createEntity("other", NoContentType, None) - extractor(iri, entity, 179, None).rejectedWith[InvalidMultipartFieldName] + extractor(iri, entity, 250, None).rejectedWith[InvalidMultipartFieldName] } "fail to be extracted if payload size is too large" in { diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/batch/BatchCopySuite.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/batch/BatchCopySuite.scala index d62af97cdc..b985f1b15b 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/batch/BatchCopySuite.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/batch/BatchCopySuite.scala @@ -41,12 +41,15 @@ class BatchCopySuite extends NexusSuite with StorageFixtures with Generators wit private val source = CopyFileSource(sourceProj.ref, NonEmptyList.of(sourceFileId)) private val storageStatEntry = StorageStatEntry(files = 10L, spaceUsed = 5L) private val keywords = genKeywords() - private val stubbedFileAttr = attributes(genString(), keywords = keywords) + private val description = genString() + private val name = genString() + private val stubbedFileAttr = + attributes(genString(), keywords = keywords, description = Some(description), name = Some(name)) test("successfully perform disk copy") { val events = ListBuffer.empty[Event] val (sourceFileRes, sourceStorage) = - genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords) + genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, description, name) val (user, aclCheck) = userAuthorizedOnProjectStorage(sourceStorage.value) val batchCopy = mkBatchCopy( @@ -75,7 +78,7 @@ class BatchCopySuite extends NexusSuite with StorageFixtures with Generators wit test("successfully perform remote disk copy") { val events = ListBuffer.empty[Event] val (sourceFileRes, sourceStorage) = - genFileResourceAndStorage(sourceFileId, sourceProj.context, remoteVal, keywords) + genFileResourceAndStorage(sourceFileId, sourceProj.context, remoteVal, keywords, description, name) val (user, aclCheck) = userAuthorizedOnProjectStorage(sourceStorage.value) val batchCopy = mkBatchCopy( @@ -111,7 +114,7 @@ class BatchCopySuite extends NexusSuite with StorageFixtures with Generators wit test("fail if a source storage is different to destination storage") { val events = ListBuffer.empty[Event] val (sourceFileRes, sourceStorage) = - genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords) + genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, description, name) val (user, aclCheck) = userAuthorizedOnProjectStorage(sourceStorage.value) val batchCopy = mkBatchCopy( @@ -131,7 +134,7 @@ class BatchCopySuite extends NexusSuite with StorageFixtures with Generators wit test("fail if user does not have read access on a source file's storage") { val events = ListBuffer.empty[Event] val (sourceFileRes, sourceStorage) = - genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords) + genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, description, name) val user = genUser() val aclCheck = AclSimpleCheck((user, AclAddress.fromProject(sourceProj.ref), Set())).accepted @@ -151,7 +154,7 @@ class BatchCopySuite extends NexusSuite with StorageFixtures with Generators wit test("fail if a single source file exceeds max size for destination storage") { val events = ListBuffer.empty[Event] val (sourceFileRes, sourceStorage) = - genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, 1000L) + genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, description, name, 1000L) val (user, aclCheck) = userAuthorizedOnProjectStorage(sourceStorage.value) val batchCopy = mkBatchCopy( @@ -176,7 +179,7 @@ class BatchCopySuite extends NexusSuite with StorageFixtures with Generators wit val statEntry = StorageStatEntry(files = 10L, spaceUsed = 1L) val spaceLeft = capacity - statEntry.spaceUsed val (sourceFileRes, sourceStorage) = - genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, fileSize) + genFileResourceAndStorage(sourceFileId, sourceProj.context, diskVal, keywords, description, name, fileSize) val (user, aclCheck) = userAuthorizedOnProjectStorage(sourceStorage.value) val batchCopy = mkBatchCopy( diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/generators/FileGen.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/generators/FileGen.scala index 915f86b9dc..82a4c83f47 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/generators/FileGen.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/generators/FileGen.scala @@ -62,20 +62,22 @@ trait FileGen { self: Generators with FileFixtures => def genOption[A](genA: => A): Option[A] = if (Random.nextInt(2) % 2 == 0) Some(genA) else None def genFileResource(fileId: FileId, context: ProjectContext): FileResource = - genFileResourceWithStorage(fileId, context, genRevision(), genKeywords(), 1L) + genFileResourceWithStorage(fileId, context, genRevision(), genKeywords(), genString(), genString(), 1L) def genFileResourceWithStorage( fileId: FileId, context: ProjectContext, storageRef: ResourceRef.Revision, keywords: Map[Label, String], + description: String, + name: String, fileSize: Long ): FileResource = genFileResourceWithIri( fileId.id.value.toIri(context.apiMappings, context.base).getOrElse(throw new Exception(s"Bad file $fileId")), fileId.project, storageRef, - attributes(genString(), size = fileSize, keywords = keywords) + attributes(genString(), size = fileSize, keywords = keywords, description = Some(description), name = Some(name)) ) def genFileResourceAndStorage( @@ -83,11 +85,13 @@ trait FileGen { self: Generators with FileFixtures => context: ProjectContext, storageVal: StorageValue, keywords: Map[Label, String], + description: String, + name: String, fileSize: Long = 1L ): (FileResource, StorageResource) = { val storageRes = StorageGen.resourceFor(genIri(), fileId.project, storageVal) val storageRef = ResourceRef.Revision(storageRes.id, storageRes.id, storageRes.rev) - (genFileResourceWithStorage(fileId, context, storageRef, keywords, fileSize), storageRes) + (genFileResourceWithStorage(fileId, context, storageRef, keywords, description, name, fileSize), storageRes) } def genFileResourceWithIri( @@ -176,7 +180,9 @@ object FileGen { id: UUID, projRef: ProjectRef, path: AbsolutePath, - keywords: Map[Label, String] + keywords: Map[Label, String], + description: Option[String], + name: Option[String] ): FileAttributes = { val uuidPathSegment = id.toString.take(8).mkString("/") FileAttributes( @@ -186,6 +192,8 @@ object FileGen { filename, Some(`text/plain(UTF-8)`), keywords, + description, + name, size, digest, Client diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileSerializationSuite.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileSerializationSuite.scala index 3bfabad47d..874479635d 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileSerializationSuite.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileSerializationSuite.scala @@ -31,12 +31,14 @@ class FileSerializationSuite extends SerializationSuite with StorageFixtures { private val tag = UserTag.unsafe("mytag") private val projectRef = ProjectRef.unsafe("myorg", "myproj") - private val storageRef = ResourceRef.Revision(iri"$dId?rev=1", dId, 1) - private val fileId = nxv + "file" - private val digest = ComputedDigest(DigestAlgorithm.default, "digest-value") - private val uuid = UUID.fromString("8049ba90-7cc6-4de5-93a1-802c04200dcc") - private val keywords = Map(Label.unsafe("key") -> "value") - private val attributes = + private val storageRef = ResourceRef.Revision(iri"$dId?rev=1", dId, 1) + private val fileId = nxv + "file" + private val digest = ComputedDigest(DigestAlgorithm.default, "digest-value") + private val uuid = UUID.fromString("8049ba90-7cc6-4de5-93a1-802c04200dcc") + private val keywords = Map(Label.unsafe("key") -> "value") + private val description = "A description" + private val name = "A name" + private val attributes = FileAttributes( uuid, "http://localhost/file.txt", @@ -44,18 +46,21 @@ class FileSerializationSuite extends SerializationSuite with StorageFixtures { "file.txt", Some(`text/plain(UTF-8)`), Map.empty, + None, + None, 12, digest, Client ) - private val attributesWithKeywords = attributes.copy(keywords = keywords) + private val attributesWithMetadata = + attributes.copy(keywords = keywords, description = Some(description), name = Some(name)) // format: off private val created = FileCreated(fileId, projectRef, storageRef, DiskStorageType, attributes.copy(digest = NotComputedDigest), 1, instant, subject, None) - private val createdWithKeywords = FileCreated(fileId, projectRef, storageRef, DiskStorageType, attributesWithKeywords.copy(digest = NotComputedDigest), 1, instant, subject, None) + private val createdWithMetadata = FileCreated(fileId, projectRef, storageRef, DiskStorageType, attributesWithMetadata.copy(digest = NotComputedDigest), 1, instant, subject, None) private val createdTagged = created.copy(tag = Some(tag)) - private val createdTaggedWithKeywords = createdWithKeywords.copy(tag = Some(tag)) + private val createdTaggedWithMetadata = createdWithMetadata.copy(tag = Some(tag)) private val updated = FileUpdated(fileId, projectRef, storageRef, DiskStorageType, attributes, 2, instant, subject, Some(tag)) private val updatedAttr = FileAttributesUpdated(fileId, projectRef, storageRef, DiskStorageType, Some(`text/plain(UTF-8)`), 12, digest, 3, instant, subject) private val tagged = FileTagAdded(fileId, projectRef, storageRef, DiskStorageType, targetRev = 1, tag, 4, instant, subject) @@ -83,9 +88,9 @@ class FileSerializationSuite extends SerializationSuite with StorageFixtures { expected(created, Json.fromInt(1), Json.Null, Json.Null, Json.fromString("Client")) ), ( - "FileCreated with keywords", - createdWithKeywords, - loadEvents("files", "file-created-with-keywords.json"), + "FileCreated with metadata", + createdWithMetadata, + loadEvents("files", "file-created-with-metadata.json"), Created, expected(created, Json.fromInt(1), Json.Null, Json.Null, Json.fromString("Client")) ), @@ -98,10 +103,10 @@ class FileSerializationSuite extends SerializationSuite with StorageFixtures { ), ( "FileCreated with tags and keywords", - createdTaggedWithKeywords, - loadEvents("files", "file-created-tagged-with-keywords.json"), + createdTaggedWithMetadata, + loadEvents("files", "file-created-tagged-with-metadata.json"), Created, - expected(createdTaggedWithKeywords, Json.fromInt(1), Json.Null, Json.Null, Json.fromString("Client")) + expected(createdTaggedWithMetadata, Json.fromInt(1), Json.Null, Json.Null, Json.fromString("Client")) ), ( "FileUpdated", @@ -206,10 +211,10 @@ class FileSerializationSuite extends SerializationSuite with StorageFixtures { subject ) - private val stateWithKeywords = state.copy(attributes = attributesWithKeywords) + private val stateWithMetadata = state.copy(attributes = attributesWithMetadata) private val fileState = jsonContentOf("files/database/file-state.json") - private val fileStateWithKeywords = jsonContentOf("files/database/file-state-with-keywords.json") + private val fileStateWithMetadata = jsonContentOf("files/database/file-state-with-metadata.json") test(s"Correctly serialize a FileState") { assertEquals(FileState.serializer.codec(state), fileState) @@ -219,12 +224,12 @@ class FileSerializationSuite extends SerializationSuite with StorageFixtures { assertEquals(FileState.serializer.codec.decodeJson(fileState), Right(state)) } - test(s"Correctly serialize a FileState with keywords") { - assertEquals(FileState.serializer.codec(stateWithKeywords), fileStateWithKeywords) + test(s"Correctly serialize a FileState with metadata") { + assertEquals(FileState.serializer.codec(stateWithMetadata), fileStateWithMetadata) } - test(s"Correctly deserialize a FileState with keywords") { - assertEquals(FileState.serializer.codec.decodeJson(fileStateWithKeywords), Right(stateWithKeywords)) + test(s"Correctly deserialize a FileState with metadata") { + assertEquals(FileState.serializer.codec.decodeJson(fileStateWithMetadata), Right(stateWithMetadata)) } } diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala index f664132fb0..89057a1d02 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/routes/FilesRoutesSpec.scala @@ -1,11 +1,11 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.files.routes import akka.actor.typed -import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` +import akka.http.scaladsl.model.ContentTypes.{`application/json`, `text/plain(UTF-8)`} import akka.http.scaladsl.model.MediaRanges._ -import akka.http.scaladsl.model.MediaTypes.`text/html` -import akka.http.scaladsl.model.headers.{Accept, Location, OAuth2BearerToken, RawHeader} -import akka.http.scaladsl.model.{StatusCodes, Uri} +import akka.http.scaladsl.model.MediaTypes.{`multipart/form-data`, `text/html`} +import akka.http.scaladsl.model.headers._ +import akka.http.scaladsl.model.{HttpRequest, RequestEntity, StatusCodes, Uri} import akka.http.scaladsl.server.Route import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig @@ -39,7 +39,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef, Resource import ch.epfl.bluebrain.nexus.testkit.CirceLiteral import ch.epfl.bluebrain.nexus.testkit.errors.files.FileErrors.{fileAlreadyExistsError, fileIsNotDeprecatedError} import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsIOValues -import io.circe.syntax.EncoderOps +import io.circe.syntax.{EncoderOps, KeyOps} import io.circe.{Json, JsonObject} import org.scalatest._ @@ -160,16 +160,32 @@ class FilesRoutesSpec .accepted } + def postJson(path: String, json: Json): HttpRequest = { + Post(path, json.toEntity).withHeaders(`Content-Type`(`application/json`)) + } + + def postFile(path: String, entity: RequestEntity): HttpRequest = { + Post(path, entity).withHeaders(`Content-Type`(`multipart/form-data`)) + } + + def putJson(path: String, json: Json): HttpRequest = { + Put(path, json.toEntity).withHeaders(`Content-Type`(`application/json`)) + } + + def putFile(path: String, entity: RequestEntity): HttpRequest = { + Put(path, entity).withHeaders(`Content-Type`(`multipart/form-data`)) + } + "File routes" should { "fail to create a file without disk/write permission" in { - Post("/v1/files/org/proj", entity()) ~> routes ~> check { + postFile("/v1/files/org/proj", entity()) ~> routes ~> check { response.shouldBeForbidden } } "create a file" in { - Post("/v1/files/org/proj", entity()) ~> asWriter ~> routes ~> check { + postFile("/v1/files/org/proj", entity()) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.Created val attr = attributes() response.asJson shouldEqual fileMetadata(projectRef, generatedId, attr, diskIdRev) @@ -178,7 +194,7 @@ class FilesRoutesSpec "create and tag a file" in { withUUIDF(uuid2) { - Post("/v1/files/org/proj?tag=mytag", entity()) ~> asWriter ~> routes ~> check { + postFile("/v1/files/org/proj?tag=mytag", entity()) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.Created val attr = attributes(id = uuid2) val expected = fileMetadata(projectRef, generatedId2, attr, diskIdRev) @@ -192,22 +208,31 @@ class FilesRoutesSpec "fail to create a file link using a storage that does not allow it" in { val payload = json"""{"filename": "my.txt", "path": "my/file.txt", "mediaType": "text/plain"}""" - Put("/v1/files/org/proj/file1", payload.toEntity) ~> asWriter ~> routes ~> check { + putJson("/v1/files/org/proj/file1", payload) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.BadRequest response.asJson shouldEqual jsonContentOf("files/errors/unsupported-operation.json", "id" -> file1, "storageId" -> dId) } } + "fail to create a file link if no filename is specified either explicitly or in the path" in { + val payload = json"""{"path": "my/", "mediaType": "text/plain"}""" + postJson("/v1/files/org/proj", payload) ~> asWriter ~> routes ~> check { + status shouldEqual StatusCodes.BadRequest + response.asJson shouldEqual + jsonContentOf("files/errors/file-link-no-filename.json") + } + } + "fail to create a file without s3/write permission" in { - Put("/v1/files/org/proj/file1?storage=s3-storage", entity()) ~> asWriter ~> routes ~> check { + putFile("/v1/files/org/proj/file1?storage=s3-storage", entity()) ~> asWriter ~> routes ~> check { response.shouldBeForbidden } } "create a file on s3 with an authenticated user and provided id" in { val id = genString() - Put(s"/v1/files/org/proj/$id?storage=s3-storage", entity(id)) ~> asS3Writer ~> routes ~> check { + putFile(s"/v1/files/org/proj/$id?storage=s3-storage", entity(id)) ~> asS3Writer ~> routes ~> check { status shouldEqual StatusCodes.Created val attr = attributes(id) response.asJson shouldEqual @@ -217,7 +242,7 @@ class FilesRoutesSpec "create and tag a file on s3 with an authenticated user and provided id" in { withUUIDF(uuid2) { - Put( + putFile( "/v1/files/org/proj/fileTagged?storage=s3-storage&tag=mytag", entity("fileTagged.txt") ) ~> asS3Writer ~> routes ~> check { @@ -234,7 +259,7 @@ class FilesRoutesSpec "reject the creation of a file which already exists" in { givenAFile { id => - Put(s"/v1/files/org/proj/$id", entity()) ~> asWriter ~> routes ~> check { + putFile(s"/v1/files/org/proj/$id", entity()) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.Conflict response.asJson shouldEqual fileAlreadyExistsError(nxvBase(id)) } @@ -242,7 +267,7 @@ class FilesRoutesSpec } "reject the creation of a file that is too large" in { - Put( + putFile( "/v1/files/org/proj/file-too-large", randomEntity(filename = "large-file.txt", 1100) ) ~> asWriter ~> routes ~> check { @@ -252,7 +277,7 @@ class FilesRoutesSpec } "reject the creation of a file to a storage that does not exist" in { - Put("/v1/files/org/proj/file2?storage=not-exist", entity()) ~> asWriter ~> routes ~> check { + putFile("/v1/files/org/proj/file2?storage=not-exist", entity()) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.NotFound response.asJson shouldEqual jsonContentOf("storages/errors/not-found.json", "id" -> (nxv + "not-exist"), "proj" -> projectRef) @@ -261,7 +286,7 @@ class FilesRoutesSpec "fail to update a file without disk/write permission" in { givenAFile { id => - Put(s"/v1/files/org/proj/$id?rev=1", s3FieldsJson.toEntity) ~> routes ~> check { + putJson(s"/v1/files/org/proj/$id?rev=1", s3FieldsJson) ~> routes ~> check { response.shouldBeForbidden } } @@ -275,7 +300,7 @@ class FilesRoutesSpec ) forAll(endpoints.zipWithIndex) { case (endpoint, idx) => val filename = s"file-idx-$idx.txt" - Put(s"$endpoint?rev=${idx + 1}", entity(filename)) ~> asWriter ~> routes ~> check { + putFile(s"$endpoint?rev=${idx + 1}", entity(filename)) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.OK val attr = attributes(filename) response.asJson shouldEqual @@ -287,7 +312,7 @@ class FilesRoutesSpec "update and tag a file in one request" in { givenAFile { id => - Put(s"/v1/files/org/proj/$id?rev=1&tag=mytag", entity(s"$id.txt")) ~> asWriter ~> routes ~> check { + putFile(s"/v1/files/org/proj/$id?rev=1&tag=mytag", entity(s"$id.txt")) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.OK } Get(s"/v1/files/org/proj/$id?tag=mytag") ~> Accept(`*/*`) ~> asWriter ~> routes ~> check { @@ -299,7 +324,7 @@ class FilesRoutesSpec "fail to update a file link using a storage that does not allow it" in { givenAFile { id => val payload = json"""{"filename": "my.txt", "path": "my/file.txt", "mediaType": "text/plain"}""" - Put(s"/v1/files/org/proj/$id?rev=1", payload.toEntity) ~> asWriter ~> routes ~> check { + putJson(s"/v1/files/org/proj/$id?rev=1", payload) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.BadRequest response.asJson shouldEqual jsonContentOf("files/errors/unsupported-operation.json", "id" -> (nxv + id), "storageId" -> dId) @@ -309,7 +334,7 @@ class FilesRoutesSpec "reject the update of a non-existent file" in { val nonExistentFile = genString() - Put(s"/v1/files/org/proj/$nonExistentFile?rev=1", entity("other.txt")) ~> asWriter ~> routes ~> check { + putFile(s"/v1/files/org/proj/$nonExistentFile?rev=1", entity("other.txt")) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.NotFound response.asJson shouldEqual jsonContentOf("files/errors/not-found.json", "id" -> (nxv + nonExistentFile), "proj" -> "org/proj") @@ -318,7 +343,7 @@ class FilesRoutesSpec "reject the update of a non-existent file storage" in { givenAFile { id => - Put(s"/v1/files/org/proj/$id?rev=1&storage=not-exist", entity("other.txt")) ~> asWriter ~> routes ~> check { + putFile(s"/v1/files/org/proj/$id?rev=1&storage=not-exist", entity("other.txt")) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.NotFound response.asJson shouldEqual jsonContentOf("storages/errors/not-found.json", "id" -> (nxv + "not-exist"), "proj" -> projectRef) @@ -328,7 +353,7 @@ class FilesRoutesSpec "reject the update of a file at a non-existent revision" in { givenAFile { id => - Put(s"/v1/files/org/proj/$id?rev=10", entity("other.txt")) ~> asWriter ~> routes ~> check { + putFile(s"/v1/files/org/proj/$id?rev=10", entity("other.txt")) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.Conflict response.asJson shouldEqual jsonContentOf("files/errors/incorrect-rev.json", "provided" -> 10, "expected" -> 1) @@ -415,7 +440,7 @@ class FilesRoutesSpec "tag a file" in { givenAFile { id => val payload = json"""{"tag": "mytag", "rev": 1}""" - Post(s"/v1/files/org/proj/$id/tags?rev=1", payload.toEntity) ~> asWriter ~> routes ~> check { + postJson(s"/v1/files/org/proj/$id/tags?rev=1", payload) ~> asWriter ~> routes ~> check { status shouldEqual StatusCodes.Created val attr = attributes(id) response.asJson shouldEqual fileMetadata(projectRef, nxv + id, attr, diskIdRev, rev = 2) @@ -688,6 +713,8 @@ object FilesRoutesSpec extends CirceLiteral { ) case true => Json.obj() } + val descriptionJson = attributes.description.map(desc => Json.obj("description" := desc)) + val nameJson = attributes.name.map(name => Json.obj("name" := name)) val mainJson = json""" { @@ -725,6 +752,7 @@ object FilesRoutesSpec extends CirceLiteral { } """ - mainJson deepMerge (keywordsJson) + (List(mainJson, keywordsJson) ++ nameJson ++ descriptionJson) + .reduce(_.deepMerge(_)) } } diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala index 11bf717f3b..f7c1085fe0 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/directives/DeltaDirectives.scala @@ -114,6 +114,16 @@ trait DeltaDirectives extends UriDirectives { } } + def contentType(mediaType: MediaType): Directive0 = { + headerValueByName("Content-Type").flatMap { contentType => + if (contentType == mediaType.value) { + pass + } else { + reject() + } + } + } + /** * If the `Accept` header is set to `text/html`, redirect to the matching resource page in fusion if the feature is * enabled diff --git a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfRejectionHandler.scala b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfRejectionHandler.scala index 1e84b38ea9..2a53d2bc51 100644 --- a/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfRejectionHandler.scala +++ b/delta/sdk/src/main/scala/ch/epfl/bluebrain/nexus/delta/sdk/marshalling/RdfRejectionHandler.scala @@ -187,7 +187,7 @@ object RdfRejectionHandler { val unsupported = rejection.contentType.fold("")(_.toString) val supported = rejection.supported.mkString(" or ") val expected = if (supported.isEmpty) "" else s" Expected: $supported" - jsonObj(rejection, s"The request's Content-Type$unsupported is not supported.$expected") + jsonObj(rejection, s"The request's Content-Type $unsupported is not supported.$expected") } implicit private[marshalling] val unsupportedReqCtSeqEncoder @@ -196,7 +196,7 @@ object RdfRejectionHandler { val unsupported = rejections.find(_.contentType.isDefined).flatMap(_.contentType).fold("")(" [" + _ + "]") val supported = rejections.flatMap(_.supported).mkString(" or ") val expected = if (supported.isEmpty) "" else s" Expected: $supported" - jsonObj(rejections.head, s"The request's Content-Type$unsupported is not supported.$expected") + jsonObj(rejections.head, s"The request's Content-Type $unsupported is not supported.$expected") } implicit private val unsupportedReqCtResponseFields: HttpResponseFields[UnsupportedRequestContentTypeRejection] = diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/FileMatchers.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/FileMatchers.scala new file mode 100644 index 0000000000..0243a5b545 --- /dev/null +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/FileMatchers.scala @@ -0,0 +1,40 @@ +package ch.epfl.bluebrain.nexus.testkit.scalatest + +import io.circe.Json +import org.scalatest.matchers.{HavePropertyMatchResult, HavePropertyMatcher} + +object FileMatchers { + + def keywords(expected: (String, String)*): HavePropertyMatcher[Json, Map[String, String]] = keywords(expected.toMap) + + def keywords(expected: Map[String, String]): HavePropertyMatcher[Json, Map[String, String]] = HavePropertyMatcher { + json => + val actual = json.hcursor.downField("_keywords").as[Map[String, String]].toOption + HavePropertyMatchResult( + actual.contains(expected), + "keywords", + expected, + actual.orNull + ) + } + + def description(expected: String): HavePropertyMatcher[Json, String] = HavePropertyMatcher { json => + val actual = json.hcursor.downField("description").as[String].toOption + HavePropertyMatchResult( + actual.contains(expected), + "description", + expected, + actual.orNull + ) + } + + def name(expected: String): HavePropertyMatcher[Json, String] = HavePropertyMatcher { json => + val actual = json.hcursor.downField("name").as[String].toOption + HavePropertyMatchResult( + actual.contains(expected), + "name", + expected, + actual.orNull + ) + } +} diff --git a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/ResourceMatchers.scala b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/ResourceMatchers.scala index c16da79568..1ef74dc0d7 100644 --- a/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/ResourceMatchers.scala +++ b/delta/testkit/src/main/scala/ch/epfl/bluebrain/nexus/testkit/scalatest/ResourceMatchers.scala @@ -15,4 +15,5 @@ object ResourceMatchers { actualId.orNull ) } + } diff --git a/docs/src/main/paradox/docs/delta/api/files-api.md b/docs/src/main/paradox/docs/delta/api/files-api.md index b5fe65cbe5..f3d66cfb5a 100644 --- a/docs/src/main/paradox/docs/delta/api/files-api.md +++ b/docs/src/main/paradox/docs/delta/api/files-api.md @@ -51,11 +51,16 @@ POST /v1/files/{org_label}/{project_label}?storage={storageId}&tag={tagName} When not specified, the default storage of the project is used. - `{tagName}` an optional label given to the file on its first revision. -The json payload: +The request body: -- If the `@id` value is found on the payload, this @id will be used. -- If the `@id` value is not found on the payload, an @id will be generated as follows: `base:{UUID}`. The `base` is -the `prefix` defined on the resource's project (`{project_label}`). +The body should be a multipart form, to allow file upload. The form should contain one part named `file`. This part can be given a content-type header, which will be used if specified. If not specified, the content-type will be inferred from the file's extension. + +This part can contain the following disposition parameters: +- `filename`: the filename which will be used in the back-end file system +- `metadata`: a JSON object containing the following one or more of the following fields: + - `name`: a string which is a descriptive name for the file. It will be indexed in the full-text search. + - `description`: a string that describes the file. It will be indexed in the full-text search. + - `keywords`: a JSON object with `Label` keys and `string` values. These keywords will be indexed and can be used to search for the file. **Example** @@ -79,7 +84,14 @@ PUT /v1/files/{org_label}/{project_label}/{file_id}?storage={storageId}&tag={tag When not specified, the default storage of the project is used. - `{tagName}` an optional label given to the file on its first revision. -Note that if the payload contains an @id different from the `{file_id}`, the request will fail. +The body should be a multipart form, to allow file upload. The form should contain one part named `file`. This part can be given a content-type header, which will be used if specified. If not specified, the content-type will be inferred from the file's extension. + +This part can contain the following disposition parameters: +- `filename`: the filename which will be used in the back-end file system +- `metadata`: a JSON object containing the following one or more of the following fields: + - `name`: a string which is a descriptive name for the file. It will be indexed in the full-text search. + - `description`: a string that describes the file. It will be indexed in the full-text search. + - `keywords`: a JSON object with `Label` keys and `string` values. These keywords will be indexed and can be used to search for the file. **Example** diff --git a/docs/src/main/paradox/docs/releases/v1.10-release-notes.md b/docs/src/main/paradox/docs/releases/v1.10-release-notes.md index 9aee98db14..4e3bf760f4 100644 --- a/docs/src/main/paradox/docs/releases/v1.10-release-notes.md +++ b/docs/src/main/paradox/docs/releases/v1.10-release-notes.md @@ -58,6 +58,15 @@ Previously deprecated views can now be undeprecated. This is available for all v The default Elasticsearch view now uses a new mapping and settings which improves the ability to search for resources using the listing endpoints. +### Files + +#### Custom metadata + +It is now possible to add custom metadata when creating files. This metadata will be indexed for full-text +search. + +@ref:[More information](../delta/api/files-api.md#create-using-post) + ### Storages #### Disabling default storage diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala index 4e874c7723..b6d86e87ea 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/HttpClient.scala @@ -144,12 +144,14 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit ) } - def uploadFileWithKeywords( + def uploadFileWithMetadata( requestPath: String, fileContents: String, contentType: ContentType, fileName: String, identity: Identity, + description: Option[String], + name: Option[String], keywords: Map[String, String] )(implicit um: FromEntityUnmarshaller[Json]): IO[(Json, HttpResponse)] = { @@ -163,7 +165,16 @@ class HttpClient private (baseUrl: Uri, httpExt: HttpExt)(implicit BodyPart.Strict( "file", HttpEntity(contentType, s.getBytes), - Map("filename" -> fileName, "keywords" -> keywords.asJson.noSpaces) + Map( + "filename" -> fileName, + "metadata" -> Json + .obj( + "name" -> name.asJson, + "description" -> description.asJson, + "keywords" -> keywords.asJson + ) + .noSpaces + ) ) ).toEntity() }, diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/BatchCopySpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/BatchCopySpec.scala index f8d9396cd3..947e9765cf 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/BatchCopySpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/BatchCopySpec.scala @@ -6,6 +6,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.implicits.{catsSyntaxParallelTraverse1, toTraverseOps} import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils +import ch.epfl.bluebrain.nexus.testkit.scalatest.FileMatchers.{description => descriptionField, keywords => keywordsField, name => nameField} import ch.epfl.bluebrain.nexus.tests.HttpClient._ import ch.epfl.bluebrain.nexus.tests.Identity.storages.Coyote import ch.epfl.bluebrain.nexus.tests.kg.files.BatchCopySpec.{CopyStorageType, Response, StorageDetails} @@ -86,7 +87,15 @@ class BatchCopySpec extends BaseIntegrationSpec { } def genTextFileInput(): FileInput = - FileInput(genId(), genString(), ContentTypes.`text/plain(UTF-8)`, genString(), Map(genString() -> genString())) + FileInput( + genId(), + genString(), + ContentTypes.`text/plain(UTF-8)`, + genString(), + Map(genString() -> genString()), + genString(), + genString() + ) def mkPayload(sourceProjRef: String, sourceFiles: List[FileInput]): Json = { val sourcePayloads = sourceFiles.map(f => Json.obj("sourceFileId" := f.fileId)) @@ -95,7 +104,15 @@ class BatchCopySpec extends BaseIntegrationSpec { def uploadFile(file: FileInput, storage: StorageDetails): IO[Assertion] = filesDsl - .uploadFileWithKeywords(file, storage.projRef, storage.storageId, None, file.keywords) + .uploadFileWithMetadata( + file, + storage.projRef, + storage.storageId, + None, + file.keywords, + Some(file.description), + Some(file.name) + ) .map { case (_, response) => response.status shouldEqual StatusCodes.Created } def copyFilesAndCheckSavedResourcesAndContents( @@ -119,7 +136,7 @@ class BatchCopySpec extends BaseIntegrationSpec { } def checkFileContentsAreCopiedCorrectly(destProjRef: String, sourceFiles: List[FileInput], ids: List[String]) = - ids.zip(sourceFiles).traverse { case (destId, FileInput(_, filename, contentType, contents, _)) => + ids.zip(sourceFiles).traverse { case (destId, FileInput(_, filename, contentType, contents, _, _, _)) => deltaClient .get[ByteString](s"/files/$destProjRef/${UrlUtils.encode(destId)}", Coyote, acceptAll) { filesDsl.expectFileContentAndMetadata(filename, contentType, contents) @@ -133,13 +150,12 @@ class BatchCopySpec extends BaseIntegrationSpec { ): IO[Assertion] = { ids .zip(sourceFiles) - .parTraverse { case (id, FileInput(_, _, _, _, keywords)) => + .parTraverse { case (id, FileInput(_, _, _, _, keywords, description, name)) => deltaClient.get[Json](s"/files/$destProjRef/${UrlUtils.encode(id)}", Coyote) { (json, response) => response.status shouldEqual StatusCodes.OK - json.hcursor.downField("_keywords").as[Map[String, String]].toOption match { - case Some(value) => value shouldEqual keywords - case None => fail("keywords missing") - } + json should have(keywordsField(keywords)) + json should have(descriptionField(description)) + json should have(nameField(name)) } } .map(_ => succeed) diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesDsl.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesDsl.scala index 04ccbcf628..cafd7ba563 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesDsl.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesDsl.scala @@ -40,20 +40,24 @@ class FilesDsl(deltaClient: HttpClient)(implicit mat: Materializer, ec: Executio ) } - def uploadFileWithKeywords( + def uploadFileWithMetadata( fileInput: FileInput, projRef: String, storage: String, rev: Option[Int], - keywords: Map[String, String] + keywords: Map[String, String], + description: Option[String], + name: Option[String] ): IO[(Json, HttpResponse)] = { val revString = rev.map(r => s"&rev=$r").getOrElse("") - deltaClient.uploadFileWithKeywords( + deltaClient.uploadFileWithMetadata( s"/files/$projRef/${fileInput.fileId}?storage=nxv:$storage$revString", fileInput.contents, fileInput.ct, fileInput.filename, Coyote, + description, + name, keywords ) } diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesSpec.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesSpec.scala index 7bc8d69e6c..879a556739 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesSpec.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/FilesSpec.scala @@ -3,6 +3,7 @@ package ch.epfl.bluebrain.nexus.tests.kg.files import akka.http.scaladsl.model.{ContentTypes, StatusCodes} import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UrlUtils +import ch.epfl.bluebrain.nexus.testkit.scalatest.FileMatchers.{description => descriptionField, keywords, name => nameField} import ch.epfl.bluebrain.nexus.testkit.scalatest.ResourceMatchers.`@id` import ch.epfl.bluebrain.nexus.tests.BaseIntegrationSpec import ch.epfl.bluebrain.nexus.tests.Identity.files.Writer @@ -73,6 +74,26 @@ class FilesSpec extends BaseIntegrationSpec { exactly(1, results) should have(`@id`(cerebellumId)) no(results) should have(`@id`(cortexId)) } + + "allow a file to be found via the description" in { + val coolId = givenAFileWithDescription("A really cool file") + val warmId = givenAFileWithDescription("A really warm file") + + val results = queryForFilesWithFreeText("cool").accepted + + exactly(1, results) should have(`@id`(coolId)) + no(results) should have(`@id`(warmId)) + } + + "allow a file to be found via the name" in { + val faxId = givenAFileWithName("File o fax") + val fishId = givenAFileWithName("File et o fish") + + val results = queryForFilesWithFreeText("fish").accepted + + exactly(1, results) should have(`@id`(fishId)) + no(results) should have(`@id`(faxId)) + } } private def assertListingTotal(id: String, expectedTotal: Int) = @@ -111,17 +132,45 @@ class FilesSpec extends BaseIntegrationSpec { private def givenAFileWithBrainRegion(brainRegion: String): String = { val id = genString() val fullId = deltaClient - .uploadFileWithKeywords( + .uploadFileWithMetadata( s"/files/$org/$project/$id", "file content", ContentTypes.`text/plain(UTF-8)`, s"$id.json", Writer, + None, + None, Map("brainRegion" -> brainRegion) ) .map { case (json, response) => response.status shouldEqual StatusCodes.Created - json.hcursor.downField("@id").as[String].getOrElse(fail("Could not extract @id from response")) + json should have(keywords("brainRegion" -> brainRegion)) + extractId(json) + } + .accepted + + eventually { assertFileIsInListing(id) } + + fullId + } + + private def givenAFileWithName(name: String): String = { + val id = genString() + val fullId = deltaClient + .uploadFileWithMetadata( + s"/files/$org/$project/$id", + "file content", + ContentTypes.`text/plain(UTF-8)`, + s"$id.json", + Writer, + None, + Some(name), + Map.empty + ) + .map { case (json, response) => + response.status shouldEqual StatusCodes.Created + json should have(nameField(name)) + extractId(json) } .accepted @@ -130,6 +179,35 @@ class FilesSpec extends BaseIntegrationSpec { fullId } + private def givenAFileWithDescription(description: String): String = { + val id = genString() + val fullId = deltaClient + .uploadFileWithMetadata( + s"/files/$org/$project/$id", + "file content", + ContentTypes.`text/plain(UTF-8)`, + s"$id.json", + Writer, + Some(description), + None, + Map.empty + ) + .map { case (json, response) => + response.status shouldEqual StatusCodes.Created + json should have(descriptionField(description)) + extractId(json) + } + .accepted + + eventually { assertFileIsInListing(id) } + + fullId + } + + private def extractId(json: Json) = { + json.hcursor.downField("@id").as[String].getOrElse(fail("Could not extract @id from response")) + } + /** Provides a file in the default storage */ private def givenAFile(assertion: String => Assertion): Assertion = { val id = genString() diff --git a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/model/FileInput.scala b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/model/FileInput.scala index c4825c1c78..79a4097fa6 100644 --- a/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/model/FileInput.scala +++ b/tests/src/test/scala/ch/epfl/bluebrain/nexus/tests/kg/files/model/FileInput.scala @@ -8,7 +8,9 @@ final case class FileInput( filename: String, ct: ContentType, contents: String, - keywords: Map[String, String] + keywords: Map[String, String], + description: String, + name: String ) object FileInput { @@ -17,14 +19,24 @@ object FileInput { val updatedJsonFileContent = """{ "updated": ["is", "a", "test", "file"] }""" val emptyTextFile = - FileInput("empty", "empty", ContentTypes.`text/plain(UTF-8)`, emptyFileContent, Map("brainRegion" -> "cortex")) + FileInput( + "empty", + "empty", + ContentTypes.`text/plain(UTF-8)`, + emptyFileContent, + Map("brainRegion" -> "cortex"), + "A cortex file", + "Ctx 1" + ) val jsonFileNoContentType = FileInput( "attachment.json", "attachment.json", ContentTypes.NoContentType, jsonFileContent, - Map("brainRegion" -> "cerebellum") + Map("brainRegion" -> "cerebellum"), + "A cerebellum file", + "Crb 2" ) val updatedJsonFileWithContentType = jsonFileNoContentType.copy(contents = updatedJsonFileContent, ct = ContentTypes.`application/json`) @@ -33,7 +45,9 @@ object FileInput { "attachment2", ContentTypes.NoContentType, "text file", - Map("brainRegion" -> "hippocampus") + Map("brainRegion" -> "hippocampus"), + "A hippocampus file", + "Hpc 3" ) val textFileWithContentType = FileInput( @@ -41,7 +55,9 @@ object FileInput { "attachment2", ContentTypes.`application/octet-stream`, "text file", - Map("brainRegion" -> "hippocampus") + Map("brainRegion" -> "hippocampus"), + "A cerebellum file", + "Crb 4" ) val customBinaryContent = @@ -50,6 +66,8 @@ object FileInput { "custom-binary", ContentType.Binary(MediaType.applicationBinary("obj", NotCompressible)), "text file", - Map("brainRegion" -> "hippocampus") + Map("brainRegion" -> "hippocampus"), + "A custom file", + "custom-binary" ) }