From 22b5bafa779f5d17ef9b12ae16fa5593a7b1c742 Mon Sep 17 00:00:00 2001 From: Simon Date: Thu, 17 Oct 2024 10:14:26 +0200 Subject: [PATCH] Separate access check from other operations (#5190) * Separate access check from other operations --------- Co-authored-by: Simon Dumas --- .../plugins/storage/StoragePluginModule.scala | 11 +++-- .../delta/plugins/storage/files/Files.scala | 2 +- .../storage/files/FormDataExtractor.scala | 17 ++----- .../plugins/storage/files/model/FileId.scala | 1 - .../storage/files/model/FileRejection.scala | 4 +- .../plugins/storage/storages/Storages.scala | 2 +- .../storages/access/DiskStorageAccess.scala | 26 ++++++++++ .../storages/access/RemoteStorageAccess.scala | 28 +++++++++++ .../storages/access/S3StorageAccess.scala | 21 +++++++++ .../storages/access/StorageAccess.scala | 27 +++++++++++ .../storages/operations/FileOperations.scala | 13 ++--- .../storages/operations/StorageAccess.scala | 16 ------- .../operations/disk/DiskFileOperations.scala | 21 --------- .../remote/RemoteDiskFileOperations.scala | 12 ----- .../operations/s3/S3FileOperations.scala | 8 ---- .../storage/files/FormDataExtractorSpec.scala | 20 ++++---- .../files/RemoteStorageFilesSpec.scala | 3 ++ .../files/mocks/FileOperationsMock.scala | 23 ++------- .../storage/storages/StoragesStmSpec.scala | 2 +- .../access/DiskStorageAccessSuite.scala | 35 ++++++++++++++ .../RemoteDiskStorageAccessSpec.scala | 16 +++---- .../access/S3StorageAccessSuite.scala | 30 ++++++++++++ .../disk/DiskStorageAccessSpec.scala | 47 ------------------- .../operations/remote/RemoteStorageSpec.scala | 1 + .../operations/s3/S3FileOperationsSuite.scala | 11 ----- .../nexus/ship/files/FileWiring.scala | 6 +-- .../nexus/ship/storages/StorageWiring.scala | 11 +---- 27 files changed, 212 insertions(+), 202 deletions(-) create mode 100644 delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccess.scala create mode 100644 delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteStorageAccess.scala create mode 100644 delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccess.scala create mode 100644 delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/StorageAccess.scala delete mode 100644 delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/StorageAccess.scala create mode 100644 delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/RemoteStorageFilesSpec.scala create mode 100644 delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccessSuite.scala rename delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/{operations/remote => access}/RemoteDiskStorageAccessSpec.scala (56%) create mode 100644 delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccessSuite.scala delete mode 100644 delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageAccessSpec.scala diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala index 479576d1a0..a0fc704894 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/StoragePluginModule.scala @@ -25,6 +25,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.{S3F import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.routes.StoragesRoutes import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.schemas.{storage => storagesSchemaId} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages._ +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access.{RemoteStorageAccess, S3StorageAccess, StorageAccess} import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.{ContextValue, RemoteContextResolution} import ch.epfl.bluebrain.nexus.delta.rdf.utils.JsonKeyOrdering @@ -78,12 +79,16 @@ class StoragePluginModule(priority: Int) extends ModuleDef { new S3LocationGenerator(prefix) } + make[StorageAccess].from { (remoteClient: RemoteDiskStorageClient, s3Client: S3StorageClient) => + StorageAccess(RemoteStorageAccess(remoteClient), S3StorageAccess(s3Client)) + } + make[Storages] .fromEffect { ( fetchContext: FetchContext, contextResolution: ResolverContextResolution, - fileOperations: FileOperations, + storageAccess: StorageAccess, permissions: Permissions, xas: Transactors, cfg: StoragePluginConfig, @@ -96,7 +101,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { fetchContext, contextResolution, permissions.fetchPermissionSet, - fileOperations, + storageAccess, xas, cfg.storages, serviceAccount, @@ -183,7 +188,7 @@ class StoragePluginModule(priority: Int) extends ModuleDef { } make[FileOperations].from { (disk: DiskFileOperations, remoteDisk: RemoteDiskFileOperations, s3: S3FileOperations) => - FileOperations.mk(disk, remoteDisk, s3) + FileOperations.apply(disk, remoteDisk, s3) } make[Files].from { diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala index 7f52c0b713..a6c0c10cb8 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/Files.scala @@ -513,7 +513,7 @@ final class Files( private def saveFileToStorage(iri: Iri, storage: Storage, uploadRequest: FileUploadRequest): IO[FileAttributes] = { for { - info <- formDataExtractor(iri, uploadRequest.entity, storage.storageValue.maxFileSize) + info <- formDataExtractor(uploadRequest.entity, storage.storageValue.maxFileSize) description = FileDescription.from(info, uploadRequest.metadata) storageMetadata <- fileOperations.save(storage, info, uploadRequest.contentLength) } yield FileAttributes.from(description, storageMetadata) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala index 498fb8cc52..8e056fcdfd 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractor.scala @@ -14,7 +14,6 @@ import ch.epfl.bluebrain.nexus.delta.kernel.error.NotARejection import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig import ch.epfl.bluebrain.nexus.delta.kernel.utils.FileUtils import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileRejection.{FileTooLarge, InvalidMultipartFieldName, WrappedAkkaRejection} -import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import scala.concurrent.{ExecutionContext, Future} import scala.util.Try @@ -25,8 +24,6 @@ trait FormDataExtractor { * Extracts the part with fieldName ''file'' from the passed ''entity'' MultiPart/FormData. Any other part is * discarded. * - * @param id - * the file id * @param entity * the Multipart/FormData payload * @param maxFileSize @@ -34,11 +31,7 @@ trait FormDataExtractor { * @return * the file metadata. plus the entity with the file content */ - def apply( - id: Iri, - entity: HttpEntity, - maxFileSize: Long - ): IO[UploadedFileInformation] + def apply(entity: HttpEntity, maxFileSize: Long): IO[UploadedFileInformation] } case class UploadedFileInformation( @@ -74,15 +67,11 @@ object FormDataExtractor { new FormDataExtractor { implicit val ec: ExecutionContext = as.getDispatcher - override def apply( - id: Iri, - entity: HttpEntity, - maxFileSize: Long - ): IO[UploadedFileInformation] = { + override def apply(entity: HttpEntity, maxFileSize: Long): IO[UploadedFileInformation] = { for { formData <- unmarshall(entity, maxFileSize) fileOpt <- extractFile(formData, maxFileSize) - file <- IO.fromOption(fileOpt)(InvalidMultipartFieldName(id)) + file <- IO.fromOption(fileOpt)(InvalidMultipartFieldName) } yield file } diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala index 256cefd87e..f670813aa4 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileId.scala @@ -17,7 +17,6 @@ final case class FileId(id: IdSegmentRef, project: ProjectRef) { def expandRef(fetchContext: ProjectRef => IO[ProjectContext]): IO[ResourceRef] = fetchContext(project).flatMap { pc => iriExpander(id.value, pc).map { iri => - (iri, pc) id match { case IdSegmentRef.Latest(_) => ResourceRef.Latest(iri) case IdSegmentRef.Revision(_, rev) => ResourceRef.Revision(iri, rev) diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala index ca1f75cef1..1ff3b9be3e 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/model/FileRejection.scala @@ -135,8 +135,8 @@ object FileRejection { * Rejection returned when attempting to create/update a file with a Multipart/Form-Data payload that does not * contain a ''file'' fieldName */ - final case class InvalidMultipartFieldName(id: Iri) - extends FileRejection(s"File '$id' payload a Multipart/Form-Data without a 'file' part.") + final case object InvalidMultipartFieldName + extends FileRejection(s"Multipart/Form-Data payload does not contain a 'file' part.") final case object EmptyCustomMetadata extends FileRejection(s"No metadata was provided") diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala index d3a616ffbc..c260b66bae 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/Storages.scala @@ -7,12 +7,12 @@ import ch.epfl.bluebrain.nexus.delta.kernel.kamon.KamonMetricComponent import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.Storages._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access.StorageAccess import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageCommand._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageEvent._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue.DiskStorageValue import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model._ -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageAccess import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.schemas.{storage => storageSchema} import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.api.JsonLdApi diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccess.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccess.scala new file mode 100644 index 0000000000..4640ad43ca --- /dev/null +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccess.scala @@ -0,0 +1,26 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access + +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.AbsolutePath +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible + +import java.nio.file.Files + +object DiskStorageAccess { + + def checkVolumeExists(path: AbsolutePath): IO[Unit] = { + def failWhen(condition: Boolean, err: => String) = { + IO.raiseWhen(condition)(StorageNotAccessible(err)) + } + + for { + exists <- IO.blocking(Files.exists(path.value)) + _ <- failWhen(!exists, s"Volume '${path.value}' does not exist.") + isDirectory <- IO.blocking(Files.isDirectory(path.value)) + _ <- failWhen(!isDirectory, s"Volume '${path.value}' is not a directory.") + isWritable <- IO.blocking(Files.isWritable(path.value)) + _ <- failWhen(!isWritable, s"Volume '${path.value}' does not have write access.") + } yield () + } + +} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteStorageAccess.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteStorageAccess.scala new file mode 100644 index 0000000000..68152f2ec3 --- /dev/null +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteStorageAccess.scala @@ -0,0 +1,28 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access + +import cats.syntax.all._ +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient +import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError +import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label + +trait RemoteStorageAccess { + + def checkFolderExists(folder: Label): IO[Unit] + +} + +object RemoteStorageAccess { + + def apply(client: RemoteDiskStorageClient): RemoteStorageAccess = + (folder: Label) => + client + .exists(folder) + .adaptError { case err: HttpClientError => + StorageNotAccessible( + err.details.fold(s"Folder '$folder' does not exist")(d => s"${err.reason}: $d") + ) + } + +} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccess.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccess.scala new file mode 100644 index 0000000000..c8fafabced --- /dev/null +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccess.scala @@ -0,0 +1,21 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access + +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.client.S3StorageClient + +trait S3StorageAccess { + + def checkBucketExists(bucket: String): IO[Unit] + +} + +object S3StorageAccess { + + def apply(client: S3StorageClient): S3StorageAccess = + (bucket: String) => + client.bucketExists(bucket).flatMap { exists => + IO.raiseUnless(exists)(StorageNotAccessible(s"Bucket $bucket does not exist")) + } + +} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/StorageAccess.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/StorageAccess.scala new file mode 100644 index 0000000000..6e2ed5a583 --- /dev/null +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/StorageAccess.scala @@ -0,0 +1,27 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access + +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue.{DiskStorageValue, RemoteDiskStorageValue, S3StorageValue} + +trait StorageAccess { + + /** + * Checks whether the system has access to the passed ''storage'' + * + * @return + * a [[Unit]] if access has been verified successfully or signals an error [[StorageNotAccessible]] with the + * details about why the storage is not accessible + */ + def validateStorageAccess(storage: StorageValue): IO[Unit] +} + +object StorageAccess { + + def apply(remoteAccess: RemoteStorageAccess, s3Access: S3StorageAccess): StorageAccess = { + case d: DiskStorageValue => DiskStorageAccess.checkVolumeExists(d.volume) + case s: RemoteDiskStorageValue => remoteAccess.checkFolderExists(s.folder) + case s: S3StorageValue => s3Access.checkBucketExists(s.bucket) + } + +} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/FileOperations.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/FileOperations.scala index 6296238995..c1db7dc9d4 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/FileOperations.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/FileOperations.scala @@ -4,9 +4,8 @@ import akka.http.scaladsl.model.Uri import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.UploadedFileInformation import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{ComputedFileAttributes, FileAttributes, FileDelegationRequest, FileStorageMetadata} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.Storage import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.Storage.{DiskStorage, RemoteDiskStorage, S3Storage} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue.{DiskStorageValue, RemoteDiskStorageValue, S3StorageValue} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{Storage, StorageValue} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.{DelegateFileOperation, FetchAttributeRejection, LinkFileRejection, MoveFileRejection} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.UploadingFile.{DiskUploadingFile, RemoteUploadingFile, S3UploadingFile} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.disk.DiskFileOperations @@ -18,7 +17,7 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import java.util.UUID -trait FileOperations extends StorageAccess { +trait FileOperations { def save( storage: Storage, info: UploadedFileInformation, @@ -37,18 +36,12 @@ trait FileOperations extends StorageAccess { } object FileOperations { - def mk( + def apply( diskFileOps: DiskFileOperations, remoteDiskFileOps: RemoteDiskFileOperations, s3FileOps: S3FileOperations ): FileOperations = new FileOperations { - override def validateStorageAccess(storage: StorageValue): IO[Unit] = storage match { - case s: DiskStorageValue => diskFileOps.checkVolumeExists(s.volume) - case s: S3StorageValue => s3FileOps.checkBucketExists(s.bucket) - case s: RemoteDiskStorageValue => remoteDiskFileOps.checkFolderExists(s.folder) - } - override def save( storage: Storage, info: UploadedFileInformation, diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/StorageAccess.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/StorageAccess.scala deleted file mode 100644 index 726d976644..0000000000 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/StorageAccess.scala +++ /dev/null @@ -1,16 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations - -import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue - -trait StorageAccess { - - /** - * Checks whether the system has access to the passed ''storage'' - * - * @return - * a [[Unit]] if access has been verified successfully or signals an error [[StorageNotAccessible]] with the - * details about why the storage is not accessible - */ - def validateStorageAccess(storage: StorageValue): IO[Unit] -} diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskFileOperations.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskFileOperations.scala index b36e4c1f7b..f45209c034 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskFileOperations.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskFileOperations.scala @@ -6,18 +6,12 @@ import akka.stream.scaladsl.FileIO import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileStorageMetadata -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.AbsolutePath -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection.UnexpectedLocationFormat import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.UploadingFile.DiskUploadingFile import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource -import java.nio.file.Files - trait DiskFileOperations { - def checkVolumeExists(path: AbsolutePath): IO[Unit] - def fetch(path: Uri.Path): IO[AkkaSource] def save(uploading: DiskUploadingFile): IO[FileStorageMetadata] @@ -28,21 +22,6 @@ object DiskFileOperations { private val saveFile = new DiskStorageSaveFile() - override def checkVolumeExists(path: AbsolutePath): IO[Unit] = { - def failWhen(condition: Boolean, err: => String) = { - IO.raiseWhen(condition)(StorageNotAccessible(err)) - } - - for { - exists <- IO.blocking(Files.exists(path.value)) - _ <- failWhen(!exists, s"Volume '${path.value}' does not exist.") - isDirectory <- IO.blocking(Files.isDirectory(path.value)) - _ <- failWhen(!isDirectory, s"Volume '${path.value}' is not a directory.") - isWritable <- IO.blocking(Files.isWritable(path.value)) - _ <- failWhen(!isWritable, s"Volume '${path.value}' does not have write access.") - } yield () - } - override def fetch(path: Uri.Path): IO[AkkaSource] = absoluteDiskPath(path).redeemWith( e => IO.raiseError(UnexpectedLocationFormat(s"file://$path", e.getMessage)), path => diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskFileOperations.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskFileOperations.scala index ef5f081f95..c56f4feff2 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskFileOperations.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskFileOperations.scala @@ -7,7 +7,6 @@ import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{ComputedFileAttributes, FileStorageMetadata} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.Storage.RemoteDiskStorage -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.FileOperations.intermediateFolders import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchAttributeRejection.WrappedFetchRejection import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection @@ -15,13 +14,11 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.Uploadi import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteDiskStorageClient import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.model.RemoteDiskStorageFileAttributes import ch.epfl.bluebrain.nexus.delta.sdk.AkkaSource -import ch.epfl.bluebrain.nexus.delta.sdk.http.HttpClientError import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef} import java.util.UUID trait RemoteDiskFileOperations { - def checkFolderExists(folder: Label): IO[Unit] def legacyLink(storage: RemoteDiskStorage, sourcePath: Uri.Path, filename: String): IO[FileStorageMetadata] @@ -36,15 +33,6 @@ object RemoteDiskFileOperations { def mk(client: RemoteDiskStorageClient)(implicit uuidf: UUIDF): RemoteDiskFileOperations = new RemoteDiskFileOperations { - override def checkFolderExists(folder: Label): IO[Unit] = - client - .exists(folder) - .adaptError { case err: HttpClientError => - StorageNotAccessible( - err.details.fold(s"Folder '$folder' does not exist")(d => s"${err.reason}: $d") - ) - } - override def fetch(folder: Label, path: Uri.Path): IO[AkkaSource] = client.getFile(folder, path) override def save(uploading: RemoteUploadingFile): IO[FileStorageMetadata] = diff --git a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperations.scala b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperations.scala index 533079dd5e..b9d4f1ba97 100644 --- a/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperations.scala +++ b/delta/plugins/storage/src/main/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperations.scala @@ -9,7 +9,6 @@ import ch.epfl.bluebrain.nexus.delta.kernel.Logger import ch.epfl.bluebrain.nexus.delta.kernel.utils.{UUIDF, UrlUtils} import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileStorageMetadata -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection.UnexpectedFetchError import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.UploadingFile.S3UploadingFile @@ -21,7 +20,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef import software.amazon.awssdk.services.s3.model.NoSuchKeyException trait S3FileOperations { - def checkBucketExists(bucket: String): IO[Unit] def fetch(bucket: String, path: Uri.Path): IO[AkkaSource] @@ -45,12 +43,6 @@ object S3FileOperations { private lazy val saveFile = new S3StorageSaveFile(client, locationGenerator) - override def checkBucketExists(bucket: String): IO[Unit] = { - client.bucketExists(bucket).flatMap { exists => - IO.raiseUnless(exists)(StorageNotAccessible(s"Bucket $bucket does not exist")) - } - } - override def fetch(bucket: String, path: Uri.Path): IO[AkkaSource] = IO.delay { StreamConverter( diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala index 527636a40a..5eb32f87c6 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/FormDataExtractorSpec.scala @@ -20,7 +20,6 @@ class FormDataExtractorSpec "A Form Data HttpEntity" should { val content = "file content" - val iri = iri"http://localhost/file" val customMediaType = MediaType.parse("application/custom").rightValue val customContentType = ContentType(customMediaType, () => HttpCharsets.`UTF-8`) @@ -69,7 +68,7 @@ class FormDataExtractorSpec val entity = createEntity("file", NoContentType, Some("filename")) val UploadedFileInformation(filename, contentType, contents) = - extractor(iri, entity, 250).accepted + extractor(entity, 250).accepted filename shouldEqual "filename" contentType shouldEqual `application/octet-stream` @@ -78,8 +77,7 @@ class FormDataExtractorSpec "be extracted with the custom media type from the config" in { val entity = createEntity("file", NoContentType, Some("file.custom")) - val UploadedFileInformation(filename, contentType, contents) = - extractor(iri, entity, 2000).accepted + val UploadedFileInformation(filename, contentType, contents) = extractor(entity, 2000).accepted filename shouldEqual "file.custom" contentType shouldEqual customContentType @@ -89,8 +87,7 @@ class FormDataExtractorSpec "be extracted with the akka detection from the extension" in { val entity = createEntity("file", NoContentType, Some("file.txt")) - val UploadedFileInformation(filename, contentType, contents) = - extractor(iri, entity, 250).accepted + val UploadedFileInformation(filename, contentType, contents) = extractor(entity, 250).accepted filename shouldEqual "file.txt" contentType shouldEqual `text/plain(UTF-8)` consume(contents.dataBytes) shouldEqual content @@ -99,21 +96,20 @@ class FormDataExtractorSpec "be extracted with the default filename when none is provided" in { val entity = createEntity("file", NoContentType, None) - val filename = extractor(iri, entity, 250).accepted.filename + val filename = extractor(entity, 250).accepted.filename filename shouldEqual "file" } "be extracted with the default filename when an empty string is provided" in { val entity = createEntity("file", NoContentType, Some("")) - val filename = extractor(iri, entity, 250).accepted.filename + val filename = extractor(entity, 250).accepted.filename filename shouldEqual "file" } "be extracted with the provided content type header" in { val entity = createEntity("file", `text/plain(UTF-8)`, Some("file.custom")) - val UploadedFileInformation(filename, contentType, contents) = - extractor(iri, entity, 2000).accepted + val UploadedFileInformation(filename, contentType, contents) = extractor(entity, 2000).accepted filename shouldEqual "file.custom" contentType shouldEqual `text/plain(UTF-8)` consume(contents.dataBytes) shouldEqual content @@ -121,12 +117,12 @@ class FormDataExtractorSpec "fail to be extracted if no file part exists found" in { val entity = createEntity("other", NoContentType, None) - extractor(iri, entity, 250).rejectedWith[InvalidMultipartFieldName] + extractor(entity, 250).rejectedWith[InvalidMultipartFieldName.type] } "fail to be extracted if payload size is too large" in { val entity = createEntity("other", `text/plain(UTF-8)`, None) - extractor(iri, entity, 10).rejected shouldEqual FileTooLarge(10L) + extractor(entity, 10).rejected shouldEqual FileTooLarge(10L) } } } diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/RemoteStorageFilesSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/RemoteStorageFilesSpec.scala new file mode 100644 index 0000000000..06bf44fab3 --- /dev/null +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/RemoteStorageFilesSpec.scala @@ -0,0 +1,3 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.files + +class RemoteStorageFilesSpec {} diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/mocks/FileOperationsMock.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/mocks/FileOperationsMock.scala index 03f2098f8b..a6e7992f21 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/mocks/FileOperationsMock.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/files/mocks/FileOperationsMock.scala @@ -5,9 +5,7 @@ import akka.http.scaladsl.model.Uri import akka.http.scaladsl.model.Uri.Path import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.UploadedFileInformation -import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{ComputedFileAttributes, FileAttributes, FileDelegationRequest, FileStorageMetadata} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{AbsolutePath, Storage, StorageValue} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileStorageMetadata import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.FileOperations import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.UploadingFile.{DiskUploadingFile, S3UploadingFile} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.disk.DiskFileOperations @@ -22,37 +20,24 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.ProjectRef object FileOperationsMock { def forRemoteDisk(client: RemoteDiskStorageClient)(implicit uuidf: UUIDF): FileOperations = - FileOperations.mk(diskUnimplemented, RemoteDiskFileOperations.mk(client), s3Unimplemented) + FileOperations.apply(diskUnimplemented, RemoteDiskFileOperations.mk(client), s3Unimplemented) def forDiskAndRemoteDisk(client: RemoteDiskStorageClient)(implicit as: ActorSystem, uuidf: UUIDF): FileOperations = - FileOperations.mk(DiskFileOperations.mk, RemoteDiskFileOperations.mk(client), s3Unimplemented) + FileOperations.apply(DiskFileOperations.mk, RemoteDiskFileOperations.mk(client), s3Unimplemented) def disabled(implicit as: ActorSystem, uuidf: UUIDF): FileOperations = - FileOperations.mk( + FileOperations.apply( DiskFileOperations.mk, RemoteDiskFileOperations.mk(RemoteDiskStorageClientDisabled), S3FileOperations.mk(S3StorageClient.disabled, new S3LocationGenerator(Path.Empty)) ) - def unimplemented: FileOperations = new FileOperations { - def validateStorageAccess(storage: StorageValue): IO[Unit] = ??? - def save(storage: Storage, info: UploadedFileInformation, contentLength: Option[Long]): IO[FileStorageMetadata] = - ??? - def legacyLink(storage: Storage, sourcePath: Uri.Path, filename: String): IO[FileStorageMetadata] = ??? - def fetch(storage: Storage, attributes: FileAttributes): IO[AkkaSource] = ??? - def fetchAttributes(storage: Storage, attributes: FileAttributes): IO[ComputedFileAttributes] = ??? - def link(storage: Storage, path: Uri.Path): IO[S3FileOperations.S3FileMetadata] = ??? - def delegate(storage: Storage, filename: String): IO[FileDelegationRequest.TargetLocation] = ??? - } - def diskUnimplemented: DiskFileOperations = new DiskFileOperations { - def checkVolumeExists(path: AbsolutePath): IO[Unit] = ??? def fetch(path: Uri.Path): IO[AkkaSource] = ??? def save(uploading: DiskUploadingFile): IO[FileStorageMetadata] = ??? } def s3Unimplemented: S3FileOperations = new S3FileOperations { - def checkBucketExists(bucket: String): IO[Unit] = ??? def fetch(bucket: String, path: Uri.Path): IO[AkkaSource] = ??? def save(uploading: S3UploadingFile): IO[FileStorageMetadata] = ??? def link(bucket: String, path: Uri.Path): IO[S3FileOperations.S3FileMetadata] = ??? diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesStmSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesStmSpec.scala index c88634fc57..7a3cc589f6 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesStmSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/StoragesStmSpec.scala @@ -4,13 +4,13 @@ import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StorageGen.storageState import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.Storages.{evaluate, next} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.{DiskStorageConfig, StorageTypeConfig} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access.StorageAccess import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageCommand.{CreateStorage, DeprecateStorage, UndeprecateStorage, UpdateStorage} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageEvent._ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.{DifferentStorageType, IncorrectRev, InvalidMaxFileSize, InvalidStorageType, PermissionsAreNotDefined, ResourceAlreadyExists, StorageIsDeprecated, StorageIsNotDeprecated, StorageNotAccessible, StorageNotFound} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageType.{DiskStorage => DiskStorageType} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue.{DiskStorageValue, RemoteDiskStorageValue, S3StorageValue} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{AbsolutePath, DigestAlgorithm} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageAccess import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission import ch.epfl.bluebrain.nexus.delta.sourcing.model.Identity.User import ch.epfl.bluebrain.nexus.delta.sourcing.model.Tag.UserTag diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccessSuite.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccessSuite.scala new file mode 100644 index 0000000000..701e13b486 --- /dev/null +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/DiskStorageAccessSuite.scala @@ -0,0 +1,35 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access + +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access.DiskStorageAccess.checkVolumeExists +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.AbsolutePath +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible +import ch.epfl.bluebrain.nexus.testkit.mu.NexusSuite + +import java.nio.file.{Files, Path} + +class DiskStorageAccessSuite extends NexusSuite { + + test("succeed verifying the volume") { + val volume = AbsolutePath(Files.createTempDirectory("disk-access")).rightValue + checkVolumeExists(volume) + } + + test("fail when volume does not exist") { + val volume = AbsolutePath(Path.of("/random", genString())).rightValue + val expectedError = StorageNotAccessible(s"Volume '$volume' does not exist.") + checkVolumeExists(volume).interceptEquals(expectedError) + } + + test("fail when volume is not a directory") { + val volume = AbsolutePath(Files.createTempFile(genString(), genString())).rightValue + val expectedError = StorageNotAccessible(s"Volume '$volume' is not a directory.") + checkVolumeExists(volume).interceptEquals(expectedError) + } + + test("fail when volume does not have write access") { + val volume = AbsolutePath(Files.createTempDirectory("disk-not-access")).rightValue + volume.value.toFile.setReadOnly() + val expectedError = StorageNotAccessible(s"Volume '$volume' does not have write access.") + checkVolumeExists(volume).interceptEquals(expectedError) + } +} diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteDiskStorageAccessSpec.scala similarity index 56% rename from delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala rename to delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteDiskStorageAccessSpec.scala index efa60daca3..b3fc265d41 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteDiskStorageAccessSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/RemoteDiskStorageAccessSpec.scala @@ -1,36 +1,32 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access import akka.actor.ActorSystem import akka.testkit.TestKit import ch.epfl.bluebrain.nexus.delta.plugins.storage.remotestorage.RemoteStorageClientFixtures import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.{StorageFixtures, UUIDFFixtures} import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec +import org.scalatest.DoNotDiscover import org.scalatest.concurrent.Eventually -import org.scalatest.{BeforeAndAfterAll, DoNotDiscover} @DoNotDiscover class RemoteDiskStorageAccessSpec(fixture: RemoteStorageClientFixtures) extends TestKit(ActorSystem("RemoteDiskStorageAccessSpec")) with CatsEffectSpec with Eventually - with StorageFixtures - with UUIDFFixtures.Random - with BeforeAndAfterAll with RemoteStorageClientFixtures { private lazy val remoteDiskStorageClient = fixture.init - private lazy val fileOps = RemoteDiskFileOperations.mk(remoteDiskStorageClient) + private lazy val remoteAccess = RemoteStorageAccess(remoteDiskStorageClient) - "A RemoteDiskStorage access operations" should { + "A RemoteDiskStorage access" should { "succeed verifying the folder" in eventually { - fileOps.checkFolderExists(Label.unsafe(RemoteStorageClientFixtures.BucketName)).accepted + remoteAccess.checkFolderExists(Label.unsafe(RemoteStorageClientFixtures.BucketName)).accepted } "fail when folder does not exist" in { - fileOps.checkFolderExists(Label.unsafe(genString())).rejectedWith[StorageNotAccessible] + remoteAccess.checkFolderExists(Label.unsafe(genString())).rejectedWith[StorageNotAccessible] } } diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccessSuite.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccessSuite.scala new file mode 100644 index 0000000000..d4169387c9 --- /dev/null +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/access/S3StorageAccessSuite.scala @@ -0,0 +1,30 @@ +package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access + +import cats.effect.IO +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.S3StorageConfig +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.{LocalStackS3StorageClient, S3Helpers} +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.client.S3StorageClient +import ch.epfl.bluebrain.nexus.testkit.mu.NexusSuite +import io.laserdisc.pure.s3.tagless.S3AsyncClientOp +import munit.AnyFixture + +class S3StorageAccessSuite extends NexusSuite with LocalStackS3StorageClient.Fixture with S3Helpers { + + override def munitFixtures: Seq[AnyFixture[_]] = List(localStackS3Client) + + implicit private lazy val (s3Client: S3StorageClient, underlying: S3AsyncClientOp[IO], _: S3StorageConfig) = + localStackS3Client() + + private lazy val s3Access = S3StorageAccess(s3Client) + + test("Succeed for an existing bucket") { + givenAnS3Bucket { bucket => + s3Access.checkBucketExists(bucket) + } + } + + test("Fail when a bucket doesn't exist") { + s3Access.checkBucketExists(genString()).intercept[StorageNotAccessible] + } +} diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageAccessSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageAccessSpec.scala deleted file mode 100644 index 8548f18e84..0000000000 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/disk/DiskStorageAccessSpec.scala +++ /dev/null @@ -1,47 +0,0 @@ -package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.disk - -import akka.actor.ActorSystem -import akka.testkit.TestKit -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.UUIDFFixtures -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.AbsolutePath -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec - -import java.nio.file.{Files, Path} - -class DiskStorageAccessSpec - extends TestKit(ActorSystem("DiskStorageAccessSpec")) - with CatsEffectSpec - with UUIDFFixtures.Random { - - private val fileOps = DiskFileOperations.mk - - "A DiskStorage access operations" should { - - "succeed verifying the volume" in { - val volume = AbsolutePath(Files.createTempDirectory("disk-access")).rightValue - fileOps.checkVolumeExists(volume).accepted - } - - "fail when volume does not exist" in { - val volume = AbsolutePath(Path.of("/random", genString())).rightValue - fileOps.checkVolumeExists(volume).rejected shouldEqual StorageNotAccessible(s"Volume '$volume' does not exist.") - } - - "fail when volume is not a directory" in { - val volume = AbsolutePath(Files.createTempFile(genString(), genString())).rightValue - fileOps.checkVolumeExists(volume).rejected shouldEqual StorageNotAccessible( - s"Volume '$volume' is not a directory." - ) - } - - "fail when volume does not have write access" in { - val volume = AbsolutePath(Files.createTempDirectory("disk-not-access")).rightValue - volume.value.toFile.setReadOnly() - fileOps.checkVolumeExists(volume).rejected shouldEqual StorageNotAccessible( - s"Volume '$volume' does not have write access." - ) - } - } - -} diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSpec.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSpec.scala index 704cdbb078..084bc4f8c1 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSpec.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/remote/RemoteStorageSpec.scala @@ -2,6 +2,7 @@ package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.FilesSpec import ch.epfl.bluebrain.nexus.delta.plugins.storage.remotestorage.RemoteStorageClientFixtures +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access.RemoteDiskStorageAccessSpec import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.client.RemoteStorageClientSpec import ch.epfl.bluebrain.nexus.testkit.scalatest.BaseSpec import org.scalatest.Suite diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperationsSuite.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperationsSuite.scala index c3ddd57be7..2b70562e23 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperationsSuite.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/storages/operations/s3/S3FileOperationsSuite.scala @@ -10,7 +10,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes. import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileStorageMetadata import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.S3StorageConfig import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.DigestAlgorithm -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.AkkaSourceHelpers import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.UploadingFile.S3UploadingFile @@ -57,16 +56,6 @@ class S3FileOperationsSuite private def expectedLocation(proj: ProjectRef, filename: String): Uri = Uri.Empty / conf.prefixPath / expectedPath(proj, filename) - test("List objects in an existing bucket") { - givenAnS3Bucket { bucket => - fileOps.checkBucketExists(bucket) - } - } - - test("Fail to list objects when bucket doesn't exist") { - fileOps.checkBucketExists(genString()).intercept[StorageNotAccessible] - } - test("Save and fetch an object in a bucket") { givenAnS3Bucket { bucket => val project = ProjectRef.unsafe("org", "project") diff --git a/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/files/FileWiring.scala b/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/files/FileWiring.scala index e20f3f9c60..8f0bed6c11 100644 --- a/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/files/FileWiring.scala +++ b/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/files/FileWiring.scala @@ -5,20 +5,18 @@ import cats.effect.IO import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.FormDataExtractor import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.FileOperations import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.client.S3StorageClient -import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode import ch.epfl.bluebrain.nexus.ship.storages.StorageWiring.{failingDiskFileOperations, failingRemoteDiskFileOperations, linkS3FileOperationOnly} object FileWiring { def linkOperationOnly(s3StorageClient: S3StorageClient): FileOperations = - FileOperations.mk( + FileOperations.apply( failingDiskFileOperations, failingRemoteDiskFileOperations, linkS3FileOperationOnly(s3StorageClient) ) def failingFormDataExtractor: FormDataExtractor = - (_: IriOrBNode.Iri, _: HttpEntity, _: Long) => - IO.raiseError(new IllegalArgumentException("FormDataExtractor should not be called")) + (_: HttpEntity, _: Long) => IO.raiseError(new IllegalArgumentException("FormDataExtractor should not be called")) } diff --git a/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/storages/StorageWiring.scala b/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/storages/StorageWiring.scala index ea2a9ded2a..afee559b9c 100644 --- a/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/storages/StorageWiring.scala +++ b/ship/src/main/scala/ch/epfl/bluebrain/nexus/ship/storages/StorageWiring.scala @@ -7,10 +7,10 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.StorageScopeInitialization import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.{ComputedFileAttributes, FileStorageMetadata} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.Storages import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.S3StorageConfig +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.access.StorageAccess import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.Storage.RemoteDiskStorage import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageFields.S3StorageFields -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.{AbsolutePath, StorageValue} -import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageAccess +import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.UploadingFile.{DiskUploadingFile, RemoteUploadingFile, S3UploadingFile} import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.disk.DiskFileOperations import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.remote.RemoteDiskFileOperations @@ -84,9 +84,6 @@ object StorageWiring { } def failingDiskFileOperations: DiskFileOperations = new DiskFileOperations { - override def checkVolumeExists(path: AbsolutePath): IO[Unit] = - IO.raiseError(new IllegalArgumentException("DiskFileOperations should not be called")) - override def fetch(path: Uri.Path): IO[AkkaSource] = IO.raiseError(new IllegalArgumentException("DiskFileOperations should not be called")) @@ -95,8 +92,6 @@ object StorageWiring { } def failingRemoteDiskFileOperations: RemoteDiskFileOperations = new RemoteDiskFileOperations { - override def checkFolderExists(folder: Label): IO[Unit] = - IO.raiseError(new IllegalArgumentException("RemoteDiskFileOperations should not be called")) override def fetch(folder: Label, path: Uri.Path): IO[AkkaSource] = IO.raiseError(new IllegalArgumentException("RemoteDiskFileOperations should not be called")) @@ -116,8 +111,6 @@ object StorageWiring { } def linkS3FileOperationOnly(s3Client: S3StorageClient): S3FileOperations = new S3FileOperations { - override def checkBucketExists(bucket: String): IO[Unit] = - IO.raiseError(new IllegalArgumentException("S3FileOperations should not be called")) override def fetch(bucket: String, path: Uri.Path): IO[AkkaSource] = IO.raiseError(new IllegalArgumentException("S3FileOperations should not be called"))