Skip to content

Commit

Permalink
Deal with S3 possibly not supplying content type
Browse files Browse the repository at this point in the history
  • Loading branch information
shinyhappydan committed Apr 23, 2024
1 parent 101a129 commit 6106f74
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ final class Files(
s3Metadata <- fileOperations.register(storage, path)
filename <- IO.fromOption(path.lastSegment)(InvalidFilePath)
attr = FileAttributes.from(
FileDescription(filename, Some(mediaType.getOrElse(s3Metadata.contentType)), metadata),
FileDescription(filename, mediaType.orElse(s3Metadata.contentType), metadata),
s3Metadata.metadata
)
res <- eval(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import akka.http.scaladsl.model.{BodyPartEntity, ContentType, Uri}
import akka.stream.scaladsl.Source
import akka.util.ByteString
import cats.effect.IO
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.kernel.Logger
import ch.epfl.bluebrain.nexus.delta.kernel.utils.UUIDF
import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.FileAttributes.FileAttributesOrigin
Expand All @@ -14,7 +13,7 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.DigestAlgori
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.Storage.S3Storage
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageRejection.StorageNotAccessible
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.FetchFileRejection.UnexpectedFetchError
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.RegisterFileRejection.{InvalidContentType, MissingContentType}
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.RegisterFileRejection.InvalidContentType
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.S3FileOperations.S3FileMetadata
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.client.S3StorageClient
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.client.S3StorageClient.HeadObject
Expand Down Expand Up @@ -43,7 +42,7 @@ trait S3FileOperations {
}

object S3FileOperations {
final case class S3FileMetadata(contentType: ContentType, metadata: FileStorageMetadata)
final case class S3FileMetadata(contentType: Option[ContentType], metadata: FileStorageMetadata)

def mk(client: S3StorageClient)(implicit as: ActorSystem, uuidf: UUIDF): S3FileOperations = new S3FileOperations {

Expand Down Expand Up @@ -86,9 +85,15 @@ object S3FileOperations {
log.error(e)(s"Failed fetching required attributes for S3 file registration. Bucket $bucket and path $path")
}

private def parseContentType(raw: Option[String]): IO[ContentType] = {
IO.fromOption(raw)(MissingContentType)
.flatMap(raw => ContentType.parse(raw).map(_.pure[IO]).getOrElse(IO.raiseError(InvalidContentType(raw))))
private def parseContentType(raw: Option[String]): IO[Option[ContentType]] = {
raw match {
case Some(value) =>
ContentType.parse(value) match {
case Left(_) => IO.raiseError(InvalidContentType(value))
case Right(value) => IO.pure(Some(value))
}
case None => IO.none
}
}

private def mkS3Metadata(bucket: String, path: Uri.Path, resp: HeadObject) = {
Expand Down

0 comments on commit 6106f74

Please sign in to comment.