Skip to content

Commit

Permalink
upload file returns location
Browse files Browse the repository at this point in the history
  • Loading branch information
shinyhappydan committed Apr 22, 2024
1 parent 0482337 commit d1dd9f7
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.Storage.S3St
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.FileOperations.intermediateFolders
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.StorageFileRejection.SaveFileRejection._
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.operations.s3.client.S3StorageClient
import ch.epfl.bluebrain.nexus.delta.rdf.syntax.uriSyntax
import ch.epfl.bluebrain.nexus.delta.sdk.stream.StreamConverter
import fs2.Stream

Expand Down Expand Up @@ -52,31 +51,31 @@ final class S3StorageSaveFile(s3StorageClient: S3StorageClient)(implicit
val fileData: Stream[IO, Byte] = convertStream(entity.dataBytes)

(for {
_ <- log(bucket, key, s"Checking for object existence")
_ <- validateObjectDoesNotExist(bucket, key)
_ <- log(bucket, key, s"Beginning upload")
(digest, fileSize) <- s3StorageClient.uploadFile(fileData, bucket, key, algorithm)
_ <- log(bucket, key, s"Finished upload. Digest: $digest")
attr = fileMetadata(bucket, key, uuid, fileSize, algorithm, digest)
_ <- log(bucket, key, s"Checking for object existence")
_ <- validateObjectDoesNotExist(bucket, key)
_ <- log(bucket, key, s"Beginning upload")
(digest, fileSize, location) <- s3StorageClient.uploadFile(fileData, bucket, key, algorithm)
_ <- log(bucket, key, s"Finished upload. Digest: $digest")
attr = fileMetadata(key, uuid, fileSize, algorithm, digest, location)
} yield attr)
.onError(e => logger.error(e)("Unexpected error when storing file"))
.adaptError { err => UnexpectedSaveError(key, err.getMessage) }
}

private def fileMetadata(
bucket: String,
key: String,
uuid: UUID,
fileSize: Long,
algorithm: DigestAlgorithm,
digest: String
digest: String,
location: Uri
): FileStorageMetadata =
FileStorageMetadata(
uuid = uuid,
bytes = fileSize,
digest = Digest.ComputedDigest(algorithm, digest),
origin = Client,
location = s3StorageClient.baseEndpoint / bucket / Uri.Path(key),
location = location,
path = Uri.Path(key)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import cats.effect.{IO, Ref, Resource}
import cats.syntax.all._
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.S3StorageConfig
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.DigestAlgorithm
import ch.epfl.bluebrain.nexus.delta.rdf.syntax.uriSyntax
import ch.epfl.bluebrain.nexus.delta.sdk.error.ServiceError.FeatureDisabled
import fs2.{Chunk, Pipe, Stream}
import fs2.aws.s3.S3
Expand Down Expand Up @@ -37,7 +38,7 @@ trait S3StorageClient {
bucket: String,
key: String,
algorithm: DigestAlgorithm
): IO[(String, Long)]
): IO[(String, Long, Uri)]

def objectExists(bucket: String, key: String): IO[Boolean]

Expand Down Expand Up @@ -99,7 +100,7 @@ object S3StorageClient {
bucket: String,
key: String,
algorithm: DigestAlgorithm
): IO[(String, Long)] = {
): IO[(String, Long, Uri)] = {
for {
fileSizeAcc <- Ref.of[IO, Long](0L)
digest <- fileData
Expand All @@ -110,7 +111,8 @@ object S3StorageClient {
.compile
.onlyOrError
fileSize <- fileSizeAcc.get
} yield (digest, fileSize)
location = baseEndpoint / bucket / Uri.Path(key)
} yield (digest, fileSize, location)
}

private def uploadFilePipe(bucket: String, key: String, algorithm: DigestAlgorithm): Pipe[IO, Byte, String] = {
Expand Down Expand Up @@ -174,6 +176,6 @@ object S3StorageClient {
bucket: String,
key: String,
algorithm: DigestAlgorithm
): IO[(String, Long)] = raiseDisabledErr
): IO[(String, Long, Uri)] = raiseDisabledErr
}
}

0 comments on commit d1dd9f7

Please sign in to comment.