Skip to content

Commit

Permalink
Remove endpoint and region from the S3 Storage instances
Browse files Browse the repository at this point in the history
  • Loading branch information
shinyhappydan committed Apr 9, 2024
1 parent 96a1c94 commit ed719b3
Show file tree
Hide file tree
Showing 13 changed files with 2 additions and 55 deletions.
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.delta.kernel.Secret
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.StoragesConfig.StorageTypeConfig
import ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model.StorageValue.{DiskStorageValue, RemoteDiskStorageValue, S3StorageValue}
import ch.epfl.bluebrain.nexus.delta.rdf.IriOrBNode.Iri
import ch.epfl.bluebrain.nexus.delta.sdk.implicits._
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.decoder.configuration.semiauto.deriveConfigJsonLdDecoder
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.decoder.{Configuration => JsonLdConfiguration, JsonLdDecoder}
Expand Down Expand Up @@ -131,14 +129,6 @@ object StorageFields {
* ''true'' if this store is the project's default, ''false'' otherwise
* @param bucket
* the S3 compatible bucket
* @param endpoint
* the endpoint, either a domain or a full URL
* @param accessKey
* the AWS access key ID
* @param secretKey
* the AWS secret key
* @param region
* the AWS region
* @param readPermission
* the permission required in order to download a file from this storage
* @param writePermission
Expand All @@ -151,8 +141,6 @@ object StorageFields {
description: Option[String],
default: Boolean,
bucket: String,
endpoint: Option[Uri],
region: Option[Region],
readPermission: Option[Permission],
writePermission: Option[Permission],
maxFileSize: Option[Long]
Expand All @@ -169,8 +157,6 @@ object StorageFields {
default,
cfg.digestAlgorithm,
bucket,
endpoint.orElse(Some(cfg.defaultEndpoint)),
region,
readPermission.getOrElse(cfg.defaultReadPermission),
writePermission.getOrElse(cfg.defaultWritePermission),
computeMaxFileSize(maxFileSize, cfg.defaultMaxFileSize)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
package ch.epfl.bluebrain.nexus.delta.plugins.storage.storages.model

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.delta.plugins.storage.files.model.Digest
import ch.epfl.bluebrain.nexus.delta.rdf.jsonld.context.JsonLdContext.keywords
import ch.epfl.bluebrain.nexus.delta.sdk.implicits._
import ch.epfl.bluebrain.nexus.delta.sdk.permissions.model.Permission
import ch.epfl.bluebrain.nexus.delta.sourcing.model.{Label, ProjectRef}
import io.circe.generic.extras.Configuration
Expand Down Expand Up @@ -132,8 +130,6 @@ object StorageValue {
default: Boolean,
algorithm: DigestAlgorithm,
bucket: String,
endpoint: Option[Uri],
region: Option[Region],
readPermission: Permission,
writePermission: Permission,
maxFileSize: Long
Expand All @@ -153,8 +149,6 @@ object StorageValue {
default: Boolean,
algorithm: DigestAlgorithm,
bucket: String,
endpoint: Option[Uri],
region: Option[Region],
readPermission: Permission,
writePermission: Permission,
maxFileSize: Long
Expand All @@ -165,8 +159,6 @@ object StorageValue {
default,
algorithm,
bucket,
endpoint,
region,
readPermission,
writePermission,
maxFileSize
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@
"description": "s3description",
"default" : true,
"bucket" : "mybucket",
"endpoint" : "http://localhost",
"readPermission" : "s3/read",
"region": "eu-west-1",
"writePermission" : "s3/write",
"maxFileSize" : 51
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,6 @@
"@value": true
}
],
"https://bluebrain.github.io/nexus/vocabulary/endpoint": [
{
"@value": "http://localhost"
}
],
"https://bluebrain.github.io/nexus/vocabulary/maxFileSize": [
{
"@value": 51
Expand All @@ -45,11 +40,6 @@
"@value": "s3/read"
}
],
"https://bluebrain.github.io/nexus/vocabulary/region": [
{
"@value": "eu-west-1"
}
],
"https://bluebrain.github.io/nexus/vocabulary/writePermission": [
{
"@value": "s3/write"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@
"default" : true,
"name": "s3name",
"description": "s3description",
"endpoint" : "http://localhost",
"maxFileSize" : 51,
"readPermission" : "s3/read",
"region" : "eu-west-1",
"writePermission" : "s3/write"
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
"name": "s3name",
"description": "s3description",
"bucket": "mybucket",
"endpoint": "http://localhost",
"readPermission": "s3/read",
"region": "eu-west-1",
"writePermission": "s3/write",
"maxFileSize": 51
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,8 @@
"description": "s3description",
"bucket": "mybucket",
"default": true,
"endpoint": "http://localhost",
"maxFileSize": 51,
"readPermission": "s3/read",
"region": "eu-west-1",
"writePermission": "s3/write"
},
"_storageId": "https://bluebrain.github.io/nexus/vocabulary/s3-storage",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,8 @@
"description": "s3description",
"bucket": "mybucket",
"default": true,
"endpoint": "http://localhost",
"maxFileSize": 51,
"readPermission": "s3/read",
"region": "eu-west-1",
"writePermission": "s3/write"
},
"_storageId": "https://bluebrain.github.io/nexus/vocabulary/s3-storage",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
"description": "s3description",
"algorithm": "SHA-256",
"bucket": "mybucket",
"endpoint": "http://localhost",
"region": "eu-west-1",
"readPermission": "s3/read",
"writePermission": "s3/write",
"maxFileSize": 51,
Expand All @@ -26,9 +24,7 @@
"description": "s3description",
"default": true,
"bucket": "mybucket",
"endpoint": "http://localhost",
"readPermission": "s3/read",
"region": "eu-west-1",
"writePermission": "s3/write",
"maxFileSize": 51
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import ch.epfl.bluebrain.nexus.delta.sourcing.model.Label
import ch.epfl.bluebrain.nexus.testkit.CirceLiteral
import ch.epfl.bluebrain.nexus.testkit.minio.MinioDocker
import ch.epfl.bluebrain.nexus.testkit.scalatest.ClasspathResources
import software.amazon.awssdk.regions.Region

import java.nio.file.{Files, Paths}
import scala.concurrent.duration._
Expand Down Expand Up @@ -42,7 +41,7 @@ trait StorageFixtures extends CirceLiteral {
val diskVal = diskFields.toValue(config).get
val diskFieldsUpdate = DiskStorageFields(Some("diskName"), Some("diskDescription"), default = false, Some(tmpVolume), Some(Permission.unsafe("disk/read")), Some(Permission.unsafe("disk/write")), Some(2000), Some(40))
val diskValUpdate = diskFieldsUpdate.toValue(config).get
val s3Fields = S3StorageFields(Some("s3name"), Some("s3description"), default = true, "mybucket", Some("http://localhost"), Some(Region.EU_WEST_1), Some(Permission.unsafe("s3/read")), Some(Permission.unsafe("s3/write")), Some(51))
val s3Fields = S3StorageFields(Some("s3name"), Some("s3description"), default = true, "mybucket", Some(Permission.unsafe("s3/read")), Some(Permission.unsafe("s3/write")), Some(51))
val s3Val = s3Fields.toValue(config).get
val remoteFields = RemoteDiskStorageFields(Some("remoteName"), Some("remoteDescription"), default = true, Label.unsafe("myfolder"), Some(Permission.unsafe("remote/read")), Some(Permission.unsafe("remote/write")), Some(52))
val remoteVal = remoteFields.toValue(config).get
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class StorageFieldsSpec extends CatsEffectSpec with RemoteContextResolutionFixtu
"region"
)
sourceDecoder(pc, jsonNoDefaults).accepted._2 shouldEqual
S3StorageFields(None, None, default = true, "mybucket", None, None, None, None, None)
S3StorageFields(None, None, default = true, "mybucket", None, None, None)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,6 @@ class S3StorageFetchSaveSpec
default = false,
algorithm = DigestAlgorithm.default,
bucket = bucket,
endpoint = None,
region = None,
readPermission = read,
writePermission = write,
maxFileSize = 20
Expand Down
4 changes: 0 additions & 4 deletions docs/src/main/paradox/docs/delta/api/storages-api.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,6 @@ In order to be able to use this storage, the configuration flag `plugins.storage
{
"@type": "S3Storage",
"default": "{default}",
"endpoint": "{endpoint}",
"region": "{region}",
"readPermission": "{read_permission}",
"writePermission": "{write_permission}",
"maxFileSize": {max_file_size}
Expand All @@ -107,8 +105,6 @@ In order to be able to use this storage, the configuration flag `plugins.storage
...where

- `{default}`: Boolean - the flag to decide whether this storage is going to become the default storage for the target project or not.
- `{endpoint}`: Uri - the Amazon S3 compatible service endpoint. This field is optional, defaulting to the configuration flag `plugins.storage.storages.amazon.default-endpoint`.
- `{region}`: String - the Amazon S3 compatible region. This field is optional, defaulting to the S3 default region configuration.
- `{read_permission}`: String - the permission a client must have in order to fetch files using this storage. This field is optional, defaulting to the configuration flag `plugins.storage.storages.amazon.default-read-permission` (`resources/read`).
- `{write_permission}`: String - the permission a client must have in order to create files using this storage. This field is optional, defaulting to the configuration flag `plugins.storage.storages.amazon.default-write-permission` (`files/write`).
- `{max_file_size}`: Long - the maximum allowed size in bytes for files uploaded using this storage. This field is optional, defaulting to the configuration flag `plugins.storage.storages.amazon.default-max-file-size` (10G).
Expand Down

0 comments on commit ed719b3

Please sign in to comment.