Skip to content

Commit

Permalink
Update bucket key prefixes and add max number records config
Browse files Browse the repository at this point in the history
  • Loading branch information
TomJKing committed Aug 29, 2024
1 parent 29244f2 commit 0044abd
Show file tree
Hide file tree
Showing 10 changed files with 42 additions and 26 deletions.
2 changes: 1 addition & 1 deletion project/Dependencies.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import sbt.*

object Dependencies {
private val http4sVersion = "0.23.26"
private val http4sVersion = "0.23.27"
private val mockitoVersion = "1.17.37"
private val pureConfigVersion = "0.17.7"
private val tapirVersion = "1.10.15"
Expand Down
5 changes: 5 additions & 0 deletions src/main/resources/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,8 @@ s3 {
schema {
dataLoadSharePointLocation = "/metadata-schema/dataLoadSharePointSchema.schema.json"
}

transferConfiguration {
maxNumberRecords = 3000
maxNumberRecords = ${?MAX_NUMBER_RECORDS}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ object ApplicationConfig {
case class Auth(url: String, realm: String)
case class S3(metadataUploadBucket: String, recordsUploadBucket: String)
case class Schema(dataLoadSharePointLocation: String)
case class TransferConfiguration(maxNumberRecords: Int)

case class Configuration(auth: Auth, transferServiceApi: TransferServiceApi, consignmentApi: ConsignmentApi, s3: S3, schema: Schema)
case class Configuration(auth: Auth, transferServiceApi: TransferServiceApi, consignmentApi: ConsignmentApi, s3: S3, schema: Schema, transferConfiguration: TransferConfiguration)

val appConfig: Configuration = ConfigSource.default.load[Configuration] match {
case Left(value) => throw new RuntimeException(s"Failed to load transfer service application configuration ${value.prettyPrint()}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ object LoadModel {
case class CustomMetadataConfiguration(required: Boolean = false) extends MetadataPropertyModel
case class MetadataPropertyDetails(propertyName: String, required: Boolean) extends MetadataPropertyModel
case class TransferConfiguration(
maxNumberRecords: Int,
metadataPropertyDetails: Set[MetadataPropertyDetails] = Set(),
customMetadataConfiguration: CustomMetadataConfiguration = CustomMetadataConfiguration()
)
Expand All @@ -18,6 +19,6 @@ object LoadModel {
transferId: UUID,
recordsLoadDestination: AWSS3LoadDestination,
metadataLoadDestination: AWSS3LoadDestination,
transferConfiguration: TransferConfiguration = TransferConfiguration()
transferConfiguration: TransferConfiguration
) extends LoadModel
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import uk.gov.nationalarchives.tdr.transfer.service.ApplicationConfig
import uk.gov.nationalarchives.tdr.transfer.service.api.model.LoadModel.{AWSS3LoadDestination, LoadDetails, TransferConfiguration}
import uk.gov.nationalarchives.tdr.transfer.service.api.model.SourceSystem.SourceSystemEnum.SourceSystem
import uk.gov.nationalarchives.tdr.transfer.service.services.GraphQlApiService
import uk.gov.nationalarchives.tdr.transfer.service.services.dataload.DataLoadInitiation.s3Config
import uk.gov.nationalarchives.tdr.transfer.service.services.dataload.DataLoadInitiation.{s3Config, transferConfigurationConfig}

import java.util.UUID

Expand All @@ -16,20 +16,23 @@ class DataLoadInitiation(graphQlApiService: GraphQlApiService) {
addConsignmentResult <- graphQlApiService.addConsignment(token)
consignmentId = addConsignmentResult.consignmentid.get
_ <- graphQlApiService.startUpload(token, consignmentId)
result <- loadDetails(consignmentId, token.userId, sourceSystem)
result <- loadDetails(consignmentId, sourceSystem)
} yield result
}

private def loadDetails(transferId: UUID, userId: UUID, sourceSystem: SourceSystem): IO[LoadDetails] = {
val recordsS3Bucket = AWSS3LoadDestination(s"${s3Config.recordsUploadBucket}", s"$userId/$transferId")
val metadataS3Bucket = AWSS3LoadDestination(s"${s3Config.metadataUploadBucket}", s"$transferId/dataload")
private def loadDetails(transferId: UUID, sourceSystem: SourceSystem): IO[LoadDetails] = {
val s3KeyPrefix = s"$sourceSystem/$transferId"
val maxNumberRecords = transferConfigurationConfig.maxNumberRecords
val recordsS3Bucket = AWSS3LoadDestination(s"${s3Config.recordsUploadBucket}", s"$s3KeyPrefix/records")
val metadataS3Bucket = AWSS3LoadDestination(s"${s3Config.metadataUploadBucket}", s"$s3KeyPrefix/metadata")
val metadataProperties = MetadataLoadConfiguration.metadataLoadConfiguration(sourceSystem)
val transferConfiguration = TransferConfiguration(metadataProperties)
val transferConfiguration = TransferConfiguration(maxNumberRecords, metadataProperties)
IO(LoadDetails(transferId, recordsLoadDestination = recordsS3Bucket, metadataLoadDestination = metadataS3Bucket, transferConfiguration))
}
}

object DataLoadInitiation {
val s3Config: ApplicationConfig.S3 = ApplicationConfig.appConfig.s3
val transferConfigurationConfig: ApplicationConfig.TransferConfiguration = ApplicationConfig.appConfig.transferConfiguration
def apply() = new DataLoadInitiation(GraphQlApiService.service)
}
4 changes: 4 additions & 0 deletions src/test/resources/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,7 @@ s3 {
schema {
dataLoadSharePointLocation = "/metadata-schema/dataLoadSharePointSchema.schema.json"
}

transferConfiguration {
maxNumberRecords = 3000
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ trait BaseSpec extends AnyFlatSpec with MockitoSugar with Matchers with EitherVa
implicit val tdrKeycloakDeployment: TdrKeycloakDeployment = TdrKeycloakDeployment("authUrl", "realm", 60)

val expectedMetadataPropertyDetails: Set[MetadataPropertyDetails] = Set(
MetadataPropertyDetails("Modified", true),
MetadataPropertyDetails("SHA256ClientSideChecksum", true),
MetadataPropertyDetails("File_x0020_Size", true),
MetadataPropertyDetails("FileRef", true)
MetadataPropertyDetails("Modified", required = true),
MetadataPropertyDetails("SHA256ClientSideChecksum", required = true),
MetadataPropertyDetails("File_x0020_Size", required = true),
MetadataPropertyDetails("FileRef", required = true)
)

val expectedTransferConfiguration: TransferConfiguration = TransferConfiguration(expectedMetadataPropertyDetails)
val expectedTransferConfiguration: TransferConfiguration = TransferConfiguration(3000, expectedMetadataPropertyDetails)
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import org.http4s.implicits.http4sLiteralsSyntax
import org.http4s.{Header, Headers, Method, Request, Status}
import org.scalatest.matchers.should.Matchers
import org.typelevel.ci.CIString
import uk.gov.nationalarchives.tdr.transfer.service.TestUtils.{invalidToken, userId, validUserToken}
import uk.gov.nationalarchives.tdr.transfer.service.TestUtils.{invalidToken, validUserToken}
import uk.gov.nationalarchives.tdr.transfer.service.api.controllers.LoadController
import uk.gov.nationalarchives.tdr.transfer.service.api.model.LoadModel.{AWSS3LoadDestination, LoadDetails}
import uk.gov.nationalarchives.tdr.transfer.service.services.ExternalServicesSpec
Expand Down Expand Up @@ -45,15 +45,16 @@ class TransferServiceServerSpec extends ExternalServicesSpec with Matchers {
)
.unsafeRunSync()

val expectedRecordsDestination = AWSS3LoadDestination("s3BucketNameRecords", s"$userId/$transferId")
val expectedMetadataLoadDestination = AWSS3LoadDestination("s3BucketNameMetadata", s"$transferId/dataload")
val expectedRecordsDestination = AWSS3LoadDestination("s3BucketNameRecords", s"sharepoint/$transferId/records")
val expectedMetadataLoadDestination = AWSS3LoadDestination("s3BucketNameMetadata", s"sharepoint/$transferId/metadata")

response.status shouldBe Status.Ok
val body = response.as[Json].unsafeRunSync()
val loadDetails = body.as[LoadDetails].toOption.get
loadDetails.transferId.toString shouldBe transferId
loadDetails.metadataLoadDestination shouldEqual expectedMetadataLoadDestination
loadDetails.recordsLoadDestination shouldEqual expectedRecordsDestination
loadDetails.transferConfiguration.maxNumberRecords shouldBe 3000
loadDetails.transferConfiguration.customMetadataConfiguration.required shouldBe false
loadDetails.transferConfiguration.metadataPropertyDetails.size shouldBe 4
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class DataLoadInitiationSpec extends BaseSpec {
private val mockBearerAccessToken = mock[BearerAccessToken]
private val consignmentId = UUID.fromString("6e3b76c4-1745-4467-8ac5-b4dd736e1b3e")
private val userId = UUID.randomUUID()
private val sourceSystem = SourceSystemEnum.SharePoint

"'initiateConsignmentLoad'" should "create a consignment and return expected 'LoadDetails' object" in {
val addConsignmentResponse = AddConsignment(Some(consignmentId), None)
Expand All @@ -30,13 +31,13 @@ class DataLoadInitiationSpec extends BaseSpec {

val expectedResult = LoadDetails(
consignmentId,
AWSS3LoadDestination("s3BucketNameRecords", s"$userId/$consignmentId"),
AWSS3LoadDestination("s3BucketNameMetadata", s"$consignmentId/dataload"),
AWSS3LoadDestination("s3BucketNameRecords", s"$sourceSystem/$consignmentId/records"),
AWSS3LoadDestination("s3BucketNameMetadata", s"$sourceSystem/$consignmentId/metadata"),
expectedTransferConfiguration
)

val service = new DataLoadInitiation(mockGraphQlApiService)
val result = service.initiateConsignmentLoad(mockToken, SourceSystemEnum.SharePoint).unsafeRunSync()
val result = service.initiateConsignmentLoad(mockToken, sourceSystem).unsafeRunSync()
result shouldBe expectedResult
verify(mockGraphQlApiService, times(1)).addConsignment(mockToken)
verify(mockGraphQlApiService, times(1)).startUpload(mockToken, consignmentId, None)
Expand All @@ -49,7 +50,7 @@ class DataLoadInitiationSpec extends BaseSpec {
val service = new DataLoadInitiation(mockGraphQlApiService)

val exception = intercept[RuntimeException] {
service.initiateConsignmentLoad(mockToken, SourceSystemEnum.SharePoint).attempt.unsafeRunSync()
service.initiateConsignmentLoad(mockToken, sourceSystem).attempt.unsafeRunSync()
}
exception.getMessage shouldBe "Error adding consignment"
verify(mockGraphQlApiService, times(1)).addConsignment(mockToken)
Expand All @@ -64,7 +65,7 @@ class DataLoadInitiationSpec extends BaseSpec {
when(mockGraphQlApiService.startUpload(mockToken, consignmentId)).thenThrow(new RuntimeException("Error starting upload"))

val service = new DataLoadInitiation(mockGraphQlApiService)
val response = service.initiateConsignmentLoad(mockToken, SourceSystemEnum.SharePoint).attempt.unsafeRunSync()
val response = service.initiateConsignmentLoad(mockToken, sourceSystem).attempt.unsafeRunSync()

response.isLeft should equal(true)
response.left.value.getMessage should equal("Error starting upload")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ class MetadataLoadConfigurationSpec extends BaseSpec {
"'metadataLoadConfiguration'" should "return the correct metadata configuration for the given source system" in {
val result = MetadataLoadConfiguration.metadataLoadConfiguration(SourceSystemEnum.SharePoint)
result.size shouldBe 4
result.contains(MetadataPropertyDetails("FileRef", true)) shouldBe true
result.contains(MetadataPropertyDetails("File_x0020_Size", true)) shouldBe true
result.contains(MetadataPropertyDetails("SHA256ClientSideChecksum", true)) shouldBe true
result.contains(MetadataPropertyDetails("Modified", true)) shouldBe true
result.contains(MetadataPropertyDetails("FileRef", required = true)) shouldBe true
result.contains(MetadataPropertyDetails("File_x0020_Size", required = true)) shouldBe true
result.contains(MetadataPropertyDetails("SHA256ClientSideChecksum", required = true)) shouldBe true
result.contains(MetadataPropertyDetails("Modified", required = true)) shouldBe true
}

"'metadataLoadConfiguration'" should "return an error if source system not mapped to a schema" in {
Expand Down

0 comments on commit 0044abd

Please sign in to comment.