Skip to content

Commit

Permalink
Merge branch 'develop' into jdsika-patch-1
Browse files Browse the repository at this point in the history
  • Loading branch information
jdsika authored Dec 5, 2024
2 parents b0b2a21 + 5612c0b commit c00edb8
Show file tree
Hide file tree
Showing 5 changed files with 91 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,20 @@ describe('common/aws/handlers/processAssetUpload', () => {
it('should extract and write metadata to a bucket', async () => {
// when ... we want extract data from a asset and upload to a different bucket
// then ... it should validate, extract and upload to a bucket
const uploadStub = jest.fn().mockResolvedValue('UPLOAD_URL')
const uploadDoneStub = jest.fn().mockResolvedValue('UPLOAD_URL')
const transformToByteArrayStub = jest.fn().mockResolvedValue('ASSET_BYTE_ARRAY')
const readFileStub = jest.fn().mockResolvedValue({
Body: {
transformToByteArray: transformToByteArrayStub,
},
}) as any
const validateShaclDataWithSchemaStub = jest.fn().mockResolvedValue({
const validateAndCreateMetadataStub = jest.fn().mockResolvedValue({
conforms: true,
reports: { conforms: true },
metadata: 'METADATA',
metadata: {
minter: 'MINTER_ADDRESS',
},
modifiedManifest: 'MODIFIED_MANIFEST',
assetCID: 'ASSET_CID',
metadataCID: 'METADATA_CID',
files: {
Expand All @@ -42,14 +45,26 @@ describe('common/aws/handlers/processAssetUpload', () => {
},
],
},
visualizationFiles: [
{
path: 'PATH',
arrayBuffer: 'FILE_BUFFER',
},
{
path: 'PATH_1',
arrayBuffer: 'FILE_BUFFER',
},
],
}) as any
const writeFileStub = jest.fn().mockReturnValue({
done: uploadStub,
done: uploadDoneStub,
}) as any
const copyFileStub = jest.fn().mockResolvedValue('COPIED') as any
const deleteFileStub = jest.fn().mockReturnValue('SHACL_DATA') as any
const getAssetStatusStub = jest.fn().mockReturnValue('ASSET_CID') as any
const updateAssetStatusStub = jest.fn().mockReturnValue('UPDATED') as any
const uploadFileStub = jest.fn().mockResolvedValue('ASSET_CID') as any
const createGroupStub = jest.fn().mockResolvedValue('GROUP_NAME') as any

const event = {
Records: [
Expand All @@ -73,23 +88,43 @@ describe('common/aws/handlers/processAssetUpload', () => {
writeFile: writeFileStub,
copyFile: copyFileStub,
deleteFile: deleteFileStub,
validateAndCreateMetadata: validateShaclDataWithSchemaStub,
validateAndCreateMetadata: validateAndCreateMetadataStub,
getAsset: getAssetStatusStub,
updateAsset: updateAssetStatusStub,
uploadFile: uploadFileStub,
createGroup: createGroupStub,
})(event as any, context, callback)

expect(result).toEqual(undefined)
expect(readFileStub).toHaveBeenCalledWith({ Bucket: 'BUCKET_NAME', Key: 'OBJECT_KEY' })
expect(validateShaclDataWithSchemaStub).toHaveBeenCalledWith('ASSET_BYTE_ARRAY', 'ASSET_CID')
expect(validateShaclDataWithSchemaStub).toHaveBeenCalledTimes(1)
expect(writeFileStub).toHaveBeenCalledTimes(3)
expect(validateAndCreateMetadataStub).toHaveBeenCalledWith('ASSET_BYTE_ARRAY', 'ASSET_CID')
expect(validateAndCreateMetadataStub).toHaveBeenCalledTimes(1)
expect(writeFileStub).toHaveBeenCalledTimes(5)
expect(deleteFileStub).toHaveBeenCalledTimes(1)
expect(updateAssetStatusStub).toHaveBeenCalledWith(
'ASSET_CID',
'OBJECT_KEY',
'pending',
{
minter: 'MINTER_ADDRESS',
},
'MODIFIED_MANIFEST',
)
expect(copyFileStub).toHaveBeenCalledTimes(1)
expect(createGroupStub).toHaveBeenCalledWith('MINTER_ADDRESS')
expect(uploadDoneStub).toHaveBeenCalledWith()
expect(uploadFileStub).toHaveBeenCalledWith({ arrayBuffer: 'FILE_BUFFER', filename: 'PATH', group: 'GROUP_NAME' })
expect(uploadFileStub).toHaveBeenCalledWith({
arrayBuffer: 'FILE_BUFFER',
filename: 'PATH_1',
group: 'GROUP_NAME',
})
})

it('should delete the asset if the validation does not conforms', async () => {
// when ... we want extract data from a asset and upload to a different bucket
// then ... it should validate, extract and upload to a bucket
const uploadStub = jest.fn().mockResolvedValue('UPLOAD_URL')
const uploadDoneStub = jest.fn().mockResolvedValue('UPLOAD_URL')
const transformToByteArrayStub = jest.fn().mockResolvedValue('ASSET_BYTE_ARRAY')
const readFileStub = jest.fn().mockResolvedValue({
Body: {
Expand All @@ -99,17 +134,21 @@ describe('common/aws/handlers/processAssetUpload', () => {
const validateShaclDataWithSchemaStub = jest.fn().mockResolvedValue({
conforms: false,
reports: { conforms: false },
metadata: 'METADATA',
metadata: {
minter: 'MINTER_ADDRESS',
},
assetCID: 'ASSET_CID',
metadataCID: 'METADATA_CID',
}) as any
const writeFileStub = jest.fn().mockReturnValue({
done: uploadStub,
done: uploadDoneStub,
}) as any
const copyFileStub = jest.fn().mockResolvedValue('COPIED') as any
const deleteFileStub = jest.fn().mockReturnValue('SHACL_DATA') as any
const getAssetStatusStub = jest.fn().mockReturnValue('ASSET_CID') as any
const updateAssetStatusStub = jest.fn().mockReturnValue('UPDATED') as any
const uploadFileStub = jest.fn().mockResolvedValue('ASSET_CID') as any
const createGroupStub = jest.fn().mockResolvedValue('GROUP_NAME') as any

const event = {
Records: [
Expand All @@ -136,6 +175,8 @@ describe('common/aws/handlers/processAssetUpload', () => {
validateAndCreateMetadata: validateShaclDataWithSchemaStub,
getAsset: getAssetStatusStub,
updateAsset: updateAssetStatusStub,
uploadFile: uploadFileStub,
createGroup: createGroupStub,
})(event as any, context, callback)

expect(result).toEqual(undefined)
Expand All @@ -145,6 +186,10 @@ describe('common/aws/handlers/processAssetUpload', () => {
expect(updateAssetStatusStub).toHaveBeenCalledWith('OBJECT_KEY', 'OBJECT_KEY', 'not_accepted')
expect(writeFileStub).toHaveBeenCalledTimes(0)
expect(deleteFileStub).toHaveBeenCalledWith({ Bucket: 'BUCKET_NAME', Key: 'OBJECT_KEY' })
expect(uploadDoneStub).not.toHaveBeenCalledWith()
expect(copyFileStub).toHaveBeenCalledTimes(0)
expect(createGroupStub).toHaveBeenCalledTimes(0)
expect(uploadFileStub).toHaveBeenCalledTimes(0)
})
})
})
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ import ValidationReport from 'rdf-validate-shacl/src/validation-report'
import { getAsset, updateAsset, validateAndCreateMetadata } from '../../../asset'
import { ExtractedFileWithCID, ManifestExtractedFiles } from '../../../asset/types'
import { copyFile, deleteFile, readFile, writeFile } from '../../../aws'
import { uploadFile } from '../../../ipfs'
import { createGroup, uploadFile } from '../../../ipfs'
import { log } from '../../../logger'
import { Asset, AssetMetadata, AssetStatus } from '../../../types'

export const _main =
Expand All @@ -24,6 +25,8 @@ export const _main =
validateAndCreateMetadata,
getAsset,
updateAsset,
uploadFile,
createGroup,
}: {
readFile: ({ Bucket, Key }: { Bucket: string; Key: string }) => Promise<GetObjectCommandOutput>
writeFile: (params: PutObjectCommandInput) => Upload
Expand Down Expand Up @@ -57,6 +60,16 @@ export const _main =
metadata?: AssetMetadata | string,
manifest?: Record<string, unknown>,
) => Promise<Asset>
uploadFile: ({
arrayBuffer,
filename,
group,
}: {
arrayBuffer: ArrayBuffer
filename: string
group?: string
}) => Promise<string>
createGroup: (minter: string) => Promise<string>
}): S3Handler =>
async event => {
try {
Expand Down Expand Up @@ -128,8 +141,11 @@ export const _main =
Promise.all(writeFilesToIpfsPromises)

const pinataIpfsPromises = visualizationFiles.map(
async ({ path, arrayBuffer }: { path: string; arrayBuffer: ArrayBuffer }) =>
uploadFile({ arrayBuffer, filename: last(split('/', path)) as string }),
async ({ path, arrayBuffer }: { path: string; arrayBuffer: ArrayBuffer }) => {
log.info(`Uploading ${path} to IPFS`)
const group = await createGroup(metadata.minter)
return uploadFile({ arrayBuffer, filename: last(split('/', path)) as string, group })
},
)

Promise.all(pinataIpfsPromises)
Expand Down Expand Up @@ -171,4 +187,6 @@ export const main = _main({
validateAndCreateMetadata,
getAsset,
updateAsset,
uploadFile,
createGroup,
})
10 changes: 9 additions & 1 deletion apps/envited.ascs.digital/common/ipfs/ipfs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ export const uploadJson =

export const uploadFile =
(pinata: PinataSDK) =>
async ({ arrayBuffer, filename }: { arrayBuffer: ArrayBuffer; filename: string }) => {
async ({ arrayBuffer, filename, group = '' }: { arrayBuffer: ArrayBuffer; filename: string; group?: string }) => {
const buffer = Buffer.from(arrayBuffer)

const readable = new Readable({
Expand All @@ -30,6 +30,14 @@ export const uploadFile =
},
})

if (group) {
return pinata.upload
.stream(readable)
.addMetadata({ name: filename })
.group(group)
.then(data => pinata.gateways.convert(data.IpfsHash))
}

return pinata.upload
.stream(readable)
.addMetadata({ name: filename })
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ import { db } from '../../database/queries'
import { Database } from '../../database/types'
import { CreateGroup, UploadJson, createGroup, uploadJson } from '../../ipfs'
import { Log, log } from '../../logger'
import { Role, Session } from '../../types'
import { badRequestError, forbiddenError, notFoundError, unauthorizedError } from '../../utils'
import { Asset, Role, Session } from '../../types'
import { badRequestError, extractAddressFromDid, forbiddenError, notFoundError, unauthorizedError } from '../../utils'

export const uploadTokenMetadataToIPFS =
({
Expand Down Expand Up @@ -53,8 +53,8 @@ export const uploadTokenMetadataToIPFS =
throw forbiddenError({ resource: 'assets', message: 'No issuer found', userId: session.user.id })
}

const group = await createGroup(user.issuerId)
return uploadJson({ data: asset.metadata, filename: 'token_info.json', group })
const group = await createGroup(extractAddressFromDid(user.issuerId))
return uploadJson({ data: (asset as Asset).metadata, filename: 'token_info.json', group })
}

export const uploadTokenMetadata = uploadTokenMetadataToIPFS({
Expand Down
2 changes: 1 addition & 1 deletion apps/envited.ascs.digital/common/types/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ export enum CredentialType {
export interface Asset {
id: string
cid: string
metadata: string
metadata: AssetMetadata
status: AssetStatus
userId: string
}
Expand Down

0 comments on commit c00edb8

Please sign in to comment.