From 936d900e24b4e2fdab3a6b0b9d4e31221b3df799 Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 17:11:40 -0700 Subject: [PATCH 1/3] Add dockerfile --- Dockerfile | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..dfe847a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM node:14 +WORKDIR /usr/src/app +ENV PRODUCTION=true +EXPOSE 8080 +EXPOSE 80 +EXPOSE 443 +EXPOSE 8443 +COPY package*.json ./ +RUN npm install +COPY . . + +CMD [ "node", "index.js" ] \ No newline at end of file From 39f773e95cd7a63004b8ded5dd86773011a9023c Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 17:19:49 -0700 Subject: [PATCH 2/3] Add config.js and api --- api/index.js | 16 + api/v1/enums.js | 8 + api/v1/environment.js | 7 + api/v1/index.js | 231 ++++++++++++++ api/v1/routes/auth.js | 45 +++ api/v1/routes/tokens.js | 655 ++++++++++++++++++++++++++++++++++++++++ api/v1/routes/wallet.js | 22 ++ config.js | 59 ++++ 8 files changed, 1043 insertions(+) create mode 100644 api/index.js create mode 100644 api/v1/enums.js create mode 100644 api/v1/environment.js create mode 100644 api/v1/index.js create mode 100644 api/v1/routes/auth.js create mode 100644 api/v1/routes/tokens.js create mode 100644 api/v1/routes/wallet.js create mode 100644 config.js diff --git a/api/index.js b/api/index.js new file mode 100644 index 0000000..5b1fbc5 --- /dev/null +++ b/api/index.js @@ -0,0 +1,16 @@ +require('dotenv-flow').config(); + +const express = require('express'); +const fileUpload = require('express-fileupload'); + +const {addV1Routes} = require("./v1/index.js"); + +const {HTTP_PORT} = require('./config.js'); + +const app = express(); + +app.use(fileUpload()); + +addV1Routes(app); + +app.listen(HTTP_PORT, () => console.log(`App listening at http://localhost:${HTTP_PORT}`)); \ No newline at end of file diff --git a/api/v1/enums.js b/api/v1/enums.js new file mode 100644 index 0000000..daa021a --- /dev/null +++ b/api/v1/enums.js @@ -0,0 +1,8 @@ +const ResponseStatus = { + Success: "success", + Error: "error" +} + +module.exports = { + ResponseStatus +} \ No newline at end of file diff --git a/api/v1/environment.js b/api/v1/environment.js new file mode 100644 index 0000000..bbcbeb2 --- /dev/null +++ b/api/v1/environment.js @@ -0,0 +1,7 @@ +const development = !process.env.PRODUCTION +const production = process != undefined && process.env.PRODUCTION + +module.exports = { + development, + production +} \ No newline at end of file diff --git a/api/v1/index.js b/api/v1/index.js new file mode 100644 index 0000000..2f8f554 --- /dev/null +++ b/api/v1/index.js @@ -0,0 +1,231 @@ +const expressJSDocSwagger = require('express-jsdoc-swagger'); + +const {createWallet} = require("./routes/wallet.js"); +const {handleServerSideAuth, authenticateToken} = require("./routes/auth.js"); +const {listTokens, createToken, readToken, deleteToken, sendToken, readTokenRange, signTransfer} = require("./routes/tokens.js"); + +const {getBlockchain} = require('../../blockchain.js'); + +let blockchain; + +(async () => { + blockchain = await getBlockchain(); +})() + +function addV1Routes(app){ + const swaggerOptions = { + info: { + version: "v1", + title: "Webaverse API Documentation", + description: "Documentation for the Webaverse API server", + }, + components: { + securitySchemes: { + bearerAuth: { + type: "http", + scheme: "bearer", + bearerFormat: "JWT" + } + } + }, + filesPattern: '*.js', + swaggerUIPath: '/v1/api-docs', + baseDir: __dirname, + exposeSwaggerUI: true, + exposeApiDocs: true, + apiDocsPath: '/api/v1/api-docs' + }; + + expressJSDocSwagger(app)(swaggerOptions); + +/** + * Authentication payload + * @typedef {object} AuthPayload + * @property {string} authSecretKey.required - Auth Secret Key + */ +/** + * Authentication response + * @typedef {object} AuthResponse + * @property {string} status - The status of the authentication request (success/error) + * @property {string} accessToken - JWT token for authentication + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * POST /api/v1/authorizeServer + * @summary Get authentication token + * @param {AuthPayload} request.body.required - AuthPayload object for authentication + * @return {AuthResponse} 200 - success response + */ +app.post('/api/v1/authorizeServer', async (req, res) => { + return await handleServerSideAuth(req, res); +}); + +// WALLETS + +/** + * Response for user account creation and retrieval + * @typedef {object} WalletCreationResponse + * @property {string} status - The status of the creation request (success/error) + * @property {string} userMnemonic - The private key for the user (to be stored and NEVER shared) + * @property {string} userAddress - The public key for the user (to be stored) + * @property {string} error - If the status is error, the error can be read from here +*/ + +/** + * POST /api/v1/wallet + * @summary Create a wallet for a user + * @security bearerAuth + * @return {WalletCreationResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + */ +app.post('/api/v1/wallet', authenticateToken, async (req, res) => { + return await createWallet(req, res); +}); + +// TOKENS + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenResponse + * @property {string} status - The status of the list request (success/error) + * @property {object} token - Token object returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenIdResponse + * @property {string} status - The status of the list request (success/error) + * @property {string} tokenId - Token id returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenIdListResponse + * @property {string} status - The status of the list request (success/error) + * @property {object} tokenIds - Token id returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenListResponse + * @property {string} status - The status of the list request (success/error) + * @property {object} tokens - Array of token objects returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenStatusResponse + * @property {string} status - The status of the list request (success/error) + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenSignatureResponse + * @property {string} status - The status of the list request (success/error) + * @property {string} tokenId - The ID fo the token being signed + * @property {string} signature - The status of the list request (success/error) + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * GET /api/v1/tokens/:address/:mainnetAddress + * @summary List tokens for a user + * @security bearerAuth + * @return {TokenListResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} address.path.required - Address of the user to list tokens for + * @param {string} mainnetAddress.path.optional - Mainnet address of the user to list tokens for (optional) + */ +app.get('/api/v1/tokens/:address/:mainnetAddress?', authenticateToken, async (req, res) => { + return await listTokens(req, res, blockchain.web3); +}); + +/** + * GET /api/v1/token/:tokenId + * @summary Retrieve data for a non-fungible token + * @security bearerAuth + * @return {TokenResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} tokenId.path.required - Token to retrieve + */ +app.get('/api/v1/token/:tokenId', authenticateToken, async (req, res) => { + return await readToken(req, res); +}); + +/** + * GET /api/v1/token/:tokenStartId/:tokenEndId + * @summary Retrieve a range of tokens + * @security bearerAuth + * @return {TokenListResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} tokenStartId.path.required - First token to retrieve + * @param {string} tokenEndId.path.required - Last token in range to retrieve + */ +app.get('/api/v1/token/:tokenStartId/:tokenEndId', authenticateToken, async (req, res) => { + return await readTokenRange(req, res); +}); + +/** + * POST /api/v1/token + * @summary Create a non-fungible token with a file or IPFS hash + * @security bearerAuth + * @return {TokenListResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} userMnemonic.required - Mint the token using a user's private key + * @param {string} file.optional - File to upload to IPFS + * @param {string} resourceHash.optional - IPFS resource hash or other URI + * @param {number} quantity.optional; - Number of tokens to mint +*/ +app.post('/api/v1/token', authenticateToken, async (req, res) => { + return await createToken(req, res, blockchain); +}); + +/** + * DELETE /api/v1/token + * @summary Burn a token forever + * @security bearerAuth + * @param {string} tokenId.required - Token to delete + * @return {TokenStatusResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + */ +app.delete('/api/v1/token', authenticateToken, async (req, res) => { + return await deleteToken(req, res, blockchain); +}); + +/** + * POST /api/v1/token/send + * @summary Send this token from one user to another + * @security bearerAuth + * @return {TokenStatusResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} tokenId.required - Token to be sent + * @param {string} fromUserAddress.required - Token sent by this user (public address) + * @param {string} toUserAddress.required - Token received by this user (public address) + */ +app.post('/api/v1/token/send', authenticateToken, async (req, res) => { + return await sendToken(req, res, blockchain); +}); + +/** + * POST /api/v1/token/signTransfer + * @summary Prepare a token to be transferred, either mainnet <-> sidechain or polygon <-> sidechain + * @return {TokenSignatureResponse} 200 - success response + * @return {object} 401 - forbidden request response + * @property {string} tokenId - Token to be sent + * @property {string} transferToChain - Transfer to this chain + */ + app.post('/api/v1/token/signTransfer', async (req, res) => { + return await signTransfer(req, res, blockchain); +}); + +} + +module.exports = { + addV1Routes +} \ No newline at end of file diff --git a/api/v1/routes/auth.js b/api/v1/routes/auth.js new file mode 100644 index 0000000..f3f8f21 --- /dev/null +++ b/api/v1/routes/auth.js @@ -0,0 +1,45 @@ +const {setCorsHeaders} = require("../../../utils.js"); +const {ResponseStatus} = require("../enums.js"); +const {development} = require("../environment.js"); +const {AUTH_SECRET_KEY, AUTH_TOKEN_SECRET} = require('../../config.js'); +const jwt = require('jsonwebtoken'); + +function authenticateToken(req, res, next) { + const authHeader = req.headers['authorization']; + const token = authHeader && authHeader.split(' ')[1]; + + if (!token) + return res.status(401).send() + + jwt.verify(token, AUTH_TOKEN_SECRET, (error, data) => { + if (error) + return res.sendStatus(403); + + const {authSecretKey} = data; + if (AUTH_SECRET_KEY !== authSecretKey) + return res.sendStatus(403); + + next() + }); +} + +// Compares a shared secret key and +async function handleServerSideAuth(req, res) { + if (development) setCorsHeaders(res); + const {authSecretKey} = req.body; + + if (!authSecretKey) + return res.json({status: ResponseStatus.Error, accessToken: null, error: "authSecretKey value was not found"}); + + if (authSecretKey != AUTH_SECRET_KEY) + return res.json({status: ResponseStatus.Error, accessToken: null, error: "authSecretKey value was invalid"}) + + const accessToken = jwt.sign({authSecretKey}, AUTH_TOKEN_SECRET); + + return res.json({status: ResponseStatus.Success, accessToken, error: null}) +} + +module.exports = { + handleServerSideAuth, + authenticateToken +} diff --git a/api/v1/routes/tokens.js b/api/v1/routes/tokens.js new file mode 100644 index 0000000..7201786 --- /dev/null +++ b/api/v1/routes/tokens.js @@ -0,0 +1,655 @@ +const path = require('path'); +const http = require('http'); +const bip39 = require('bip39'); +const {hdkey} = require('ethereumjs-wallet'); +const {getBlockchain, areAddressesCollaborator} = require('../../../blockchain.js'); +const {makePromise, setCorsHeaders} = require('../../../utils.js'); +const {getRedisItem, parseRedisItems, getRedisClient} = require('../../../redis.js'); +const { + proofOfAddressMessage, + unlockableMetadataKey, + encryptedMetadataKey, + redisPrefixes, + mainnetSignatureMessage, + nftIndexName, + burnAddress, + zeroAddress +} = require('../../../constants.js'); +const { + ENCRYPTION_MNEMONIC, + MINTING_FEE, + IPFS_HOST, + MAINNET_MNEMONIC, + PINATA_API_KEY, + PINATA_SECRET_API_KEY, + DEFAULT_TOKEN_DESCRIPTION +} = require('../../config.js'); +const {ResponseStatus} = require("../enums.js"); +const {runSidechainTransaction} = require("../../../tokens.js"); +const {production, development} = require("../environment.js"); + +const {jsonParse} = require('../../../utils.js'); + +const {encodeSecret, decodeSecret} = require('../../../encryption.js'); + +const pinataSDK = require('@pinata/sdk'); +const pinata = (PINATA_API_KEY && PINATA_API_KEY !== "") ? pinataSDK(PINATA_API_KEY, PINATA_SECRET_API_KEY) : null; + +const pinataOptions = { + pinataOptions: { + customPinPolicy: { + regions: [ + { + id: 'FRA1', + desiredReplicationCount: 1 + }, + { + id: 'NYC1', + desiredReplicationCount: 2 + } + ] + } + } +}; + +const redisClient = getRedisClient(); + +const network = production ? 'mainnet' : 'testnet'; + +const {Readable} = require('stream'); + +let web3, contracts; + +(async function () { + const blockchain = await getBlockchain(); + web3 = blockchain.web3; + contracts = blockchain.contracts; +})(); + + +// Takes an account as input +async function listTokens(req, res, web3) { + const {address, mainnetAddress} = req.params; + + if (development) setCorsHeaders(res); + try { + const [ + mainnetTokens, + sidechainTokens, + ] = await Promise.all([ + (async () => { + if (!mainnetAddress) return []; + const recoveredAddress = await web3[network].eth.accounts.recover(mainnetSignatureMessage, mainnetAddress); + if (!recoveredAddress) return []; + const p = makePromise(); + const args = `${nftIndexName} ${JSON.stringify(recoveredAddress)} INFIELDS 1 currentOwnerAddress LIMIT 0 1000000`.split(' ').concat([(err, result) => { + if (!err) { + const items = parseRedisItems(result); + p.accept({ + Items: items, + }); + } else { + p.reject(err); + } + }]); + redisClient.ft_search.apply(redisClient, args); + const o = await p; + + return (o && o.Items) || []; + })(), + (async () => { + const p = makePromise(); + const args = `${nftIndexName} ${JSON.stringify(address)} INFIELDS 1 currentOwnerAddress LIMIT 0 1000000`.split(' ').concat([(err, result) => { + if (!err) { + const items = parseRedisItems(result); + p.accept({ + Items: items, + }); + } else { + p.reject(err); + } + }]); + redisClient.ft_search.apply(redisClient, args); + const o = await p; + return (o && o.Items) || []; + })(), + ]); + const tokens = sidechainTokens + .concat(mainnetTokens) + .sort((a, b) => a.id - b.id) + .filter((token, i) => { // filter unique hashes + if (token === "0" || (token.properties.hash === "" && token.owner.address === zeroAddress)) + return false; + + for (let j = 0; j < i; j++) { + if (tokens[j].properties.hash === token.properties.hash && token.properties.hash !== "") + return false; + } + return true; + }); + return res.json({status: ResponseStatus.Success, tokens: JSON.stringify(tokens), error: null}); + } catch (error) { + return res.json({status: ResponseStatus.Error, tokens: null, error}); + } +} + +// Called by create token on successful resource upload +async function mintTokens(resHash, mnemonic, quantity, privateData, web3, contracts, res) { + let tokenIds, status; + const fullAmount = { + t: 'uint256', + v: new web3.utils.BN(1e9) + .mul(new web3.utils.BN(1e9)) + .mul(new web3.utils.BN(1e9)), + }; + + const fullAmountD2 = { + t: 'uint256', + v: fullAmount.v.div(new web3.utils.BN(2)), + }; + const wallet = hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(mnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(); + const address = wallet.getAddressString(); + + if (MINTING_FEE > 0) { + let allowance = await contracts['FT'].methods.allowance(address, contracts['NFT']._address).call(); + allowance = new web3.utils.BN(allowance, 0); + if (allowance.lt(fullAmountD2.v)) { + const result = await runSidechainTransaction(mnemonic)('FT', 'approve', contracts['NFT']._address, fullAmount.v); + status = result.status; + } else { + status = true; + } + } else status = true; + + if (status) { + const description = DEFAULT_TOKEN_DESCRIPTION; + + let fileName = resHash.split('/').pop(); + + let extName = path.extname(fileName).slice(1); + extName = extName === "" ? "png" : extName + extName = extName === "jpeg" ? "jpg" : extName + + fileName = extName ? fileName.slice(0, -(extName.length + 1)) : fileName; + + const {hash} = JSON.parse(Buffer.from(resHash, 'utf8').toString('utf8')); + + const result = await runSidechainTransaction(mnemonic)('NFT', 'mint', address, hash, fileName, extName, description, quantity); + status = result.status; + + if(privateData) + { + const encryptedData = encodeSecret(privateData); + await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', hash, unlockableMetadataKey, encryptedData); + await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', hash, encryptedMetadataKey, encryptedData); + } + + const tokenId = new web3.utils.BN(result.logs[0].topics[3].slice(2), 16).toNumber(); + tokenIds = [tokenId, tokenId + quantity - 1]; + } + return res.json({status: ResponseStatus.Success, tokenIds, error: null}); +} + +async function createToken(req, res, {web3, contracts}) { + const {mnemonic, quantity, privateData} = req.body; + + try { + let {resourceHash} = req.body; + + const file = req.files && req.files[0]; + + if (!bip39.validateMnemonic(mnemonic)) { + return res.json({status: ResponseStatus.Error, error: "Invalid mnemonic"}); + } + + if (!resourceHash && !file) { + return res.json({status: ResponseStatus.Error, error: "POST did not include a file or resourceHash"}); + } + + // Check if there are any files -- if there aren't, check if there's a hash + if (resourceHash && file) { + return res.json({status: ResponseStatus.Error, error: "POST should include a resourceHash *or* file but not both"}); + } + + if (file) { + const readableStream = new Readable({ + read() { + this.push(Buffer.from(file)); + this.push(null); + } + }); + + // Pinata API keys are valid, so this is probably what the user wants + if (pinata) { + const {IpfsHash} = pinata.pinFileToIPFS(readableStream, pinataOptions) + if (IpfsHash) mintTokens(IpfsHash, mnemonic, quantity, web3, contracts, res); + else res.json({status: ResponseStatus.Error, error: "Error pinning to Pinata service, hash was not returned"}); + } else { + // Upload to our own IPFS node + const req = http.request(IPFS_HOST, {method: 'POST'}, res => { + const bufferString = []; + res.on('data', data => { + bufferString.push(data); + }); + res.on('end', async () => { + const buffer = Buffer.concat(bufferString); + const string = buffer.toString('utf8'); + const {hash} = JSON.parse(string); + if (hash) mintTokens(hash, mnemonic, quantity, web3, contracts, res); + else return res.json({status: ResponseStatus.Error, error: "Error getting hash back from IPFS node"}); + }); + res.on('error', err => { + console.warn(err.stack); + return res.json({status: ResponseStatus.Error, error: err.stack}); + }); + }); + req.on('error', err => { + console.warn(err.stack); + res.json({status: ResponseStatus.Error, error: err.stack}); + }); + file.pipe(req); + } + } else { + mintTokens(resourceHash, mnemonic, quantity, privateData, web3, contracts, res); + } + + } catch (error) { + console.warn(error.stack); + return res.json({status: ResponseStatus.Error, tokenIds: [], error}); + } +} + +async function readToken(req, res) { + const {tokenId} = req.params; + let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); + let token = o.Item; + + if (development) setCorsHeaders(res); + if (token) { + return res.json({status: ResponseStatus.Success, token, error: null}) + } else { + return res.json({status: ResponseStatus.Error, token: null, error: "The token could not be found"}) + } +} + +// Same as read token, but return unlockable in plaintext +async function readTokenWithUnlockable(req, res) { + const {tokenId} = req.params; + let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); + let token = o.Item; + + if (development) setCorsHeaders(res); + if (token) { + if(token[unlockableMetadataKey] !== undefined && token[unlockableMetadataKey] !== ""){ + let value = token[unlockableMetadataKey]; + value = jsonParse(value); + if (value !== null) { + let {ciphertext, tag} = value; + ciphertext = Buffer.from(ciphertext, 'base64'); + tag = Buffer.from(tag, 'base64'); + value = decodeSecret(ENCRYPTION_MNEMONIC, {ciphertext, tag}); + } + token[unlockableMetadataKey] = value; + } + return res.json({status: ResponseStatus.Success, token, error: null}) + } else { + return res.json({status: ResponseStatus.Error, token: null, error: "The token could not be found"}) + } +} + +// async function readUnlockable(req, res) { +// const {tokenId} = req.params; +// let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); +// let token = o.Item; +// let value = ""; +// if (development) setCorsHeaders(res); +// if (token) { +// if(token[unlockableMetadataKey] !== undefined && token[unlockableMetadataKey] !== ""){ +// value = token[unlockableMetadataKey]; +// value = jsonParse(value); +// if (value !== null) { +// let {ciphertext, tag} = value; +// ciphertext = Buffer.from(ciphertext, 'base64'); +// tag = Buffer.from(tag, 'base64'); +// value = decodeSecret(ENCRYPTION_MNEMONIC, {ciphertext, tag}); +// } +// token[unlockableMetadataKey] = value; +// return res.json({status: ResponseStatus.Success, value, error: null}) +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token could not be unlocked"}) +// } +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token could not be found"}) +// } +// } + +// async function readEncryptedData(req, res) { +// const {tokenId} = req.params; +// let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); +// let token = o.Item; +// if (development) setCorsHeaders(res); +// if (token) { +// if(token[encryptedMetadataKey] !== undefined && token[encryptedMetadataKey] !== ""){ +// const url = token[encryptedMetadataKey]; +// await fetch(url).then(data => res.send(data)); +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token does not appear to have encrypted data"}) +// } +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token could not be found"}) +// } +// } + +async function readTokenRange(req, res) { + if (development) setCorsHeaders(res); + try { + const {tokenStartId, tokenEndId} = req.params; + + if (tokenStartId <= 0 || tokenEndId < tokenStartId || (tokenEndId - tokenStartId) > 100) + return res.json({status: ResponseStatus.Error, error: "Invalid range for tokens"}) + + + const promise = makePromise(); + const args = `${nftIndexName} * filter id ${tokenStartId} ${tokenEndId} LIMIT 0 1000000`.split(' ').concat([(err, result) => { + if (!err) { + const items = parseRedisItems(result); + promise.accept({ + Items: items, + }); + } else { + promise.reject(err); + } + }]); + redisClient.ft_search.apply(redisClient, args); + const o = await promise; + + let tokens = o.Items + .filter(token => token !== null) + .sort((a, b) => a.id - b.id) + .filter((token, i) => { // filter unique hashes + + if (token.properties.hash === "" && token.owner.address === zeroAddress) + return false; + + for (let j = 0; j < i; j++) + if (tokens[j].properties.hash === token.properties.hash && token.properties.hash !== "") + return false; + + return true; + }); + + + return res.json({status: ResponseStatus.Success, tokens, error: null}) + } catch (error) { + return res.json({status: ResponseStatus.Error, tokens: [], error}) + } +} + +// TODO: Try to unpin from pinata if we are using pinata +async function deleteToken(req, res) { + try { + const {tokenId} = req.body; + + let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); + let token = o.Item; + + const address = token.owner.address; + + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + const randomHash = Math.random().toString(36); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, randomHash); + const result = await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'transferFrom', address, burnAddress, tokenId); + + if (result) console.log("Result of delete transaction:", result); + return res.json({status: ResponseStatus.Success, error: null}) + } catch (error) { + return res.json({status: ResponseStatus.Error, error}) + } +} + +async function sendToken(req, res) { + try { + const {fromUserAddress, toUserAddress, tokenId} = req.body; + const quantity = req.body.quantity ?? 1; + + let status = true; + let error = null; + for (let i = 0; i < quantity; i++) { + try { + const isApproved = await contracts.NFT.methods.isApprovedForAll(fromUserAddress, contracts['Trade']._address).call(); + if (!isApproved) { + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'setApprovalForAll', contracts['Trade']._address, true); + } + + const result = await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'transferFrom', fromUserAddress, toUserAddress, tokenId); + status = status && result.status; + } catch (err) { + console.warn(err.stack); + status = false; + error = err; + break; + } + } + + if (status) { + return res.json({status: ResponseStatus.Success, message: 'Transferred ' + tokenId + ' to ' + toUserAddress, error: null}) + } else { + return res.json({status: ResponseStatus.Error, message: 'Transfer request could not be fulfilled: ' + status, error: error}) + } + } catch (error) { + return res.json({status: ResponseStatus.Error, message: 'Error sending token', error: error}) + } +} + +async function signTransfer(req, res, blockchain) { + console.warn("Method not implemented", req, res, blockchain); +} + +async function getPrivateData(req, res) { + const {signatures, id} = req.body; + const key = unlockableMetadataKey; + const addresses = []; + let unlockSuccessful = false; + for (const signature of signatures) { + try { + let address = await web3.mainnetsidechain.eth.accounts.recover(proofOfAddressMessage, signature); + address = address.toLowerCase(); + addresses.push(address); + unlockSuccessful = true; + } catch (err) { + console.warn(err.stack); + unlockSuccessful = false; + } + } + + if (!unlockSuccessful) + return res.json({status: ResponseStatus.error, error: "Failed to unlock private token data"}); + + const hash = await contracts.mainnetsidechain.NFT.methods.getHash(id).call(); + const isCollaborator = await areAddressesCollaborator(addresses, hash, id); + if (isCollaborator) { + let value = await contracts.mainnetsidechain.NFT.methods.getMetadata(hash, key).call(); + value = jsonParse(value); + if (value !== null) { + let {ciphertext, tag} = value; + ciphertext = Buffer.from(ciphertext, 'base64'); + tag = Buffer.from(tag, 'base64'); + value = decodeSecret(ENCRYPTION_MNEMONIC, {ciphertext, tag}); + } + return res.json({status: ResponseStatus.success, payload: value, error: null}); + } else { + return res.json({status: ResponseStatus.error, payload: null, error: `Address is not a collaborator on ${hash}`}); + } +} + +// TODO: Try to unpin from pinata if we are using pinata and already have file +async function updatePublicAsset(req, res, {contracts}) { + const {mnemonic, tokenId, resourceHash} = req.body; + const file = req.files && req.files[0]; + try { + if (!bip39.validateMnemonic(mnemonic)) { + return res.json({status: ResponseStatus.Error, error: "Invalid mnemonic"}); + } + + if (!resourceHash && !file) { + return res.json({status: ResponseStatus.Error, error: "POST did not include a file or resourceHash"}); + } + + // Check if there are any files -- if there aren't, check if there's a hash + if (resourceHash && file) { + return res.json({status: ResponseStatus.Error, error: "POST should include a resourceHash *or* file but not both"}); + } + + if (file) { + const readableStream = new Readable({ + read() { + this.push(Buffer.from(file)); + this.push(null); + } + }); + + // Pinata API keys are valid, so this is probably what the user wants + if (pinata) { + // TODO: Try to unpin existing pinata hash + const {IpfsHash} = pinata.pinFileToIPFS(readableStream, pinataOptions); + if (IpfsHash){ + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, IpfsHash); + } + else res.json({status: ResponseStatus.Error, error: "Error pinning to Pinata service, hash was not returned"}); + } else { + // Upload to our own IPFS node + const req = http.request(IPFS_HOST, {method: 'POST'}, res => { + const bufferString = []; + res.on('data', data => { + bufferString.push(data); + }); + res.on('end', async () => { + const buffer = Buffer.concat(bufferString); + const string = buffer.toString('utf8'); + const {hash} = JSON.parse(string); + if (hash){ + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, hash); + } + else return res.json({status: ResponseStatus.Error, error: "Error getting hash back from IPFS node"}); + }); + res.on('error', err => { + console.warn(err.stack); + return res.json({status: ResponseStatus.Error, error: err.stack}); + }); + }); + req.on('error', err => { + console.warn(err.stack); + res.json({status: ResponseStatus.Error, error: err.stack}); + }); + file.pipe(req); + } + } else { + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, resourceHash); + } + } catch (error) { + console.warn(error.stack); + return res.json({status: ResponseStatus.Error, tokenIds: [], error}); + } +} + +// // TODO: Try to unpin from pinata if we are using pinata +// async function updatePrivateData(req, res, {contracts}) { +// async function updateHashForKeys(token, privateDataHash){ +// // TODO: +// // First, check if it already has this private data +// // if(token.privateData) +// // If yes, check if pinata is true -- if it is, unpin the hash +// // Else, unpin the hash for local node +// // Set the new metadata + +// // const encryptedData = encodeSecret(privateData); +// // await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', token.hash, unlockableMetadataKey, encryptedData); +// // await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', token.hash, encryptedMetadataKey, encryptedData); + +// } +// try { +// const {mnemonic, tokenId, resourceHash, privateData} = req.body; +// let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); +// let token = o.Item; +// const file = req.files && req.files[0]; +// if (!bip39.validateMnemonic(mnemonic)) { +// return res.json({status: ResponseStatus.Error, error: "Invalid mnemonic"}); +// } + +// if (!resourceHash && !file && !privateData) { +// return res.json({status: ResponseStatus.Error, error: "POST did not include a privateData field or a file or resourceHash"}); +// } + +// // Check if there are any files -- if there aren't, check if there's a hash +// if (resourceHash && file) { +// return res.json({status: ResponseStatus.Error, error: "POST should include a privateData field, resourceHash *or* file but not more than one"}); +// } + +// if (file) { +// const readableStream = new Readable({ +// read() { +// this.push(Buffer.from(file)); +// this.push(null); +// } +// }); + +// // Pinata API keys are valid, so this is probably what the user wants +// if (pinata) { +// // TODO: Try to unpin existing pinata hash +// const {IpfsHash} = pinata.pinFileToIPFS(readableStream, pinataOptions); +// if (IpfsHash){ +// updateHashForKeys(token, IpfsHash); +// } +// else res.json({status: ResponseStatus.Error, error: "Error pinning to Pinata service, hash was not returned"}); +// } else { +// // Upload to our own IPFS node +// const req = http.request(IPFS_HOST, {method: 'POST'}, res => { +// const bufferString = []; +// res.on('data', data => { +// bufferString.push(data); +// }); +// res.on('end', async () => { +// const buffer = Buffer.concat(bufferString); +// const string = buffer.toString('utf8'); +// const {hash} = JSON.parse(string); +// if (hash){ +// updateHashForKeys(token, hash); +// } +// else return res.json({status: ResponseStatus.Error, error: "Error getting hash back from IPFS node"}); +// }); +// res.on('error', err => { +// console.warn(err.stack); +// return res.json({status: ResponseStatus.Error, error: err.stack}); +// }); +// }); +// req.on('error', err => { +// console.warn(err.stack); +// res.json({status: ResponseStatus.Error, error: err.stack}); +// }); +// file.pipe(req); +// } +// } else { +// updateHashForKeys(token, resourceHash); +// } +// } catch (error) { +// console.warn(error.stack); +// return res.json({status: ResponseStatus.Error, tokenIds: [], error}); +// } +// } + +module.exports = { + listTokens, + createToken, + updatePublicAsset, + readToken, + readTokenWithUnlockable, + readTokenRange, + deleteToken, + sendToken, + getPrivateData, + signTransfer, + // readEncryptedData +} diff --git a/api/v1/routes/wallet.js b/api/v1/routes/wallet.js new file mode 100644 index 0000000..16883ac --- /dev/null +++ b/api/v1/routes/wallet.js @@ -0,0 +1,22 @@ +const bip39 = require('bip39'); +const {hdkey} = require('ethereumjs-wallet'); +const {setCorsHeaders} = require("../../../utils.js"); +const {ResponseStatus} = require("../enums.js"); +const {development} = require("../environment.js"); + +// Generates a new mnemonic, private key and public address and hands the mnemonic back +async function createWallet(req, res) { + if (development) setCorsHeaders(res); + try { + const userMnemonic = bip39.generateMnemonic(); + const wallet = hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(userMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(); + const userAddress = wallet.getAddressString(); + return res.json({status: ResponseStatus.Success, userMnemonic, userAddress, error: null}); + } catch (error) { + return res.json({status: ResponseStatus.Error, userMnemonic: null, userAddress: null, error}); + } +} + +module.exports = { + createWallet +} diff --git a/config.js b/config.js new file mode 100644 index 0000000..a934d00 --- /dev/null +++ b/config.js @@ -0,0 +1,59 @@ +let config = require('fs').existsSync(__dirname + '/../config.json') ? require('../config.json') : require('./config.default.json'); + +const MAINNET_MNEMONIC = process.env.MAINNET_MNEMONIC || config.MAINNET_MNEMONIC || config.mainnetMnemonic; +const TESTNET_MNEMONIC = process.env.TESTNET_MNEMONIC || config.TESTNET_MNEMONIC || config.testnetMnemonic; +const POLYGON_MNEMONIC = process.env.POLYGON_MNEMONIC || config.POLYGON_MNEMONIC || config.polygonMnemonic; +const TESTNET_POLYGON_MNEMONIC = process.env.TESTNET_POLYGON_MNEMONIC || config.TESTNET_POLYGON_MNEMONIC || config.testnetpolygonMnemonic; +const INFURA_PROJECT_ID = process.env.INFURA_PROJECT_ID || config.INFURA_PROJECT_ID || config.infuraProjectId; +const ENCRYPTION_MNEMONIC = process.env.ENCRYPTION_MNEMONIC || config.ENCRYPTION_MNEMONIC || config.encryptionMnemonic; +const POLYGON_VIGIL_KEY = process.env.POLYGON_VIGIL_KEY || config.POLYGON_VIGIL_KEY || config.polygonVigilKey; +const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID || config.GITHUB_CLIENT_ID || config.githubClientId; +const GITHUB_CLIENT_SECRET = process.env.GITHUB_CLIENT_SECRET || config.GITHUB_CLIENT_SECRET || config.githubClientSecret; +const DISCORD_CLIENT_ID = process.env.DISCORD_CLIENT_ID || config.DISCORD_CLIENT_ID || config.discordClientId; +const DISCORD_CLIENT_SECRET = process.env.DISCORD_CLIENT_SECRET || config.DISCORD_CLIENT_SECRET || config.discordClientSecret; + +const REDIS_HOST = process.env.REDIS_HOST || config.REDIS_HOST || config.redisHost; +const REDIS_PORT = process.env.REDIS_PORT || config.REDIS_PORT|| config.redisPort; +const REDIS_KEY = process.env.REDIS_KEY || config.REDIS_KEY || config.redisKey; + +const HTTP_PORT = parseInt(process.env.HTTP_PORT || config.HTTP_PORT, 10) || 80; +const HTTPS_PORT = parseInt(process.env.HTTPS_PORT || config.HTTPS_PORT, 10) || 443; +const PUBLIC_IP_ADDRESS = process.env.PUBLIC_IP_ADDRESS || config.PUBLIC_IP_ADDRESS || config.publicIp; +const PRIVATE_IP_ADDRESS = process.env.PRIVATE_IP_ADDRESS || config.PRIVATE_IP_ADDRESS || config.privateIp; +const IPFS_HOST = process.env.IPFS_HOST || config.IPFS_HOST || config.storageHost; +const ETHEREUM_HOST = process.env.ETHEREUM_HOST || config.ETHEREUM_HOST || config.ethereumHost; +const DEFAULT_TOKEN_DESCRIPTION = process.env.DEFAULT_TOKEN_DESCRIPTION || config.DEFAULT_TOKEN_DESCRIPTION || ""; +const AUTH_TOKEN_SECRET = process.env.AUTH_TOKEN_SECRET || config.AUTH_TOKEN_SECRET || ""; +const AUTH_SECRET_KEY = process.env.AUTH_SECRET_KEY || config.AUTH_SECRET_KEY || ""; +const IPFS_HOST_PORT = process.env.IPFS_HOST_PORT || config.IPFS_HOST_PORT || 8081; + +const MINTING_FEE = process.env.MINTING_FEE || config.MINTING_FEE || 10; + +module.exports = { + IPFS_HOST_PORT, + AUTH_SECRET_KEY, + AUTH_TOKEN_SECRET, + PUBLIC_IP_ADDRESS, + PRIVATE_IP_ADDRESS, + HTTP_PORT, + HTTPS_PORT, + REDIS_HOST, + REDIS_PORT, + REDIS_KEY, + GITHUB_CLIENT_ID, + GITHUB_CLIENT_SECRET, + DISCORD_CLIENT_ID, + DISCORD_CLIENT_SECRET, + MAINNET_MNEMONIC, + TESTNET_MNEMONIC, + POLYGON_MNEMONIC, + TESTNET_POLYGON_MNEMONIC, + INFURA_PROJECT_ID, + ENCRYPTION_MNEMONIC, + POLYGON_VIGIL_KEY, + DEFAULT_TOKEN_DESCRIPTION, + MINTING_FEE, + ETHEREUM_HOST, + IPFS_HOST, + config +}; \ No newline at end of file From 40d86fc90ecc39182414648f2fdcdca84400a81d Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 17:31:10 -0700 Subject: [PATCH 3/3] Add eslint --- .eslintrc.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .eslintrc.json diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..da1eefd --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,16 @@ +{ + "env": { + "node": true, + "commonjs": true, + "es2021": true + }, + "extends": "eslint:recommended", + "parserOptions": { + "ecmaVersion": 12 + }, + "rules": { + "object-curly-spacing": ["error", "never"], + "no-control-regex": "off", + "no-useless-escape": "off" + } +}