From 936d900e24b4e2fdab3a6b0b9d4e31221b3df799 Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 17:11:40 -0700 Subject: [PATCH 1/7] Add dockerfile --- Dockerfile | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..dfe847a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM node:14 +WORKDIR /usr/src/app +ENV PRODUCTION=true +EXPOSE 8080 +EXPOSE 80 +EXPOSE 443 +EXPOSE 8443 +COPY package*.json ./ +RUN npm install +COPY . . + +CMD [ "node", "index.js" ] \ No newline at end of file From 39f773e95cd7a63004b8ded5dd86773011a9023c Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 17:19:49 -0700 Subject: [PATCH 2/7] Add config.js and api --- api/index.js | 16 + api/v1/enums.js | 8 + api/v1/environment.js | 7 + api/v1/index.js | 231 ++++++++++++++ api/v1/routes/auth.js | 45 +++ api/v1/routes/tokens.js | 655 ++++++++++++++++++++++++++++++++++++++++ api/v1/routes/wallet.js | 22 ++ config.js | 59 ++++ 8 files changed, 1043 insertions(+) create mode 100644 api/index.js create mode 100644 api/v1/enums.js create mode 100644 api/v1/environment.js create mode 100644 api/v1/index.js create mode 100644 api/v1/routes/auth.js create mode 100644 api/v1/routes/tokens.js create mode 100644 api/v1/routes/wallet.js create mode 100644 config.js diff --git a/api/index.js b/api/index.js new file mode 100644 index 0000000..5b1fbc5 --- /dev/null +++ b/api/index.js @@ -0,0 +1,16 @@ +require('dotenv-flow').config(); + +const express = require('express'); +const fileUpload = require('express-fileupload'); + +const {addV1Routes} = require("./v1/index.js"); + +const {HTTP_PORT} = require('./config.js'); + +const app = express(); + +app.use(fileUpload()); + +addV1Routes(app); + +app.listen(HTTP_PORT, () => console.log(`App listening at http://localhost:${HTTP_PORT}`)); \ No newline at end of file diff --git a/api/v1/enums.js b/api/v1/enums.js new file mode 100644 index 0000000..daa021a --- /dev/null +++ b/api/v1/enums.js @@ -0,0 +1,8 @@ +const ResponseStatus = { + Success: "success", + Error: "error" +} + +module.exports = { + ResponseStatus +} \ No newline at end of file diff --git a/api/v1/environment.js b/api/v1/environment.js new file mode 100644 index 0000000..bbcbeb2 --- /dev/null +++ b/api/v1/environment.js @@ -0,0 +1,7 @@ +const development = !process.env.PRODUCTION +const production = process != undefined && process.env.PRODUCTION + +module.exports = { + development, + production +} \ No newline at end of file diff --git a/api/v1/index.js b/api/v1/index.js new file mode 100644 index 0000000..2f8f554 --- /dev/null +++ b/api/v1/index.js @@ -0,0 +1,231 @@ +const expressJSDocSwagger = require('express-jsdoc-swagger'); + +const {createWallet} = require("./routes/wallet.js"); +const {handleServerSideAuth, authenticateToken} = require("./routes/auth.js"); +const {listTokens, createToken, readToken, deleteToken, sendToken, readTokenRange, signTransfer} = require("./routes/tokens.js"); + +const {getBlockchain} = require('../../blockchain.js'); + +let blockchain; + +(async () => { + blockchain = await getBlockchain(); +})() + +function addV1Routes(app){ + const swaggerOptions = { + info: { + version: "v1", + title: "Webaverse API Documentation", + description: "Documentation for the Webaverse API server", + }, + components: { + securitySchemes: { + bearerAuth: { + type: "http", + scheme: "bearer", + bearerFormat: "JWT" + } + } + }, + filesPattern: '*.js', + swaggerUIPath: '/v1/api-docs', + baseDir: __dirname, + exposeSwaggerUI: true, + exposeApiDocs: true, + apiDocsPath: '/api/v1/api-docs' + }; + + expressJSDocSwagger(app)(swaggerOptions); + +/** + * Authentication payload + * @typedef {object} AuthPayload + * @property {string} authSecretKey.required - Auth Secret Key + */ +/** + * Authentication response + * @typedef {object} AuthResponse + * @property {string} status - The status of the authentication request (success/error) + * @property {string} accessToken - JWT token for authentication + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * POST /api/v1/authorizeServer + * @summary Get authentication token + * @param {AuthPayload} request.body.required - AuthPayload object for authentication + * @return {AuthResponse} 200 - success response + */ +app.post('/api/v1/authorizeServer', async (req, res) => { + return await handleServerSideAuth(req, res); +}); + +// WALLETS + +/** + * Response for user account creation and retrieval + * @typedef {object} WalletCreationResponse + * @property {string} status - The status of the creation request (success/error) + * @property {string} userMnemonic - The private key for the user (to be stored and NEVER shared) + * @property {string} userAddress - The public key for the user (to be stored) + * @property {string} error - If the status is error, the error can be read from here +*/ + +/** + * POST /api/v1/wallet + * @summary Create a wallet for a user + * @security bearerAuth + * @return {WalletCreationResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + */ +app.post('/api/v1/wallet', authenticateToken, async (req, res) => { + return await createWallet(req, res); +}); + +// TOKENS + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenResponse + * @property {string} status - The status of the list request (success/error) + * @property {object} token - Token object returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenIdResponse + * @property {string} status - The status of the list request (success/error) + * @property {string} tokenId - Token id returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenIdListResponse + * @property {string} status - The status of the list request (success/error) + * @property {object} tokenIds - Token id returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenListResponse + * @property {string} status - The status of the list request (success/error) + * @property {object} tokens - Array of token objects returned + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenStatusResponse + * @property {string} status - The status of the list request (success/error) + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * Response for user account creation and retrieval + * @typedef {object} TokenSignatureResponse + * @property {string} status - The status of the list request (success/error) + * @property {string} tokenId - The ID fo the token being signed + * @property {string} signature - The status of the list request (success/error) + * @property {string} error - If the status is error, the error can be read from here + */ + +/** + * GET /api/v1/tokens/:address/:mainnetAddress + * @summary List tokens for a user + * @security bearerAuth + * @return {TokenListResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} address.path.required - Address of the user to list tokens for + * @param {string} mainnetAddress.path.optional - Mainnet address of the user to list tokens for (optional) + */ +app.get('/api/v1/tokens/:address/:mainnetAddress?', authenticateToken, async (req, res) => { + return await listTokens(req, res, blockchain.web3); +}); + +/** + * GET /api/v1/token/:tokenId + * @summary Retrieve data for a non-fungible token + * @security bearerAuth + * @return {TokenResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} tokenId.path.required - Token to retrieve + */ +app.get('/api/v1/token/:tokenId', authenticateToken, async (req, res) => { + return await readToken(req, res); +}); + +/** + * GET /api/v1/token/:tokenStartId/:tokenEndId + * @summary Retrieve a range of tokens + * @security bearerAuth + * @return {TokenListResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} tokenStartId.path.required - First token to retrieve + * @param {string} tokenEndId.path.required - Last token in range to retrieve + */ +app.get('/api/v1/token/:tokenStartId/:tokenEndId', authenticateToken, async (req, res) => { + return await readTokenRange(req, res); +}); + +/** + * POST /api/v1/token + * @summary Create a non-fungible token with a file or IPFS hash + * @security bearerAuth + * @return {TokenListResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} userMnemonic.required - Mint the token using a user's private key + * @param {string} file.optional - File to upload to IPFS + * @param {string} resourceHash.optional - IPFS resource hash or other URI + * @param {number} quantity.optional; - Number of tokens to mint +*/ +app.post('/api/v1/token', authenticateToken, async (req, res) => { + return await createToken(req, res, blockchain); +}); + +/** + * DELETE /api/v1/token + * @summary Burn a token forever + * @security bearerAuth + * @param {string} tokenId.required - Token to delete + * @return {TokenStatusResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + */ +app.delete('/api/v1/token', authenticateToken, async (req, res) => { + return await deleteToken(req, res, blockchain); +}); + +/** + * POST /api/v1/token/send + * @summary Send this token from one user to another + * @security bearerAuth + * @return {TokenStatusResponse} 200 - success response + * @return {AuthResponse} 401 - authentication error response + * @param {string} tokenId.required - Token to be sent + * @param {string} fromUserAddress.required - Token sent by this user (public address) + * @param {string} toUserAddress.required - Token received by this user (public address) + */ +app.post('/api/v1/token/send', authenticateToken, async (req, res) => { + return await sendToken(req, res, blockchain); +}); + +/** + * POST /api/v1/token/signTransfer + * @summary Prepare a token to be transferred, either mainnet <-> sidechain or polygon <-> sidechain + * @return {TokenSignatureResponse} 200 - success response + * @return {object} 401 - forbidden request response + * @property {string} tokenId - Token to be sent + * @property {string} transferToChain - Transfer to this chain + */ + app.post('/api/v1/token/signTransfer', async (req, res) => { + return await signTransfer(req, res, blockchain); +}); + +} + +module.exports = { + addV1Routes +} \ No newline at end of file diff --git a/api/v1/routes/auth.js b/api/v1/routes/auth.js new file mode 100644 index 0000000..f3f8f21 --- /dev/null +++ b/api/v1/routes/auth.js @@ -0,0 +1,45 @@ +const {setCorsHeaders} = require("../../../utils.js"); +const {ResponseStatus} = require("../enums.js"); +const {development} = require("../environment.js"); +const {AUTH_SECRET_KEY, AUTH_TOKEN_SECRET} = require('../../config.js'); +const jwt = require('jsonwebtoken'); + +function authenticateToken(req, res, next) { + const authHeader = req.headers['authorization']; + const token = authHeader && authHeader.split(' ')[1]; + + if (!token) + return res.status(401).send() + + jwt.verify(token, AUTH_TOKEN_SECRET, (error, data) => { + if (error) + return res.sendStatus(403); + + const {authSecretKey} = data; + if (AUTH_SECRET_KEY !== authSecretKey) + return res.sendStatus(403); + + next() + }); +} + +// Compares a shared secret key and +async function handleServerSideAuth(req, res) { + if (development) setCorsHeaders(res); + const {authSecretKey} = req.body; + + if (!authSecretKey) + return res.json({status: ResponseStatus.Error, accessToken: null, error: "authSecretKey value was not found"}); + + if (authSecretKey != AUTH_SECRET_KEY) + return res.json({status: ResponseStatus.Error, accessToken: null, error: "authSecretKey value was invalid"}) + + const accessToken = jwt.sign({authSecretKey}, AUTH_TOKEN_SECRET); + + return res.json({status: ResponseStatus.Success, accessToken, error: null}) +} + +module.exports = { + handleServerSideAuth, + authenticateToken +} diff --git a/api/v1/routes/tokens.js b/api/v1/routes/tokens.js new file mode 100644 index 0000000..7201786 --- /dev/null +++ b/api/v1/routes/tokens.js @@ -0,0 +1,655 @@ +const path = require('path'); +const http = require('http'); +const bip39 = require('bip39'); +const {hdkey} = require('ethereumjs-wallet'); +const {getBlockchain, areAddressesCollaborator} = require('../../../blockchain.js'); +const {makePromise, setCorsHeaders} = require('../../../utils.js'); +const {getRedisItem, parseRedisItems, getRedisClient} = require('../../../redis.js'); +const { + proofOfAddressMessage, + unlockableMetadataKey, + encryptedMetadataKey, + redisPrefixes, + mainnetSignatureMessage, + nftIndexName, + burnAddress, + zeroAddress +} = require('../../../constants.js'); +const { + ENCRYPTION_MNEMONIC, + MINTING_FEE, + IPFS_HOST, + MAINNET_MNEMONIC, + PINATA_API_KEY, + PINATA_SECRET_API_KEY, + DEFAULT_TOKEN_DESCRIPTION +} = require('../../config.js'); +const {ResponseStatus} = require("../enums.js"); +const {runSidechainTransaction} = require("../../../tokens.js"); +const {production, development} = require("../environment.js"); + +const {jsonParse} = require('../../../utils.js'); + +const {encodeSecret, decodeSecret} = require('../../../encryption.js'); + +const pinataSDK = require('@pinata/sdk'); +const pinata = (PINATA_API_KEY && PINATA_API_KEY !== "") ? pinataSDK(PINATA_API_KEY, PINATA_SECRET_API_KEY) : null; + +const pinataOptions = { + pinataOptions: { + customPinPolicy: { + regions: [ + { + id: 'FRA1', + desiredReplicationCount: 1 + }, + { + id: 'NYC1', + desiredReplicationCount: 2 + } + ] + } + } +}; + +const redisClient = getRedisClient(); + +const network = production ? 'mainnet' : 'testnet'; + +const {Readable} = require('stream'); + +let web3, contracts; + +(async function () { + const blockchain = await getBlockchain(); + web3 = blockchain.web3; + contracts = blockchain.contracts; +})(); + + +// Takes an account as input +async function listTokens(req, res, web3) { + const {address, mainnetAddress} = req.params; + + if (development) setCorsHeaders(res); + try { + const [ + mainnetTokens, + sidechainTokens, + ] = await Promise.all([ + (async () => { + if (!mainnetAddress) return []; + const recoveredAddress = await web3[network].eth.accounts.recover(mainnetSignatureMessage, mainnetAddress); + if (!recoveredAddress) return []; + const p = makePromise(); + const args = `${nftIndexName} ${JSON.stringify(recoveredAddress)} INFIELDS 1 currentOwnerAddress LIMIT 0 1000000`.split(' ').concat([(err, result) => { + if (!err) { + const items = parseRedisItems(result); + p.accept({ + Items: items, + }); + } else { + p.reject(err); + } + }]); + redisClient.ft_search.apply(redisClient, args); + const o = await p; + + return (o && o.Items) || []; + })(), + (async () => { + const p = makePromise(); + const args = `${nftIndexName} ${JSON.stringify(address)} INFIELDS 1 currentOwnerAddress LIMIT 0 1000000`.split(' ').concat([(err, result) => { + if (!err) { + const items = parseRedisItems(result); + p.accept({ + Items: items, + }); + } else { + p.reject(err); + } + }]); + redisClient.ft_search.apply(redisClient, args); + const o = await p; + return (o && o.Items) || []; + })(), + ]); + const tokens = sidechainTokens + .concat(mainnetTokens) + .sort((a, b) => a.id - b.id) + .filter((token, i) => { // filter unique hashes + if (token === "0" || (token.properties.hash === "" && token.owner.address === zeroAddress)) + return false; + + for (let j = 0; j < i; j++) { + if (tokens[j].properties.hash === token.properties.hash && token.properties.hash !== "") + return false; + } + return true; + }); + return res.json({status: ResponseStatus.Success, tokens: JSON.stringify(tokens), error: null}); + } catch (error) { + return res.json({status: ResponseStatus.Error, tokens: null, error}); + } +} + +// Called by create token on successful resource upload +async function mintTokens(resHash, mnemonic, quantity, privateData, web3, contracts, res) { + let tokenIds, status; + const fullAmount = { + t: 'uint256', + v: new web3.utils.BN(1e9) + .mul(new web3.utils.BN(1e9)) + .mul(new web3.utils.BN(1e9)), + }; + + const fullAmountD2 = { + t: 'uint256', + v: fullAmount.v.div(new web3.utils.BN(2)), + }; + const wallet = hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(mnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(); + const address = wallet.getAddressString(); + + if (MINTING_FEE > 0) { + let allowance = await contracts['FT'].methods.allowance(address, contracts['NFT']._address).call(); + allowance = new web3.utils.BN(allowance, 0); + if (allowance.lt(fullAmountD2.v)) { + const result = await runSidechainTransaction(mnemonic)('FT', 'approve', contracts['NFT']._address, fullAmount.v); + status = result.status; + } else { + status = true; + } + } else status = true; + + if (status) { + const description = DEFAULT_TOKEN_DESCRIPTION; + + let fileName = resHash.split('/').pop(); + + let extName = path.extname(fileName).slice(1); + extName = extName === "" ? "png" : extName + extName = extName === "jpeg" ? "jpg" : extName + + fileName = extName ? fileName.slice(0, -(extName.length + 1)) : fileName; + + const {hash} = JSON.parse(Buffer.from(resHash, 'utf8').toString('utf8')); + + const result = await runSidechainTransaction(mnemonic)('NFT', 'mint', address, hash, fileName, extName, description, quantity); + status = result.status; + + if(privateData) + { + const encryptedData = encodeSecret(privateData); + await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', hash, unlockableMetadataKey, encryptedData); + await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', hash, encryptedMetadataKey, encryptedData); + } + + const tokenId = new web3.utils.BN(result.logs[0].topics[3].slice(2), 16).toNumber(); + tokenIds = [tokenId, tokenId + quantity - 1]; + } + return res.json({status: ResponseStatus.Success, tokenIds, error: null}); +} + +async function createToken(req, res, {web3, contracts}) { + const {mnemonic, quantity, privateData} = req.body; + + try { + let {resourceHash} = req.body; + + const file = req.files && req.files[0]; + + if (!bip39.validateMnemonic(mnemonic)) { + return res.json({status: ResponseStatus.Error, error: "Invalid mnemonic"}); + } + + if (!resourceHash && !file) { + return res.json({status: ResponseStatus.Error, error: "POST did not include a file or resourceHash"}); + } + + // Check if there are any files -- if there aren't, check if there's a hash + if (resourceHash && file) { + return res.json({status: ResponseStatus.Error, error: "POST should include a resourceHash *or* file but not both"}); + } + + if (file) { + const readableStream = new Readable({ + read() { + this.push(Buffer.from(file)); + this.push(null); + } + }); + + // Pinata API keys are valid, so this is probably what the user wants + if (pinata) { + const {IpfsHash} = pinata.pinFileToIPFS(readableStream, pinataOptions) + if (IpfsHash) mintTokens(IpfsHash, mnemonic, quantity, web3, contracts, res); + else res.json({status: ResponseStatus.Error, error: "Error pinning to Pinata service, hash was not returned"}); + } else { + // Upload to our own IPFS node + const req = http.request(IPFS_HOST, {method: 'POST'}, res => { + const bufferString = []; + res.on('data', data => { + bufferString.push(data); + }); + res.on('end', async () => { + const buffer = Buffer.concat(bufferString); + const string = buffer.toString('utf8'); + const {hash} = JSON.parse(string); + if (hash) mintTokens(hash, mnemonic, quantity, web3, contracts, res); + else return res.json({status: ResponseStatus.Error, error: "Error getting hash back from IPFS node"}); + }); + res.on('error', err => { + console.warn(err.stack); + return res.json({status: ResponseStatus.Error, error: err.stack}); + }); + }); + req.on('error', err => { + console.warn(err.stack); + res.json({status: ResponseStatus.Error, error: err.stack}); + }); + file.pipe(req); + } + } else { + mintTokens(resourceHash, mnemonic, quantity, privateData, web3, contracts, res); + } + + } catch (error) { + console.warn(error.stack); + return res.json({status: ResponseStatus.Error, tokenIds: [], error}); + } +} + +async function readToken(req, res) { + const {tokenId} = req.params; + let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); + let token = o.Item; + + if (development) setCorsHeaders(res); + if (token) { + return res.json({status: ResponseStatus.Success, token, error: null}) + } else { + return res.json({status: ResponseStatus.Error, token: null, error: "The token could not be found"}) + } +} + +// Same as read token, but return unlockable in plaintext +async function readTokenWithUnlockable(req, res) { + const {tokenId} = req.params; + let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); + let token = o.Item; + + if (development) setCorsHeaders(res); + if (token) { + if(token[unlockableMetadataKey] !== undefined && token[unlockableMetadataKey] !== ""){ + let value = token[unlockableMetadataKey]; + value = jsonParse(value); + if (value !== null) { + let {ciphertext, tag} = value; + ciphertext = Buffer.from(ciphertext, 'base64'); + tag = Buffer.from(tag, 'base64'); + value = decodeSecret(ENCRYPTION_MNEMONIC, {ciphertext, tag}); + } + token[unlockableMetadataKey] = value; + } + return res.json({status: ResponseStatus.Success, token, error: null}) + } else { + return res.json({status: ResponseStatus.Error, token: null, error: "The token could not be found"}) + } +} + +// async function readUnlockable(req, res) { +// const {tokenId} = req.params; +// let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); +// let token = o.Item; +// let value = ""; +// if (development) setCorsHeaders(res); +// if (token) { +// if(token[unlockableMetadataKey] !== undefined && token[unlockableMetadataKey] !== ""){ +// value = token[unlockableMetadataKey]; +// value = jsonParse(value); +// if (value !== null) { +// let {ciphertext, tag} = value; +// ciphertext = Buffer.from(ciphertext, 'base64'); +// tag = Buffer.from(tag, 'base64'); +// value = decodeSecret(ENCRYPTION_MNEMONIC, {ciphertext, tag}); +// } +// token[unlockableMetadataKey] = value; +// return res.json({status: ResponseStatus.Success, value, error: null}) +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token could not be unlocked"}) +// } +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token could not be found"}) +// } +// } + +// async function readEncryptedData(req, res) { +// const {tokenId} = req.params; +// let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); +// let token = o.Item; +// if (development) setCorsHeaders(res); +// if (token) { +// if(token[encryptedMetadataKey] !== undefined && token[encryptedMetadataKey] !== ""){ +// const url = token[encryptedMetadataKey]; +// await fetch(url).then(data => res.send(data)); +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token does not appear to have encrypted data"}) +// } +// } else { +// return res.json({status: ResponseStatus.Error, value: null, error: "The token could not be found"}) +// } +// } + +async function readTokenRange(req, res) { + if (development) setCorsHeaders(res); + try { + const {tokenStartId, tokenEndId} = req.params; + + if (tokenStartId <= 0 || tokenEndId < tokenStartId || (tokenEndId - tokenStartId) > 100) + return res.json({status: ResponseStatus.Error, error: "Invalid range for tokens"}) + + + const promise = makePromise(); + const args = `${nftIndexName} * filter id ${tokenStartId} ${tokenEndId} LIMIT 0 1000000`.split(' ').concat([(err, result) => { + if (!err) { + const items = parseRedisItems(result); + promise.accept({ + Items: items, + }); + } else { + promise.reject(err); + } + }]); + redisClient.ft_search.apply(redisClient, args); + const o = await promise; + + let tokens = o.Items + .filter(token => token !== null) + .sort((a, b) => a.id - b.id) + .filter((token, i) => { // filter unique hashes + + if (token.properties.hash === "" && token.owner.address === zeroAddress) + return false; + + for (let j = 0; j < i; j++) + if (tokens[j].properties.hash === token.properties.hash && token.properties.hash !== "") + return false; + + return true; + }); + + + return res.json({status: ResponseStatus.Success, tokens, error: null}) + } catch (error) { + return res.json({status: ResponseStatus.Error, tokens: [], error}) + } +} + +// TODO: Try to unpin from pinata if we are using pinata +async function deleteToken(req, res) { + try { + const {tokenId} = req.body; + + let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); + let token = o.Item; + + const address = token.owner.address; + + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + const randomHash = Math.random().toString(36); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, randomHash); + const result = await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'transferFrom', address, burnAddress, tokenId); + + if (result) console.log("Result of delete transaction:", result); + return res.json({status: ResponseStatus.Success, error: null}) + } catch (error) { + return res.json({status: ResponseStatus.Error, error}) + } +} + +async function sendToken(req, res) { + try { + const {fromUserAddress, toUserAddress, tokenId} = req.body; + const quantity = req.body.quantity ?? 1; + + let status = true; + let error = null; + for (let i = 0; i < quantity; i++) { + try { + const isApproved = await contracts.NFT.methods.isApprovedForAll(fromUserAddress, contracts['Trade']._address).call(); + if (!isApproved) { + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'setApprovalForAll', contracts['Trade']._address, true); + } + + const result = await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'transferFrom', fromUserAddress, toUserAddress, tokenId); + status = status && result.status; + } catch (err) { + console.warn(err.stack); + status = false; + error = err; + break; + } + } + + if (status) { + return res.json({status: ResponseStatus.Success, message: 'Transferred ' + tokenId + ' to ' + toUserAddress, error: null}) + } else { + return res.json({status: ResponseStatus.Error, message: 'Transfer request could not be fulfilled: ' + status, error: error}) + } + } catch (error) { + return res.json({status: ResponseStatus.Error, message: 'Error sending token', error: error}) + } +} + +async function signTransfer(req, res, blockchain) { + console.warn("Method not implemented", req, res, blockchain); +} + +async function getPrivateData(req, res) { + const {signatures, id} = req.body; + const key = unlockableMetadataKey; + const addresses = []; + let unlockSuccessful = false; + for (const signature of signatures) { + try { + let address = await web3.mainnetsidechain.eth.accounts.recover(proofOfAddressMessage, signature); + address = address.toLowerCase(); + addresses.push(address); + unlockSuccessful = true; + } catch (err) { + console.warn(err.stack); + unlockSuccessful = false; + } + } + + if (!unlockSuccessful) + return res.json({status: ResponseStatus.error, error: "Failed to unlock private token data"}); + + const hash = await contracts.mainnetsidechain.NFT.methods.getHash(id).call(); + const isCollaborator = await areAddressesCollaborator(addresses, hash, id); + if (isCollaborator) { + let value = await contracts.mainnetsidechain.NFT.methods.getMetadata(hash, key).call(); + value = jsonParse(value); + if (value !== null) { + let {ciphertext, tag} = value; + ciphertext = Buffer.from(ciphertext, 'base64'); + tag = Buffer.from(tag, 'base64'); + value = decodeSecret(ENCRYPTION_MNEMONIC, {ciphertext, tag}); + } + return res.json({status: ResponseStatus.success, payload: value, error: null}); + } else { + return res.json({status: ResponseStatus.error, payload: null, error: `Address is not a collaborator on ${hash}`}); + } +} + +// TODO: Try to unpin from pinata if we are using pinata and already have file +async function updatePublicAsset(req, res, {contracts}) { + const {mnemonic, tokenId, resourceHash} = req.body; + const file = req.files && req.files[0]; + try { + if (!bip39.validateMnemonic(mnemonic)) { + return res.json({status: ResponseStatus.Error, error: "Invalid mnemonic"}); + } + + if (!resourceHash && !file) { + return res.json({status: ResponseStatus.Error, error: "POST did not include a file or resourceHash"}); + } + + // Check if there are any files -- if there aren't, check if there's a hash + if (resourceHash && file) { + return res.json({status: ResponseStatus.Error, error: "POST should include a resourceHash *or* file but not both"}); + } + + if (file) { + const readableStream = new Readable({ + read() { + this.push(Buffer.from(file)); + this.push(null); + } + }); + + // Pinata API keys are valid, so this is probably what the user wants + if (pinata) { + // TODO: Try to unpin existing pinata hash + const {IpfsHash} = pinata.pinFileToIPFS(readableStream, pinataOptions); + if (IpfsHash){ + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, IpfsHash); + } + else res.json({status: ResponseStatus.Error, error: "Error pinning to Pinata service, hash was not returned"}); + } else { + // Upload to our own IPFS node + const req = http.request(IPFS_HOST, {method: 'POST'}, res => { + const bufferString = []; + res.on('data', data => { + bufferString.push(data); + }); + res.on('end', async () => { + const buffer = Buffer.concat(bufferString); + const string = buffer.toString('utf8'); + const {hash} = JSON.parse(string); + if (hash){ + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, hash); + } + else return res.json({status: ResponseStatus.Error, error: "Error getting hash back from IPFS node"}); + }); + res.on('error', err => { + console.warn(err.stack); + return res.json({status: ResponseStatus.Error, error: err.stack}); + }); + }); + req.on('error', err => { + console.warn(err.stack); + res.json({status: ResponseStatus.Error, error: err.stack}); + }); + file.pipe(req); + } + } else { + const currentHash = await contracts['mainnetsidechain'].NFT.methods.getHash(tokenId).call(); + await runSidechainTransaction(MAINNET_MNEMONIC)('NFT', 'updateHash', currentHash, resourceHash); + } + } catch (error) { + console.warn(error.stack); + return res.json({status: ResponseStatus.Error, tokenIds: [], error}); + } +} + +// // TODO: Try to unpin from pinata if we are using pinata +// async function updatePrivateData(req, res, {contracts}) { +// async function updateHashForKeys(token, privateDataHash){ +// // TODO: +// // First, check if it already has this private data +// // if(token.privateData) +// // If yes, check if pinata is true -- if it is, unpin the hash +// // Else, unpin the hash for local node +// // Set the new metadata + +// // const encryptedData = encodeSecret(privateData); +// // await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', token.hash, unlockableMetadataKey, encryptedData); +// // await runSidechainTransaction(mnemonic)('NFT', 'setMetadata', token.hash, encryptedMetadataKey, encryptedData); + +// } +// try { +// const {mnemonic, tokenId, resourceHash, privateData} = req.body; +// let o = await getRedisItem(tokenId, redisPrefixes.mainnetsidechainNft); +// let token = o.Item; +// const file = req.files && req.files[0]; +// if (!bip39.validateMnemonic(mnemonic)) { +// return res.json({status: ResponseStatus.Error, error: "Invalid mnemonic"}); +// } + +// if (!resourceHash && !file && !privateData) { +// return res.json({status: ResponseStatus.Error, error: "POST did not include a privateData field or a file or resourceHash"}); +// } + +// // Check if there are any files -- if there aren't, check if there's a hash +// if (resourceHash && file) { +// return res.json({status: ResponseStatus.Error, error: "POST should include a privateData field, resourceHash *or* file but not more than one"}); +// } + +// if (file) { +// const readableStream = new Readable({ +// read() { +// this.push(Buffer.from(file)); +// this.push(null); +// } +// }); + +// // Pinata API keys are valid, so this is probably what the user wants +// if (pinata) { +// // TODO: Try to unpin existing pinata hash +// const {IpfsHash} = pinata.pinFileToIPFS(readableStream, pinataOptions); +// if (IpfsHash){ +// updateHashForKeys(token, IpfsHash); +// } +// else res.json({status: ResponseStatus.Error, error: "Error pinning to Pinata service, hash was not returned"}); +// } else { +// // Upload to our own IPFS node +// const req = http.request(IPFS_HOST, {method: 'POST'}, res => { +// const bufferString = []; +// res.on('data', data => { +// bufferString.push(data); +// }); +// res.on('end', async () => { +// const buffer = Buffer.concat(bufferString); +// const string = buffer.toString('utf8'); +// const {hash} = JSON.parse(string); +// if (hash){ +// updateHashForKeys(token, hash); +// } +// else return res.json({status: ResponseStatus.Error, error: "Error getting hash back from IPFS node"}); +// }); +// res.on('error', err => { +// console.warn(err.stack); +// return res.json({status: ResponseStatus.Error, error: err.stack}); +// }); +// }); +// req.on('error', err => { +// console.warn(err.stack); +// res.json({status: ResponseStatus.Error, error: err.stack}); +// }); +// file.pipe(req); +// } +// } else { +// updateHashForKeys(token, resourceHash); +// } +// } catch (error) { +// console.warn(error.stack); +// return res.json({status: ResponseStatus.Error, tokenIds: [], error}); +// } +// } + +module.exports = { + listTokens, + createToken, + updatePublicAsset, + readToken, + readTokenWithUnlockable, + readTokenRange, + deleteToken, + sendToken, + getPrivateData, + signTransfer, + // readEncryptedData +} diff --git a/api/v1/routes/wallet.js b/api/v1/routes/wallet.js new file mode 100644 index 0000000..16883ac --- /dev/null +++ b/api/v1/routes/wallet.js @@ -0,0 +1,22 @@ +const bip39 = require('bip39'); +const {hdkey} = require('ethereumjs-wallet'); +const {setCorsHeaders} = require("../../../utils.js"); +const {ResponseStatus} = require("../enums.js"); +const {development} = require("../environment.js"); + +// Generates a new mnemonic, private key and public address and hands the mnemonic back +async function createWallet(req, res) { + if (development) setCorsHeaders(res); + try { + const userMnemonic = bip39.generateMnemonic(); + const wallet = hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(userMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(); + const userAddress = wallet.getAddressString(); + return res.json({status: ResponseStatus.Success, userMnemonic, userAddress, error: null}); + } catch (error) { + return res.json({status: ResponseStatus.Error, userMnemonic: null, userAddress: null, error}); + } +} + +module.exports = { + createWallet +} diff --git a/config.js b/config.js new file mode 100644 index 0000000..a934d00 --- /dev/null +++ b/config.js @@ -0,0 +1,59 @@ +let config = require('fs').existsSync(__dirname + '/../config.json') ? require('../config.json') : require('./config.default.json'); + +const MAINNET_MNEMONIC = process.env.MAINNET_MNEMONIC || config.MAINNET_MNEMONIC || config.mainnetMnemonic; +const TESTNET_MNEMONIC = process.env.TESTNET_MNEMONIC || config.TESTNET_MNEMONIC || config.testnetMnemonic; +const POLYGON_MNEMONIC = process.env.POLYGON_MNEMONIC || config.POLYGON_MNEMONIC || config.polygonMnemonic; +const TESTNET_POLYGON_MNEMONIC = process.env.TESTNET_POLYGON_MNEMONIC || config.TESTNET_POLYGON_MNEMONIC || config.testnetpolygonMnemonic; +const INFURA_PROJECT_ID = process.env.INFURA_PROJECT_ID || config.INFURA_PROJECT_ID || config.infuraProjectId; +const ENCRYPTION_MNEMONIC = process.env.ENCRYPTION_MNEMONIC || config.ENCRYPTION_MNEMONIC || config.encryptionMnemonic; +const POLYGON_VIGIL_KEY = process.env.POLYGON_VIGIL_KEY || config.POLYGON_VIGIL_KEY || config.polygonVigilKey; +const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID || config.GITHUB_CLIENT_ID || config.githubClientId; +const GITHUB_CLIENT_SECRET = process.env.GITHUB_CLIENT_SECRET || config.GITHUB_CLIENT_SECRET || config.githubClientSecret; +const DISCORD_CLIENT_ID = process.env.DISCORD_CLIENT_ID || config.DISCORD_CLIENT_ID || config.discordClientId; +const DISCORD_CLIENT_SECRET = process.env.DISCORD_CLIENT_SECRET || config.DISCORD_CLIENT_SECRET || config.discordClientSecret; + +const REDIS_HOST = process.env.REDIS_HOST || config.REDIS_HOST || config.redisHost; +const REDIS_PORT = process.env.REDIS_PORT || config.REDIS_PORT|| config.redisPort; +const REDIS_KEY = process.env.REDIS_KEY || config.REDIS_KEY || config.redisKey; + +const HTTP_PORT = parseInt(process.env.HTTP_PORT || config.HTTP_PORT, 10) || 80; +const HTTPS_PORT = parseInt(process.env.HTTPS_PORT || config.HTTPS_PORT, 10) || 443; +const PUBLIC_IP_ADDRESS = process.env.PUBLIC_IP_ADDRESS || config.PUBLIC_IP_ADDRESS || config.publicIp; +const PRIVATE_IP_ADDRESS = process.env.PRIVATE_IP_ADDRESS || config.PRIVATE_IP_ADDRESS || config.privateIp; +const IPFS_HOST = process.env.IPFS_HOST || config.IPFS_HOST || config.storageHost; +const ETHEREUM_HOST = process.env.ETHEREUM_HOST || config.ETHEREUM_HOST || config.ethereumHost; +const DEFAULT_TOKEN_DESCRIPTION = process.env.DEFAULT_TOKEN_DESCRIPTION || config.DEFAULT_TOKEN_DESCRIPTION || ""; +const AUTH_TOKEN_SECRET = process.env.AUTH_TOKEN_SECRET || config.AUTH_TOKEN_SECRET || ""; +const AUTH_SECRET_KEY = process.env.AUTH_SECRET_KEY || config.AUTH_SECRET_KEY || ""; +const IPFS_HOST_PORT = process.env.IPFS_HOST_PORT || config.IPFS_HOST_PORT || 8081; + +const MINTING_FEE = process.env.MINTING_FEE || config.MINTING_FEE || 10; + +module.exports = { + IPFS_HOST_PORT, + AUTH_SECRET_KEY, + AUTH_TOKEN_SECRET, + PUBLIC_IP_ADDRESS, + PRIVATE_IP_ADDRESS, + HTTP_PORT, + HTTPS_PORT, + REDIS_HOST, + REDIS_PORT, + REDIS_KEY, + GITHUB_CLIENT_ID, + GITHUB_CLIENT_SECRET, + DISCORD_CLIENT_ID, + DISCORD_CLIENT_SECRET, + MAINNET_MNEMONIC, + TESTNET_MNEMONIC, + POLYGON_MNEMONIC, + TESTNET_POLYGON_MNEMONIC, + INFURA_PROJECT_ID, + ENCRYPTION_MNEMONIC, + POLYGON_VIGIL_KEY, + DEFAULT_TOKEN_DESCRIPTION, + MINTING_FEE, + ETHEREUM_HOST, + IPFS_HOST, + config +}; \ No newline at end of file From 40d86fc90ecc39182414648f2fdcdca84400a81d Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 17:31:10 -0700 Subject: [PATCH 3/7] Add eslint --- .eslintrc.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .eslintrc.json diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..da1eefd --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,16 @@ +{ + "env": { + "node": true, + "commonjs": true, + "es2021": true + }, + "extends": "eslint:recommended", + "parserOptions": { + "ecmaVersion": 12 + }, + "rules": { + "object-curly-spacing": ["error", "never"], + "no-control-regex": "off", + "no-useless-escape": "off" + } +} From 60de2a1ff2e65ed401f2b37d3a1732d3ec18695f Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 18:06:41 -0700 Subject: [PATCH 4/7] Refactor environment vars out of constants, consolidate AWS init calls to aws.js --- .env.default | 14 ++++----- README.md | 4 +-- account-manager.js | 20 +++---------- api/index.js | 2 +- api/v1/routes/auth.js | 2 +- api/v1/routes/tokens.js | 2 +- aws.js | 11 +++---- blockchain.js | 26 +++++++---------- config.default.json | 16 +++++----- config.js | 28 +++++++++++++----- constants.js | 44 +++++++++++++++++++++------- index.js | 47 +++++++---------------------- package-lock.json | 5 +++- redis.js | 4 +-- routes/analytics.js | 15 +--------- routes/preview.js | 4 +-- routes/sign.js | 44 +++++++++++++--------------- routes/unlock.js | 65 +++++++++++++++++------------------------ routes/worlds.js | 20 ++----------- tokens.js | 10 +++---- 20 files changed, 164 insertions(+), 219 deletions(-) diff --git a/.env.default b/.env.default index c66198f..02b4e8b 100644 --- a/.env.default +++ b/.env.default @@ -1,16 +1,14 @@ -accessKeyId="" -secretAccessKey="" githubClientId="" githubClientSecret="" discordClientId="" discordClientSecret="" -infuraProjectId="" -polygonVigilKey="" +INFURA_PROJECT_ID="" +POLYGON_VIGIL_KEY="" infuraProjectSecret="" -mainnetMnemonic="" -polygonMnemonic="" -testnetMnemonic="" -testnetpolygonMnemonic="" +MAINNET_MNEMONIC="" +POLYGON_MNEMONIC="" +TESTNET_MNEMONIC="" +TESTNET_POLYGON_MNEMONIC="" network="kovan" infuraNetwork="rinkeby" githubPagesDomain="exokit.dev" diff --git a/README.md b/README.md index a15098e..3e4d4e4 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ Node server hosted on AWS, mainly used for REST endpoints. ``` { - "accessKeyId": "", - "secretAccessKey": "" + "AWS_ACCESS_KEY_ID": "", + "AWS_SECRET_ACCESS_KEY": "" } ``` diff --git a/account-manager.js b/account-manager.js index 7614b8c..618f067 100644 --- a/account-manager.js +++ b/account-manager.js @@ -1,19 +1,7 @@ -const AWS = require('aws-sdk'); const blockchain = require('./blockchain.js'); -let config = require('fs').existsSync('./config.json') ? require('./config.json') : null; +const {ddb} = require('./aws.js'); +const {tableNames} = require('./constants.js'); -const accessKeyId = process.env.accessKeyId || config.accessKeyId; -const secretAccessKey = process.env.secretAccessKey || config.secretAccessKey; - -const awsConfig = new AWS.Config({ - credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, - }), - region: 'us-west-1', -}); -const ddb = new AWS.DynamoDB(awsConfig); -const tableName = 'users'; const keyName = 'test-users.cache'; const _makePromise = () => { @@ -37,7 +25,7 @@ class AccountManager { } async load() { const tokenItem = await ddb.getItem({ - TableName: tableName, + TableName: tableNames.user, Key: { email: {S: keyName}, } @@ -47,7 +35,7 @@ class AccountManager { const _save = async () => { await ddb.putItem({ - TableName: tableName, + TableName: tableNames.user, Item: { email: {S: keyName}, users: {S: JSON.stringify(this.users)}, diff --git a/api/index.js b/api/index.js index 5b1fbc5..c5b0b57 100644 --- a/api/index.js +++ b/api/index.js @@ -5,7 +5,7 @@ const fileUpload = require('express-fileupload'); const {addV1Routes} = require("./v1/index.js"); -const {HTTP_PORT} = require('./config.js'); +const {HTTP_PORT} = require('../config.js'); const app = express(); diff --git a/api/v1/routes/auth.js b/api/v1/routes/auth.js index f3f8f21..4d6aec4 100644 --- a/api/v1/routes/auth.js +++ b/api/v1/routes/auth.js @@ -1,7 +1,7 @@ const {setCorsHeaders} = require("../../../utils.js"); const {ResponseStatus} = require("../enums.js"); const {development} = require("../environment.js"); -const {AUTH_SECRET_KEY, AUTH_TOKEN_SECRET} = require('../../config.js'); +const {AUTH_SECRET_KEY, AUTH_TOKEN_SECRET} = require('../../../config.js'); const jwt = require('jsonwebtoken'); function authenticateToken(req, res, next) { diff --git a/api/v1/routes/tokens.js b/api/v1/routes/tokens.js index 7201786..7798246 100644 --- a/api/v1/routes/tokens.js +++ b/api/v1/routes/tokens.js @@ -23,7 +23,7 @@ const { PINATA_API_KEY, PINATA_SECRET_API_KEY, DEFAULT_TOKEN_DESCRIPTION -} = require('../../config.js'); +} = require('../../../config.js'); const {ResponseStatus} = require("../enums.js"); const {runSidechainTransaction} = require("../../../tokens.js"); const {production, development} = require("../environment.js"); diff --git a/aws.js b/aws.js index 69c5cf7..0f9cca4 100644 --- a/aws.js +++ b/aws.js @@ -1,16 +1,13 @@ const stream = require('stream'); const AWS = require('aws-sdk'); -let config = require('fs').existsSync('./config.json') ? require('./config.json') : null; - -const accessKeyId = process.env.accessKeyId || config.accessKeyId; -const secretAccessKey = process.env.secretAccessKey || config.secretAccessKey; +const {AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION} = require('./config.js'); const awsConfig = new AWS.Config({ credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, + AWS_ACCESS_KEY_ID, + AWS_SECRET_ACCESS_KEY, }), - region: 'us-west-1', + region: AWS_REGION, }); const s3 = new AWS.S3(awsConfig); diff --git a/blockchain.js b/blockchain.js index d6c41ea..1f104a5 100644 --- a/blockchain.js +++ b/blockchain.js @@ -4,11 +4,7 @@ const dns = require('dns'); const https = require('https'); const fetch = require('node-fetch'); const Web3 = require('web3'); -const {polygonVigilKey, ethereumHost} = require('./constants.js'); - -let config = require('fs').existsSync('./config.json') ? require('./config.json') : null; - -const infuraProjectId = process.env.infuraProjectId || config.infuraProjectId; +const {INFURA_PROJECT_ID, POLYGON_VIGIL_KEY, ETHEREUM_HOST} = require('./config.js') let addresses, abis, @@ -40,12 +36,12 @@ const loadPromise = (async() => { fetch('https://contracts.webaverse.com/config/addresses.js').then(res => res.text()).then(s => JSON.parse(s.replace(/^\s*export\s*default\s*/, ''))), fetch('https://contracts.webaverse.com/config/abi.js').then(res => res.text()).then(s => JSON.parse(s.replace(/^\s*export\s*default\s*/, ''))), new Promise((accept, reject) => { - dns.resolve4(ethereumHost, (err, addresses) => { + dns.resolve4(ETHEREUM_HOST, (err, addresses) => { if (!err) { if (addresses.length > 0) { accept(addresses[0]); } else { - reject(new Error('no addresses resolved for ' + ethereumHost)); + reject(new Error('no addresses resolved for ' + ETHEREUM_HOST)); } } else { reject(err); @@ -82,36 +78,36 @@ const loadPromise = (async() => { web3 = { mainnet: new Web3(new Web3.providers.HttpProvider( - `https://mainnet.infura.io/v3/${infuraProjectId}` + `https://mainnet.infura.io/v3/${INFURA_PROJECT_ID}` )), mainnetsidechain: new Web3(new Web3.providers.HttpProvider( `${gethNodeUrl}:${ports.mainnetsidechain}` )), /* testnet: new Web3(new Web3.providers.HttpProvider( - `https://rinkeby.infura.io/v3/${infuraProjectId}` + `https://rinkeby.infura.io/v3/${INFURA_PROJECT_ID}` )), testnetsidechain: new Web3(new Web3.providers.HttpProvider( `${gethNodeUrl}:${ports.testnetsidechain}` )), */ polygon: new Web3(new Web3.providers.HttpProvider( - `https://rpc-mainnet.maticvigil.com/v1/${polygonVigilKey}` + `https://rpc-mainnet.maticvigil.com/v1/${POLYGON_VIGIL_KEY}` )), /* testnetpolygon: new Web3(new Web3.providers.HttpProvider( - `https://rpc-mumbai.maticvigil.com/v1/${polygonVigilKey}` + `https://rpc-mumbai.maticvigil.com/v1/${POLYGON_VIGIL_KEY}` )), */ }; web3socketProviderUrls = { - mainnet: `wss://mainnet.infura.io/ws/v3/${infuraProjectId}`, + mainnet: `wss://mainnet.infura.io/ws/v3/${INFURA_PROJECT_ID}`, mainnetsidechain: `${gethNodeWSUrl}:${ports.mainnetsidechainWs}`, - // testnet: `wss://rinkeby.infura.io/ws/v3/${infuraProjectId}`, + // testnet: `wss://rinkeby.infura.io/ws/v3/${INFURA_PROJECT_ID}`, // testnetsidechain: `${gethNodeWSUrl}:${ports.testnetsidechainWs}`, - polygon: `wss://rpc-webverse-mainnet.maticvigil.com/v1/${polygonVigilKey}`, - // testnetpolygon: `wss://rpc-mumbai.maticvigil.com/ws/v1/${polygonVigilKey}`, + polygon: `wss://rpc-webverse-mainnet.maticvigil.com/v1/${POLYGON_VIGIL_KEY}`, + // testnetpolygon: `wss://rpc-mumbai.maticvigil.com/ws/v1/${POLYGON_VIGIL_KEY}`, }; /* web3socketProviders = {}; diff --git a/config.default.json b/config.default.json index d5c085b..22810b5 100644 --- a/config.default.json +++ b/config.default.json @@ -1,17 +1,17 @@ { - "accessKeyId": "", - "secretAccessKey": "", + "AWS_ACCESS_KEY_ID": "", + "AWS_SECRET_ACCESS_KEY": "", "githubClientId": "", "githubClientSecret": "", "discordClientId": "", "discordClientSecret": "", - "infuraProjectId": "", - "polygonVigilKey": "", + "INFURA_PROJECT_ID": "", + "POLYGON_VIGIL_KEY": "", "infuraProjectSecret": "", - "mainnetMnemonic": "", - "polygonMnemonic": "", - "testnetMnemonic": "", - "testnetpolygonMnemonic": "", + "MAINNET_MNEMONIC": "", + "POLYGON_MNEMONIC": "", + "TESTNET_MNEMONIC": "", + "TESTNET_POLYGON_MNEMONIC": "", "network": "kovan", "infuraNetwork": "rinkeby", "githubPagesDomain": "exokit.dev", diff --git a/config.js b/config.js index a934d00..e36e876 100644 --- a/config.js +++ b/config.js @@ -1,4 +1,8 @@ -let config = require('fs').existsSync(__dirname + '/../config.json') ? require('../config.json') : require('./config.default.json'); +let config = require('fs').existsSync(__dirname + '/config.json') ? require('./config.json') : require('./config.default.json'); + +const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || config.AWS_ACCESS_KEY_ID || config.accessKeyId; +const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || config.AWS_SECRET_ACCESS_KEY || config.secretAccessKey; +const AWS_REGION = process.env.AWS_REGION || config.AWS_REGION || config.awsRegion || 'us-west-1'; const MAINNET_MNEMONIC = process.env.MAINNET_MNEMONIC || config.MAINNET_MNEMONIC || config.mainnetMnemonic; const TESTNET_MNEMONIC = process.env.TESTNET_MNEMONIC || config.TESTNET_MNEMONIC || config.testnetMnemonic; @@ -6,31 +10,40 @@ const POLYGON_MNEMONIC = process.env.POLYGON_MNEMONIC || config.POLYGON_MNEMONIC const TESTNET_POLYGON_MNEMONIC = process.env.TESTNET_POLYGON_MNEMONIC || config.TESTNET_POLYGON_MNEMONIC || config.testnetpolygonMnemonic; const INFURA_PROJECT_ID = process.env.INFURA_PROJECT_ID || config.INFURA_PROJECT_ID || config.infuraProjectId; const ENCRYPTION_MNEMONIC = process.env.ENCRYPTION_MNEMONIC || config.ENCRYPTION_MNEMONIC || config.encryptionMnemonic; -const POLYGON_VIGIL_KEY = process.env.POLYGON_VIGIL_KEY || config.POLYGON_VIGIL_KEY || config.polygonVigilKey; +const POLYGON_VIGIL_KEY = process.env.POLYGON_VIGIL_KEY || config.POLYGON_VIGIL_KEY || config.polygonVigilKey || `1bdde9289621d9d420488a9804254f4a958e128b`; const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID || config.GITHUB_CLIENT_ID || config.githubClientId; const GITHUB_CLIENT_SECRET = process.env.GITHUB_CLIENT_SECRET || config.GITHUB_CLIENT_SECRET || config.githubClientSecret; const DISCORD_CLIENT_ID = process.env.DISCORD_CLIENT_ID || config.DISCORD_CLIENT_ID || config.discordClientId; const DISCORD_CLIENT_SECRET = process.env.DISCORD_CLIENT_SECRET || config.DISCORD_CLIENT_SECRET || config.discordClientSecret; const REDIS_HOST = process.env.REDIS_HOST || config.REDIS_HOST || config.redisHost; -const REDIS_PORT = process.env.REDIS_PORT || config.REDIS_PORT|| config.redisPort; -const REDIS_KEY = process.env.REDIS_KEY || config.REDIS_KEY || config.redisKey; +const REDIS_PORT = process.env.REDIS_PORT || config.REDIS_PORT|| config.redisPort || 6369; +const REDIS_KEY = process.env.REDIS_KEY || config.REDIS_KEY || config.redisKey || "default"; const HTTP_PORT = parseInt(process.env.HTTP_PORT || config.HTTP_PORT, 10) || 80; const HTTPS_PORT = parseInt(process.env.HTTPS_PORT || config.HTTPS_PORT, 10) || 443; const PUBLIC_IP_ADDRESS = process.env.PUBLIC_IP_ADDRESS || config.PUBLIC_IP_ADDRESS || config.publicIp; const PRIVATE_IP_ADDRESS = process.env.PRIVATE_IP_ADDRESS || config.PRIVATE_IP_ADDRESS || config.privateIp; -const IPFS_HOST = process.env.IPFS_HOST || config.IPFS_HOST || config.storageHost; -const ETHEREUM_HOST = process.env.ETHEREUM_HOST || config.ETHEREUM_HOST || config.ethereumHost; +const IPFS_HOST = process.env.IPFS_HOST || config.IPFS_HOST || config.STORAGE_HOST || 'https://ipfs.exokit.org'; +const CACHE_HOST_URL = process.env.CACHE_HOST_URL || config.CACHE_HOST_URL || config.cacheHostUrl || 'cache.webaverse.com'; +const STORAGE_HOST = process.env.STORAGE_HOST || config.STORAGE_HOST || config.storageHost || 'https://storage.exokit.org'; + +const ETHEREUM_HOST = process.env.ETHEREUM_HOST || config.ETHEREUM_HOST || config.ethereumHost || config.ethereumHostAddress || 'ethereum.exokit.org'; const DEFAULT_TOKEN_DESCRIPTION = process.env.DEFAULT_TOKEN_DESCRIPTION || config.DEFAULT_TOKEN_DESCRIPTION || ""; const AUTH_TOKEN_SECRET = process.env.AUTH_TOKEN_SECRET || config.AUTH_TOKEN_SECRET || ""; const AUTH_SECRET_KEY = process.env.AUTH_SECRET_KEY || config.AUTH_SECRET_KEY || ""; const IPFS_HOST_PORT = process.env.IPFS_HOST_PORT || config.IPFS_HOST_PORT || 8081; -const MINTING_FEE = process.env.MINTING_FEE || config.MINTING_FEE || 10; +const MINTING_FEE = process.env.MINTING_FEE || config.MINTING_FEE || config.mintingFee || 10; module.exports = { + AWS_ACCESS_KEY_ID, + AWS_SECRET_ACCESS_KEY, + AWS_REGION, + STORAGE_HOST, + IPFS_HOST, IPFS_HOST_PORT, + CACHE_HOST_URL, AUTH_SECRET_KEY, AUTH_TOKEN_SECRET, PUBLIC_IP_ADDRESS, @@ -54,6 +67,5 @@ module.exports = { DEFAULT_TOKEN_DESCRIPTION, MINTING_FEE, ETHEREUM_HOST, - IPFS_HOST, config }; \ No newline at end of file diff --git a/constants.js b/constants.js index cba3268..13dff03 100644 --- a/constants.js +++ b/constants.js @@ -1,4 +1,3 @@ -const MAX_SIZE = 50 * 1024 * 1024; const accountKeys = [ 'name', 'avatarId', @@ -11,14 +10,17 @@ const accountKeys = [ 'homeSpaceExt', 'homeSpacePreview', 'ftu', - // 'mainnetAddress', 'addressProofs', ]; + const ids = { lastCachedBlockAccount: 'lastCachedBlock', lastCachedBlockNft: -1, }; + const tableNames = { + user: 'users', + defaultCacheTable: 'sidechain-cache', mainnetAccount: 'mainnet-cache-account', mainnetNft: 'mainnet-cache-nft', mainnetsidechainAccount: 'sidechain-cache-account', @@ -32,6 +34,7 @@ const tableNames = { testnetpolygonAccount: 'testnetpolygon-cache-account', testnetpolygonNft: 'testnetpolygon-cache-nft', }; + const redisPrefixes = (() => { const result = {}; for (const k in tableNames) { @@ -39,22 +42,41 @@ const redisPrefixes = (() => { } return result; })(); + const nftIndexName = 'nftIdx'; -const polygonVigilKey = `1bdde9289621d9d420488a9804254f4a958e128b`; -const ethereumHost = 'ethereum.exokit.org'; -const storageHost = 'https://ipfs.exokit.org'; const mainnetSignatureMessage = `Connecting mainnet address.`; -const cacheHostUrl = 'cache.webaverse.com'; +const emailRegex = /(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])/; +const codeTestRegex = /^[0-9]{6}$/; +const discordIdTestRegex = /^[0-9]+$/; +const twitterIdTestRegex = /^@?(\w){1,15}$/; + +const zeroAddress = '0x0000000000000000000000000000000000000000'; +const burnAddress = "0x000000000000000000000000000000000000dEaD"; + +const proofOfAddressMessage = `Proof of address.`; +const unlockableMetadataKey = 'unlockable'; +const encryptedMetadataKey = 'encrypted'; + +const maxFileSize = 50 * 1024 * 1024; + +const defaultAvatarPreview = "https://preview.exokit.org/[https://raw.githubusercontent.com/avaer/vrm-samples/master/vroid/male.vrm]/preview.png"; + module.exports = { - MAX_SIZE, + maxFileSize, + burnAddress, + defaultAvatarPreview, + unlockableMetadataKey, + encryptedMetadataKey, + proofOfAddressMessage, accountKeys, ids, tableNames, redisPrefixes, nftIndexName, - polygonVigilKey, - ethereumHost, - storageHost, mainnetSignatureMessage, - cacheHostUrl, + emailRegex, + codeTestRegex, + discordIdTestRegex, + twitterIdTestRegex, + zeroAddress }; \ No newline at end of file diff --git a/index.js b/index.js index def58bd..c6a2572 100644 --- a/index.js +++ b/index.js @@ -29,43 +29,18 @@ const {SHA3} = require('sha3'); const {default: formurlencoded} = require('form-urlencoded'); const bip39 = require('bip39'); const {hdkey} = require('ethereumjs-wallet'); -const {getDynamoItem, getDynamoAllItems, putDynamoItem} = require('./aws.js'); +const {ddb, ses} = require('./aws.js'); const {getRedisItem, getRedisAllItems, parseRedisItems} = require('./redis.js'); const {getExt, makePromise} = require('./utils.js'); const Timer = require('./timer.js'); const {getStoreEntries, getChainNft, getAllWithdrawsDeposits} = require('./tokens.js'); const {getBlockchain} = require('./blockchain.js'); // const browserManager = require('./browser-manager.js'); -const {accountKeys, ids, nftIndexName, redisPrefixes, mainnetSignatureMessage, cacheHostUrl} = require('./constants.js'); +const {accountKeys, ids, nftIndexName, redisPrefixes, mainnetSignatureMessage} = require('./constants.js'); const {connect: redisConnect, getRedisClient} = require('./redis'); const ethereumJsUtil = require('./ethereumjs-util.js'); +const{GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, DISCORD_CLIENT_ID, DISCORD_CLIENT_SECRET, CACHE_HOST_URL} = require('./config.js'); -let config = require('fs').existsSync('./config.json') ? require('./config.json') : null; - -const accessKeyId = process.env.accessKeyId || config.accessKeyId; -const secretAccessKey = process.env.secretAccessKey || config.secretAccessKey; -const githubClientId = process.env.githubClientId || config.githubClientId; -const githubClientSecret = process.env.githubClientSecret || config.githubClientSecret; -const discordClientId = process.env.discordClientId || config.discordClientId; -const discordClientSecret = process.env.discordClientSecret || config.discordClientSecret; - -const awsConfig = new AWS.Config({ - credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, - }), - region: 'us-west-1', -}); -const ddb = new AWS.DynamoDB(awsConfig); -const ddbd = new AWS.DynamoDB.DocumentClient(awsConfig); -const s3 = new AWS.S3(awsConfig); -const ses = new AWS.SES(new AWS.Config({ - credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, - }), - region: 'us-west-2', -})); /* const apiKeyCache = new LRU({ max: 1024, maxAge: 60 * 1000, @@ -507,8 +482,8 @@ try { }); }); const s = formurlencoded({ - client_id: discordClientId, - client_secret: discordClientSecret, + client_id: DISCORD_CLIENT_ID, + client_secret: DISCORD_CLIENT_SECRET, code: discordcode, grant_type: 'authorization_code', scope: 'identify', @@ -1249,8 +1224,8 @@ try { _respond(500, err.stack); }); proxyReq.end(JSON.stringify({ - client_id: githubClientId, - client_secret: githubClientSecret, + client_id: GITHUB_CLIENT_ID, + client_secret: GITHUB_CLIENT_SECRET, code, state, })); @@ -1497,7 +1472,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) "image": "https://preview.exokit.org/" + hash.slice(2) + '.' + ext + '/preview.png', "external_url": "https://app.webaverse.com?h=" + p.slice(1), // "background_color": "000000", - "animation_url": `${storageHost}/${hash.slice(2)}/preview.${ext === 'vrm' ? 'glb' : ext}`, + "animation_url": `${STORAGE_HOST}/${hash.slice(2)}/preview.${ext === 'vrm' ? 'glb' : ext}`, // "animation_url": "http://dl5.webmfiles.org/big-buck-bunny_trailer.webm", "properties": { "filename": filename, @@ -1737,7 +1712,7 @@ try { "image": "https://preview.exokit.org/" + hash.slice(2) + '.' + ext + '/preview.png', "external_url": "https://app.webaverse.com?h=" + p.slice(1), // "background_color": "000000", - "animation_url": `${storageHost}/${hash.slice(2)}/preview.${ext === 'vrm' ? 'glb' : ext}`, + "animation_url": `${STORAGE_HOST}/${hash.slice(2)}/preview.${ext === 'vrm' ? 'glb' : ext}`, // "animation_url": "http://dl5.webmfiles.org/big-buck-bunny_trailer.webm", "properties": { "filename": filename, @@ -1823,7 +1798,7 @@ try { "image": "https://preview.exokit.org/" + hash.slice(2) + '.' + ext + '/preview.png', "external_url": "https://app.webaverse.com?h=" + p.slice(1), // "background_color": "000000", - "animation_url": `${storageHost}/${hash.slice(2)}/preview.${ext === 'vrm' ? 'glb' : ext}`, + "animation_url": `${STORAGE_HOST}/${hash.slice(2)}/preview.${ext === 'vrm' ? 'glb' : ext}`, // "animation_url": "http://dl5.webmfiles.org/big-buck-bunny_trailer.webm", "properties": { "filename": filename, @@ -1994,7 +1969,7 @@ try { let redisClient = null; const _tryConnectRedis = () => { - redisConnect(undefined, cacheHostUrl) + redisConnect(undefined, CACHE_HOST_URL) .then(() => { redisClient = getRedisClient(); console.log('connected to redis'); diff --git a/package-lock.json b/package-lock.json index 3ba6183..9fa9de5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,7 +5,6 @@ "requires": true, "packages": { "": { - "name": "api-backend", "version": "0.0.1", "license": "ISC", "dependencies": { @@ -1259,6 +1258,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.1.tgz", "integrity": "sha512-xowrxvpxojqkagPcWRQVXZl0YXhRhAtBEIq3VoER1NH5Mw1n1o0ojdspp+GS2J//2gCVyrzQDApQ4unGF+QOoA==", + "hasInstallScript": true, "dependencies": { "node-gyp-build": "~3.7.0" } @@ -3900,6 +3900,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/keccak/-/keccak-3.0.1.tgz", "integrity": "sha512-epq90L9jlFWCW7+pQa6JOnKn2Xgl2mtI664seYR6MHskvI9agt7AnDqmAlp9TqU4/caMYbA08Hi5DMZAl5zdkA==", + "hasInstallScript": true, "dependencies": { "node-addon-api": "^2.0.0", "node-gyp-build": "^4.2.0" @@ -5751,6 +5752,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/secp256k1/-/secp256k1-4.0.2.tgz", "integrity": "sha512-UDar4sKvWAksIlfX3xIaQReADn+WFnHvbVujpcbr+9Sf/69odMwy2MUsz5CKLQgX9nsIyrjuxL2imVyoNHa3fg==", + "hasInstallScript": true, "dependencies": { "elliptic": "^6.5.2", "node-addon-api": "^2.0.0", @@ -7004,6 +7006,7 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.2.tgz", "integrity": "sha512-SwV++i2gTD5qh2XqaPzBnNX88N6HdyhQrNNRykvcS0QKvItV9u3vPEJr+X5Hhfb1JC0r0e1alL0iB09rY8+nmw==", + "hasInstallScript": true, "dependencies": { "node-gyp-build": "~3.7.0" } diff --git a/redis.js b/redis.js index fc64bfb..8bfb4ee 100644 --- a/redis.js +++ b/redis.js @@ -4,7 +4,7 @@ const redisearch = require('redis-redisearch'); redisearch(redis); const {makePromise} = require('./utils.js'); const {ids} = require('./constants.js'); -const {redisKey} = require('./config.json'); +const {REDIS_KEY} = require('./config.js'); // c = r.createClient(); c.auth('lol', err => {c.hset('cities', 'id', 'A Town Created from Grafting.', err => { c.hget('cities', 'id', console.log); }); c.on('error', console.warn); }); c.ft_create.apply(c, 'idx SCHEMA id TEXT SORTABLE'.split(' ').concat([console.warn])); 1 @@ -14,7 +14,7 @@ async function connect(port, host) { if (!loadPromise) { loadPromise = new Promise((accept, reject) => { redisClient = redis.createClient(port, host); - redisClient.auth(redisKey, err => { + redisClient.auth(REDIS_KEY, err => { if (!err) { accept(); } else { diff --git a/routes/analytics.js b/routes/analytics.js index 7812f83..b60b128 100644 --- a/routes/analytics.js +++ b/routes/analytics.js @@ -1,20 +1,7 @@ const url = require('url'); const uuid = require('uuid'); const { _setCorsHeaders } = require('../utils.js'); -const AWS = require('aws-sdk'); -let config = require('fs').existsSync('./config.json') ? require('../config.json') : null; - -const accessKeyId = process.env.accessKeyId || config.accessKeyId; -const secretAccessKey = process.env.secretAccessKey || config.secretAccessKey; - -const awsConfig = new AWS.Config({ - credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, - }), - region: 'us-west-1', -}); -const ddbd = new AWS.DynamoDB.DocumentClient(awsConfig); +const {ddbd} = require('../aws.js'); const _handleAnalyticsRequest = async (req, res) => { const request = url.parse(req.url); diff --git a/routes/preview.js b/routes/preview.js index dd8ea37..f49de28 100644 --- a/routes/preview.js +++ b/routes/preview.js @@ -13,6 +13,7 @@ const mime = require('mime'); const {getObject, putObject} = require('../aws.js'); const puppeteer = require('puppeteer'); const browserManager = require('../browser-manager.js'); +const {STORAGE_HOST} = require('../config.js'); const PREVIEW_HOST = '127.0.0.1'; const PREVIEW_PORT = 8999; @@ -20,7 +21,6 @@ const PREVIEW_PORT = 8999; const bucketNames = { preview: 'preview.exokit.org', }; -const storageHost = 'https://storage.exokit.org'; const _makePromise = () => { let accept, reject; @@ -107,7 +107,7 @@ const _handlePreviewRequest = async (req, res) => { const hash = match[1]; const ext = match[2].toLowerCase(); const type = match[4].toLowerCase(); - const url = `${storageHost}/${hash}`; + const url = `${STORAGE_HOST}/${hash}`; return { url, hash, diff --git a/routes/sign.js b/routes/sign.js index 746535f..bdb51b7 100644 --- a/routes/sign.js +++ b/routes/sign.js @@ -8,41 +8,39 @@ const Web3 = require('web3'); const bip39 = require('bip39'); const {hdkey} = require('ethereumjs-wallet'); const {_setCorsHeaders} = require('../utils.js'); -const {polygonVigilKey} = require('../constants.js'); - -const config = require('fs').existsSync('./config.json') ? require('../config.json') : null; -const mainnetMnemonic = process.env.mainnetMnemonic || config.mainnetMnemonic; -const testnetMnemonic = process.env.testnetMnemonic || config.testnetMnemonic; -const polygonMnemonic = process.env.polygonMnemonic || config.polygonMnemonic; -const testnetpolygonMnemonic = process.env.testnetpolygonMnemonic || config.testnetpolygonMnemonic; -const infuraProjectId = process.env.infuraProjectId || config.infuraProjectId; -const encryptionMnemonic = process.env.encryptionMnemonic || config.encryptionMnemonic; +const { + POLYGON_VIGIL_KEY, + MAINNET_MNEMONIC, + TESTNET_MNEMONIC, + POLYGON_MNEMONIC, + TESTNET_POLYGON_MNEMONIC, + INFURA_PROJECT_ID, + ETHEREUM_HOST +} = require('../config.js'); const loadPromise = (async () => { - const ethereumHost = 'ethereum.exokit.org'; - const ethereumHostAddress = await new Promise((accept, reject) => { - dns.resolve4(ethereumHost, (err, addresses) => { + dns.resolve4(ETHEREUM_HOST, (err, addresses) => { if (!err) { if (addresses.length > 0) { accept(addresses[0]); } else { - reject(new Error('no addresses resolved for ' + ethereumHostname)); + reject(new Error('no addresses resolved for ' + ETHEREUM_HOST)); } } else { reject(err); } }); }); - gethNodeUrl = `http://${ethereumHostAddress}`; + const gethNodeUrl = `http://${ethereumHostAddress}`; const web3 = { - mainnet: new Web3(new Web3.providers.HttpProvider(`https://mainnet.infura.io/v3/${infuraProjectId}`)), + mainnet: new Web3(new Web3.providers.HttpProvider(`https://mainnet.infura.io/v3/${INFURA_PROJECT_ID}`)), mainnetsidechain: new Web3(new Web3.providers.HttpProvider(gethNodeUrl + ':8545')), - testnet: new Web3(new Web3.providers.HttpProvider(`https://rinkeby.infura.io/v3/${infuraProjectId}`)), + testnet: new Web3(new Web3.providers.HttpProvider(`https://rinkeby.infura.io/v3/${INFURA_PROJECT_ID}`)), testnetsidechain: new Web3(new Web3.providers.HttpProvider(gethNodeUrl + ':8546')), - polygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mainnet.maticvigil.com/v1/${polygonVigilKey}`)), - testnetpolygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mumbai.maticvigil.com/v1/${polygonVigilKey}`)), + polygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mainnet.maticvigil.com/v1/${POLYGON_VIGIL_KEY}`)), + testnetpolygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mumbai.maticvigil.com/v1/${POLYGON_VIGIL_KEY}`)), }; const addresses = await fetch('https://contracts.webaverse.com/config/addresses.js').then(res => res.text()).then(s => JSON.parse(s.replace(/^\s*export\s*default\s*/, ''))); const abis = await fetch('https://contracts.webaverse.com/config/abi.js').then(res => res.text()).then(s => JSON.parse(s.replace(/^\s*export\s*default\s*/, ''))); @@ -76,11 +74,11 @@ const loadPromise = (async () => { return result; })(); const wallets = { - mainnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(mainnetMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - mainnetsidechain: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(mainnetMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - testnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(testnetMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - polygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(polygonMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - testnetpolygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(testnetpolygonMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + mainnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(MAINNET_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + mainnetsidechain: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(MAINNET_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + testnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(TESTNET_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + polygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(POLYGON_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + testnetpolygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(TESTNET_POLYGON_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), }; return { diff --git a/routes/unlock.js b/routes/unlock.js index d1a833c..b668802 100644 --- a/routes/unlock.js +++ b/routes/unlock.js @@ -11,46 +11,33 @@ const bip39 = require('bip39'); const {hdkey} = require('ethereumjs-wallet'); const {jsonParse, _setCorsHeaders} = require('../utils.js'); const {encodeSecret, decodeSecret} = require('../encryption.js'); -const {MAX_SIZE, storageHost, polygonVigilKey} = require('../constants.js'); +const {MAX_SIZE} = require('../constants.js'); -let config = require('fs').existsSync('./config.json') ? require('../config.json') : null; +const { + MAINNET_MNEMONIC, + TESTNET_MNEMONIC, + POLYGON_MNEMONIC, + TESTNET_POLYGON_MNEMONIC, + INFURA_PROJECT_ID, + ENCRYPTION_MNEMONIC, + POLYGON_VIGIL_KEY, + STORAGE_HOST +} = require('../config.js'); -const accessKeyId = process.env.accessKeyId || config.accessKeyId; -const secretAccessKey = process.env.secretAccessKey || config.secretAccessKey; -const mainnetMnemonic = process.env.mainnetMnemonic || config.mainnetMnemonic; -const testnetMnemonic = process.env.testnetMnemonic || config.testnetMnemonic; -const polygonMnemonic = process.env.polygonMnemonic || config.polygonMnemonic; -const testnetpolygonMnemonic = process.env.testnetpolygonMnemonic || config.testnetpolygonMnemonic; -const infuraProjectId = process.env.infuraProjectId || config.infuraProjectId; -const encryptionMnemonic = process.env.encryptionMnemonic || config.encryptionMnemonic; - -const awsConfig = new AWS.Config({ - credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, - }), - region: 'us-west-1', -}); -const ddb = new AWS.DynamoDB(awsConfig); - -const {pipeline, PassThrough} = require('stream'); -const {randomBytes, createCipheriv, createDecipheriv} = require('crypto'); - -const tableName = 'users'; const unlockableKey = 'unlockable'; const encryptedKey = 'encrypted'; let contracts = null; const loadPromise = (async () => { - const ethereumHost = 'ethereum.exokit.org'; + const ETHEREUM_HOST = 'ethereum.exokit.org'; const ethereumHostAddress = await new Promise((accept, reject) => { - dns.resolve4(ethereumHost, (err, addresses) => { + dns.resolve4(ETHEREUM_HOST, (err, addresses) => { if (!err) { if (addresses.length > 0) { accept(addresses[0]); } else { - reject(new Error('no addresses resolved for ' + ethereumHostname)); + reject(new Error('no addresses resolved for ' + ETHEREUM_HOSTname)); } } else { reject(err); @@ -60,12 +47,12 @@ const loadPromise = (async () => { gethNodeUrl = `http://${ethereumHostAddress}`; const web3 = { - mainnet: new Web3(new Web3.providers.HttpProvider(`https://mainnet.infura.io/v3/${infuraProjectId}`)), + mainnet: new Web3(new Web3.providers.HttpProvider(`https://mainnet.infura.io/v3/${INFURA_PROJECT_ID}`)), mainnetsidechain: new Web3(new Web3.providers.HttpProvider(gethNodeUrl + ':8545')), - testnet: new Web3(new Web3.providers.HttpProvider(`https://rinkeby.infura.io/v3/${infuraProjectId}`)), + testnet: new Web3(new Web3.providers.HttpProvider(`https://rinkeby.infura.io/v3/${INFURA_PROJECT_ID}`)), testnetsidechain: new Web3(new Web3.providers.HttpProvider(gethNodeUrl + ':8546')), - polygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mainnet.maticvigil.com/v1/${polygonVigilKey}`)), - testnetpolygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mumbai.maticvigil.com/v1/${polygonVigilKey}`)), + polygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mainnet.maticvigil.com/v1/${POLYGON_VIGIL_KEY}`)), + testnetpolygon: new Web3(new Web3.providers.HttpProvider(`https://rpc-mumbai.maticvigil.com/v1/${POLYGON_VIGIL_KEY}`)), }; const addresses = await fetch('https://contracts.webaverse.com/config/addresses.js').then(res => res.text()).then(s => JSON.parse(s.replace(/^\s*export\s*default\s*/, ''))); const abis = await fetch('https://contracts.webaverse.com/config/abi.js').then(res => res.text()).then(s => JSON.parse(s.replace(/^\s*export\s*default\s*/, ''))); @@ -100,10 +87,10 @@ const loadPromise = (async () => { })(); const wallets = { - mainnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(mainnetMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - testnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(testnetMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - polygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(polygonMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), - testnetpolygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(testnetpolygonMnemonic)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + mainnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(MAINNET_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + testnet: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(TESTNET_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + polygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(POLYGON_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), + testnetpolygon: hdkey.fromMasterSeed(bip39.mnemonicToSeedSync(TESTNET_POLYGON_MNEMONIC)).derivePath(`m/44'/60'/0'/0/0`).getWallet(), }; return { @@ -293,7 +280,7 @@ const _handleUnlockRequest = async (req, res) => { ciphertext = Buffer.from(ciphertext, 'base64'); tag = Buffer.from(tag, 'base64'); // console.log('got ciphertext 1', {ciphertext, tag}); - value = decodeSecret(encryptionMnemonic, id, {ciphertext, tag}, 'utf8'); + value = decodeSecret(ENCRYPTION_MNEMONIC, id, {ciphertext, tag}, 'utf8'); // console.log('got ciphertext 2', {ciphertext, tag, value}); } @@ -368,7 +355,7 @@ const _handleLockRequest = async (req, res) => { req.on('error', reject); }); - let {ciphertext, tag} = encodeSecret(encryptionMnemonic, id, b); + let {ciphertext, tag} = encodeSecret(ENCRYPTION_MNEMONIC, id, b); // ciphertext = ciphertext.toString('base64'); tag = tag.toString('base64'); /* value = JSON.stringify({ @@ -464,14 +451,14 @@ const _handleDecryptRequest = async (req, res) => { let {cipherhash, tag} = value; const ciphertext = await (async () => { - const res = await fetch(`${storageHost}/ipfs/${cipherhash}`); + const res = await fetch(`${STORAGE_HOST}/ipfs/${cipherhash}`); const b = await res.buffer(); return b; })(); tag = Buffer.from(tag, 'base64'); // console.log('got ciphertext 1', {ciphertext, tag}); - const plaintext = decodeSecret(encryptionMnemonic, id, {ciphertext, tag}, null); + const plaintext = decodeSecret(ENCRYPTION_MNEMONIC, id, {ciphertext, tag}, null); // console.log('got ciphertext 2', {ciphertext, tag, value}); res.setHeader('Content-Type', 'application/octet-stream'); diff --git a/routes/worlds.js b/routes/worlds.js index 238388e..c8d6aa1 100644 --- a/routes/worlds.js +++ b/routes/worlds.js @@ -7,25 +7,9 @@ const fs = require('fs').promises; const child_process = require('child_process'); // const mime = require('mime'); const {_setCorsHeaders, getExt} = require('../utils.js'); -const AWS = require('aws-sdk'); const ps = require('ps-node'); -let config = require('fs').existsSync('./config.json') ? require('../config.json') : null; - -const accessKeyId = process.env.accessKeyId || config.accessKeyId; -const secretAccessKey = process.env.secretAccessKey || config.secretAccessKey; -const privateIp = process.env.privateIp || config.privateIp; -const publicIp = process.env.publicIp || config.publicIp; - -const awsConfig = new AWS.Config({ - credentials: new AWS.Credentials({ - accessKeyId, - secretAccessKey, - }), - region: 'us-west-1', -}); -// const ddb = new AWS.DynamoDB(awsConfig); -// const ddbd = new AWS.DynamoDB.DocumentClient(awsConfig); -const s3 = new AWS.S3(awsConfig); +const {s3} = require('../aws.js'); +const {privateIp, publicIp} = require('../config.js'); const jsPath = '../dialog/index.js'; const bucketName = 'worlds.exokit.org'; diff --git a/tokens.js b/tokens.js index 5bc230e..60b1802 100644 --- a/tokens.js +++ b/tokens.js @@ -1,8 +1,6 @@ -const {accountKeys, storageHost} = require('./constants.js'); +const {accountKeys, zeroAddress, defaultAvatarPreview} = require('./constants.js'); const {getBlockchain, getPastEvents} = require('./blockchain.js'); - -const zeroAddress = '0x0000000000000000000000000000000000000000'; -const defaultAvatarPreview = `https://preview.exokit.org/[https://raw.githubusercontent.com/avaer/vrm-samples/master/vroid/male.vrm]/preview.png`; +const {STORAGE_HOST} = require('./config.js'); const _log = async (text, p) => { // console.log('start pull', text); try { @@ -496,7 +494,7 @@ const formatToken = contractName => chainName => async (token, storeEntries, mai description, image: 'https://preview.exokit.org/' + hash + '.' + ext + '/preview.png', external_url: 'https://app.webaverse.com?h=' + hash, - animation_url: `${storageHost}/${hash}/preview.${ext === 'vrm' ? 'glb' : ext}`, + animation_url: `${STORAGE_HOST}/${hash}/preview.${ext === 'vrm' ? 'glb' : ext}`, properties: { name, hash, @@ -560,7 +558,7 @@ const formatLand = contractName => chainName => async (token, storeEntries) => { description, image: coord ? `https://land-preview.exokit.org/32/${coord[0]}/${coord[2]}?${extentsJson ? `e=${JSON.stringify(extentsJson)}` : ''}` : null, external_url: `https://app.webaverse.com?${coord ? `c=${JSON.stringify(coord)}` : ''}`, - // animation_url: `${storageHost}/${hash}/preview.${ext === 'vrm' ? 'glb' : ext}`, + // animation_url: `${STORAGE_HOST}/${hash}/preview.${ext === 'vrm' ? 'glb' : ext}`, properties: { name, hash, From 5316fa4d41a788cfeb8f74483ec8bdac71a1c928 Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 18:12:18 -0700 Subject: [PATCH 5/7] Remove unused dependencies and eslint errors --- routes/accounts.js | 4 ++-- routes/analytics.js | 2 +- routes/preview.js | 12 +----------- routes/sign.js | 4 ++-- routes/unlock.js | 2 -- 5 files changed, 6 insertions(+), 18 deletions(-) diff --git a/routes/accounts.js b/routes/accounts.js index b044ecf..0c36035 100644 --- a/routes/accounts.js +++ b/routes/accounts.js @@ -1,5 +1,5 @@ const url = require('url'); -const { _setCorsHeaders } = require('../utils.js'); +const {_setCorsHeaders} = require('../utils.js'); const blockchain = require('../blockchain.js'); const accountManager = require('../account-manager.js'); @@ -25,7 +25,7 @@ const _handleAccountsRequest = async (req, res) => { const latestBlock = await blockchain.getLatestBlock(); res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify(latestBlock, null, 2)); - } else if (match = request.path.match(/^\/getEvents\/([^\/]+)\/([0-9]+)\/([0-9]+)$/)) { + } else if ((match = request.path.match(/^\/getEvents\/([^\/]+)\/([0-9]+)\/([0-9]+)$/))) { const eventTypes = match[1].split(','); const startBlock = parseInt(match[2], 10); const endBlock = parseInt(match[3], 10); diff --git a/routes/analytics.js b/routes/analytics.js index b60b128..87da7b1 100644 --- a/routes/analytics.js +++ b/routes/analytics.js @@ -1,6 +1,6 @@ const url = require('url'); const uuid = require('uuid'); -const { _setCorsHeaders } = require('../utils.js'); +const {_setCorsHeaders} = require('../utils.js'); const {ddbd} = require('../aws.js'); const _handleAnalyticsRequest = async (req, res) => { diff --git a/routes/preview.js b/routes/preview.js index f49de28..8363cbd 100644 --- a/routes/preview.js +++ b/routes/preview.js @@ -1,17 +1,7 @@ -const path = require('path'); -const stream = require('stream'); -const fs = require('fs'); const url = require('url'); -const querystring = require('querystring'); const http = require('http'); -const https = require('https'); -const crypto = require('crypto'); -const zlib = require('zlib'); -const child_process = require('child_process'); const mime = require('mime'); - const {getObject, putObject} = require('../aws.js'); -const puppeteer = require('puppeteer'); const browserManager = require('../browser-manager.js'); const {STORAGE_HOST} = require('../config.js'); @@ -174,7 +164,7 @@ const _handlePreviewRequest = async (req, res) => { proxyReq.on('data', d => { bs.push(d); }); - await new Promise((accept, reject) => { + await new Promise((accept) => { proxyReq.on('end', accept); }); proxyRes.end(); diff --git a/routes/sign.js b/routes/sign.js index bdb51b7..5625793 100644 --- a/routes/sign.js +++ b/routes/sign.js @@ -94,7 +94,7 @@ const loadPromise = (async () => { const _handleSignRequest = async (req, res) => { // console.log('sign request', req.url); - const {web3, addresses, abis, chainIds, contracts, wallets} = await loadPromise; + const {web3, addresses, chainIds, wallets} = await loadPromise; const request = url.parse(req.url); // const path = request.path.split('/')[1]; @@ -131,7 +131,7 @@ const _handleSignRequest = async (req, res) => { // console.log('got log', logs, log); if (log) { const wallet = wallets[destinationChainName]; - const proxyContractAddress = addresses[destinationChainName][proxyContractName]; + // const proxyContractAddress = addresses[destinationChainName][proxyContractName]; // const {returnValues} = log; // const {from, to: toInverse} = returnValues; diff --git a/routes/unlock.js b/routes/unlock.js index b668802..6b7f978 100644 --- a/routes/unlock.js +++ b/routes/unlock.js @@ -1,10 +1,8 @@ -const crypto = require('crypto'); const url = require('url'); const dns = require('dns'); // const util = require('util'); // const fs = require('fs'); // const {spawn} = require('child_process'); -const AWS = require('aws-sdk'); const fetch = require('node-fetch'); const Web3 = require('web3'); const bip39 = require('bip39'); From f5711818a19c0eddcef144cddd7f61800cee0fd8 Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 18:13:17 -0700 Subject: [PATCH 6/7] Fix constant error --- routes/unlock.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/routes/unlock.js b/routes/unlock.js index b668802..4d561dc 100644 --- a/routes/unlock.js +++ b/routes/unlock.js @@ -37,14 +37,14 @@ const loadPromise = (async () => { if (addresses.length > 0) { accept(addresses[0]); } else { - reject(new Error('no addresses resolved for ' + ETHEREUM_HOSTname)); + reject(new Error('no addresses resolved for ' + ETHEREUM_HOST)); } } else { reject(err); } }); }); - gethNodeUrl = `http://${ethereumHostAddress}`; + const gethNodeUrl = `http://${ethereumHostAddress}`; const web3 = { mainnet: new Web3(new Web3.providers.HttpProvider(`https://mainnet.infura.io/v3/${INFURA_PROJECT_ID}`)), From 9b9eee83dd56a8cb2c74a5a47dd495a5ed3ea470 Mon Sep 17 00:00:00 2001 From: shawticus Date: Sun, 9 May 2021 18:37:18 -0700 Subject: [PATCH 7/7] Lint fix, remove unnecessary imports and unused vars --- blockchain.js | 2 +- encoder.js | 4 -- events-manager.js | 2 - index.js | 97 ++++++++++++++--------------------------------- ipfsMigration.js | 2 +- namegen.js | 4 +- redis.js | 5 +-- routes/storage.js | 6 +-- routes/unlock.js | 17 ++------- routes/worlds.js | 9 +++-- tokens.js | 28 +++++++------- 11 files changed, 61 insertions(+), 115 deletions(-) diff --git a/blockchain.js b/blockchain.js index 1f104a5..98dcd16 100644 --- a/blockchain.js +++ b/blockchain.js @@ -9,7 +9,7 @@ const {INFURA_PROJECT_ID, POLYGON_VIGIL_KEY, ETHEREUM_HOST} = require('./config. let addresses, abis, web3, - web3socketProviders, + // web3socketProviders, web3sockets, contracts, // wsContracts, diff --git a/encoder.js b/encoder.js index c32ef6f..8583a91 100644 --- a/encoder.js +++ b/encoder.js @@ -8,10 +8,6 @@ const Stream = require('stream'); - -const internals = {}; - - exports.encode = function (buffer) { return Buffer.from(buffer.toString('base64')); diff --git a/events-manager.js b/events-manager.js index a68cae2..c254556 100644 --- a/events-manager.js +++ b/events-manager.js @@ -1,9 +1,7 @@ const events = require('events'); const {EventEmitter} = events; -const AWS = require('aws-sdk'); const blockchain = require('./blockchain.js'); const flowConstants = require('./flow-constants.js'); - class EventsManager extends EventEmitter { constructor() { super(); diff --git a/index.js b/index.js index c6a2572..69ba5ff 100644 --- a/index.js +++ b/index.js @@ -1,64 +1,30 @@ require('dotenv').config(); -const path = require('path'); -const stream = require('stream'); const fs = require('fs'); const url = require('url'); const querystring = require('querystring'); const http = require('http'); const https = require('https'); -const dns = require('dns'); const crypto = require('crypto'); -const zlib = require('zlib'); -const os = require('os'); -const child_process = require('child_process'); -const mkdirp = require('mkdirp'); -const FormData = require('form-data'); -// const express = require('express'); const httpProxy = require('http-proxy'); const ws = require('ws'); -// const LRU = require('lru'); -const mime = require('mime'); -const AWS = require('aws-sdk'); -const Stripe = require('stripe'); -// const puppeteer = require('puppeteer'); const namegen = require('./namegen.js'); -const Base64Encoder = require('./encoder.js').Encoder; -// const {JSONServer, CustomEvent} = require('./dist/sync-server.js'); -const fetch = require('node-fetch'); -const {SHA3} = require('sha3'); const {default: formurlencoded} = require('form-urlencoded'); const bip39 = require('bip39'); const {hdkey} = require('ethereumjs-wallet'); const {ddb, ses} = require('./aws.js'); const {getRedisItem, getRedisAllItems, parseRedisItems} = require('./redis.js'); -const {getExt, makePromise} = require('./utils.js'); -const Timer = require('./timer.js'); +const {makePromise} = require('./utils.js'); const {getStoreEntries, getChainNft, getAllWithdrawsDeposits} = require('./tokens.js'); -const {getBlockchain} = require('./blockchain.js'); -// const browserManager = require('./browser-manager.js'); +const {NetworkNames, getBlockchain} = require('./blockchain.js'); const {accountKeys, ids, nftIndexName, redisPrefixes, mainnetSignatureMessage} = require('./constants.js'); const {connect: redisConnect, getRedisClient} = require('./redis'); const ethereumJsUtil = require('./ethereumjs-util.js'); -const{GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, DISCORD_CLIENT_ID, DISCORD_CLIENT_SECRET, CACHE_HOST_URL} = require('./config.js'); - -/* const apiKeyCache = new LRU({ - max: 1024, - maxAge: 60 * 1000, -}); */ -// const stripe = Stripe(stripeClientSecret); -// const accountManager = require('./account-manager.js'); -// const eventsManager = require('./events-manager.js'); - -const Discord = require('discord.js'); - -// const api = require('./api.js'); -// const { _handleStorageRequest } = require('./routes/storage.js'); -// const { _handleAccountsRequest } = require('./routes/accounts.js'); -// const { _handlePreviewRequest } = require('./routes/preview.js') -const { worldManager, _handleWorldsRequest, _startWorldsRoute } = require('./routes/worlds.js'); -const { _handleSignRequest } = require('./routes/sign.js'); -const { _handleUnlockRequest, _handleLockRequest, _handleDecryptRequest, _isCollaborator, _isSingleCollaborator} = require('./routes/unlock.js'); -const { _handleAnalyticsRequest } = require('./routes/analytics.js'); +const {GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, DISCORD_CLIENT_ID, DISCORD_CLIENT_SECRET, CACHE_HOST_URL} = require('./config.js'); +const {getChainToken, getChainOwnerNft} = require("./tokens.js"); +const {worldManager, _handleWorldsRequest} = require('./routes/worlds.js'); +const {_handleSignRequest} = require('./routes/sign.js'); +const {_handleUnlockRequest, _handleLockRequest, _handleDecryptRequest, _isCollaborator, _isSingleCollaborator} = require('./routes/unlock.js'); +const {_handleAnalyticsRequest} = require('./routes/analytics.js'); let CERT = null; let PRIVKEY = null; @@ -793,16 +759,7 @@ const _handleEthereum = port => async (req, res) => { // XXX make this per-port res.setHeader('Access-Control-Allow-Origin', '*'); res.end(body); }; - const _setCorsHeaders = res => { - res.setHeader('Access-Control-Allow-Origin', '*'); - res.setHeader('Access-Control-Allow-Headers', '*'); - res.setHeader('Access-Control-Allow-Methods', '*'); - }; - try { - const {method} = req; - const {query, pathname: p} = url.parse(req.url, true); - const { gethNodeUrl, } = await getBlockchain(); @@ -830,6 +787,7 @@ try { } }; +// eslint-disable-next-line no-unused-vars const _handleAccounts = chainName => async (req, res) => { const _respond = (statusCode, body) => { res.statusCode = statusCode; @@ -1262,9 +1220,9 @@ try { const {pathname: p} = url.parse(req.url, true); // console.log('got p', p); let match; - if (match = p.match(/^\/(0x[a-f0-9]+)$/)) { + if ((match = p.match(/^\/(0x[a-f0-9]+)$/))) { const address = match[1]; - + const {contracts} = await getBlockchain(); const tokenIds = await contracts[chainName].NFT.methods.getTokenIdsOf(address).call(); let username = await contracts[chainName].Account.methods.getMetadata(address, 'name').call(); @@ -1408,7 +1366,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) if (method === 'GET') { const {pathname: p} = url.parse(req.url, true); let match; - if (match = p.match(/^\/([0-9]+)$/)) { + if ((match = p.match(/^\/([0-9]+)$/))) { const tokenId = parseInt(match[1], 10); @@ -1424,7 +1382,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) } else { _respond(404, JSON.stringify(null)); } - } else if (match = p.match(/^\/([0-9]+)-([0-9]+)$/)) { + } else if ((match = p.match(/^\/([0-9]+)-([0-9]+)$/))) { const startTokenId = parseInt(match[1], 10); const endTokenId = parseInt(match[2], 10); @@ -1499,7 +1457,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) } else { _respond(400, 'invalid range'); } - } else if (match = p.match(/^\/(0x[a-f0-9]+)$/i)) { + } else if ((match = p.match(/^\/(0x[a-f0-9]+)$/i))) { const address = match[1]; const [ @@ -1511,6 +1469,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) let mainnetAddress = null; const account = await getRedisItem(address, redisPrefixes.mainnetsidechainAccount); const signature = account?.metadata?.['mainnetAddress']; + const {web3} = await getBlockchain(); if (signature) { mainnetAddress = await web3.testnet.eth.accounts.recover(mainnetSignatureMessage, signature); } @@ -1575,7 +1534,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) tokens = tokens.filter(token => !!token.name); } _respond(200, JSON.stringify(tokens)); - } else if (match = p.match(/^\/isCollaborator\/([0-9]+)\/(0x[a-f0-9]+)$/i)) { + } else if ((match = p.match(/^\/isCollaborator\/([0-9]+)\/(0x[a-f0-9]+)$/i))) { const tokenId = parseInt(match[1], 10); const address = match[2]; @@ -1584,7 +1543,7 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) _setCorsHeaders(res); res.setHeader('Content-Type', 'application/json'); _respond(200, JSON.stringify(isCollaborator)); - } else if (match = p.match(/^\/isSingleCollaborator\/([0-9]+)\/(0x[a-f0-9]+)$/i)) { + } else if ((match = p.match(/^\/isSingleCollaborator\/([0-9]+)\/(0x[a-f0-9]+)$/i))) { const tokenId = parseInt(match[1], 10); const address = match[2]; @@ -1593,14 +1552,14 @@ const _handleCachedNft = contractName => (chainName, isAll) => async (req, res) _setCorsHeaders(res); res.setHeader('Content-Type', 'application/json'); _respond(200, JSON.stringify(isSingleCollaborator)); - } else if (match = req.url.match(/^\/search\?(.+)$/)) { + } else if ((match = req.url.match(/^\/search\?(.+)$/))) { const qs = querystring.parse(match[1]); - const {q = '*', ext, owner, minter} = qs; + const {q = '*', owner, minter} = qs; if (q) { const regex = /(\w+)/g; const words = []; let match; - while (match = regex.exec(q)) { + while ((match = regex.exec(q))) { words.push(`%${match[1]}%`); } @@ -1680,7 +1639,7 @@ try { if (method === 'GET') { const {pathname: p} = url.parse(req.url, true); let match; - if (match = p.match(/^\/([0-9]+)$/)) { + if ((match = p.match(/^\/([0-9]+)$/))) { const tokenId = parseInt(match[1], 10); const storeEntries = await _maybeGetStoreEntries(); @@ -1736,7 +1695,7 @@ try { } } })); */ - } else if (match = p.match(/^\/([0-9]+)-([0-9]+)$/)) { + } else if ((match = p.match(/^\/([0-9]+)-([0-9]+)$/))) { const startTokenId = parseInt(match[1], 10); const endTokenId = parseInt(match[2], 10); @@ -1825,9 +1784,9 @@ try { } else { _respond(400, 'invalid range'); } - } else if (match = p.match(/^\/(0x[a-f0-9]+)$/i)) { + } else if ((match = p.match(/^\/(0x[a-f0-9]+)$/i))) { const address = match[1]; - + const {contracts, web3} = await getBlockchain(); const signature = await contracts[NetworkNames.mainnetsidechain].Account.methods.getMetadata(address, "mainnetAddress").call(); let mainnetAddress = null; @@ -1859,6 +1818,7 @@ try { let tokens = await Promise.all(promises); if (isAll && mainnetAddress) { + // TODO: 'otherChainName' is not defined -- is this code not getting called? const nftMainnetBalance = await contracts[otherChainName][contractName].methods.balanceOf(mainnetAddress).call(); const mainnetPromises = Array(nftMainnetBalance); for (let i = 0; i < nftMainnetBalance; i++) { @@ -1950,7 +1910,7 @@ try { if (method === 'GET' & p === '/') { const booths = await _getBooths(); _respond(200, JSON.stringify(booths)); - } else if (match = p.match(/^\/(0x[a-f0-9]+)$/i)) { + } else if ((match = p.match(/^\/(0x[a-f0-9]+)$/i))) { const seller = match[1]; let booths = await _getBooths(); booths = booths.filter(booth => booth.seller === seller); @@ -2122,7 +2082,7 @@ try { return; } - if (match = o.host.match(/^(.+)\.proxy\.exokit.org$/)) { + if ((match = o.host.match(/^(.+)\.proxy\.exokit.org$/))) { const raw = match[1]; const match2 = raw.match(/^(https?-)(.+?)(-[0-9]+)?$/); if (match2) { @@ -2135,7 +2095,6 @@ try { } else { o.protocol = match2[1].replace(/-/g, ':'); o.host = match2[2].replace(/--/g, '=').replace(/-/g, '.').replace(/=/g, '-').replace(/\.\./g, '-') + (match2[3] ? match2[3].replace(/-/g, ':') : ''); - const oldUrl = req.url; req.url = url.format(o); // console.log(oldUrl, '->', req.url); @@ -2186,7 +2145,7 @@ const _ws = protocol => (req, socket, head) => { const o = url.parse(protocol + '//' + (req.headers['host'] || '') + req.url); console.log('got', protocol, req.headers['host'], req.url, o); let match; - if (match = o.host.match(/^(.+)\.proxy\.exokit.org$/)) { + if ((match = o.host.match(/^(.+)\.proxy\.exokit.org$/))) { const raw = match[1]; const match2 = raw.match(/^(https?-)(.+?)(-[0-9]+)?$/); console.log('match 2', raw, match2); diff --git a/ipfsMigration.js b/ipfsMigration.js index e8c17a5..2edb0b8 100644 --- a/ipfsMigration.js +++ b/ipfsMigration.js @@ -1,4 +1,4 @@ -const { putObject } = require('./aws.js') +const {putObject} = require('./aws.js') const fetch = require('node-fetch'); const mime = require('mime'); diff --git a/namegen.js b/namegen.js index 8579311..8ae003d 100644 --- a/namegen.js +++ b/namegen.js @@ -1,10 +1,10 @@ function namegen(count) { - var vowels = { '1': ["b", "c", "d", "f", "g", "h", "i", "j", "k", "l", "m", "n", "p", "q", "r", "s", "t", "v", "w", "x", "y", "z"], + var vowels = {'1': ["b", "c", "d", "f", "g", "h", "i", "j", "k", "l", "m", "n", "p", "q", "r", "s", "t", "v", "w", "x", "y", "z"], '2': ["a", "e", "o", "u"], '3': ["br", "cr", "dr", "fr", "gr", "pr", "str", "tr", "bl", "cl", "fl", "gl", "pl", "sl", "sc", "sk", "sm", "sn", "sp", "st", "sw", "ch", "sh", "th", "wh"], '4': ["ae", "ai", "ao", "au", "a", "ay", "ea", "ei", "eo", "eu", "e", "ey", "ua", "ue", "ui", "uo", "u", "uy", "ia", "ie", "iu", "io", "iy", "oa", "oe", "ou", "oi", "o", "oy"], '5': ["turn", "ter", "nus", "rus", "tania", "hiri", "hines", "gawa", "nides", "carro", "rilia", "stea", "lia", "lea", "ria", "nov", "phus", "mia", "nerth", "wei", "ruta", "tov", "zuno", "vis", "lara", "nia", "liv", "tera", "gantu", "yama", "tune", "ter", "nus", "cury", "bos", "pra", "thea", "nope", "tis", "clite"], - '6': ["una", "ion", "iea", "iri", "illes", "ides", "agua", "olla", "inda", "eshan", "oria", "ilia", "erth", "arth", "orth", "oth", "illon", "ichi", "ov", "arvis", "ara", "ars", "yke", "yria", "onoe", "ippe", "osie", "one", "ore", "ade", "adus", "urn", "ypso", "ora", "iuq", "orix", "apus", "ion", "eon", "eron", "ao", "omia"] }, + '6': ["una", "ion", "iea", "iri", "illes", "ides", "agua", "olla", "inda", "eshan", "oria", "ilia", "erth", "arth", "orth", "oth", "illon", "ichi", "ov", "arvis", "ara", "ars", "yke", "yria", "onoe", "ippe", "osie", "one", "ore", "ade", "adus", "urn", "ypso", "ora", "iuq", "orix", "apus", "ion", "eon", "eron", "ao", "omia"]}, mtx = [[1,1, 2,2, 5,5], [2,2, 3,3, 6,6], [3,3, 4,4, 5,5], diff --git a/redis.js b/redis.js index 8bfb4ee..d428cd9 100644 --- a/redis.js +++ b/redis.js @@ -1,4 +1,3 @@ -const stream = require('stream'); const redis = require('redis'); const redisearch = require('redis-redisearch'); redisearch(redis); @@ -69,7 +68,7 @@ async function putRedisItem(id, data, TableName) { await p; } -async function getRedisAllItems(TableName = defaultDynamoTable) { +async function getRedisAllItems(TableName) { // console.time('lol 1'); let keys = await new Promise((accept, reject) => { redisClient.keys(`${TableName}:*`, (err, result) => { @@ -86,7 +85,7 @@ async function getRedisAllItems(TableName = defaultDynamoTable) { // console.timeEnd('lol 1'); // console.time('lol 2'); - const _runJobs = jobs => new Promise((accept, reject) => { + const _runJobs = jobs => new Promise((accept) => { const maxTasksInFlight = 100; let tasksInFlight = 0; const _recurse = async () => { diff --git a/routes/storage.js b/routes/storage.js index 1d91e43..992006b 100644 --- a/routes/storage.js +++ b/routes/storage.js @@ -1,6 +1,6 @@ const url = require('url'); const https = require('https'); -const { putObject, uploadFromStream } = require('../aws.js'); +const {uploadFromStream} = require('../aws.js'); const crypto = require('crypto'); const mime = require('mime'); const {_setCorsHeaders, getExt} = require('../utils.js'); @@ -16,7 +16,7 @@ const _handleStorageRequest = async (req, res) => { const filename = match && match[2]; res = _setCorsHeaders(res); - const {method, headers} = req; + const {method} = req; if (method === 'OPTIONS') { res.end(); } else if (method === 'POST') { @@ -44,7 +44,7 @@ const _handleStorageRequest = async (req, res) => { res.status = 500; res.end(err.stack); }); - ws.on('done', data => { + ws.on('done', () => { // console.log('got done', data); res.end(JSON.stringify({ hash: hashHex, diff --git a/routes/unlock.js b/routes/unlock.js index a30a23a..0083463 100644 --- a/routes/unlock.js +++ b/routes/unlock.js @@ -1,4 +1,3 @@ -const url = require('url'); const dns = require('dns'); // const util = require('util'); // const fs = require('fs'); @@ -211,10 +210,8 @@ const _areAddressesSingleColaborator = async (addresses, id) => { const _handleUnlockRequest = async (req, res) => { // console.log('unlock request', req.url); - const {web3, addresses, abis, chainIds, contracts, wallets} = await loadPromise; + const {web3, contracts} = await loadPromise; - const request = url.parse(req.url); - // const path = request.path.split('/')[1]; try { res = _setCorsHeaders(res); const {method} = req; @@ -312,11 +309,7 @@ const _handleUnlockRequest = async (req, res) => { }; const _handleLockRequest = async (req, res) => { // console.log('unlock request', req.url); - - const {web3, addresses, abis, chainIds, contracts, wallets} = await loadPromise; - - const request = url.parse(req.url); - // const path = request.path.split('/')[1]; + try { res = _setCorsHeaders(res); const {method} = req; @@ -381,10 +374,8 @@ const _handleLockRequest = async (req, res) => { const _handleDecryptRequest = async (req, res) => { // console.log('unlock request', req.url); - const {web3, addresses, abis, chainIds, contracts, wallets} = await loadPromise; - - const request = url.parse(req.url); - // const path = request.path.split('/')[1]; + const {web3, contracts} = await loadPromise; + try { res = _setCorsHeaders(res); const {method} = req; diff --git a/routes/worlds.js b/routes/worlds.js index c8d6aa1..8e765f6 100644 --- a/routes/worlds.js +++ b/routes/worlds.js @@ -6,7 +6,7 @@ const fs = require('fs').promises; // const crypto = require('crypto'); const child_process = require('child_process'); // const mime = require('mime'); -const {_setCorsHeaders, getExt} = require('../utils.js'); +const {_setCorsHeaders} = require('../utils.js'); const ps = require('ps-node'); const {s3} = require('../aws.js'); const {privateIp, publicIp} = require('../config.js'); @@ -53,6 +53,7 @@ class WorldManager { .filter(w => w.arguments[0] === jsPath) .map(w => { const {pid} = w; + // eslint-disable-next-line no-unused-vars let [_, name, publicIp, privateIp, port] = w.arguments; port = parseInt(port, 10); return { @@ -188,7 +189,7 @@ class WorldManager { } } } else { - return await new Promise((accept, reject) => { + return await new Promise((accept) => { this.queues.push(async () => { const world = await this.createWorld(name); accept(world); @@ -208,7 +209,7 @@ class WorldManager { if (cp) { cp.kill(); - await new Promise((accept, reject) => { + await new Promise((accept) => { cp.on('exit', async () => { const b = await fs.readFile(cp.dataFilePath); await s3.putObject({ @@ -240,7 +241,7 @@ class WorldManager { } } } else { - return await new Promise((accept, reject) => { + return await new Promise((accept) => { this.queues.push(async () => { const result = await this.deleteWorld(name); accept(result); diff --git a/tokens.js b/tokens.js index 60b1802..d5ab825 100644 --- a/tokens.js +++ b/tokens.js @@ -391,7 +391,7 @@ const _cancelEntries = (mainnetDepositedEntries, mainnetWithdrewEntries, sidecha ]; }; -const formatToken = contractName => chainName => async (token, storeEntries, mainnetDepositedEntries, mainnetWithdrewEntries, sidechainDepositedEntries, sidechainWithdrewEntries, polygonDepositedEntries, polygonWithdrewEntries) => { +const formatToken = chainName => async (token, storeEntries, mainnetDepositedEntries, mainnetWithdrewEntries, sidechainDepositedEntries, sidechainWithdrewEntries, polygonDepositedEntries, polygonWithdrewEntries) => { // console.log('format token', {id: token.id}); const tokenId = parseInt(token.id, 10); @@ -402,9 +402,9 @@ const formatToken = contractName => chainName => async (token, storeEntries, mai } = await getBlockchain(); const { - mainnetChainName, + // mainnetChainName, sidechainChainName, - polygonChainName, + // polygonChainName, } = getChainNames(chainName); let [ @@ -515,15 +515,15 @@ const formatToken = contractName => chainName => async (token, storeEntries, mai console.log('got token', JSON.stringify(o, null, 2)); return o; }; -const formatLand = contractName => chainName => async (token, storeEntries) => { +const formatLand = chainName => async (token) => { const { contracts, } = await getBlockchain(); const { - mainnetChainName, + // mainnetChainName, sidechainChainName, - polygonChainName, + // polygonChainName, } = getChainNames(chainName); const owner = await _fetchAccount(token.owner, sidechainChainName); @@ -535,7 +535,7 @@ const formatLand = contractName => chainName => async (token, storeEntries) => { description, rarity, extents, - sidechainMinterAddress, + // sidechainMinterAddress, ] = await Promise.all([ contracts[chainName].LAND.methods.getSingleMetadata(tokenId, 'description').call(), contracts[chainName].LAND.methods.getMetadata(name, 'rarity').call(), @@ -644,7 +644,7 @@ const getChainNft = contractName => chainName => async (tokenId, storeEntries, m if (_isValidToken(token)) { if (contractName === 'NFT') { // console.log('start call'); - const r = await formatToken(contractName)(chainName)( + const r = await formatToken(chainName)( token, storeEntries, mainnetDepositedEntries, @@ -657,7 +657,7 @@ const getChainNft = contractName => chainName => async (tokenId, storeEntries, m // console.log('end call'); return r; } else if (contractName === 'LAND') { - return await formatLand(contractName)(chainName)( + return await formatLand(chainName)( token, storeEntries, mainnetDepositedEntries, @@ -679,7 +679,7 @@ const getChainNft = contractName => chainName => async (tokenId, storeEntries, m } }; const getChainToken = getChainNft('NFT'); -const getChainLand = getChainNft('LAND'); +// const getChainLand = getChainNft('LAND'); const getChainOwnerNft = contractName => chainName => async (address, i, storeEntries, mainnetDepositedEntries, mainnetWithdrewEntries, sidechainDepositedEntries, sidechainWithdrewEntries, polygonDepositedEntries, polygonWithdrewEntries) => { if (!storeEntries || !mainnetDepositedEntries || !mainnetWithdrewEntries || !sidechainDepositedEntries || !sidechainWithdrewEntries || !polygonDepositedEntries || !polygonWithdrewEntries) { console.warn('bad arguments were', { @@ -693,7 +693,8 @@ const getChainOwnerNft = contractName => chainName => async (address, i, storeEn }); throw new Error('invalid arguments'); } - + const {contracts} = await getBlockchain(); + const tokenSrc = await contracts[chainName][contractName].methods.tokenOfOwnerByIndexFull(address, i).call(); const token = _copy(tokenSrc); const {hash} = token; @@ -704,7 +705,7 @@ const getChainOwnerNft = contractName => chainName => async (address, i, storeEn try { if (contractName === 'NFT') { - return await formatToken(contractName)(chainName)( + return await formatToken(chainName)( token, storeEntries, mainnetDepositedEntries, @@ -715,7 +716,7 @@ const getChainOwnerNft = contractName => chainName => async (address, i, storeEn polygonWithdrewEntries, ); } else if (contractName === 'LAND') { - return await formatLand(contractName)(chainName)( + return await formatLand(chainName)( token, storeEntries, mainnetDepositedEntries, @@ -888,6 +889,7 @@ module.exports = { getChainNft, getChainAccount, getChainToken, + getChainOwnerNft, // formatToken, // formatLand, getStoreEntries,