From 42eb39f7e7a3bf6018d3e3bddd1c541b57624219 Mon Sep 17 00:00:00 2001 From: bitbeckers Date: Wed, 19 Jun 2024 16:27:46 +0200 Subject: [PATCH] chore(cleanup): remove unused deps --- .dockerignore | 46 - .github/workflows/deploy-cors-proxy.yml | 33 - .github/workflows/deploy-graph.yml | 40 - .github/workflows/docker-deps.yml | 44 - .github/workflows/e2e-tests.yml | 73 - cors-proxy/.eslintrc.json | 24 - cors-proxy/package.json | 19 - cors-proxy/src/index.test.ts | 25 - cors-proxy/src/index.ts | 105 - cors-proxy/tsconfig.json | 106 - cors-proxy/wrangler.toml | 3 - defender/.env.example | 2 - defender/.eslintrc.yml | 26 - defender/README.md | 15 - defender/package.json | 43 - defender/src/HypercertMinterABI.ts | 1117 - .../batch-mint-claims-from-allowlists.ts | 122 - defender/src/auto-tasks/execute-taker-bid.ts | 168 - .../auto-tasks/mint-claim-from-allowlist.ts | 130 - .../src/auto-tasks/on-allowlist-created.ts | 151 - defender/src/config.ts | 34 - defender/src/create-autotask.ts | 26 - defender/src/create-sentinel.ts | 56 - defender/src/errors.ts | 19 - defender/src/networks.ts | 109 - defender/src/reset.ts | 24 - defender/src/rollout.ts | 125 - .../src/scripts/fix-allowlist-duplicates.ts | 129 - defender/src/setup.ts | 70 - defender/src/update.ts | 106 - defender/tsconfig.json | 25 - defender/webpack.config.cjs | 50 - docker/README.md | 15 - docker/after_graph.sh | 26 - docker/after_localchain.sh | 108 - docker/base.Dockerfile | 5 - docker/base.env | 4 - docker/compose.yaml | 249 - docker/dev.env | 26 - docker/e2e.env | 24 - docker/frontend.sh | 15 - docker/graph.Dockerfile | 9 - docker/install.sh | 4 - docker/nginx/e2e_proxy.conf | 71 - docker/nginx/graph_nginx.conf | 25 - docker/playwright.Dockerfile | 7 - docker/postgres.init.d/add_databases.sh | 8 - docker/run_tests.sh | 33 - docker/scripts/build-base.sh | 16 - docker/scripts/build-graph-dependencies.sh | 45 - docker/scripts/build-graph.sh | 41 - docker/scripts/build-playwright.sh | 16 - docker/tx_client.sh | 8 - e2e/fixtures/metamask.ts | 62 - e2e/mint-token.spec.ts | 75 - e2e/utils/constants.ts | 14 - frontend/.env.local.example | 24 - frontend/.eslintrc.json | 33 - frontend/.gitignore | 13 - frontend/README.md | 78 - frontend/_redirects | 2 - frontend/components/add-registry-dialog.tsx | 43 - frontend/components/burn-fraction-button.tsx | 68 - .../components/claim-all-fractions-button.tsx | 88 - frontend/components/client-grid.tsx | 50 - frontend/components/confetti.tsx | 30 - frontend/components/config.tsx | 40 - frontend/components/connect-wallet.tsx | 26 - frontend/components/contexts.tsx | 4 - .../contract-interaction-dialog-context.tsx | 94 - frontend/components/dapp-context.tsx | 131 - frontend/components/dapp-state.tsx | 84 - frontend/components/forms.tsx | 469 - frontend/components/ftc-purchase.tsx | 327 - .../components/generic-hypercert-treemap.tsx | 53 - frontend/components/hypercert-create.tsx | 532 - frontend/components/hypercert-fetcher.tsx | 100 - .../merge-all-claim-fractions-button.tsx | 110 - frontend/components/post.tsx | 12 - .../project-browser/css/browser.module.css | 16 - .../project-browser/css/data-table.module.css | 78 - .../project-browser/data-table/expandable.tsx | 52 - .../data-table/field-default.tsx | 13 - .../data-table/field-growth.tsx | 36 - .../data-table/field-label.tsx | 25 - .../data-table/field-project.tsx | 24 - .../data-table/field-status.tsx | 14 - .../data-table/generic-data-table.tsx | 181 - .../expanded-project-data-table.tsx | 188 - .../project-browser/project-browser.tsx | 158 - .../project-client-provider.tsx | 66 - .../project-browser/project-contexts.tsx | 21 - .../project-browser/project-data-table.tsx | 120 - frontend/components/split-fraction-button.tsx | 188 - frontend/components/supabase-query.tsx | 109 - frontend/components/supabase-to-chart.tsx | 107 - frontend/components/testnet-only.tsx | 10 - .../components/transfer-fraction-button.tsx | 249 - frontend/components/widgets.tsx | 77 - .../zuzalu-hypercert-treemap.compat.d.ts | 24 - .../components/zuzalu-hypercert-treemap.tsx | 36 - frontend/components/zuzalu-purchase.tsx | 244 - frontend/content/burn-hypercert-content.ts | 29 - frontend/content/chainInteractions.ts | 23 - frontend/content/claim-hypercert-content.ts | 89 - frontend/content/hypercert-detail-content.ts | 14 - frontend/content/layout.ts | 17 - frontend/content/merge-hypercert-content.ts | 16 - frontend/content/my-hypercerts-content.ts | 7 - frontend/content/readable-errors.ts | 5 - frontend/content/split-hypercert-content.ts | 29 - frontend/hooks/account.ts | 11 - frontend/hooks/burnFraction.ts | 91 - frontend/hooks/checkWriteable.ts | 95 - frontend/hooks/fractions.ts | 69 - frontend/hooks/hypercerts-client.ts | 63 - frontend/hooks/list-registries.ts | 23 - frontend/hooks/mergeFractionUnits.ts | 89 - frontend/hooks/mintClaim.ts | 98 - frontend/hooks/mintClaimAllowlist.ts | 204 - frontend/hooks/mintFractionAllowlistBatch.ts | 148 - frontend/hooks/readTransferRestriction.ts | 37 - frontend/hooks/splitClaimUnits.ts | 86 - frontend/hooks/transferFraction.ts | 125 - frontend/hooks/verifyFractionClaim.ts | 63 - frontend/jest.config.js | 16 - frontend/jest.setup.js | 5 - frontend/lib/client.ts | 17 - frontend/lib/common.ts | 43 - frontend/lib/config.ts | 36 - frontend/lib/data-table.tsx | 85 - frontend/lib/errors.ts | 34 - frontend/lib/formatting.ts | 91 - frontend/lib/hypercert.test.ts | 167 - frontend/lib/hypercert.ts | 186 - frontend/lib/parse-blockchain-error.ts | 32 - frontend/lib/parsing.test.ts | 177 - frontend/lib/parsing.ts | 138 - frontend/lib/postdata_api.ts | 17 - frontend/lib/projects.tsx | 883 - frontend/lib/supabase-client.ts | 18 - frontend/lib/test-utils.ts | 13 - frontend/next.config.mjs | 25 - frontend/package.json | 95 - frontend/pages/[[...catchall]].tsx | 108 - frontend/pages/_document.tsx | 34 - frontend/pages/_error.js | 39 - frontend/pages/index-old.tsx | 37 - frontend/pages/plasmic-host.tsx | 14 - frontend/pages/post/[id].tsx | 55 - frontend/pages/sentry_sample_error.js | 65 - frontend/plasmic-init.ts | 730 - frontend/public/favicon.ico | Bin 7406 -> 0 bytes frontend/sentry.client.config.js | 19 - frontend/sentry.edge.config.js | 19 - frontend/sentry.properties | 4 - frontend/sentry.server.config.js | 19 - frontend/tsconfig.json | 22 - frontend/types/postdata.ts | 14 - frontend/types/prizes.ts | 42 - frontend/types/web3.ts | 3 - graph/.env.example | 1 - graph/.eslintrc.yml | 11 - graph/.github/workflows/ci-graph.yml | 32 - graph/.gitignore | 29 - graph/README.md | 17 - graph/abis/HypercertMinter.json | 1130 - .../HypercertMinter/HypercertMinter.ts | 1559 - graph/generated/schema.ts | 334 - graph/networks.json | 32 - graph/package.json | 34 - graph/schema.graphql | 25 - graph/src/hypercert-minter.ts | 199 - graph/src/utils.ts | 119 - graph/subgraph.yaml | 48 - graph/tests/.latest.json | 4 - .../tests/hypercert-minter-allowlist.test.ts | 55 - graph/tests/hypercert-minter-burn.test.ts | 193 - graph/tests/hypercert-minter-claim.test.ts | 58 - graph/tests/hypercert-minter-fraction.test.ts | 303 - graph/tests/hypercert-minter-utils.ts | 362 - graph/tsconfig.json | 4 - package.json | 28 +- playwright.config.ts | 77 - pnpm-lock.yaml | 24662 ++-------------- pnpm-workspace.yaml | 5 - sdk/lib/hypercerts-api | 2 +- sdk/package.json | 36 +- sdk/rollup.config.mjs | 28 +- sdk/src/__generated__/gql/fragment-masking.ts | 26 +- sdk/src/__generated__/gql/graphql.ts | 67 + sdk/src/client.ts | 4 + sdk/src/evaluations/index.ts | 3 +- sdk/src/index.ts | 5 - sdk/src/storage.ts | 9 +- sdk/src/types/storage.ts | 4 +- sdk/src/utils/allowlist.ts | 2 +- sdk/test/client/minting.test.ts | 6 +- sdk/test/indexer.test.ts | 5 +- sdk/test/storage.test.ts | 2 +- sdk/tsconfig.json | 8 +- vendor/README.md | 7 - vendor/observabletreemap/LICENSE.txt | 13 - vendor/observabletreemap/README.md | 33 - .../observabletreemap/c857fa5c110524ee@515.js | 318 - ...7b721e369dc2d68fdf1a59b788758bbaba79cd.png | Bin 159245 -> 0 bytes ...ed59d71feea50f2470359e1b3a0020c2593c20.png | Bin 13241679 -> 0 bytes ...acc2cf02ec1ab6d3d53014273688cfffaef6c.json | 498 - ...63b1a6d9d6583e8f41c6c3f6394d982a3cf47.webp | Bin 108696 -> 0 bytes vendor/observabletreemap/index.html | 14 - vendor/observabletreemap/index.js | 1 - vendor/observabletreemap/inspector.css | 1 - vendor/observabletreemap/package.json | 14 - vendor/observabletreemap/runtime.js | 3717 --- 214 files changed, 2574 insertions(+), 44721 deletions(-) delete mode 100644 .dockerignore delete mode 100644 .github/workflows/deploy-cors-proxy.yml delete mode 100644 .github/workflows/deploy-graph.yml delete mode 100644 .github/workflows/docker-deps.yml delete mode 100644 .github/workflows/e2e-tests.yml delete mode 100644 cors-proxy/.eslintrc.json delete mode 100644 cors-proxy/package.json delete mode 100644 cors-proxy/src/index.test.ts delete mode 100644 cors-proxy/src/index.ts delete mode 100644 cors-proxy/tsconfig.json delete mode 100644 cors-proxy/wrangler.toml delete mode 100644 defender/.env.example delete mode 100644 defender/.eslintrc.yml delete mode 100644 defender/README.md delete mode 100644 defender/package.json delete mode 100644 defender/src/HypercertMinterABI.ts delete mode 100644 defender/src/auto-tasks/batch-mint-claims-from-allowlists.ts delete mode 100644 defender/src/auto-tasks/execute-taker-bid.ts delete mode 100644 defender/src/auto-tasks/mint-claim-from-allowlist.ts delete mode 100644 defender/src/auto-tasks/on-allowlist-created.ts delete mode 100644 defender/src/config.ts delete mode 100644 defender/src/create-autotask.ts delete mode 100644 defender/src/create-sentinel.ts delete mode 100644 defender/src/errors.ts delete mode 100644 defender/src/networks.ts delete mode 100644 defender/src/reset.ts delete mode 100644 defender/src/rollout.ts delete mode 100644 defender/src/scripts/fix-allowlist-duplicates.ts delete mode 100644 defender/src/setup.ts delete mode 100644 defender/src/update.ts delete mode 100644 defender/tsconfig.json delete mode 100644 defender/webpack.config.cjs delete mode 100644 docker/README.md delete mode 100644 docker/after_graph.sh delete mode 100644 docker/after_localchain.sh delete mode 100644 docker/base.Dockerfile delete mode 100644 docker/base.env delete mode 100644 docker/compose.yaml delete mode 100644 docker/dev.env delete mode 100644 docker/e2e.env delete mode 100644 docker/frontend.sh delete mode 100644 docker/graph.Dockerfile delete mode 100644 docker/install.sh delete mode 100644 docker/nginx/e2e_proxy.conf delete mode 100644 docker/nginx/graph_nginx.conf delete mode 100644 docker/playwright.Dockerfile delete mode 100755 docker/postgres.init.d/add_databases.sh delete mode 100755 docker/run_tests.sh delete mode 100644 docker/scripts/build-base.sh delete mode 100644 docker/scripts/build-graph-dependencies.sh delete mode 100644 docker/scripts/build-graph.sh delete mode 100644 docker/scripts/build-playwright.sh delete mode 100755 docker/tx_client.sh delete mode 100644 e2e/fixtures/metamask.ts delete mode 100644 e2e/mint-token.spec.ts delete mode 100644 e2e/utils/constants.ts delete mode 100644 frontend/.env.local.example delete mode 100644 frontend/.eslintrc.json delete mode 100644 frontend/.gitignore delete mode 100644 frontend/README.md delete mode 100644 frontend/_redirects delete mode 100644 frontend/components/add-registry-dialog.tsx delete mode 100644 frontend/components/burn-fraction-button.tsx delete mode 100644 frontend/components/claim-all-fractions-button.tsx delete mode 100644 frontend/components/client-grid.tsx delete mode 100644 frontend/components/confetti.tsx delete mode 100644 frontend/components/config.tsx delete mode 100644 frontend/components/connect-wallet.tsx delete mode 100644 frontend/components/contexts.tsx delete mode 100644 frontend/components/contract-interaction-dialog-context.tsx delete mode 100644 frontend/components/dapp-context.tsx delete mode 100644 frontend/components/dapp-state.tsx delete mode 100644 frontend/components/forms.tsx delete mode 100644 frontend/components/ftc-purchase.tsx delete mode 100644 frontend/components/generic-hypercert-treemap.tsx delete mode 100644 frontend/components/hypercert-create.tsx delete mode 100644 frontend/components/hypercert-fetcher.tsx delete mode 100644 frontend/components/merge-all-claim-fractions-button.tsx delete mode 100644 frontend/components/post.tsx delete mode 100644 frontend/components/project-browser/css/browser.module.css delete mode 100644 frontend/components/project-browser/css/data-table.module.css delete mode 100644 frontend/components/project-browser/data-table/expandable.tsx delete mode 100644 frontend/components/project-browser/data-table/field-default.tsx delete mode 100644 frontend/components/project-browser/data-table/field-growth.tsx delete mode 100644 frontend/components/project-browser/data-table/field-label.tsx delete mode 100644 frontend/components/project-browser/data-table/field-project.tsx delete mode 100644 frontend/components/project-browser/data-table/field-status.tsx delete mode 100644 frontend/components/project-browser/data-table/generic-data-table.tsx delete mode 100644 frontend/components/project-browser/expanded-project-data-table.tsx delete mode 100644 frontend/components/project-browser/project-browser.tsx delete mode 100644 frontend/components/project-browser/project-client-provider.tsx delete mode 100644 frontend/components/project-browser/project-contexts.tsx delete mode 100644 frontend/components/project-browser/project-data-table.tsx delete mode 100644 frontend/components/split-fraction-button.tsx delete mode 100644 frontend/components/supabase-query.tsx delete mode 100644 frontend/components/supabase-to-chart.tsx delete mode 100644 frontend/components/testnet-only.tsx delete mode 100644 frontend/components/transfer-fraction-button.tsx delete mode 100644 frontend/components/widgets.tsx delete mode 100644 frontend/components/zuzalu-hypercert-treemap.compat.d.ts delete mode 100644 frontend/components/zuzalu-hypercert-treemap.tsx delete mode 100644 frontend/components/zuzalu-purchase.tsx delete mode 100644 frontend/content/burn-hypercert-content.ts delete mode 100644 frontend/content/chainInteractions.ts delete mode 100644 frontend/content/claim-hypercert-content.ts delete mode 100644 frontend/content/hypercert-detail-content.ts delete mode 100644 frontend/content/layout.ts delete mode 100644 frontend/content/merge-hypercert-content.ts delete mode 100644 frontend/content/my-hypercerts-content.ts delete mode 100644 frontend/content/readable-errors.ts delete mode 100644 frontend/content/split-hypercert-content.ts delete mode 100644 frontend/hooks/account.ts delete mode 100644 frontend/hooks/burnFraction.ts delete mode 100644 frontend/hooks/checkWriteable.ts delete mode 100644 frontend/hooks/fractions.ts delete mode 100644 frontend/hooks/hypercerts-client.ts delete mode 100644 frontend/hooks/list-registries.ts delete mode 100644 frontend/hooks/mergeFractionUnits.ts delete mode 100644 frontend/hooks/mintClaim.ts delete mode 100644 frontend/hooks/mintClaimAllowlist.ts delete mode 100644 frontend/hooks/mintFractionAllowlistBatch.ts delete mode 100644 frontend/hooks/readTransferRestriction.ts delete mode 100644 frontend/hooks/splitClaimUnits.ts delete mode 100644 frontend/hooks/transferFraction.ts delete mode 100644 frontend/hooks/verifyFractionClaim.ts delete mode 100644 frontend/jest.config.js delete mode 100644 frontend/jest.setup.js delete mode 100644 frontend/lib/client.ts delete mode 100644 frontend/lib/common.ts delete mode 100644 frontend/lib/config.ts delete mode 100644 frontend/lib/data-table.tsx delete mode 100644 frontend/lib/errors.ts delete mode 100644 frontend/lib/formatting.ts delete mode 100644 frontend/lib/hypercert.test.ts delete mode 100644 frontend/lib/hypercert.ts delete mode 100644 frontend/lib/parse-blockchain-error.ts delete mode 100644 frontend/lib/parsing.test.ts delete mode 100644 frontend/lib/parsing.ts delete mode 100644 frontend/lib/postdata_api.ts delete mode 100644 frontend/lib/projects.tsx delete mode 100644 frontend/lib/supabase-client.ts delete mode 100644 frontend/lib/test-utils.ts delete mode 100644 frontend/next.config.mjs delete mode 100644 frontend/package.json delete mode 100644 frontend/pages/[[...catchall]].tsx delete mode 100644 frontend/pages/_document.tsx delete mode 100644 frontend/pages/_error.js delete mode 100644 frontend/pages/index-old.tsx delete mode 100644 frontend/pages/plasmic-host.tsx delete mode 100644 frontend/pages/post/[id].tsx delete mode 100644 frontend/pages/sentry_sample_error.js delete mode 100644 frontend/plasmic-init.ts delete mode 100644 frontend/public/favicon.ico delete mode 100644 frontend/sentry.client.config.js delete mode 100644 frontend/sentry.edge.config.js delete mode 100644 frontend/sentry.properties delete mode 100644 frontend/sentry.server.config.js delete mode 100644 frontend/tsconfig.json delete mode 100644 frontend/types/postdata.ts delete mode 100644 frontend/types/prizes.ts delete mode 100644 frontend/types/web3.ts delete mode 100644 graph/.env.example delete mode 100644 graph/.eslintrc.yml delete mode 100644 graph/.github/workflows/ci-graph.yml delete mode 100644 graph/.gitignore delete mode 100644 graph/README.md delete mode 100644 graph/abis/HypercertMinter.json delete mode 100644 graph/generated/HypercertMinter/HypercertMinter.ts delete mode 100644 graph/generated/schema.ts delete mode 100644 graph/networks.json delete mode 100644 graph/package.json delete mode 100644 graph/schema.graphql delete mode 100644 graph/src/hypercert-minter.ts delete mode 100644 graph/src/utils.ts delete mode 100644 graph/subgraph.yaml delete mode 100644 graph/tests/.latest.json delete mode 100644 graph/tests/hypercert-minter-allowlist.test.ts delete mode 100644 graph/tests/hypercert-minter-burn.test.ts delete mode 100644 graph/tests/hypercert-minter-claim.test.ts delete mode 100644 graph/tests/hypercert-minter-fraction.test.ts delete mode 100644 graph/tests/hypercert-minter-utils.ts delete mode 100644 graph/tsconfig.json delete mode 100644 playwright.config.ts delete mode 100644 vendor/README.md delete mode 100644 vendor/observabletreemap/LICENSE.txt delete mode 100644 vendor/observabletreemap/README.md delete mode 100644 vendor/observabletreemap/c857fa5c110524ee@515.js delete mode 100644 vendor/observabletreemap/files/5cdcd98cf613333135182fbd6591c0a752b6c1485dcbfc8e40a7b074189effa8bffa8a19cf72c53860f1ad48c77b721e369dc2d68fdf1a59b788758bbaba79cd.png delete mode 100644 vendor/observabletreemap/files/5d83e602e4898c3c575288f957d50411155b47335688b26c04c2c642492780b24fa06e16cabeb52fbf887d8dc4ed59d71feea50f2470359e1b3a0020c2593c20.png delete mode 100644 vendor/observabletreemap/files/af40c6fdb79581388488f364d8ef0bab6388ba6c45ccae8b8e691939048c2de72f421898ecbe772c1ed529a8c75acc2cf02ec1ab6d3d53014273688cfffaef6c.json delete mode 100644 vendor/observabletreemap/files/c7f073e058835956a33f5d3d8aebf3f3d8ca460ca4d488de46af448dea9756b62b2c8cd145e8dc207c9ba32adb463b1a6d9d6583e8f41c6c3f6394d982a3cf47.webp delete mode 100644 vendor/observabletreemap/index.html delete mode 100644 vendor/observabletreemap/index.js delete mode 100644 vendor/observabletreemap/inspector.css delete mode 100644 vendor/observabletreemap/package.json delete mode 100644 vendor/observabletreemap/runtime.js diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index ee07c756..00000000 --- a/.dockerignore +++ /dev/null @@ -1,46 +0,0 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -**/node_modules/ -.pnp/ -.pnp.* -npm-debug.log* -.pnpm-debug.log* -yarn-debug.log* -yarn-error.log* -.yarn/* -!.yarn/patches -!.yarn/releases -!.yarn/plugins -!.yarn/sdks -!.yarn/versions - -# testing -coverage/ -.eslintcache - -# builds -out/ -build/ -dist/ -.turbo/ -.next/ -.docusaurus/ - -# files -.DS_Store -*.pem -*.env -.env*.local -*.log -coverage.json - -# typescript -*.tsbuildinfo -next-env.d.ts -.idea/ -.vscode/ - -# Ignore the docker related files -docker/*.Dockerfile -docker/compose.yaml \ No newline at end of file diff --git a/.github/workflows/deploy-cors-proxy.yml b/.github/workflows/deploy-cors-proxy.yml deleted file mode 100644 index 32e1a9ad..00000000 --- a/.github/workflows/deploy-cors-proxy.yml +++ /dev/null @@ -1,33 +0,0 @@ -# NOTE: This name appears in GitHub's Checks API and in workflow's status badge. -name: deploy-cors-proxy -env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - -# Trigger the workflow when: -on: - # A push occurs to one of the matched branches. - push: - branches: - - main - paths: - - cors-proxy/** - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -jobs: - deploy-cors-proxy: - # NOTE: This name appears in GitHub's Checks API. - name: deploy-cors-proxy - environment: deploy - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Publish - uses: cloudflare/wrangler-action@2.0.0 - with: - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - workingDirectory: 'cors-proxy' - command: publish \ No newline at end of file diff --git a/.github/workflows/deploy-graph.yml b/.github/workflows/deploy-graph.yml deleted file mode 100644 index a282b660..00000000 --- a/.github/workflows/deploy-graph.yml +++ /dev/null @@ -1,40 +0,0 @@ -# NOTE: This name appears in GitHub's Checks API and in workflow's status badge. -name: deploy-graph -env: - SUBGRAPH_ACCESS_TOKEN: ${{ secrets.SUBGRAPH_ACCESS_TOKEN }} - -# Trigger the workflow when: -on: - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -jobs: - deploy-graph: - # NOTE: This name appears in GitHub's Checks API. - name: deploy-graph - environment: deploy - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Setup pnpm - uses: pnpm/action-setup@v4 - with: - version: 9.1.1 - - name: Set up Node.js 18.18.1 - uses: actions/setup-node@v4 - with: - node-version: "18.18.1" - cache: "pnpm" - - name: Install - run: pnpm install --frozen-lockfile - - name: Build the subgraph - run: pnpm run build:graph - - name: Deploy the subgraph to testnets - if: github.ref == 'refs/heads/develop' - run: pnpm run deploy:graph:test - - name: Deploy the subgraph to production - if: github.ref == 'refs/heads/main' - run: pnpm run deploy:graph:prod diff --git a/.github/workflows/docker-deps.yml b/.github/workflows/docker-deps.yml deleted file mode 100644 index d4a3c204..00000000 --- a/.github/workflows/docker-deps.yml +++ /dev/null @@ -1,44 +0,0 @@ -# Builds and pushes docker dependencies used in the hypercerts docker-compose. -name: docker-deps - -on: - workflow_dispatch: - inputs: - script_name: - description: name of the docker script to execute (without the .sh) - required: true - type: string - -env: - DOCKER_PLATFORM: amd64 - REGISTRY: ghcr.io - -jobs: - docker-build-and-push: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Log in to the Container registry - uses: docker/login-action@v2 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: run build base - run: | - bash "docker/scripts/build-base.sh" - - - name: run build script - run: | - bash "docker/scripts/build-playwright.sh" - - - name: run build graph-deps - run: | - bash "docker/scripts/build-graph-dependencies.sh" - - - name: run build graph - run: | - bash "docker/scripts/build-graph.sh" diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml deleted file mode 100644 index 8a81eda9..00000000 --- a/.github/workflows/e2e-tests.yml +++ /dev/null @@ -1,73 +0,0 @@ -# NOTE: This name appears in GitHub's Checks API and in workflow's status badge. -name: end-to-end tests -env: - PLASMIC_PROJECT_ID: ${{ vars.PLASMIC_PROJECT_ID }} - PLASMIC_PROJECT_API_TOKEN: ${{ vars.PLASMIC_PROJECT_API_TOKEN }} - NEXT_PUBLIC_NFT_STORAGE_TOKEN: ${{ secrets.NEXT_PUBLIC_NFT_STORAGE_TOKEN }} - NEXT_PUBLIC_WEB3_STORAGE_TOKEN: ${{ secrets.NEXT_PUBLIC_NFT_STORAGE_TOKEN }} - NEXT_PUBLIC_SUPABASE_URL: ${{ vars.NEXT_PUBLIC_SUPABASE_URL }} - NEXT_PUBLIC_SUPABASE_ANON_KEY: ${{ secrets.NEXT_PUBLIC_SUPABASE_ANON_KEY }} - NEXT_PUBLIC_SUPABASE_TABLE: ${{ vars.NEXT_PUBLIC_SUPABASE_TABLE }} - NEXT_PUBLIC_WALLETCONNECT_ID: ${{ secrets.NEXT_PUBLIC_WALLETCONNECT_ID }} - DOCKER_PLATFORM: amd64 - -# Trigger the workflow when: -on: - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# Cancel in progress jobs on new pushes. -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - e2e-tests: - name: e2e-tests - environment: testing - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - # Check out pull request's HEAD commit instead of the merge commit to - # prevent gitlint from failing due to too long commit message titles, - # e.g. "Merge 3e621938d65caaa67f8e35d145335d889d470fc8 into 19a39b2f66cd7a165082d1486b2f1eb36ec2354a". - ref: ${{ github.event.pull_request.head.sha }} - # Fetch all history so gitlint can check the relevant commits. - fetch-depth: "0" - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.18.1' - - - name: Set up Node.js 18 - uses: actions/setup-node@v3 - with: - node-version: "18.15.0" - cache: "yarn" - - - name: Run e2e tests - run: | - env && yarn e2e:ci-run-tests - - - name: Output logs - if: always() - run: | - yarn e2e:ci-logs > e2e.ci.log - - - name: Save logs - if: always() - uses: actions/upload-artifact@v3 - with: - name: e2e.ci.log - path: e2e.ci.log - retention-days: 3 - - - name: Save any test-results - if: always() - uses: actions/upload-artifact@v3 - with: - name: e2e-test-results - path: test-results/ - retention-days: 3 - \ No newline at end of file diff --git a/cors-proxy/.eslintrc.json b/cors-proxy/.eslintrc.json deleted file mode 100644 index c4d08aab..00000000 --- a/cors-proxy/.eslintrc.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "env": { - "es2021": true, - "node": true - }, - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended", - "prettier" - ], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaFeatures": { - "jsx": true - }, - "ecmaVersion": "latest", - "sourceType": "module" - }, - "plugins": ["@typescript-eslint"], - "rules": { - "@typescript-eslint/no-explicit-any": "off", - "@typescript-eslint/no-unused-vars": ["warn", { "argsIgnorePattern": "^_" }] - } -} diff --git a/cors-proxy/package.json b/cors-proxy/package.json deleted file mode 100644 index dda80ade..00000000 --- a/cors-proxy/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "@hypercerts-org/cors-proxy", - "version": "0.0.0", - "devDependencies": { - "@cloudflare/workers-types": "^4.20230115.0", - "typescript": "^4.9.5", - "vitest": "^1.0.1", - "wrangler": "2.9.1" - }, - "private": true, - "scripts": { - "dev": "wrangler dev --port 3000", - "deploy": "wrangler publish", - "lint": "tsc --noEmit && pnpm lint:eslint && pnpm lint:prettier", - "lint:eslint": "eslint --ignore-path ../.gitignore --max-warnings 0 --cache .", - "lint:prettier": "prettier --ignore-path ../.gitignore --loglevel warn --check **/*.ts", - "test": "vitest run" - } -} diff --git a/cors-proxy/src/index.test.ts b/cors-proxy/src/index.test.ts deleted file mode 100644 index 2a84fe2b..00000000 --- a/cors-proxy/src/index.test.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { describe, expect, it, beforeAll, afterAll } from "vitest"; -import { unstable_dev } from "wrangler"; -import { UnstableDevWorker } from "wrangler"; - -describe("Worker", () => { - let worker: UnstableDevWorker; - - beforeAll(async () => { - worker = await unstable_dev("src/index.ts", { - experimental: { disableExperimentalWarning: true }, - }); - }); - - afterAll(async () => { - await worker.stop(); - }); - - it("should return Hello World", async () => { - const resp = await worker.fetch(); - if (resp) { - const text = await resp.text(); - expect(text).toMatchInlineSnapshot('"Missing GET parameter: url"'); - } - }); -}); diff --git a/cors-proxy/src/index.ts b/cors-proxy/src/index.ts deleted file mode 100644 index aaa9a400..00000000 --- a/cors-proxy/src/index.ts +++ /dev/null @@ -1,105 +0,0 @@ -/** - * Constants - */ -// The endpoint you want the CORS reverse proxy to be on -const CORS_HEADERS = { - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "GET,HEAD,POST,OPTIONS", - "Access-Control-Max-Age": "86400", -}; - -const QUERYSTRING_KEY = "url"; - -// eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface Env { - // Example binding to KV. Learn more at https://developers.cloudflare.com/workers/runtime-apis/kv/ - // MY_KV_NAMESPACE: KVNamespace; - // - // Example binding to Durable Object. Learn more at https://developers.cloudflare.com/workers/runtime-apis/durable-objects/ - // MY_DURABLE_OBJECT: DurableObjectNamespace; - // - // Example binding to R2. Learn more at https://developers.cloudflare.com/workers/runtime-apis/r2/ - // MY_BUCKET: R2Bucket; -} - -export default { - async fetch( - request: Request, - _env: Env, - _ctx: ExecutionContext, - ): Promise { - async function handleOptions(request: Request) { - if ( - request.headers.get("Origin") !== null && - request.headers.get("Access-Control-Request-Method") !== null && - request.headers.get("Access-Control-Request-Headers") !== null - ) { - // Handle CORS preflight requests. - const allowControlRequestHeaders = request.headers.get( - "Access-Control-Request-Headers", - ); - return new Response(null, { - headers: { - ...CORS_HEADERS, - ...(allowControlRequestHeaders - ? { - "Access-Control-Allow-Headers": allowControlRequestHeaders, - } - : {}), - }, - }); - } else { - // Handle standard OPTIONS request. - return new Response(null, { - headers: { - Allow: "GET, HEAD, POST, OPTIONS", - }, - }); - } - } - - async function handleRequest(request: Request) { - const url = new URL(request.url); - const apiUrl = url.searchParams.get(QUERYSTRING_KEY); - - if (apiUrl == null) { - return new Response(`Missing GET parameter: ${QUERYSTRING_KEY}`); - } - - // Rewrite request to point to API URL. This also makes the request mutable - // so you can add the correct Origin header to make the API server think - // that this request is not cross-site. - // TODO: Never use ts ignore - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - request = new Request(apiUrl, request); - request.headers.set("Origin", new URL(apiUrl).origin); - let response = await fetch(request); - // Recreate the response so you can modify the headers - response = new Response(response.body, response); - // Set CORS headers - //response.headers.set('Access-Control-Allow-Origin', url.origin); - response.headers.set("Access-Control-Allow-Origin", "*"); - // Append to/Add Vary header so browser will cache response correctly - response.headers.append("Vary", "Origin"); - return response; - } - - if (request.method === "OPTIONS") { - // Handle CORS preflight requests - return handleOptions(request); - } else if ( - request.method === "GET" || - request.method === "HEAD" || - request.method === "POST" - ) { - // Handle requests to the API server - return handleRequest(request); - } else { - return new Response(null, { - status: 405, - statusText: "Method Not Allowed", - }); - } - }, -}; diff --git a/cors-proxy/tsconfig.json b/cors-proxy/tsconfig.json deleted file mode 100644 index b45b2852..00000000 --- a/cors-proxy/tsconfig.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig.json to read more about this file */ - - /* Projects */ - // "incremental": true, /* Enable incremental compilation */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, - "lib": [ - "es2021" - ] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, - "jsx": "react" /* Specify what JSX code is generated. */, - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ - // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - - /* Modules */ - "module": "es2022" /* Specify what module code is generated. */, - // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */, - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ - "types": [ - "@cloudflare/workers-types", - "vitest" - ] /* Specify type package names to be included without being referenced in a source file. */, - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - "resolveJsonModule": true /* Enable importing .json files */, - // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - "allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */, - "checkJs": false /* Enable error reporting in type-checked JavaScript files. */, - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - "noEmit": true /* Disable emitting files from a compilation. */, - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - "isolatedModules": true /* Ensure that each file can be safely transpiled without relying on other imports. */, - "allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */, - // "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, - - /* Type Checking */ - "strict": true /* Enable all strict type-checking options. */, - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ - // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ - // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ - } -} diff --git a/cors-proxy/wrangler.toml b/cors-proxy/wrangler.toml deleted file mode 100644 index 877becc7..00000000 --- a/cors-proxy/wrangler.toml +++ /dev/null @@ -1,3 +0,0 @@ -name = "cors-proxy" -main = "src/index.ts" -compatibility_date = "2023-02-09" diff --git a/defender/.env.example b/defender/.env.example deleted file mode 100644 index 28d5ccb9..00000000 --- a/defender/.env.example +++ /dev/null @@ -1,2 +0,0 @@ -OPENZEPPELIN_DEFENDER_ADMIN_API_KEY=<> -OPENZEPPELIN_DEFENDER_ADMIN_API_SECRET=<> \ No newline at end of file diff --git a/defender/.eslintrc.yml b/defender/.eslintrc.yml deleted file mode 100644 index 07ab4df0..00000000 --- a/defender/.eslintrc.yml +++ /dev/null @@ -1,26 +0,0 @@ -extends: - - "eslint:recommended" - - "plugin:@typescript-eslint/eslint-recommended" - - "plugin:@typescript-eslint/recommended" - - "prettier" -parser: "@typescript-eslint/parser" -parserOptions: - project: "./defender/tsconfig.json" -plugins: - - "@typescript-eslint" -root: true -ignorePatterns: ["build/"] -rules: - "@typescript-eslint/semi": - - warn - "@typescript-eslint/switch-exhaustiveness-check": - - warn - "@typescript-eslint/no-floating-promises": - - error - - ignoreIIFE: true - ignoreVoid: true - "@typescript-eslint/no-inferrable-types": "off" - "@typescript-eslint/no-unused-vars": - - error - - argsIgnorePattern: "_" - varsIgnorePattern: "_" diff --git a/defender/README.md b/defender/README.md deleted file mode 100644 index b03429ae..00000000 --- a/defender/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Hypercerts Openzeppelin Defender integration - -Integrates the HypercertMinter contract with Openzeppelin Defender. -This updates the supabase database, where we keep a cache of `(wallet,claimId)` pairs so users -can see which fractions they might be able to claim. - -Build new auto tasks and deploy them using `yarn deploy`. -This will create [Sentinels](https://docs.openzeppelin.com/defender/sentinel) on OpenZeppelin -Defender, which will monitor for specific function calls or emitted events. -When either is monitored, an [Autotask](https://docs.openzeppelin.com/defender/autotasks) is run. -These are defined inside `src/auto-tasks/`. - -## Setup - -Copy `.env.example` to `.env` and populate your keys and configuration \ No newline at end of file diff --git a/defender/package.json b/defender/package.json deleted file mode 100644 index 2ff45321..00000000 --- a/defender/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "@hypercerts-org/defender", - "description": "Manages OpenZeppelin Defender integrations for Hypercerts", - "version": "0.0.1", - "author": "Hypercerts Foundation", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "build": "rimraf build && webpack", - "deploy:test": "pnpm build && pnpm setup:test", - "deploy:prod": "pnpm build && pnpm setup:prod", - "setup:test": "npx tsx src/setup.ts TEST", - "setup:prod": "npx tsx src/setup.ts PROD", - "scripts:fix-allowlist-duplicates": "npx tsx src/scripts/fix-allowlist-duplicates.ts" - }, - "dependencies": { - "@graphql-mesh/cache-localforage": "^0.95.7", - "@hypercerts-org/contracts": "1.1.2", - "@openzeppelin/defender-autotask-client": "1.54.1", - "@openzeppelin/defender-autotask-utils": "1.54.1", - "@openzeppelin/defender-base-client": "1.54.1", - "@openzeppelin/defender-sentinel-client": "1.54.1", - "@openzeppelin/merkle-tree": "^1.0.2", - "@supabase/supabase-js": "^2.4.1", - "@types/lodash": "^4.14.199", - "axios": "^1.2.6", - "dotenv": "^16.0.3", - "ethers": "5.7.2", - "lodash": "^4.17.21", - "node-fetch": "^3.3.0" - }, - "devDependencies": { - "@types/node": "^18.11.18", - "rimraf": "^5.0.5", - "terser-webpack-plugin": "^5.3.9", - "ts-loader": "^9.4.2", - "ts-node": "^10.9.1", - "tsx": "^3.14.0", - "typescript": "^4.9.4", - "webpack": "^5.75.0", - "webpack-cli": "^5.0.1" - } -} diff --git a/defender/src/HypercertMinterABI.ts b/defender/src/HypercertMinterABI.ts deleted file mode 100644 index f78f4586..00000000 --- a/defender/src/HypercertMinterABI.ts +++ /dev/null @@ -1,1117 +0,0 @@ -/** - * This is used both with the setup scripts as well as the autotasks. - * The autotasks run within a restricted environment, so difficult to - * import directly from hypercerts-sdk - */ -export const abi = `[ - { - "inputs": [], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "inputs": [], - "name": "AlreadyClaimed", - "type": "error" - }, - { - "inputs": [], - "name": "ArraySize", - "type": "error" - }, - { - "inputs": [], - "name": "DoesNotExist", - "type": "error" - }, - { - "inputs": [], - "name": "DuplicateEntry", - "type": "error" - }, - { - "inputs": [], - "name": "Invalid", - "type": "error" - }, - { - "inputs": [], - "name": "NotAllowed", - "type": "error" - }, - { - "inputs": [], - "name": "NotApprovedOrOwner", - "type": "error" - }, - { - "inputs": [], - "name": "TransfersNotAllowed", - "type": "error" - }, - { - "inputs": [], - "name": "TypeMismatch", - "type": "error" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "address", - "name": "previousAdmin", - "type": "address" - }, - { - "indexed": false, - "internalType": "address", - "name": "newAdmin", - "type": "address" - } - ], - "name": "AdminChanged", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "bytes32", - "name": "root", - "type": "bytes32" - } - ], - "name": "AllowlistCreated", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "operator", - "type": "address" - }, - { - "indexed": false, - "internalType": "bool", - "name": "approved", - "type": "bool" - } - ], - "name": "ApprovalForAll", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256[]", - "name": "claimIDs", - "type": "uint256[]" - }, - { - "indexed": false, - "internalType": "uint256[]", - "name": "fromTokenIDs", - "type": "uint256[]" - }, - { - "indexed": false, - "internalType": "uint256[]", - "name": "toTokenIDs", - "type": "uint256[]" - }, - { - "indexed": false, - "internalType": "uint256[]", - "name": "values", - "type": "uint256[]" - } - ], - "name": "BatchValueTransfer", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "beacon", - "type": "address" - } - ], - "name": "BeaconUpgraded", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "uint256", - "name": "claimID", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "string", - "name": "uri", - "type": "string" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "totalUnits", - "type": "uint256" - } - ], - "name": "ClaimStored", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint8", - "name": "version", - "type": "uint8" - } - ], - "name": "Initialized", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "bytes32", - "name": "leaf", - "type": "bytes32" - } - ], - "name": "LeafClaimed", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "previousOwner", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "newOwner", - "type": "address" - } - ], - "name": "OwnershipTransferred", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "address", - "name": "account", - "type": "address" - } - ], - "name": "Paused", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "operator", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256[]", - "name": "ids", - "type": "uint256[]" - }, - { - "indexed": false, - "internalType": "uint256[]", - "name": "values", - "type": "uint256[]" - } - ], - "name": "TransferBatch", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "operator", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "id", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "TransferSingle", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "string", - "name": "value", - "type": "string" - }, - { - "indexed": true, - "internalType": "uint256", - "name": "id", - "type": "uint256" - } - ], - "name": "URI", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "address", - "name": "account", - "type": "address" - } - ], - "name": "Unpaused", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "implementation", - "type": "address" - } - ], - "name": "Upgraded", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "claimID", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "fromTokenID", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "toTokenID", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "ValueTransfer", - "type": "event" - }, - { - "inputs": [], - "name": "__SemiFungible1155_init", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "id", - "type": "uint256" - } - ], - "name": "balanceOf", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address[]", - "name": "accounts", - "type": "address[]" - }, - { - "internalType": "uint256[]", - "name": "ids", - "type": "uint256[]" - } - ], - "name": "balanceOfBatch", - "outputs": [ - { - "internalType": "uint256[]", - "name": "", - "type": "uint256[]" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "bytes32[][]", - "name": "proofs", - "type": "bytes32[][]" - }, - { - "internalType": "uint256[]", - "name": "claimIDs", - "type": "uint256[]" - }, - { - "internalType": "uint256[]", - "name": "units", - "type": "uint256[]" - } - ], - "name": "batchMintClaimsFromAllowlists", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "id", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "value", - "type": "uint256" - } - ], - "name": "burn", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256[]", - "name": "ids", - "type": "uint256[]" - }, - { - "internalType": "uint256[]", - "name": "values", - "type": "uint256[]" - } - ], - "name": "burnBatch", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "_tokenID", - "type": "uint256" - } - ], - "name": "burnFraction", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "units", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "merkleRoot", - "type": "bytes32" - }, - { - "internalType": "string", - "name": "_uri", - "type": "string" - }, - { - "internalType": "enum IHypercertToken.TransferRestrictions", - "name": "restrictions", - "type": "uint8" - } - ], - "name": "createAllowlist", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "name": "hasBeenClaimed", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "initialize", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes32[]", - "name": "proof", - "type": "bytes32[]" - }, - { - "internalType": "uint256", - "name": "claimID", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "leaf", - "type": "bytes32" - } - ], - "name": "isAllowedToClaim", - "outputs": [ - { - "internalType": "bool", - "name": "isAllowed", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "address", - "name": "operator", - "type": "address" - } - ], - "name": "isApprovedForAll", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_account", - "type": "address" - }, - { - "internalType": "uint256[]", - "name": "_fractionIDs", - "type": "uint256[]" - } - ], - "name": "mergeFractions", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "units", - "type": "uint256" - }, - { - "internalType": "string", - "name": "_uri", - "type": "string" - }, - { - "internalType": "enum IHypercertToken.TransferRestrictions", - "name": "restrictions", - "type": "uint8" - } - ], - "name": "mintClaim", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "bytes32[]", - "name": "proof", - "type": "bytes32[]" - }, - { - "internalType": "uint256", - "name": "claimID", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "units", - "type": "uint256" - } - ], - "name": "mintClaimFromAllowlist", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "units", - "type": "uint256" - }, - { - "internalType": "uint256[]", - "name": "fractions", - "type": "uint256[]" - }, - { - "internalType": "string", - "name": "_uri", - "type": "string" - }, - { - "internalType": "enum IHypercertToken.TransferRestrictions", - "name": "restrictions", - "type": "uint8" - } - ], - "name": "mintClaimWithFractions", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "name", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "owner", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - } - ], - "name": "ownerOf", - "outputs": [ - { - "internalType": "address", - "name": "_owner", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "pause", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "paused", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "proxiableUUID", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - } - ], - "name": "readTransferRestriction", - "outputs": [ - { - "internalType": "string", - "name": "", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "renounceOwnership", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256[]", - "name": "ids", - "type": "uint256[]" - }, - { - "internalType": "uint256[]", - "name": "amounts", - "type": "uint256[]" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - } - ], - "name": "safeBatchTransferFrom", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "from", - "type": "address" - }, - { - "internalType": "address", - "name": "to", - "type": "address" - }, - { - "internalType": "uint256", - "name": "id", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "amount", - "type": "uint256" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - } - ], - "name": "safeTransferFrom", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "operator", - "type": "address" - }, - { - "internalType": "bool", - "name": "approved", - "type": "bool" - } - ], - "name": "setApprovalForAll", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "_tokenID", - "type": "uint256" - }, - { - "internalType": "uint256[]", - "name": "_newFractions", - "type": "uint256[]" - } - ], - "name": "splitFraction", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes4", - "name": "interfaceId", - "type": "bytes4" - } - ], - "name": "supportsInterface", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "newOwner", - "type": "address" - } - ], - "name": "transferOwnership", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "account", - "type": "address" - }, - { - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - } - ], - "name": "unitsOf", - "outputs": [ - { - "internalType": "uint256", - "name": "units", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - } - ], - "name": "unitsOf", - "outputs": [ - { - "internalType": "uint256", - "name": "units", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "unpause", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "newImplementation", - "type": "address" - } - ], - "name": "upgradeTo", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "newImplementation", - "type": "address" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - } - ], - "name": "upgradeToAndCall", - "outputs": [], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "tokenID", - "type": "uint256" - } - ], - "name": "uri", - "outputs": [ - { - "internalType": "string", - "name": "_uri", - "type": "string" - } - ], - "stateMutability": "view", - "type": "function" - } -]`; diff --git a/defender/src/auto-tasks/batch-mint-claims-from-allowlists.ts b/defender/src/auto-tasks/batch-mint-claims-from-allowlists.ts deleted file mode 100644 index 578f6aa8..00000000 --- a/defender/src/auto-tasks/batch-mint-claims-from-allowlists.ts +++ /dev/null @@ -1,122 +0,0 @@ -import { - AutotaskEvent, - BlockTriggerEvent, -} from "@openzeppelin/defender-autotask-utils"; -import { HypercertMinterAbi } from "@hypercerts-org/contracts"; -import { MissingDataError, NotImplementedError } from "../errors"; -import { - getNetworkConfigFromName, - SUPABASE_ALLOWLIST_TABLE_NAME, -} from "../networks"; -import { createClient } from "@supabase/supabase-js"; -import { BigNumber, ethers } from "ethers"; -import fetch from "node-fetch"; - -export async function handler(event: AutotaskEvent) { - const network = getNetworkConfigFromName(event.autotaskName); - const { SUPABASE_URL, SUPABASE_SECRET_API_KEY } = event.secrets; - const ALCHEMY_KEY = event.secrets[network.alchemyKeyEnvName]; - const client = createClient(SUPABASE_URL, SUPABASE_SECRET_API_KEY, { - global: { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - fetch: (...args) => fetch(...args), - }, - }); - - let provider; - - if (ALCHEMY_KEY) { - provider = new ethers.providers.AlchemyProvider( - network.networkKey, - ALCHEMY_KEY, - ); - } else if (network.rpc) { - provider = new ethers.providers.JsonRpcProvider(network.rpc); - } else { - throw new Error("No provider available"); - } - - // Check data availability - const body = event.request.body; - if (!("type" in body) || body.type !== "BLOCK") { - throw new NotImplementedError("Event body is not a BlockTriggerEvent"); - } - const blockTriggerEvent = body as BlockTriggerEvent; - const contractAddress = blockTriggerEvent.matchedAddresses[0]; - const fromAddress = blockTriggerEvent.transaction.from; - const txnLogs = blockTriggerEvent.transaction.logs; - const tx = await provider.getTransaction(blockTriggerEvent.hash); - - if (!contractAddress) { - throw new MissingDataError(`body.matchedAddresses is missing`); - } else if (!fromAddress) { - throw new MissingDataError(`body.transaction.from is missing`); - } else if (!txnLogs) { - throw new MissingDataError(`body.transaction.logs is missing`); - } else if (!tx) { - throw new MissingDataError(`tx is missing`); - } - - console.log("Contract address", contractAddress); - console.log("From address", fromAddress); - - const contractInterface = new ethers.utils.Interface(HypercertMinterAbi); - - // Parse events - // Parse events - const batchTransferEvents = txnLogs - .map((l) => { - //Ignore unknown events - try { - return contractInterface.parseLog(l); - } catch (e) { - console.log("Failed to parse log", l); - return null; - } - }) - .filter((e) => e !== null && e.name === "BatchValueTransfer"); - - console.log( - "BatchTransfer Events: ", - JSON.stringify(batchTransferEvents, null, 2), - ); - - if (batchTransferEvents.length !== 1) { - throw new MissingDataError( - `Unexpected saw ${batchTransferEvents.length} BatchValueTransfer events`, - ); - } - - // Get claimIDs - const claimIds = batchTransferEvents[0].args[0] as BigNumber[]; - console.log("ClaimIDs: ", claimIds.toString()); - - const formattedClaimIds = claimIds.map( - (claimId) => `${contractAddress}-${claimId.toString().toLowerCase()}`, - ); - console.log("Formatted claim ids", formattedClaimIds); - - const uniqueClaimdIds = [...new Set(formattedClaimIds)]; - - // Wait for transaction to be confirmed for 5 blocks - if (await tx.wait(5).then((receipt) => receipt.status === 1)) { - console.log("Transaction confirmed"); - const deleteResult = await client - .from(SUPABASE_ALLOWLIST_TABLE_NAME) - .delete() - .eq("address", fromAddress) - .in("claimId", uniqueClaimdIds) - .select(); - - console.log("delete result", deleteResult); - - if (!deleteResult) { - throw new Error( - `Could not remove from database. Delete result: ${JSON.stringify( - deleteResult, - )}`, - ); - } - } -} diff --git a/defender/src/auto-tasks/execute-taker-bid.ts b/defender/src/auto-tasks/execute-taker-bid.ts deleted file mode 100644 index ccc2c040..00000000 --- a/defender/src/auto-tasks/execute-taker-bid.ts +++ /dev/null @@ -1,168 +0,0 @@ -import { - AutotaskEvent, - BlockTriggerEvent, -} from "@openzeppelin/defender-autotask-utils"; -import { getNetworkConfigFromName } from "../networks"; -import { createClient } from "@supabase/supabase-js"; -import fetch from "node-fetch"; -import { BigNumber, ethers } from "ethers"; -import { MissingDataError, NotImplementedError } from "../errors"; -import { - HypercertExchangeAbi, - HypercertMinterAbi, -} from "@hypercerts-org/contracts"; - -export async function handler(event: AutotaskEvent) { - console.log( - "Event: ", - JSON.stringify( - { ...event, secrets: "HIDDEN", credentials: "HIDDEN" }, - null, - 2, - ), - ); - const network = getNetworkConfigFromName(event.autotaskName); - const { SUPABASE_URL, SUPABASE_SECRET_API_KEY } = event.secrets; - const ALCHEMY_KEY = event.secrets[network.alchemyKeyEnvName]; - - const client = createClient(SUPABASE_URL, SUPABASE_SECRET_API_KEY, { - global: { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - fetch: (...args) => fetch(...args), - }, - }); - - let provider; - - if (ALCHEMY_KEY) { - provider = new ethers.providers.AlchemyProvider( - network.networkKey, - ALCHEMY_KEY, - ); - } else if (network.rpc) { - provider = new ethers.providers.JsonRpcProvider(network.rpc); - } else { - throw new Error("No provider available"); - } - - // Check data availability - const body = event.request.body; - if (!("type" in body) || body.type !== "BLOCK") { - throw new NotImplementedError("Event body is not a BlockTriggerEvent"); - } - const blockTriggerEvent = body as BlockTriggerEvent; - const contractAddress = blockTriggerEvent.matchedAddresses[0]; - const fromAddress = blockTriggerEvent.transaction.from; - const txnLogs = blockTriggerEvent.transaction.logs; - const tx = await provider.getTransaction(blockTriggerEvent.hash); - - if (!contractAddress) { - throw new MissingDataError(`body.matchedAddresses is missing`); - } else if (!fromAddress) { - throw new MissingDataError(`body.transaction.from is missing`); - } else if (!txnLogs) { - throw new MissingDataError(`body.transaction.logs is missing`); - } else if (!tx) { - throw new MissingDataError(`tx is missing`); - } - - console.log("Contract address", contractAddress); - console.log("From address", fromAddress); - - // TODO: Update contracts so we can use ABI from the @hypercerts-org/contracts package - const hypercertsMinterContractInterface = new ethers.utils.Interface( - HypercertMinterAbi, - ); - - // Parse TransferSingle events - const parsedLogs = txnLogs.map((l) => { - //Ignore unknown events - try { - return hypercertsMinterContractInterface.parseLog(l); - } catch (e) { - console.log("Failed to parse log", l); - return null; - } - }); - console.log("Parsed logs: ", JSON.stringify(parsedLogs, null, 2)); - const transferSingleEvents = parsedLogs.filter( - (e) => e !== null && e.name === "TransferSingle", - ); - - console.log( - "TransferSingle Events: ", - JSON.stringify(transferSingleEvents, null, 2), - ); - - if (transferSingleEvents.length !== 1) { - throw new MissingDataError( - `Unexpected saw ${transferSingleEvents.length} TransferSingle events`, - ); - } - - // Get claimID - const signerAddress = transferSingleEvents[0].args["from"] as string; - const itemId = BigNumber.from(transferSingleEvents[0].args["id"]).toString(); - - const hypercertExchangeContractInterface = new ethers.utils.Interface( - HypercertExchangeAbi, - ); - // Parse TakerBid events - const takerBidEvents = txnLogs - .map((l) => { - //Ignore unknown events - try { - return hypercertExchangeContractInterface.parseLog(l); - } catch (e) { - console.log("Failed to parse log", l); - return null; - } - }) - .filter((e) => e !== null && e.name === "TakerBid"); - - console.log("TakerBid Events: ", JSON.stringify(takerBidEvents, null, 2)); - - if (takerBidEvents.length !== 1) { - throw new MissingDataError( - `Unexpected saw ${takerBidEvents.length} TakerBid events`, - ); - } - - // Get claimID - const orderNonce = BigNumber.from( - takerBidEvents[0].args["nonceInvalidationParameters"][1], - ).toString(); - console.log( - "Signer Address: ", - signerAddress, - "Order nonce: ", - orderNonce, - "Fraction ID: ", - itemId, - "Chain ID: ", - network.chainId, - ); - - // Remove from DB - if (await tx.wait(5).then((receipt) => receipt.status === 1)) { - const deleteResult = await client - .from("marketplace-orders") - .delete() - .eq("signer", signerAddress) - .eq("chainId", network.chainId) - .eq("orderNonce", orderNonce) - .containedBy("itemIds", [itemId]) - .select() - .throwOnError(); - console.log("Deleted", deleteResult); - - if (!deleteResult) { - throw new Error( - `Could not remove from database. Delete result: ${JSON.stringify( - deleteResult, - )}`, - ); - } - } -} diff --git a/defender/src/auto-tasks/mint-claim-from-allowlist.ts b/defender/src/auto-tasks/mint-claim-from-allowlist.ts deleted file mode 100644 index b53ca014..00000000 --- a/defender/src/auto-tasks/mint-claim-from-allowlist.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { HypercertMinterAbi } from "@hypercerts-org/contracts"; -import { MissingDataError, NotImplementedError } from "../errors"; -import { - getNetworkConfigFromName, - SUPABASE_ALLOWLIST_TABLE_NAME, -} from "../networks"; -import { - AutotaskEvent, - BlockTriggerEvent, -} from "@openzeppelin/defender-autotask-utils"; -import { createClient } from "@supabase/supabase-js"; -import { ethers } from "ethers"; -import fetch from "node-fetch"; - -export async function handler(event: AutotaskEvent) { - console.log( - "Event: ", - JSON.stringify( - { ...event, secrets: "HIDDEN", credentials: "HIDDEN" }, - null, - 2, - ), - ); - const network = getNetworkConfigFromName(event.autotaskName); - const { SUPABASE_URL, SUPABASE_SECRET_API_KEY } = event.secrets; - const ALCHEMY_KEY = event.secrets[network.alchemyKeyEnvName]; - - const client = createClient(SUPABASE_URL, SUPABASE_SECRET_API_KEY, { - global: { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - fetch: (...args) => fetch(...args), - }, - }); - - let provider; - - if (ALCHEMY_KEY) { - provider = new ethers.providers.AlchemyProvider( - network.networkKey, - ALCHEMY_KEY, - ); - } else if (network.rpc) { - provider = new ethers.providers.JsonRpcProvider(network.rpc); - } else { - throw new Error("No provider available"); - } - - // Check data availability - const body = event.request.body; - if (!("type" in body) || body.type !== "BLOCK") { - throw new NotImplementedError("Event body is not a BlockTriggerEvent"); - } - const blockTriggerEvent = body as BlockTriggerEvent; - const contractAddress = blockTriggerEvent.matchedAddresses[0]; - const fromAddress = blockTriggerEvent.transaction.from; - const txnLogs = blockTriggerEvent.transaction.logs; - const tx = await provider.getTransaction(blockTriggerEvent.hash); - - if (!contractAddress) { - throw new MissingDataError(`body.matchedAddresses is missing`); - } else if (!fromAddress) { - throw new MissingDataError(`body.transaction.from is missing`); - } else if (!txnLogs) { - throw new MissingDataError(`body.transaction.logs is missing`); - } else if (!tx) { - throw new MissingDataError(`tx is missing`); - } - - console.log("Contract address", contractAddress); - console.log("From address", fromAddress); - - const contractInterface = new ethers.utils.Interface(HypercertMinterAbi); - - // Parse events - const batchTransferEvents = txnLogs - .map((l) => { - //Ignore unknown events - try { - return contractInterface.parseLog(l); - } catch (e) { - console.log("Failed to parse log", l); - return null; - } - }) - .filter((e) => e !== null && e.name === "BatchValueTransfer"); - - console.log( - "BatchTransfer Events: ", - JSON.stringify(batchTransferEvents, null, 2), - ); - - if (batchTransferEvents.length !== 1) { - throw new MissingDataError( - `Unexpected saw ${batchTransferEvents.length} BatchValueTransfer events`, - ); - } - - // Get claimID - const claimId = batchTransferEvents[0].args["claimIDs"][0] as string; - console.log( - "ClaimID: ", - batchTransferEvents[0].args["claimIDs"][0].toString(), - ); - - const formattedClaimId = `${contractAddress}-${claimId - .toString() - .toLowerCase()}`; - console.log("Formatted claim id", formattedClaimId); - - // Remove from DB - if (await tx.wait(5).then((receipt) => receipt.status === 1)) { - const deleteResult = await client - .from(SUPABASE_ALLOWLIST_TABLE_NAME) - .delete() - .eq("address", fromAddress) - .eq("claimId", formattedClaimId) - .eq("chainId", network.chainId) - .select(); - console.log("Deleted", deleteResult); - - if (!deleteResult) { - throw new Error( - `Could not remove from database. Delete result: ${JSON.stringify( - deleteResult, - )}`, - ); - } - } -} diff --git a/defender/src/auto-tasks/on-allowlist-created.ts b/defender/src/auto-tasks/on-allowlist-created.ts deleted file mode 100644 index 0e73472b..00000000 --- a/defender/src/auto-tasks/on-allowlist-created.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { MissingDataError, NotImplementedError } from "../errors"; -import { - AutotaskEvent, - BlockTriggerEvent, -} from "@openzeppelin/defender-autotask-utils"; -import { - getNetworkConfigFromName, - SUPABASE_ALLOWLIST_TABLE_NAME, -} from "../networks"; -import { StandardMerkleTree } from "@openzeppelin/merkle-tree"; -import { createClient } from "@supabase/supabase-js"; -import { ethers } from "ethers"; -import fetch from "node-fetch"; -import axios from "axios"; -import { HypercertMinterAbi } from "@hypercerts-org/contracts"; - -const getIpfsGatewayUri = (cidOrIpfsUri: string) => { - const NFT_STORAGE_IPFS_GATEWAY = "https://nftstorage.link/ipfs/{cid}"; - const cid = cidOrIpfsUri.replace("ipfs://", ""); - return NFT_STORAGE_IPFS_GATEWAY.replace("{cid}", cid); -}; - -export const getData = async (cidOrIpfsUri: string) => { - const ipfsGatewayLink = getIpfsGatewayUri(cidOrIpfsUri); - console.log(`Getting metadata ${cidOrIpfsUri} at ${ipfsGatewayLink}`); - return axios.get(ipfsGatewayLink).then((result) => result.data); -}; - -export async function handler(event: AutotaskEvent) { - console.log( - "Event: ", - JSON.stringify( - { ...event, secrets: "HIDDEN", credentials: "HIDDEN" }, - null, - 2, - ), - ); - const network = getNetworkConfigFromName(event.autotaskName); - const { SUPABASE_URL, SUPABASE_SECRET_API_KEY } = event.secrets; - const ALCHEMY_KEY = event.secrets[network.alchemyKeyEnvName]; - const client = createClient(SUPABASE_URL, SUPABASE_SECRET_API_KEY, { - global: { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - fetch: (...args) => fetch(...args), - }, - }); - const body = event.request.body; - if (!("type" in body) || body.type !== "BLOCK") { - throw new NotImplementedError("Event body is not a BlockTriggerEvent"); - } - const blockTriggerEvent = body as BlockTriggerEvent; - const contractAddress = blockTriggerEvent.matchedAddresses[0]; - const txnLogs = blockTriggerEvent.transaction.logs; - if (!contractAddress) { - throw new MissingDataError(`body.matchedAddresses is missing`); - } else if (!txnLogs) { - throw new MissingDataError(`body.transaction.logs is missing`); - } - console.log("Contract address", contractAddress); - - let provider; - - if (ALCHEMY_KEY) { - provider = new ethers.providers.AlchemyProvider( - network.networkKey, - ALCHEMY_KEY, - ); - } else if (network.rpc) { - provider = new ethers.providers.JsonRpcProvider(network.rpc); - } else { - throw new Error("No provider available"); - } - - const contract = new ethers.Contract( - contractAddress, - HypercertMinterAbi, - provider, - ); - - //Ignore unknown events - const allowlistCreatedEvents = txnLogs - .map((l) => { - try { - return contract.interface.parseLog(l); - } catch (e) { - console.log("Failed to parse log", l); - return null; - } - }) - .filter((e) => e !== null && e.name === "AllowlistCreated"); - - console.log( - "AllowlistCreated Events: ", - JSON.stringify(allowlistCreatedEvents, null, 2), - ); - - if (allowlistCreatedEvents.length !== 1) { - throw new MissingDataError( - `Unexpected saw ${allowlistCreatedEvents.length} AllowlistCreated events`, - ); - } - - const tokenId = allowlistCreatedEvents[0].args[0].toString(); - console.log("TokenId: ", tokenId); - - const metadataUri = await contract.functions.uri(tokenId); - console.log("metadataUri: ", metadataUri); - - const metadata = await getData(metadataUri[0]); - if (!metadata?.allowList) { - throw new Error(`No allowlist found`); - } - - console.log("allowlist: ", metadata.allowList); - - // Get allowlist - const treeResponse = await getData(metadata.allowList); - if (!treeResponse) { - throw new Error("Could not fetch json tree dump for allowlist"); - } - const tree = StandardMerkleTree.load(JSON.parse(treeResponse)); - - // Find the proof - const addresses: string[] = []; - for (const [, v] of tree.entries()) { - addresses.push(v[0]); - } - console.log("addresses", addresses); - const data = addresses.map((address, index) => ({ - address: address.toLowerCase(), - claimId: `${contractAddress}-${tokenId}`, - fractionCounter: index, - chainId: network.chainId, - })); - console.log("data", data); - - const addResult = await client - .from(SUPABASE_ALLOWLIST_TABLE_NAME) - .insert(data) - .select() - .then((data) => data.data); - - console.log("add result", addResult); - - if (!addResult) { - throw new Error( - `Could not add to database. Add result: ${JSON.stringify(addResult)}`, - ); - } -} diff --git a/defender/src/config.ts b/defender/src/config.ts deleted file mode 100644 index 09daf322..00000000 --- a/defender/src/config.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { NETWORKS, SupportedNetworks } from "./networks"; -import * as dotenv from "dotenv"; - -dotenv.config(); - -const requireEnv = (value: string | undefined, identifier: string) => { - if (!value) { - throw new Error(`Required env var ${identifier} does not exist`); - } - return value; -}; - -interface Config { - networks: SupportedNetworks; - credentials: { - apiKey: string; - apiSecret: string; - }; -} - -const config: Config = { - networks: NETWORKS, - credentials: { - apiKey: requireEnv( - process.env.OPENZEPPELIN_DEFENDER_ADMIN_API_KEY, - "OPENZEPPELIN_DEFENDER_ADMIN_API_KEY", - ), - apiSecret: requireEnv( - process.env.OPENZEPPELIN_DEFENDER_ADMIN_API_SECRET, - "OPENZEPPELIN_DEFENDER_ADMIN_API_SECRET", - ), - }, -}; -export default config; diff --git a/defender/src/create-autotask.ts b/defender/src/create-autotask.ts deleted file mode 100644 index 6cecaddd..00000000 --- a/defender/src/create-autotask.ts +++ /dev/null @@ -1,26 +0,0 @@ -import config from "./config"; -import { AutotaskClient } from "@openzeppelin/defender-autotask-client"; -import { SentinelTrigger } from "@openzeppelin/defender-autotask-client/lib/models/autotask.js"; - -export const createTask = async (name: string, file: string) => { - const client = new AutotaskClient(config.credentials); - const taskConfig = { - name, - encodedZippedCode: await client.getEncodedZippedCodeFromFolder( - `./build/relay/${file}`, - ), - paused: false, - trigger: { type: "sentinel" } as SentinelTrigger, - }; - - return await client - .create(taskConfig) - .then((res) => { - console.log("Created autotask", name, "with id", res.autotaskId); - return res; - }) - .catch((error) => { - console.error(error); - return null; - }); -}; diff --git a/defender/src/create-sentinel.ts b/defender/src/create-sentinel.ts deleted file mode 100644 index 55274f0d..00000000 --- a/defender/src/create-sentinel.ts +++ /dev/null @@ -1,56 +0,0 @@ -import config from "./config.js"; -import { NetworkConfig } from "./networks"; -import { SentinelClient } from "@openzeppelin/defender-sentinel-client"; -import { - EventCondition, - FunctionCondition, -} from "@openzeppelin/defender-sentinel-client/lib/models/subscriber.js"; - -export const createSentinel = async ({ - name, - network, - autotaskID, - functionConditions = [], - eventConditions = [], - contractAddress, - abi, -}: { - name: string; - network: NetworkConfig; - autotaskID: string; - eventConditions?: EventCondition[]; - functionConditions?: FunctionCondition[]; - contractAddress: string; - abi: any; -}) => { - const client = new SentinelClient(config.credentials); - await client - .create({ - type: "BLOCK", - network: network.networkKey, - confirmLevel: 1, // if not set, we pick the blockwatcher for the chosen network with the lowest offset - name, - addresses: [contractAddress], - abi, - paused: false, - eventConditions, - functionConditions, - alertTimeoutMs: 0, - notificationChannels: [], - autotaskTrigger: autotaskID, - }) - .then((res) => { - console.log( - `Created sentinel`, - res.name, - "- monitoring address", - contractAddress, - "- linked to autotask", - autotaskID, - ); - return res; - }) - .catch((error) => { - console.error(error); - }); -}; diff --git a/defender/src/errors.ts b/defender/src/errors.ts deleted file mode 100644 index 66df3372..00000000 --- a/defender/src/errors.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Error interfacing with OpenZeppelin API - */ -export class ApiError extends Error {} - -/** - * Misconfigured. Check your environment variables or `src/config.ts` - */ -export class ConfigError extends Error {} - -/** - * This is a pathway that hasn't been implemented yet - */ -export class NotImplementedError extends Error {} - -/** - * This is a pathway that hasn't been implemented yet - */ -export class MissingDataError extends Error {} diff --git a/defender/src/networks.ts b/defender/src/networks.ts deleted file mode 100644 index 1321b812..00000000 --- a/defender/src/networks.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { Network } from "@openzeppelin/defender-base-client"; -import { deployments } from "@hypercerts-org/contracts"; - -export interface NetworkConfig { - // Used to identify the network for both Alchemy and OpenZeppelin Sentinel - networkKey: Network; - // Minter contract address on the network - hypercertMinterContractAddress: string; - // Exchange contract address on the network - hypercertExchangeContractAddress?: string; - // the selector to retrieve the key from event.secrets in OpenZeppelin - alchemyKeyEnvName?: string; - // Chain ID for the network - chainId: number; - rpc?: string; -} - -export const SUPABASE_ALLOWLIST_TABLE_NAME = "allowlistCache-chainId"; - -export interface SupportedNetworks { - TEST: NetworkConfig[]; - PROD: NetworkConfig[]; -} - -export const NETWORKS: SupportedNetworks = { - TEST: [ - { - networkKey: "sepolia", - hypercertMinterContractAddress: - deployments["11155111"].HypercertMinterUUPS, - hypercertExchangeContractAddress: - deployments["11155111"].HypercertExchange, - chainId: 11155111, - rpc: "https://rpc.sepolia.org", - }, - { - networkKey: "base-sepolia", - hypercertMinterContractAddress: deployments["84532"].HypercertMinterUUPS, - hypercertExchangeContractAddress: deployments["84532"].HypercertExchange, - chainId: 84532, - rpc: "https://base-sepolia-rpc.publicnode.com", - }, - ], - PROD: [ - { - networkKey: "optimism", - hypercertMinterContractAddress: deployments["10"].HypercertMinterUUPS, - alchemyKeyEnvName: "ALCHEMY_OPTIMISM_KEY", - chainId: 10, - }, - { - networkKey: "celo", - hypercertMinterContractAddress: deployments["42220"].HypercertMinterUUPS, - chainId: 42220, - rpc: "https://forno.celo.org", - }, - { - networkKey: "base", - hypercertMinterContractAddress: deployments["8453"].HypercertMinterUUPS, - chainId: 8453, - rpc: "https://mainnet.base.org", - }, - ], -}; - -/** - * We'll use this to encode the network name into the Sentinel/Autotask name - * We'll then subsequently use `getNetworkConfigFromName` - * to extract the network name from within the Autotask - * @param network - * @param contract - * @param name - name pre-encoding - * @returns - */ -export const encodeName = ( - network: NetworkConfig, - contract: "minter" | "exchange", - name: string, -) => `[${network.networkKey}][${contract}] ${name}`; - -export const decodeName = ( - encodedName: string, -): { networkKey: string; contract: string; name: string } => { - const regex = /^\[(.+)\]\[(.+)\]\s(.+)$/; - const match = encodedName.match(regex); - if (!match) { - throw new Error(`Invalid encoded name: ${encodedName}`); - } - const networkKey = match[1]; - const contract = match[2]; - const name = match[3]; - return { networkKey, contract, name }; -}; - -/** - * From an Autotask name, deduce which NetworkConfig we're using - * @param name - name post-encoding - */ -export const getNetworkConfigFromName = ( - name: string, -): NetworkConfig | undefined => { - const allNetworks = [...NETWORKS.TEST, ...NETWORKS.PROD]; - for (let i = 0; i < allNetworks.length; i++) { - const network = allNetworks[i]; - if (name.includes(`[${network.networkKey}]`)) { - return network; - } - } -}; diff --git a/defender/src/reset.ts b/defender/src/reset.ts deleted file mode 100644 index 010d86e1..00000000 --- a/defender/src/reset.ts +++ /dev/null @@ -1,24 +0,0 @@ -import config from "./config"; -import { AutotaskClient } from "@openzeppelin/defender-autotask-client"; -import { SentinelClient } from "@openzeppelin/defender-sentinel-client"; - -export const reset = async () => { - const autotaskClient = new AutotaskClient(config.credentials); - const sentinelClient = new SentinelClient(config.credentials); - - // Remove all old auto tasks and sentinels - const oldAutoTasks = await autotaskClient.list(); - const oldSentinels = await sentinelClient.list(); - return await Promise.all([ - ...oldAutoTasks.items.map((x) => - autotaskClient.delete(x.autotaskId).then((res) => { - console.log(res.message); - }), - ), - ...oldSentinels.items.map((x) => - sentinelClient.delete(x.subscriberId).then((res) => { - console.log(res.message); - }), - ), - ]); -}; diff --git a/defender/src/rollout.ts b/defender/src/rollout.ts deleted file mode 100644 index 3c68de16..00000000 --- a/defender/src/rollout.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { createTask } from "./create-autotask"; -import { createSentinel } from "./create-sentinel"; -import { ApiError } from "./errors"; -import { NetworkConfig, encodeName } from "./networks"; -import { - HypercertExchangeAbi, - HypercertMinterAbi, -} from "@hypercerts-org/contracts"; - -export const rollOut = async (networks: NetworkConfig[]) => { - return await Promise.all( - networks.map(async (network) => { - // On allowlist created - const autoTaskOnAllowlistCreated = await createTask( - encodeName(network, "minter", "on-allowlist-created"), - "on-allowlist-created", - ); - if (!autoTaskOnAllowlistCreated) { - throw new ApiError( - encodeName( - network, - "minter", - "Could not create autoTask for on-allowlist-created", - ), - ); - } - await createSentinel({ - name: encodeName(network, "minter", "AllowlistCreated"), - network: network, - contractAddress: network.hypercertMinterContractAddress, - abi: HypercertMinterAbi, - eventConditions: [ - { eventSignature: "AllowlistCreated(uint256,bytes32)" }, - ], - autotaskID: autoTaskOnAllowlistCreated.autotaskId, - }); - - // On batch minted - const autoTaskOnBatchMintClaimsFromAllowlists = await createTask( - encodeName(network, "minter", "batch-mint-claims-from-allowlists"), - "batch-mint-claims-from-allowlists", - ); - if (!autoTaskOnBatchMintClaimsFromAllowlists) { - throw new ApiError( - encodeName( - network, - "minter", - "Could not create autoTask for batch-mint-claims-from-allowlists", - ), - ); - } - await createSentinel({ - name: encodeName(network, "minter", "batchMintClaimsFromAllowlists"), - network: network, - contractAddress: network.hypercertMinterContractAddress, - abi: HypercertMinterAbi, - autotaskID: autoTaskOnBatchMintClaimsFromAllowlists.autotaskId, - functionConditions: [ - { - functionSignature: - "batchMintClaimsFromAllowlists(address,bytes32[][],uint256[],uint256[])", - }, - ], - }); - - // On single minted from allowlist - const autoTaskOnMintClaimFromAllowlist = await createTask( - encodeName(network, "minter", "mint-claim-from-allowlist"), - "mint-claim-from-allowlist", - ); - if (!autoTaskOnMintClaimFromAllowlist) { - throw new ApiError( - encodeName( - network, - "minter", - "Could not create autoTask for mint-claim-from-allowlist", - ), - ); - } - await createSentinel({ - name: encodeName(network, "minter", "mintClaimFromAllowlist"), - network: network, - contractAddress: network.hypercertMinterContractAddress, - abi: HypercertMinterAbi, - autotaskID: autoTaskOnMintClaimFromAllowlist.autotaskId, - functionConditions: [ - { - functionSignature: - "mintClaimFromAllowlist(address,bytes32[],uint256,uint256)", - }, - ], - }); - - if (network.hypercertExchangeContractAddress) { - // On execute taker bid - const autoTaskExecuteTakerBid = await createTask( - encodeName(network, "exchange", "execute-taker-bid"), - "execute-taker-bid", - ); - if (!autoTaskExecuteTakerBid) { - throw new ApiError( - encodeName( - network, - "exchange", - "Could not create autoTask for execute-taker-bid", - ), - ); - } - await createSentinel({ - name: encodeName(network, "exchange", "executeTakerBid"), - network: network, - autotaskID: autoTaskExecuteTakerBid.autotaskId, - contractAddress: network.hypercertExchangeContractAddress, - abi: HypercertExchangeAbi, - functionConditions: [ - { - functionSignature: - "executeTakerBid((address,bytes),(uint8,uint256,uint256,uint256,uint256,uint8,address,address,address,uint256,uint256,uint256,uint256[],uint256[],bytes),bytes,(bytes32,(bytes32,uint8)[]))", - }, - ], - }); - } - }), - ); -}; diff --git a/defender/src/scripts/fix-allowlist-duplicates.ts b/defender/src/scripts/fix-allowlist-duplicates.ts deleted file mode 100644 index e22b1b3b..00000000 --- a/defender/src/scripts/fix-allowlist-duplicates.ts +++ /dev/null @@ -1,129 +0,0 @@ -// const supabaseLib = require("@supabase/supabase-js"); -// const dotenv = require("dotenv"); -// const _ = require("lodash"); -// import * as fetch from "node-fetch"; -// const hypercertsSDK = require("@hypercerts-org/hypercerts-sdk"); - -import { createClient } from "@supabase/supabase-js"; -import dotenv from "dotenv"; -import _ from "lodash"; -import fetch from "node-fetch"; - -const pageSize = 1000; - -dotenv.config(); -const supabase = createClient( - process.env.NEXT_PUBLIC_SUPABASE_HYPERCERTS_URL as string, - process.env.NEXT_PUBLIC_SUPABASE_HYPERCERTS_SERVICE_ROLE_KEY as string, -); - -const fetchAllowlistPage = async (lastId: number) => { - console.log("fetching page with id >", lastId); - return supabase - .from("allowlistCache-chainId") - .select("*") - .order("id", { ascending: true }) - .gt("id", lastId) - .eq("chainId", 10) - .limit(pageSize); -}; - -const deleteEntries = async (ids: number[]) => { - console.log("deleting entries", ids); - return supabase.from("allowlistCache-chainId").delete().in("id", ids); -}; - -const query = ` -query ClaimTokensByClaim($claimId: String!, $orderDirection: OrderDirection, $first: Int, $skip: Int) { - claimTokens(where: { claim: $claimId }, skip: $skip, first: $first, orderDirection: $orderDirection) { - id - owner - tokenID - units - } -} -`; - -const fetchClaimTokenForClaimId = async (claimId: string) => { - return ( - fetch( - "https://api.thegraph.com/subgraphs/name/hypercerts-admin/hypercerts-optimism-mainnet", - { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - variables: { - claimId, - first: 1000, - }, - query, - }), - }, - ) - .then((res) => res.json()) - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - .then((res) => res.data?.claimTokens) - ); -}; - -const main = async () => { - const totalNumberOfResults = await supabase - .from("allowlistCache-chainId") - .select("id", { count: "exact" }); - - console.log("totalNumberOfResults", totalNumberOfResults.count); - - let lastId = 1; - - // Iterate over all pages - // eslint-disable-next-line no-constant-condition - while (true) { - const { data } = await fetchAllowlistPage(lastId); - if (data.length === 0) { - break; - } - lastId = data[data.length - 1].id; - - const allowlistEntriesByClaimId = _.groupBy(data, "claimId"); - // console.log("fetched page", i); - - for (const claimId in allowlistEntriesByClaimId) { - // console.log("checking duplicates for", claimId); - const entries = allowlistEntriesByClaimId[claimId]; - // console.log(entries.length, "entries found"); - - const tokensForClaim = await fetchClaimTokenForClaimId(claimId); - // console.log("tokensForClaim", tokensForClaim); - - const addressesForClaimTokens = tokensForClaim.map( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (token: any) => token.owner, - ); - const addressesForEntry = entries.map((x) => x.address); - // console.log("Addresses for claim tokens", addressesForClaimTokens); - // console.log("Addresses for entries", addressesForEntry); - - const duplicates = _.intersectionBy( - addressesForClaimTokens, - addressesForEntry, - ); - - if (duplicates.length > 0) { - const supabaseEntries = entries.filter((entry) => - duplicates.includes(entry.address), - ); - // console.log("duplicates found for claimId", claimId, duplicates.length); - // console.log("duplicates", duplicates); - // console.log("duplicate supabaseEntries", supabaseEntries); - const idsToDelete = supabaseEntries.map((x) => x.id); - await deleteEntries(idsToDelete); - } - } - } -}; - -// eslint-disable-next-line @typescript-eslint/no-floating-promises -main(); diff --git a/defender/src/setup.ts b/defender/src/setup.ts deleted file mode 100644 index 61fb2f9e..00000000 --- a/defender/src/setup.ts +++ /dev/null @@ -1,70 +0,0 @@ -import config from "./config"; -import { ApiError, ConfigError } from "./errors"; -import { NETWORKS, getNetworkConfigFromName } from "./networks"; -import { reset } from "./reset"; -import { rollOut } from "./rollout"; -import { updateAutotask, updateSentinel } from "./update"; -import { AutotaskClient } from "@openzeppelin/defender-autotask-client"; -import { SentinelClient } from "@openzeppelin/defender-sentinel-client"; - -const setup = async () => { - const args = process.argv.slice(2); - if (args.length < 1) { - throw new ApiError("Missing argument: "); - } - - const environment = args[0]; - const supportedEnv = Object.keys(NETWORKS); - - if (!supportedEnv.includes(environment)) { - throw new ApiError("Invalid environment: "); - } - - const networks = config.networks[environment as keyof typeof NETWORKS]; - - const autotaskClient = new AutotaskClient(config.credentials); - const sentinelClient = new SentinelClient(config.credentials); - - // Remove all old auto tasks and sentinels - const oldAutoTasks = await autotaskClient.list(); - const oldSentinels = await sentinelClient.list(); - - const networksDeployed = oldAutoTasks.items - .map((task) => getNetworkConfigFromName(task.name).chainId) - .filter((value, index, array) => array.indexOf(value) === index); - - const missingNetworks = networks.filter( - (network) => !networksDeployed.includes(network.chainId), - ); - - if (missingNetworks.length > 0) { - await rollOut(missingNetworks); - } - - let updates = false; - - if (oldAutoTasks.items.length > 0) { - updates = true; - await updateAutotask(networks); - } - - if (oldSentinels.items.length > 0) { - updates = true; - await updateSentinel(networks); - } - - if (!updates) { - // Delete all sentinels and tasks first - await reset(); - - // Error out if no networks configured. - if (networks.length < 1) { - throw new ConfigError("No networks specified"); - } - - await rollOut(networks); - } -}; - -//eslint-disable-next-line @typescript-eslint/no-floating-promises -setup(); diff --git a/defender/src/update.ts b/defender/src/update.ts deleted file mode 100644 index 1a06d6e7..00000000 --- a/defender/src/update.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { abi as HypercertMinterAbi } from "./HypercertMinterABI"; -import config from "./config"; -import { NetworkConfig, decodeName } from "./networks"; -import { AutotaskClient } from "@openzeppelin/defender-autotask-client"; -import { SentinelClient } from "@openzeppelin/defender-sentinel-client"; -import { HypercertExchangeAbi } from "@hypercerts-org/contracts"; - -export const updateAutotask = async (networks: NetworkConfig[]) => { - const autotaskClient = new AutotaskClient(config.credentials); - const targetNetworks = networks.map((network) => network.networkKey); - - const oldAutoTasks = await autotaskClient.list(); - - return await Promise.all([ - ...oldAutoTasks.items.map((autoTask) => { - // Get name and network - const { name, networkKey } = decodeName(autoTask.name); - - // Validate if in target networks - if (!targetNetworks.includes(networkKey as NetworkConfig["networkKey"])) { - return; - } - - // Update autotask - console.log( - `Updating ${autoTask.autotaskId} from ./build/relay/${name} on ${networkKey}`, - ); - - autotaskClient - .updateCodeFromFolder(autoTask.autotaskId, `./build/relay/${name}`) - .then((res) => { - console.log(`Updated ${autoTask.autotaskId}`); - console.log(res); - }) - .catch((err) => { - console.error(`Failed to update ${autoTask.autotaskId}`); - console.error(err); - }); - }), - ]); -}; - -export const updateSentinel = async (networks: NetworkConfig[]) => { - const sentinelClient = new SentinelClient(config.credentials); - const targetNetworks = networks.map((network) => network.networkKey); - - const oldSentinels = await sentinelClient.list(); - - return await Promise.all([ - ...oldSentinels.items.map((sentinel) => { - // Get name and network - const { name, networkKey, contract } = decodeName(sentinel.name); - - // Validate if in target networks - - let address: string | undefined; - let abi: any; - - if (!targetNetworks.includes(networkKey as NetworkConfig["networkKey"])) { - return; - } - const network = networks.find( - (network) => network.networkKey === networkKey, - ); - - if (contract === "minter") { - address = network?.hypercertMinterContractAddress; - abi = HypercertMinterAbi; - } - - if (contract === "exchange") { - address = network?.hypercertExchangeContractAddress; - abi = HypercertExchangeAbi; - } - - if (!address) { - console.error(`No address found for ${sentinel.subscriberId}`); - return; - } - if (!abi) { - console.error(`No abi found for ${sentinel.subscriberId}`); - return; - } - - // Update sentinel - console.log( - `Updating ${sentinel.subscriberId} from ./build/relay/${name} on ${networkKey}`, - ); - - sentinelClient - .update(sentinel.subscriberId, { - ...sentinel, - addresses: [address], - abi, - }) - .then((res) => { - console.log(`Updated: ", ${sentinel.subscriberId}`); - console.log(res); - }) - .catch((err) => { - console.error(`Failed to update ${sentinel.subscriberId}`); - console.error(err); - }); - }), - ]); -}; diff --git a/defender/tsconfig.json b/defender/tsconfig.json deleted file mode 100644 index 7a1fd6c3..00000000 --- a/defender/tsconfig.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "compilerOptions": { - "allowJs": true, - "allowSyntheticDefaultImports": true, - "declaration": false, - "declarationMap": false, - "downlevelIteration": true, - "emitDecoratorMetadata": true, - "esModuleInterop": true, - "experimentalDecorators": true, - "forceConsistentCasingInFileNames": true, - "lib": ["es6"], - "module": "ESNext", - "moduleResolution": "node", - "noImplicitAny": true, - "removeComments": true, - "resolveJsonModule": true, - "skipLibCheck": true, - "sourceMap": false, - "strict": false, - "target": "es6" - }, - "exclude": ["node_modules"], - "include": ["src/**/*"] -} diff --git a/defender/webpack.config.cjs b/defender/webpack.config.cjs deleted file mode 100644 index 749abaf2..00000000 --- a/defender/webpack.config.cjs +++ /dev/null @@ -1,50 +0,0 @@ -const path = require("path"); -const webpack = require("webpack"); -const TerserPlugin = require("terser-webpack-plugin"); - -module.exports = { - entry: { - "batch-mint-claims-from-allowlists": - "./src/auto-tasks/batch-mint-claims-from-allowlists.ts", - "on-allowlist-created": "./src/auto-tasks/on-allowlist-created.ts", - "mint-claim-from-allowlist": - "./src/auto-tasks/mint-claim-from-allowlist.ts", - "execute-taker-bid": - "./src/auto-tasks/execute-taker-bid.ts", - }, - target: "node", - mode: "development", - devtool: "cheap-module-source-map", - module: { - rules: [{ test: /\.tsx?$/, use: "ts-loader", exclude: /node_modules/ }], - }, - resolve: { - extensions: [".ts", ".js"], - }, - externals: [ - // List here all dependencies available on the Autotask environment - /axios/, - /apollo-client/, - /defender-[^\-]+-client/, - /ethers/, - /web3/, - /@ethersproject\/.*/, - /aws-sdk/, - /aws-sdk\/.*/, - ], - externalsType: "commonjs2", - plugins: [ - // List here all dependencies that are not run in the Autotask environment - new webpack.IgnorePlugin({ resourceRegExp: /dotenv/ }), - ], - optimization: { - minimize: true, - minimizer: [new TerserPlugin()], - }, - output: { - filename: "[name]/index.js", - path: path.resolve(__dirname, "build", "relay"), - sourceMapFilename: "[file].map", - library: { type: "commonjs2" }, - }, -}; diff --git a/docker/README.md b/docker/README.md deleted file mode 100644 index 8366923c..00000000 --- a/docker/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Docker Setup for E2E Suite - -This docker setup is intended to be used as both a test harness and also as -local development infrastructure to enable fast-as-possible iteration. - -For more docs on usage, see the root README in this monorepo. - -## Multi-arch building - -Eventually this will be put into our github actions but for now here are the -instructions for a multi-arch build (some of us have arm chips afterall). - -``` -docker buildx create --name dual_arm64_amd64 --platform linux/amd64,linux/arm/v8 -``` diff --git a/docker/after_graph.sh b/docker/after_graph.sh deleted file mode 100644 index 625085bd..00000000 --- a/docker/after_graph.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# Script to run after the graph has started running - -set -euxo pipefail - -# Load application environment variables -source /usr/src/app/node_modules/app.env.sh - -export GRAPH_RPC_URL=$DOCKER_INTERNAL_GRAPH_RPC_URL -export IPFS_URL=$DOCKER_INTERNAL_IPFS_URL -export SUBGRAPH_NAME=hypercerts-admin/hypercerts-hardhat -export SUBGRAPH_MANIFEST=.test.subgraph.yaml -export VERSION_LABEL=v0.0.1 - -echo "Deploying the subgraph" - -cd "$REPO_DIR/graph" - -cat subgraph.yaml | sed 's/network: .*/network: hardhat/;s/address: ".*"/address: "'"${NEXT_PUBLIC_CONTRACT_ADDRESS}"'"/;s/startBlock: .*/startBlock: '"${CONTRACT_DEPLOYED_BLOCK_NUMBER}"'/' > $SUBGRAPH_MANIFEST -prepend_text="# This file is generated for local testing. It should not be committed" - -printf '%s\n%s\n' "${prepend_text}" "$(cat $SUBGRAPH_MANIFEST)" > $SUBGRAPH_MANIFEST - -yarn graph create --node $GRAPH_RPC_URL $SUBGRAPH_NAME -yarn graph deploy --node $GRAPH_RPC_URL --ipfs $IPFS_URL --version-label=$VERSION_LABEL $SUBGRAPH_NAME $SUBGRAPH_MANIFEST \ No newline at end of file diff --git a/docker/after_localchain.sh b/docker/after_localchain.sh deleted file mode 100644 index dc7acc75..00000000 --- a/docker/after_localchain.sh +++ /dev/null @@ -1,108 +0,0 @@ -#!/bin/bash -set -euxo pipefail - -# Reinitializes a test harness that can be used for local development or end to -# end testing with playwright - -REPO_DIR=${REPO_DIR:-} -LOCAL_TESTING_ADDRESS=${LOCAL_TESTING_ADDRESS:-} -deploy_json=/deploy.json - -export LOCALHOST_NETWORK_URL=http://localchain:8545 -export NEXT_PUBLIC_DEFAULT_CHAIN_ID=31337 - -function hardhat_local() { - yarn hardhat --network localhost $@ -} - -# Clean up stateful data related to any previous invocation of this -# docker-compose setup -rm -rf /postgres/* -rm -rf /ipfs_staging/* -rm -rf /ipfs_data/* - -# Allow passing in the repo directory. Otherwise automagically get the correct -# directory based on this script's path -if [[ -z "${REPO_DIR}" ]]; then - # Ensure we're working from the script's directory. This is a bit brittle but - # it's intended to be bespoke to this repo - script_dir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - cd "$script_dir"/.. - REPO_DIR=$( pwd ) -fi - -# Rebuild the project if necessary -# TODO this is failing in docker. Need to figure out why. Probably dude to shared vms -cd "${REPO_DIR}" -yarn build:contracts -yarn build:defender -yarn build:graph -yarn build:sdk - -cd "${REPO_DIR}/contracts" - -# Deploy the contract -echo "Deploy the contract" -hardhat_local deploy --output "$deploy_json" - -# Transfer token to a specific account if that account has been specified -if [[ ! -z "${LOCAL_TESTING_ADDRESS}" ]]; then - echo "Funding ${LOCAL_TESTING_ADDRESS}" - hardhat_local transfer-from-test-account --dest "$LOCAL_TESTING_ADDRESS" --amount 5000 -fi - -contract_address=$(jq '.address' -r "$deploy_json") -contract_deployed_block_number=$(jq '.blockNumber' -r "$deploy_json") -echo "Contract address to be loaded: $contract_address" - -# Have these here so we can do some interpolating -GRAPH_BASE_URL=http://${FRONTEND_GRAPH_HOST}:${FRONTEND_GRAPH_HTTP_PORT}/subgraphs/name -GRAPH_NAME=hypercerts-hardhat -GRAPH_NAMESPACE=hypercerts-admin -NEXT_PUBLIC_GRAPH_URL=${GRAPH_BASE_URL}/${GRAPH_NAMESPACE}/${GRAPH_NAME} - -cat < /usr/src/app/node_modules/app.env.sh -# Generate an environment file from the contract deployment -export REPO_DIR=${REPO_DIR} -export NEXT_PUBLIC_DEFAULT_CHAIN_ID=31337 -export NEXT_PUBLIC_CHAIN_NAME=hardhat -export NEXT_PUBLIC_CONTRACT_ADDRESS="${contract_address}" -export CONTRACT_DEPLOYED_BLOCK_NUMBER="${contract_deployed_block_number}" -export NEXT_PUBLIC_UNSAFE_FORCE_OVERRIDE_CONFIG=1 -export NEXT_PUBLIC_RPC_URL=http://${FRONTEND_RPC_HOST}:${FRONTEND_RPC_PORT} -export FRONTEND_RPC_PORT=${FRONTEND_RPC_PORT} -export FRONTEND_RPC_HOST=${FRONTEND_RPC_HOST} -export FRONTEND_GRAPH_HOST=${FRONTEND_GRAPH_HOST} -export FRONTEND_GRAPH_HTTP_PORT=${FRONTEND_GRAPH_HTTP_PORT} -export FRONTEND_GRAPH_WS_PORT=${FRONTEND_GRAPH_WS_PORT} -export FRONTEND_GRAPH_JSON_RPC_PORT=${FRONTEND_GRAPH_JSON_RPC_PORT} -export FRONTEND_GRAPH_INDEX_STATUS_PORT=${FRONTEND_GRAPH_INDEX_STATUS_PORT} -export FRONTEND_IPFS_HOST=${FRONTEND_IPFS_HOST} -export FRONTEND_IPFS_LIBP2P_PORT=${FRONTEND_IPFS_LIBP2P_PORT} -export FRONTEND_IPFS_API_PORT=${FRONTEND_IPFS_API_PORT} -export FRONTEND_IPFS_GATEWAY_PORT=${FRONTEND_IPFS_GATEWAY_PORT} -export FRONTEND_PORT=${FRONTEND_PORT} -export FRONTEND_HOST=${FRONTEND_HOST} - -export NEXT_PUBLIC_GRAPH_URL=${NEXT_PUBLIC_GRAPH_URL} - -export DOCKER_INTERNAL_GRAPH_RPC_URL=http://graph:8020 -export DOCKER_INTERNAL_GRAPH_HTTP_URL=http://graph:8000 -export DOCKER_INTERNAL_IPFS_URL=http://ipfs:5001 -export PLASMIC_PROJECT_ID="$PLASMIC_PROJECT_ID" -export PLASMIC_PROJECT_API_TOKEN="$PLASMIC_PROJECT_API_TOKEN" -export LOCALHOST_NETWORK_URL=${LOCALHOST_NETWORK_URL} -export NEXT_PUBLIC_NFT_STORAGE_TOKEN=${NEXT_PUBLIC_NFT_STORAGE_TOKEN} -export NEXT_PUBLIC_WEB3_STORAGE_TOKEN=${NEXT_PUBLIC_WEB3_STORAGE_TOKEN} -export NEXT_PUBLIC_WALLETCONNECT_ID=${NEXT_PUBLIC_WALLETCONNECT_ID} -export NEXT_PUBLIC_DOMAIN=${NEXT_PUBLIC_DOMAIN} -export NEXT_PUBLIC_SUPABASE_URL=${NEXT_PUBLIC_SUPABASE_URL} -export NEXT_PUBLIC_SUPABASE_ANON_KEY=${NEXT_PUBLIC_SUPABASE_ANON_KEY} -export NEXT_PUBLIC_SUPABASE_TABLE=${NEXT_PUBLIC_SUPABASE_TABLE} -EOF - -source /usr/src/app/node_modules/app.env.sh - -cd $REPO_DIR -# Run a full build (this seems to be necessary) -yarn build \ No newline at end of file diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile deleted file mode 100644 index f178663a..00000000 --- a/docker/base.Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM node:18 - -RUN apt-get update \ - && apt-get install -y jq \ - && npm install -g @graphprotocol/graph-cli diff --git a/docker/base.env b/docker/base.env deleted file mode 100644 index 078e078f..00000000 --- a/docker/base.env +++ /dev/null @@ -1,4 +0,0 @@ -# VERSIONS FOR DOCKER IMAGES -GRAPH_NODE_DEV_VERSION=6f907fcb75f23a3b3ed27dde0cb1eb3758a09ec3 -NODE_DEV_VERSION=1.0 -PLAYWRIGHT_VERSION=v1.35.0 \ No newline at end of file diff --git a/docker/compose.yaml b/docker/compose.yaml deleted file mode 100644 index ef7b15f4..00000000 --- a/docker/compose.yaml +++ /dev/null @@ -1,249 +0,0 @@ -version: '3' - -volumes: - node_modules: - graph_modules: - sdk_modules: - contracts_modules: - postgres_storage: - ipfs_staging: - ipfs_data: - subgraph.yaml: - - -services: - install: - image: ghcr.io/hypercerts-org/node-dev-18:1.0-${DOCKER_PLATFORM:-amd64} - working_dir: /usr/src/app - command: bash docker/install.sh - volumes: - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules - localchain: - image: ghcr.io/hypercerts-org/node-dev-18:1.0-${DOCKER_PLATFORM:-amd64} - ports: - - "${FRONTEND_RPC_PORT}:8545" - working_dir: /usr/src/app/contracts - command: yarn hardhat node - depends_on: - install: - condition: service_completed_successfully - healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:8545" ] - interval: 1s - timeout: 15s - retries: 5 - start_period: 30s - volumes: - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules - after_localchain: - image: ghcr.io/hypercerts-org/node-dev-18:1.0-${DOCKER_PLATFORM:-amd64} - working_dir: /usr/src/app - command: bash docker/after_localchain.sh - depends_on: - localchain: - condition: service_healthy - environment: - - PLASMIC_PROJECT_ID=${PLASMIC_PROJECT_ID} - - PLASMIC_PROJECT_API_TOKEN=${PLASMIC_PROJECT_API_TOKEN} - - LOCAL_TESTING_ADDRESS=${LOCAL_TESTING_ADDRESS} - - FRONTEND_RPC_PORT=${FRONTEND_RPC_PORT} - - FRONTEND_RPC_HOST=${FRONTEND_RPC_HOST} - - FRONTEND_GRAPH_HOST=${FRONTEND_GRAPH_HOST} - - FRONTEND_GRAPH_HTTP_PORT=${FRONTEND_GRAPH_HTTP_PORT} - - FRONTEND_GRAPH_WS_PORT=${FRONTEND_GRAPH_WS_PORT} - - FRONTEND_GRAPH_JSON_RPC_PORT=${FRONTEND_GRAPH_JSON_RPC_PORT} - - FRONTEND_GRAPH_INDEX_STATUS_PORT=${FRONTEND_GRAPH_INDEX_STATUS_PORT} - - FRONTEND_IPFS_HOST=${FRONTEND_IPFS_HOST} - - FRONTEND_IPFS_LIBP2P_PORT=${FRONTEND_IPFS_LIBP2P_PORT} - - FRONTEND_IPFS_API_PORT=${FRONTEND_IPFS_API_PORT} - - FRONTEND_IPFS_GATEWAY_PORT=${FRONTEND_IPFS_GATEWAY_PORT} - - FRONTEND_PORT=${FRONTEND_PORT} - - FRONTEND_HOST=${FRONTEND_HOST} - - NEXT_PUBLIC_NFT_STORAGE_TOKEN=${NEXT_PUBLIC_NFT_STORAGE_TOKEN} - - NEXT_PUBLIC_WEB3_STORAGE_TOKEN=${NEXT_PUBLIC_WEB3_STORAGE_TOKEN} - - NEXT_PUBLIC_WALLETCONNECT_ID=${NEXT_PUBLIC_WALLETCONNECT_ID} - - NEXT_PUBLIC_DOMAIN=${NEXT_PUBLIC_DOMAIN} - - NEXT_PUBLIC_SUPABASE_URL=${NEXT_PUBLIC_SUPABASE_URL} - - NEXT_PUBLIC_SUPABASE_ANON_KEY=${NEXT_PUBLIC_SUPABASE_ANON_KEY} - - NEXT_PUBLIC_SUPABASE_TABLE=${NEXT_PUBLIC_SUPABASE_TABLE} - volumes: - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules - - postgres_storage:/postgres - - ipfs_staging:/ipfs_staging - - ipfs_data:/ipfs_data - tx_client: - image: ghcr.io/hypercerts-org/node-dev-18:${NODE_DEV_VERSION}-${DOCKER_PLATFORM:-amd64} - working_dir: /usr/src/app - command: docker/tx_client.sh - depends_on: - after_graph: - condition: service_completed_successfully - volumes: - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules - restart: on-failure - frontend: - profiles: - - testing - image: ghcr.io/hypercerts-org/node-dev-18:${NODE_DEV_VERSION}-${DOCKER_PLATFORM:-amd64} - privileged: true - working_dir: /usr/src/app - ports: - - "${FRONTEND_PORT}:3000" - command: bash docker/frontend.sh - depends_on: - after_localchain: - condition: service_completed_successfully - after_graph: - condition: service_completed_successfully - volumes: - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules - environment: - - ENVIRONMENT=${ENVIRONMENT} - healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:3000" ] - interval: 1s - timeout: 15s - retries: 30 - start_period: 100s - ipfs: - image: ipfs/kubo - command: daemon --offline --migrate=true --agent-version-suffix=docker - depends_on: - after_localchain: - condition: service_completed_successfully - ports: - - "${FRONTEND_IPFS_GATEWAY_PORT}:8080" - - "${FRONTEND_IPFS_API_PORT}:5001" - # Don't need to expose the libp2p port at this time. - # - "${IPFS_LIBP2P_PORT}:4001" - volumes: - - ipfs_staging:/export - - ipfs_data:/data/ipfs - postgres: - image: postgres:15 - restart: always - user: postgres - depends_on: - after_localchain: - condition: service_completed_successfully - # Required command for the graph - command: - [ - "postgres", - "-cshared_preload_libraries=pg_stat_statements" - ] - volumes: - - postgres_storage:/var/lib/postgresql/data - - ./postgres.init.d:/docker-entrypoint-initdb.d - environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: graph-node - POSTGRES_INITDB_ARGS: "-E UTF8 --locale=C" - healthcheck: - test: [ "CMD-SHELL", "pg_isready" ] - interval: 10s - timeout: 5s - retries: 5 - graph: - image: ghcr.io/hypercerts-org/graph-node-dev:${GRAPH_NODE_DEV_VERSION}-${DOCKER_PLATFORM:-amd64} - depends_on: - postgres: - condition: service_healthy - localchain: - condition: service_healthy - ipfs: - condition: service_started - privileged: true - environment: - postgres_host: postgres - postgres_port: 5432 - postgres_user: postgres - postgres_pass: postgres - postgres_db: graph - ipfs: ipfs:5001 - ethereum: hardhat:http://localchain:8545 - ports: - # WS - - "${FRONTEND_GRAPH_WS_PORT}:8001" - # JSON-RPC - - "${FRONTEND_GRAPH_JSON_RPC_PORT}:8020" - # Indexing status - - "${FRONTEND_GRAPH_INDEX_STATUS_PORT}:8030" - healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:8000" ] - interval: 5s - timeout: 5s - retries: 10 - start_period: 30s - graph_proxy: - # This proxy is required to fix some CORS issues with the graph - image: nginx - volumes: - - ./nginx/graph_nginx.conf:/etc/nginx/nginx.conf - depends_on: - graph: - condition: service_healthy - ports: - - "${FRONTEND_GRAPH_HTTP_PORT}:80" - after_graph: - image: ghcr.io/hypercerts-org/node-dev-18:${NODE_DEV_VERSION}-${DOCKER_PLATFORM:-amd64} - depends_on: - graph: - condition: service_healthy - working_dir: /usr/src/app - privileged: true - command: bash docker/after_graph.sh - volumes: - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules - playwright: - image: ghcr.io/hypercerts-org/playwright:v1.35.0-amd64 - working_dir: /usr/src/app - command: docker/run_tests.sh - privileged: true - depends_on: - after_graph: - condition: service_completed_successfully - tx_client: - condition: service_started - frontend: - condition: service_healthy - environment: - - CI=${CI} - - DEBIAN_FRONTEND=noninteractive - - ENABLE_VNC=${ENABLE_VNC} - profiles: - - testing - ports: - - 5900:5900 - volumes: - - ./nginx/e2e_proxy.conf:/etc/nginx/e2e_proxy.conf - - ../:/usr/src/app - - node_modules:/usr/src/app/node_modules - - sdk_modules:/usr/src/app/sdk/node_modules - - contracts_modules:/usr/src/app/contracts/node_modules - - graph_modules:/usr/src/app/graph/node_modules diff --git a/docker/dev.env b/docker/dev.env deleted file mode 100644 index 6f8efacb..00000000 --- a/docker/dev.env +++ /dev/null @@ -1,26 +0,0 @@ -# Environment vars in the style `FRONTEND_*` are specifically for use when # -# when accessing a service from the frontend application (browserland). - -# Graph related ports -FRONTEND_GRAPH_HOST=localhost -FRONTEND_GRAPH_HTTP_PORT=8000 -FRONTEND_GRAPH_WS_PORT=8001 -FRONTEND_GRAPH_JSON_RPC_PORT=8020 -FRONTEND_GRAPH_INDEX_STATUS_PORT=8030 - -# IPFS Related ports -FRONTEND_IPFS_HOST=localhost -FRONTEND_IPFS_LIBP2P_PORT=4001 -FRONTEND_IPFS_API_PORT=5001 -FRONTEND_IPFS_GATEWAY_PORT=8080 - -# ETH JSON RPC Port -FRONTEND_RPC_HOST=localhost -FRONTEND_RPC_PORT=8545 - -# Hypercerts Dapp Frontend Port -FRONTEND_PORT=3000 -FRONTEND_HOST=127.0.0.1 -NEXT_PUBLIC_DOMAIN=localhost - -ENVIRONMENT=development diff --git a/docker/e2e.env b/docker/e2e.env deleted file mode 100644 index 11219dfc..00000000 --- a/docker/e2e.env +++ /dev/null @@ -1,24 +0,0 @@ -# Graph related ports -FRONTEND_GRAPH_HOST=graph -FRONTEND_GRAPH_HTTP_PORT=8000 -FRONTEND_GRAPH_WS_PORT=8001 -FRONTEND_GRAPH_JSON_RPC_PORT=8020 -FRONTEND_GRAPH_INDEX_STATUS_PORT=8030 - -# IPFS Related ports -FRONTEND_IPFS_HOST=ipfs -FRONTEND_IPFS_LIBP2P_PORT=4001 -FRONTEND_IPFS_API_PORT=5001 -FRONTEND_IPFS_GATEWAY_PORT=8080 - -# ETH JSON RPC Port -FRONTEND_RPC_HOST=127.0.0.1 -FRONTEND_RPC_PORT=8545 - -# Hypercerts Dapp Frontend Port -FRONTEND_PORT=3000 -FRONTEND_HOST=127.0.0.1 -NEXT_PUBLIC_DEFAULT_CHAIN_ID=31337 -NEXT_PUBLIC_DOMAIN=localhost - -ENVIRONMENT=tests diff --git a/docker/frontend.sh b/docker/frontend.sh deleted file mode 100644 index de130cc2..00000000 --- a/docker/frontend.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -set -euo pipefail - -source /usr/src/app/node_modules/app.env.sh - -cd "${REPO_DIR}/frontend" - -if [[ "$ENVIRONMENT" == "tests" ]]; then - echo "Building a production-like environment for testing" - yarn build - yarn start -else - echo "Running the dev environment" - yarn dev -fi diff --git a/docker/graph.Dockerfile b/docker/graph.Dockerfile deleted file mode 100644 index c441a26c..00000000 --- a/docker/graph.Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -ARG DOCKER_PLATFORM=amd64 -ARG GRAPH_COMMIT_SHA - -# In order to support multiple development environments we use a custom base -# node built from the graph node repo -# See: https://github.com/graphprotocol/graph-node -FROM ghcr.io/hypercerts-org/graph-node:${GRAPH_COMMIT_SHA}-${DOCKER_PLATFORM} - -RUN apt-get update && apt-get install -y curl diff --git a/docker/install.sh b/docker/install.sh deleted file mode 100644 index eb2cfce4..00000000 --- a/docker/install.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -git config --global --add safe.directory /usr/src/app -yarn install --non-interactive --frozen-lockfile \ No newline at end of file diff --git a/docker/nginx/e2e_proxy.conf b/docker/nginx/e2e_proxy.conf deleted file mode 100644 index 44aa87ce..00000000 --- a/docker/nginx/e2e_proxy.conf +++ /dev/null @@ -1,71 +0,0 @@ -worker_processes 2; -daemon on; - -events { - use epoll; - worker_connections 128; -} - -http { - # Hardhat needs to be "127.0.0.1" for it to work properly - server { - listen 8545; - server_name localhost; - location / { - if ($request_method = 'OPTIONS') { - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Methods' '*'; - add_header 'Access-Control-Allow-Headers' '*'; - add_header 'Access-Control-Max-Age' 1728000; - add_header 'Content-Type' 'text/plain; charset=utf-8'; - add_header 'Content-Length' 0; - return 204; - } - proxy_pass http://localchain:8545/; - } - } - - # Proxy the graph - server { - listen 8000; - server_name localhost; - location / { - if ($request_method = 'OPTIONS') { - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Methods' '*'; - add_header 'Access-Control-Allow-Headers' '*'; - add_header 'Access-Control-Max-Age' 1728000; - add_header 'Content-Type' 'text/plain; charset=utf-8'; - add_header 'Content-Length' 0; - return 204; - } - proxy_pass http://graph:8000/; - } - } - - # Proxy ipfs (don't think this is necessary) - server { - listen 8080; - server_name localhost; - location / { - if ($request_method = 'OPTIONS') { - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Methods' '*'; - add_header 'Access-Control-Allow-Headers' '*'; - add_header 'Access-Control-Max-Age' 1728000; - add_header 'Content-Type' 'text/plain; charset=utf-8'; - add_header 'Content-Length' 0; - return 204; - } - proxy_pass http://ipfs:8080/; - } - } - - server { - listen 3000; - server_name localhost; - location / { - proxy_pass http://frontend:3000/; - } - } -} diff --git a/docker/nginx/graph_nginx.conf b/docker/nginx/graph_nginx.conf deleted file mode 100644 index 3b6400fd..00000000 --- a/docker/nginx/graph_nginx.conf +++ /dev/null @@ -1,25 +0,0 @@ -worker_processes 2; - -events { - use epoll; - worker_connections 128; -} - -http { - server { - listen 80; - server_name localhost; - location / { - if ($request_method = 'OPTIONS') { - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Methods' '*'; - add_header 'Access-Control-Allow-Headers' '*'; - add_header 'Access-Control-Max-Age' 1728000; - add_header 'Content-Type' 'text/plain; charset=utf-8'; - add_header 'Content-Length' 0; - return 204; - } - proxy_pass http://graph:8000/; - } - } -} diff --git a/docker/playwright.Dockerfile b/docker/playwright.Dockerfile deleted file mode 100644 index e9fdc7a6..00000000 --- a/docker/playwright.Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM mcr.microsoft.com/playwright:v1.35.0-jammy - -ARG DEBIAN_FRONTEND=noninteractive -ENV TZ=UTC - -RUN apt-get update && \ - apt-get install -y xvfb fluxbox x11vnc nginx \ No newline at end of file diff --git a/docker/postgres.init.d/add_databases.sh b/docker/postgres.init.d/add_databases.sh deleted file mode 100755 index 32f63b5d..00000000 --- a/docker/postgres.init.d/add_databases.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -e - -psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL - CREATE DATABASE graph; -EOSQL - -echo "Added graph database" diff --git a/docker/run_tests.sh b/docker/run_tests.sh deleted file mode 100755 index 6c1768ac..00000000 --- a/docker/run_tests.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -set -euo pipefail - -source /usr/src/app/node_modules/app.env.sh - -cd "${REPO_DIR}" - -disp=:99 -screen=0 -export DISPLAY="${disp}.${screen}" - -apt-get install -y nginx - -echo "starting nginx" -nginx -c /etc/nginx/e2e_proxy.conf - -echo "starting xvfb" -Xvfb "${disp}" -ac -listen tcp -screen "${screen}" 1200x800x24 & - -echo "starting fluxbox" -fluxbox -display "${disp}" -screen "${screen}" & - -if [[ -z "$ENABLE_VNC" ]]; then - echo "vnc disabled" -else - echo "starting vnc with password 'test'" - x11vnc -display "${DISPLAY}" -forever -bg -passwd password -fi - -yarn playwright install-deps -yarn playwright install - -yarn playwright test diff --git a/docker/scripts/build-base.sh b/docker/scripts/build-base.sh deleted file mode 100644 index 6ce31d2e..00000000 --- a/docker/scripts/build-base.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -set -euxo pipefail - -script_dir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -# This needs to be set because of dealing with M1 macs and github actions. -# Github actions cannot seem to build _some_ arm64 images. This gets around that -# by forcing us to tag with the platform. -DOCKER_PLATFORM=${DOCKER_PLATFORM:-amd64} - -cd "$script_dir/.." - -docker build \ - -t "ghcr.io/hypercerts-org/node-dev-18:1.0-${DOCKER_PLATFORM}" \ - -f base.Dockerfile . -docker push "ghcr.io/hypercerts-org/node-dev-18:1.0-${DOCKER_PLATFORM}" \ No newline at end of file diff --git a/docker/scripts/build-graph-dependencies.sh b/docker/scripts/build-graph-dependencies.sh deleted file mode 100644 index 242c45a0..00000000 --- a/docker/scripts/build-graph-dependencies.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -set -euxo pipefail - -# Save the script's directory for later -script_dir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -echo "Creating a temporary directory to checkout the graph" -temp_dir=$(mktemp -d) - -cd "$temp_dir" - -# checkout the graph -git clone https://github.com/graphprotocol/graph-node.git -cd graph-node/ - -clean_up() { - rm -rf "$temp_dir" - echo "Cleaning up temp directory" -} -trap clean_up EXIT - -if [ -d .git ] -then - COMMIT_SHA=$(git rev-parse HEAD) - TAG_NAME=$(git tag --points-at HEAD) - REPO_NAME="Checkout of $(git remote get-url origin) at $(git describe --dirty)" - BRANCH_NAME=$(git rev-parse --abbrev-ref HEAD) -fi - -# This needs to be set because of dealing with M1 macs and github actions. -# Github actions cannot seem to build _some_ arm64 images. This gets around that -# by forcing us to tag with the platform. -DOCKER_PLATFORM=${DOCKER_PLATFORM:-amd64} - -for stage in graph-node-build graph-node graph-node-debug -do - docker build --target $stage \ - --build-arg "COMMIT_SHA=$COMMIT_SHA" \ - --build-arg "REPO_NAME=$REPO_NAME" \ - --build-arg "BRANCH_NAME=$BRANCH_NAME" \ - --build-arg "TAG_NAME=$TAG_NAME" \ - -t ghcr.io/hypercerts-org/$stage:${COMMIT_SHA}-${DOCKER_PLATFORM} \ - --push \ - -f docker/Dockerfile . -done \ No newline at end of file diff --git a/docker/scripts/build-graph.sh b/docker/scripts/build-graph.sh deleted file mode 100644 index 45d9fcfc..00000000 --- a/docker/scripts/build-graph.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash -set -euxo pipefail - -# Save the script's directory for later -script_dir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -echo "Creating a temporary directory to checkout the graph" -temp_dir=$(mktemp -d) - -cd "$temp_dir" - -# checkout the graph -git clone https://github.com/graphprotocol/graph-node.git -cd graph-node/ - -clean_up() { - rm -rf "$temp_dir" - echo "Cleaning up temp directory" -} -trap clean_up EXIT - -if [ -d .git ] -then - COMMIT_SHA=$(git rev-parse HEAD) - TAG_NAME=$(git tag --points-at HEAD) - REPO_NAME="Checkout of $(git remote get-url origin) at $(git describe --dirty)" - BRANCH_NAME=$(git rev-parse --abbrev-ref HEAD) -fi - -# This needs to be set because of dealing with M1 macs and github actions. -# Github actions cannot seem to build _some_ arm64 images. This gets around that -# by forcing us to tag with the platform. -DOCKER_PLATFORM=${DOCKER_PLATFORM:-amd64} - -cd "${script_dir}/.." -docker build \ - -t "ghcr.io/hypercerts-org/graph-node-dev:${COMMIT_SHA}-${DOCKER_PLATFORM}" \ - --build-arg "DOCKER_PLATFORM=${DOCKER_PLATFORM}" \ - --build-arg "GRAPH_COMMIT_SHA=${COMMIT_SHA}" \ - --push \ - -f graph.Dockerfile . \ No newline at end of file diff --git a/docker/scripts/build-playwright.sh b/docker/scripts/build-playwright.sh deleted file mode 100644 index 192a94a8..00000000 --- a/docker/scripts/build-playwright.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -set -euxo pipefail - -script_dir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -# This needs to be set because of dealing with M1 macs and github actions. -# Github actions cannot seem to build _some_ arm64 images. This gets around that -# by forcing us to tag with the platform. -DOCKER_PLATFORM=${DOCKER_PLATFORM:-amd64} - -cd "$script_dir/.." - -docker build \ - -t "ghcr.io/hypercerts-org/playwright:v1.35.0-${DOCKER_PLATFORM}" \ - --push \ - -f playwright.Dockerfile . \ No newline at end of file diff --git a/docker/tx_client.sh b/docker/tx_client.sh deleted file mode 100755 index 35579834..00000000 --- a/docker/tx_client.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -euo pipefail - -source /usr/src/app/node_modules/app.env.sh - -cd "${REPO_DIR}/contracts" - -yarn hardhat --network localhost test-tx-client \ No newline at end of file diff --git a/e2e/fixtures/metamask.ts b/e2e/fixtures/metamask.ts deleted file mode 100644 index e2bcfc7e..00000000 --- a/e2e/fixtures/metamask.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { test as base, chromium, type BrowserContext } from "@playwright/test"; -import { initialSetup } from "@synthetixio/synpress/commands/metamask"; -import { prepareMetamask } from "@synthetixio/synpress/helpers"; -import { - FRONTEND_HOST, - FRONTEND_PORT, - FRONTEND_RPC_HOST, - FRONTEND_RPC_PORT, -} from "../utils/constants"; - -export const test = base.extend<{ - context: BrowserContext; -}>({ - // eslint-disable-next-line - context: async ({}, use) => { - // required for synpress - global.expect = expect; - // download metamask - const metamaskPath = await prepareMetamask( - process.env.METAMASK_VERSION || "10.25.0", - ); - // prepare browser args - const browserArgs = [ - `--disable-extensions-except=${metamaskPath}`, - `--load-extension=${metamaskPath}`, - "--remote-debugging-port=9222", - ]; - if (process.env.CI) { - browserArgs.push("--disable-gpu"); - } - if (process.env.HEADLESS_MODE) { - browserArgs.push("--headless=new"); - } - // launch browser - const context = await chromium.launchPersistentContext("", { - headless: false, - args: browserArgs, - baseURL: `http://${FRONTEND_HOST}:${FRONTEND_PORT}`, - }); - // wait for metamask - await context.pages()[0].waitForTimeout(10000); - - // setup metamask - await initialSetup(chromium, { - secretWordsOrPrivateKey: - "test test test test test test test test test test test junk", - network: { - name: "hardhat", - chainId: 31337, - rpcUrl: `http://${FRONTEND_RPC_HOST}:${FRONTEND_RPC_PORT}`, - symbol: "TEST", - isTestnet: true, - }, - password: "Tester@1234", - enableAdvancedSettings: true, - enableExperimentalSettings: false, - }); - await use(context); - await context.close(); - }, -}); -export const expect = test.expect; diff --git a/e2e/mint-token.spec.ts b/e2e/mint-token.spec.ts deleted file mode 100644 index fc37bcd6..00000000 --- a/e2e/mint-token.spec.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { test, expect } from "./fixtures/metamask"; -import { Page } from "@playwright/test"; -import * as metamask from "@synthetixio/synpress/commands/metamask"; -import { randomUUID } from "crypto"; - -async function navigateAndEnsureWallet(goto: string, page: Page) { - await page.goto(goto); - await expect( - page.locator('button[data-testid="rk-account-button"]'), - ).toBeAttached({ timeout: 5000 }); -} - -test.beforeEach(async ({ page }) => { - // These are very large tests we should have a long timeout this time out is - // for specific actions on the page. It can probably tweaked to be faster but - // our github runners aren't so fast. - page.setDefaultTimeout(60000); - - await page.goto("/"); - await page.reload(); - await page.locator('button[data-testid="rk-connect-button"]').click(); - - await page.locator('button[data-testid="rk-wallet-option-metaMask"]').click(); - await metamask.acceptAccess(); -}); - -test("should succeed to mint a token", async ({ page }) => { - const testUUID = randomUUID(); - const name = `Test:${testUUID}`; - const description = "This is a description of the hypercert is referencing"; - const workScope = "Scope1, Scope2"; - const contributors = "Contrib1, Contrib2"; - - // Clicking to navigate to the "/app/create" path caused problems. For now, - // ignore clicking on the links with the prefilled fields - await navigateAndEnsureWallet("/app/create", page); - - // Fill in required fields - await page.locator('input[name="name"]').fill(name); - await page.locator('textarea[name="description"]').fill(description); - await page.locator('textarea[name="workScopes"]').fill(workScope); - await page.locator('textarea[name="contributors"]').fill(contributors); - - // Check boxes - await page.locator('input[name="agreeContributorsConsent"]').check(); - await page.locator('input[name="agreeTermsConditions"]').check(); - await page.locator('button[class*="HypercertsCreate__createButton"]').click(); - await metamask.confirmTransaction(); - - await page.waitForURL("/app/dashboard"); - await expect(page.getByText(testUUID)).toBeAttached({ timeout: 60000 }); -}); - -test("should fail to mint a token - lacking description", async ({ page }) => { - const testUUID = randomUUID(); - const name = `Test:${testUUID}`; - const description = "This is a description of the hypercert is referencing"; - const workScope = "Scope1, Scope2"; - const contributors = ""; - - await navigateAndEnsureWallet("/app/create", page); - await page.locator('input[name="name"]').fill(name); - await page.locator('textarea[name="description"]').fill(description); - await page.locator('textarea[name="workScopes"]').fill(workScope); - - await page.locator('textarea[name="contributors"]').fill(contributors); - await page.locator('input[name="agreeContributorsConsent"]').check(); - await page.locator('input[name="agreeTermsConditions"]').check(); - await page.locator('button[class*="HypercertsCreate__createButton"]').click(); - - await expect(page.locator('textarea[name="contributors"]')).toHaveAttribute( - "aria-invalid", - "true", - ); -}); diff --git a/e2e/utils/constants.ts b/e2e/utils/constants.ts deleted file mode 100644 index 52317dbc..00000000 --- a/e2e/utils/constants.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Page } from "playwright-core"; - -export const FRONTEND_HOST = process.env.FRONTEND_HOST || "127.0.0.1"; -export const FRONTEND_PORT = process.env.FRONTEND_PORT || "3000"; -export const FRONTEND_RPC_HOST = process.env.FRONTEND_RPC_HOST || "127.0.0.1"; -export const FRONTEND_RPC_PORT = process.env.FRONTEND_RPC_PORT || "8545"; - -export async function gotoPage(page: Page, path: string) { - return await page.goto(fullUrl(path)); -} - -export function fullUrl(path: string) { - return `http://${FRONTEND_HOST}:${FRONTEND_PORT}${path}`; -} diff --git a/frontend/.env.local.example b/frontend/.env.local.example deleted file mode 100644 index 6db8ed46..00000000 --- a/frontend/.env.local.example +++ /dev/null @@ -1,24 +0,0 @@ -############# -## App config -############# -### The domain that the application is hosted on -NEXT_PUBLIC_DOMAIN=testnet.hypercerts.org - -####### -## Web3 -####### -### UUPS proxy contract address -NEXT_PUBLIC_CONTRACT_ADDRESS=0xa16DFb32Eb140a6f3F2AC68f41dAd8c7e83C4941 -### Subgraph URL - currently using hosted service -NEXT_PUBLIC_GRAPH_URL=https://api.thegraph.com/subgraphs/name/hypercerts-admin/hypercerts-sepolia -### Wallet connect ID -NEXT_PUBLIC_WALLETCONNECT_ID=GET_FROM_https://cloud.walletconnect.com/app - -########## -## Storage -########## -NEXT_PUBLIC_WEB3_STORAGE_TOKEN=YOUR_API_KEY -NEXT_PUBLIC_NFT_STORAGE_TOKEN=YOUR_API_KEY -NEXT_PUBLIC_SUPABASE_URL=https://YOUR_SUPABASE_URL -NEXT_PUBLIC_SUPABASE_ANON_KEY=YOUR_SUPABASE_ANON_KEY -NEXT_PUBLIC_SUPABASE_TABLE=YOUR_SUPABASE_TABLE \ No newline at end of file diff --git a/frontend/.eslintrc.json b/frontend/.eslintrc.json deleted file mode 100644 index fdcf77c2..00000000 --- a/frontend/.eslintrc.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "env": { - "browser": true, - "es2021": true, - "node": true - }, - "settings": { - "next": { - "rootDir": "./" - } - }, - "extends": [ - "eslint:recommended", - "plugin:react/recommended", - "plugin:@typescript-eslint/recommended", - "prettier", - "next" - ], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaFeatures": { - "jsx": true - }, - "ecmaVersion": "latest", - "sourceType": "module" - }, - "plugins": ["react", "@typescript-eslint"], - "rules": { - "react-hooks/exhaustive-deps": "off", - "@typescript-eslint/no-explicit-any": "off", - "@typescript-eslint/no-unused-vars": ["warn", { "argsIgnorePattern": "^_" }] - } -} diff --git a/frontend/.gitignore b/frontend/.gitignore deleted file mode 100644 index 44f9bd5b..00000000 --- a/frontend/.gitignore +++ /dev/null @@ -1,13 +0,0 @@ -# next.js -/.next/ -/out/ - -# vercel -.vercel - -# Sentry -.sentryclirc - -/**/node_modules/* -node_modules/ -../node_modules/ diff --git a/frontend/README.md b/frontend/README.md deleted file mode 100644 index 4c8bac0a..00000000 --- a/frontend/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# Frontend application - -This frontend application is currently configured to use Next.js as a static site generator so that we can easily port the site hosting to any CDN. If we need server-side features (e.g. image optimization, SSR, etc), we can easily add those features later. - -## Set up - -All configurations are currently stored in environment variables. -See `.env.local.example` to see which variables need to be set. -We have pre-populated the file with the current testnet deployment on Sepolia. - -The easiest way to get started is to copy this into `.env.local` and modify the file directly, which `next` will automatically load when running the dev server below. - -Note to developers: if you add or remove environment variables, make sure you update - -- `.env.local.example` -- `./lib/config.ts` -- `../.github/workflows/ci-default.yml` -- Any CI/CD system (e.g. GitHub Actions, Pages) -- In your organization's secrets manager - -### Plasmic - -We use a no-code visual builder for React called [Plasmic](https://www.plasmic.app?ref=ryscheng). You can sign up for an account [here](https://www.plasmic.app?ref=ryscheng). - -After signing up, you can check out the frontend [here](https://studio.plasmic.app/projects/bRx6ZFJBJ4PzQ8sSaLn1xW?ref=ryscheng). You will have read-only access to this project. - -If you need to make edits, you can duplicate the project and update your project ID and API key in `.env.local`. For more information on setting up Plasmic, check out their [docs](https://docs.plasmic.app/learn/nextjs-quickstart). - -### Web3 providers - -Set up an account with a web3 provider like [Alchemy](https://alchemy.com/?r=17b797341eddfeda). Create a new application on Alchemy and set your `NEXT_PUBLIC_RPC_URL` environment variable. - -### IPFS - -We use [web3.storage](https://web3.storage/) for general blob storage and [nft.storage](https://nft.storage/) for storing token metadata. - -Sign up for accounts and populate the `NEXT_PUBLIC_WEB3_STORAGE_TOKEN` and `NEXT_PUBLIC_NFT_STORAGE_TOKEN` environment variables with your API keys. For more information, you can check out their docs -([web3.storage](https://web3.storage/docs/), [nft.storage](https://nft.storage/docs/)). - -### Supabase - -We use [Supabase](https://supabase.com/) only as a non-essential cache. -In the future, we will either remove this dependency or add instructions on how to setup a local instance for development. -In the meantime, the app should still build with the placeholder values. - -## Run development server - -``` -yarn dev -``` - -Visit on `http://localhost:3000/` - -## Testing - -To run linters: - -``` -yarn lint -``` - -To run unit tests: - -``` -yarn test -``` - -## Build and export - -This repository is currently set up to export to a static site: - -``` -yarn build -``` - -This will place the static site in `/frontend/out`, which can be uploaded to any CDN or IPFS for hosting. - -Note: This means that we do not currently use any server-side or edge functionality (e.g. middleware, SSR, image optimization etc) diff --git a/frontend/_redirects b/frontend/_redirects deleted file mode 100644 index 4d85d2bd..00000000 --- a/frontend/_redirects +++ /dev/null @@ -1,2 +0,0 @@ -/discord https://discord.gg/UZt8cBnP4w -/telegram https://t.me/+YF9AYb6zCv1mNDJi \ No newline at end of file diff --git a/frontend/components/add-registry-dialog.tsx b/frontend/components/add-registry-dialog.tsx deleted file mode 100644 index 27830d93..00000000 --- a/frontend/components/add-registry-dialog.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import { - Button, - Dialog, - DialogContent, - DialogProps, - DialogTitle, -} from "@mui/material"; -import { Formik } from "formik"; - -export const AddRegistryDialog = (props: DialogProps) => { - return ( - - Add registry - - { - console.log(data); - }} - > - {({ handleSubmit }) => ( -
- - - -