Skip to content

Commit

Permalink
Bugfix/allowlist cleanup script (#1184)
Browse files Browse the repository at this point in the history
* add cleanup-allowlist.ts script

* fix(update script)

* chore: update lockfile
  • Loading branch information
Jipperism authored Dec 14, 2023
1 parent d7f5fee commit 2556b5a
Show file tree
Hide file tree
Showing 4 changed files with 280 additions and 9 deletions.
6 changes: 5 additions & 1 deletion defender/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,23 @@
"deploy:test": "pnpm build && pnpm setup:test",
"deploy:prod": "pnpm build && pnpm setup:prod",
"setup:test": "npx tsx src/setup.ts TEST",
"setup:prod": "npx tsx src/setup.ts PROD"
"setup:prod": "npx tsx src/setup.ts PROD",
"scripts:fix-allowlist-duplicates": "npx tsx src/scripts/fix-allowlist-duplicates.ts"
},
"dependencies": {
"@graphql-mesh/cache-localforage": "^0.95.7",
"@hypercerts-org/contracts": "0.8.11",
"@openzeppelin/defender-autotask-client": "1.50.0",
"@openzeppelin/defender-autotask-utils": "1.50.0",
"@openzeppelin/defender-base-client": "1.49.0",
"@openzeppelin/defender-sentinel-client": "1.49.0",
"@openzeppelin/merkle-tree": "^1.0.2",
"@supabase/supabase-js": "^2.4.1",
"@types/lodash": "^4.14.199",
"axios": "^1.2.6",
"dotenv": "^16.0.3",
"ethers": "5.7.2",
"lodash": "^4.17.21",
"node-fetch": "^3.3.0"
},
"devDependencies": {
Expand Down
122 changes: 122 additions & 0 deletions defender/src/scripts/fix-allowlist-duplicates.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
// const supabaseLib = require("@supabase/supabase-js");
// const dotenv = require("dotenv");
// const _ = require("lodash");
// import * as fetch from "node-fetch";
// const hypercertsSDK = require("@hypercerts-org/hypercerts-sdk");

import { createClient } from "@supabase/supabase-js";
import dotenv from "dotenv";
import _ from "lodash";
import fetch from "node-fetch";

const pageSize = 1000;

dotenv.config();
const supabase = createClient(
process.env.NEXT_PUBLIC_SUPABASE_HYPERCERTS_URL as string,
process.env.NEXT_PUBLIC_SUPABASE_HYPERCERTS_SERVICE_ROLE_KEY as string,
);

const fetchAllowlistPage = async (lastId: number) => {
console.log("fetching page with id >", lastId);
return supabase
.from("allowlistCache-chainId")
.select("*")
.order("id", { ascending: true })
.gt("id", lastId)
.eq("chainId", 10)
.limit(pageSize);
};

const deleteEntries = async (ids: number[]) => {
console.log("deleting entries", ids);
return supabase.from("allowlistCache-chainId").delete().in("id", ids);
};

const query = `
query ClaimTokensByClaim($claimId: String!, $orderDirection: OrderDirection, $first: Int, $skip: Int) {
claimTokens(where: { claim: $claimId }, skip: $skip, first: $first, orderDirection: $orderDirection) {
id
owner
tokenID
units
}
}
`;

const fetchClaimTokenForClaimId = async (claimId: string) => {
return fetch(
"https://api.thegraph.com/subgraphs/name/hypercerts-admin/hypercerts-optimism-mainnet",
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
variables: {
claimId,
first: 1000,
},
query,
}),
},
)
.then((res) => res.json())
.then((res) => res.data?.claimTokens);
};

const main = async () => {
const totalNumberOfResults = await supabase
.from("allowlistCache-chainId")
.select("id", { count: "exact" });

console.log("totalNumberOfResults", totalNumberOfResults.count);

let lastId = 1;

// Iterate over all pages
while (true) {
const { data } = await fetchAllowlistPage(lastId);
if (data.length === 0) {
break;
}
lastId = data[data.length - 1].id;

const allowlistEntriesByClaimId = _.groupBy(data, "claimId");
// console.log("fetched page", i);

for (const claimId in allowlistEntriesByClaimId) {
// console.log("checking duplicates for", claimId);
const entries = allowlistEntriesByClaimId[claimId];
// console.log(entries.length, "entries found");

const tokensForClaim = await fetchClaimTokenForClaimId(claimId);
// console.log("tokensForClaim", tokensForClaim);

const addressesForClaimTokens = tokensForClaim.map(
(token: any) => token.owner,
);
const addressesForEntry = entries.map((x) => x.address);
// console.log("Addresses for claim tokens", addressesForClaimTokens);
// console.log("Addresses for entries", addressesForEntry);

const duplicates = _.intersectionBy(
addressesForClaimTokens,
addressesForEntry,
);

if (duplicates.length > 0) {
const supabaseEntries = entries.filter((entry) =>
duplicates.includes(entry.address),
);
// console.log("duplicates found for claimId", claimId, duplicates.length);
// console.log("duplicates", duplicates);
// console.log("duplicate supabaseEntries", supabaseEntries);
const idsToDelete = supabaseEntries.map((x) => x.id);
await deleteEntries(idsToDelete);
}
}
}
};

main();
Loading

0 comments on commit 2556b5a

Please sign in to comment.