Skip to content

Commit

Permalink
Fix path for data repository to solve dry-run error
Browse files Browse the repository at this point in the history
  • Loading branch information
dgparmar14 committed Jun 27, 2024
1 parent 386448d commit eaadb2a
Show file tree
Hide file tree
Showing 5 changed files with 5 additions and 6 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/scraper-dry-run.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
working-directory: scraper

- name: Scrape data from GitHub
run: pnpm start ${{ github.repository_owner }} data/github
run: pnpm start ${{ github.repository_owner }} ../data/github
working-directory: scraper
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
Expand All @@ -56,7 +56,7 @@ jobs:
path: |
data
contributors
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
Expand Down
2 changes: 0 additions & 2 deletions env.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ export const env = createEnv({
NEXT_PUBLIC_PAGE_TITLE: z.string(),
NEXT_PUBLIC_CONTRIBUTORS_INFO: z.string().optional(),
NEXT_PUBLIC_LEADERBOARD_DEFAULT_ROLES: z.string().optional(),
BLACKLISTED_USERS: z.string().array(),

NEXT_PUBLIC_FEATURES: z.string(),
},
Expand All @@ -48,6 +47,5 @@ export const env = createEnv({
? ""
: process.env.GITHUB_PAT,
NEXT_PUBLIC_FEATURES: process.env.NEXT_PUBLIC_FEATURES,
BLACKLISTED_USERS: process.env.BLACKLISTED_USERS,
},
});
Binary file added public/logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 2 additions & 2 deletions scraper/src/github-scraper/discussion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ async function parseDiscussionData(allDiscussions: Discussion[]) {
)
.flat();
authorList.push(...allDiscussions.map((d) => d.node.author.login));
const uniqueAuthors = [...new Set(authorList)];
const uniqueAuthors = Array.from(new Set(authorList));
const authorDiscussionList = uniqueAuthors.map((author) => {
const discussions = allDiscussions.filter(
(d) =>
Expand All @@ -93,7 +93,7 @@ async function parseDiscussionData(allDiscussions: Discussion[]) {
isAnswered: d.node.isAnswered,
upvoteCount: d.node.upvoteCount,
participants: [
...new Map(
new Map(
d.node.comments.edges.map((c) => [
c.node.author.login,
{
Expand Down
1 change: 1 addition & 0 deletions scraper/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"moduleResolution": "node",
"outDir": "./dist",
"esModuleInterop": true,
"downlevelIteration": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true
Expand Down

0 comments on commit eaadb2a

Please sign in to comment.