From c53d57875644a6e9aad1658f3b896448cb1e7918 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Mon, 29 Apr 2024 16:42:20 +0100 Subject: [PATCH 01/15] Inital add --- bbot/modules/github_workflows.py | 144 +++++ .../test_module_github_workflows.py | 573 ++++++++++++++++++ 2 files changed, 717 insertions(+) create mode 100644 bbot/modules/github_workflows.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_github_workflows.py diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py new file mode 100644 index 0000000000..5719153c25 --- /dev/null +++ b/bbot/modules/github_workflows.py @@ -0,0 +1,144 @@ +from bbot.modules.templates.github import github + + +class github_workflows(github): + watched_events = ["CODE_REPOSITORY"] + produced_events = ["HTTP_RESPONSE"] + flags = ["passive", "safe"] + meta = {"description": "Query Github's API for the repositories workflow logs"} + options = {"api_key": ""} + options_desc = { + "api_key": "Github token", + } + + # scope_distance_modifier = 2 + + async def setup(self): + return await super().setup() + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if "git" not in event.tags: + return False, "event is not a git repository" + return True + + async def handle_event(self, event): + # repo_url = event.data.get("url") + owner = "blacklanternsecurity" + repo = "bbot" + for workflow in await self.get_workflows(owner, repo): + workflow_name = workflow.get("name") + workflow_id = workflow.get("id") + self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}") + for run in await self.get_workflow_runs(owner, repo, workflow_id): + run_id = run.get("id") + self.log.debug(f"Looking up jobs for {workflow_name}/{run_id} in {owner}/{repo}") + for job in await self.get_run_jobs(owner, repo, run_id): + job_id = job.get("id") + commit_id = job.get("head_sha") + steps = job.get("steps", []) + for step in steps: + if step.get("conclusion") == "success": + step_name = step.get("name") + number = step.get("number") + self.log.debug( + f"Requesting {workflow_name}/run {run_id}/job {job_id}/{step_name} log for {owner}/{repo}" + ) + # Request log step from the html_url as that bypasses the admin restrictions from using the API + response = await self.helpers.request( + f"https://github.com/{owner}/{repo}/commit/{commit_id}/checks/{job_id}/logs/{number}", + follow_redirects=True, + ) + if response: + blob_url = response.headers.get("Location", "") + if blob_url: + url_event = self.make_event( + blob_url, "URL_UNVERIFIED", source=event, tags=["httpx-safe"] + ) + if not url_event: + continue + url_event.scope_distance = event.scope_distance + await self.emit_event(url_event) + + async def get_workflows(self, owner, repo): + workflows = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows?per_page=100&page=" + "{page}" + agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j.get("workflows", []): + workflows.append(item) + finally: + agen.aclose() + return workflows + + async def get_workflow_runs(self, owner, repo, workflow_id): + runs = [] + url = ( + f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?per_page=100&page=" + "{page}" + ) + agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j.get("workflow_runs", []): + runs.append(item) + finally: + agen.aclose() + return runs + + async def get_run_jobs(self, owner, repo, run_id): + jobs = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/jobs?per_page=100&page=" + "{page}" + agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json() + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j.get("jobs", []): + jobs.append(item) + finally: + agen.aclose() + return jobs diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py new file mode 100644 index 0000000000..ee2f93a49b --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -0,0 +1,573 @@ +from .base import ModuleTestBase + + +class TestGithub_Workflows(ModuleTestBase): + config_overrides = {"modules": {"github_org": {"api_key": "asdf"}}} + modules_overrides = ["github_workflows", "github_org", "speculate"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response(url="https://api.github.com/zen") + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity", + json={ + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "url": "https://api.github.com/orgs/blacklanternsecurity", + "repos_url": "https://api.github.com/orgs/blacklanternsecurity/repos", + "events_url": "https://api.github.com/orgs/blacklanternsecurity/events", + "hooks_url": "https://api.github.com/orgs/blacklanternsecurity/hooks", + "issues_url": "https://api.github.com/orgs/blacklanternsecurity/issues", + "members_url": "https://api.github.com/orgs/blacklanternsecurity/members{/member}", + "public_members_url": "https://api.github.com/orgs/blacklanternsecurity/public_members{/member}", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "description": "Security Organization", + "name": "Black Lantern Security", + "company": None, + "blog": "www.blacklanternsecurity.com", + "location": "Charleston, SC", + "email": None, + "twitter_username": None, + "is_verified": False, + "has_organization_projects": True, + "has_repository_projects": True, + "public_repos": 70, + "public_gists": 0, + "followers": 415, + "following": 0, + "html_url": "https://github.com/blacklanternsecurity", + "created_at": "2017-01-24T00:14:46Z", + "updated_at": "2022-03-28T11:39:03Z", + "archived_at": None, + "type": "Organization", + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/orgs/blacklanternsecurity/repos?per_page=100&page=1", + json=[ + { + "id": 459780477, + "node_id": "R_kgDOG2exfQ", + "name": "test_keys", + "full_name": "blacklanternsecurity/test_keys", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 79229934, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjc5MjI5OTM0", + "avatar_url": "https://avatars.githubusercontent.com/u/79229934?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/test_keys", + "description": None, + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/test_keys", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/test_keys/deployments", + "created_at": "2022-02-15T23:10:51Z", + "updated_at": "2023-09-02T12:20:13Z", + "pushed_at": "2023-10-19T02:56:46Z", + "git_url": "git://github.com/blacklanternsecurity/test_keys.git", + "ssh_url": "git@github.com:blacklanternsecurity/test_keys.git", + "clone_url": "https://github.com/blacklanternsecurity/test_keys.git", + "svn_url": "https://github.com/blacklanternsecurity/test_keys", + "homepage": None, + "size": 2, + "stargazers_count": 2, + "watchers_count": 2, + "language": None, + "has_issues": True, + "has_projects": True, + "has_downloads": True, + "has_wiki": True, + "has_pages": False, + "has_discussions": False, + "forks_count": 32, + "mirror_url": None, + "archived": False, + "disabled": False, + "open_issues_count": 2, + "license": None, + "allow_forking": True, + "is_template": False, + "web_commit_signoff_required": False, + "topics": [], + "visibility": "public", + "forks": 32, + "open_issues": 2, + "watchers": 2, + "default_branch": "main", + "permissions": {"admin": False, "maintain": False, "push": False, "triage": False, "pull": True}, + } + ], + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows?per_page=100&page=1", + json={ + "total_count": 3, + "workflows": [ + { + "id": 22452226, + "node_id": "W_kwDOG_O3ns4BVpgC", + "name": "tests", + "path": ".github/workflows/tests.yml", + "state": "active", + "created_at": "2022-03-23T15:09:22.000Z", + "updated_at": "2022-09-27T17:49:34.000Z", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226", + "html_url": "https://github.com/blacklanternsecurity/bbot/blob/stable/.github/workflows/tests.yml", + "badge_url": "https://github.com/blacklanternsecurity/bbot/workflows/tests/badge.svg", + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?per_page=100&page=1", + json={ + "total_count": 2993, + "workflow_runs": [ + { + "id": 8839360698, + "name": "tests", + "node_id": "WFR_kwLOG_O3ns8AAAACDt3wug", + "head_branch": "dnsbrute-helperify", + "head_sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "path": ".github/workflows/tests.yml", + "display_title": "Helperify Massdns", + "run_number": 4520, + "event": "pull_request", + "status": "completed", + "conclusion": "failure", + "workflow_id": 22452226, + "check_suite_id": 23162098295, + "check_suite_node_id": "CS_kwDOG_O3ns8AAAAFZJGSdw", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698", + "html_url": "https://github.com/blacklanternsecurity/bbot/actions/runs/8839360698", + "pull_requests": [ + { + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls/1303", + "id": 1839332952, + "number": 1303, + "head": { + "ref": "dnsbrute-helperify", + "sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "repo": { + "id": 468957086, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "name": "bbot", + }, + }, + "base": { + "ref": "faster-regexes", + "sha": "7baf219c7f3a4ba165639c5ddb62322453a8aea8", + "repo": { + "id": 468957086, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "name": "bbot", + }, + }, + } + ], + "created_at": "2024-04-25T21:04:32Z", + "updated_at": "2024-04-25T21:19:43Z", + "actor": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "run_attempt": 1, + "referenced_workflows": [], + "run_started_at": "2024-04-25T21:04:32Z", + "triggering_actor": { + "login": "TheTechromancer", + "id": 20261699, + "node_id": "MDQ6VXNlcjIwMjYxNjk5", + "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/TheTechromancer", + "html_url": "https://github.com/TheTechromancer", + "followers_url": "https://api.github.com/users/TheTechromancer/followers", + "following_url": "https://api.github.com/users/TheTechromancer/following{/other_user}", + "gists_url": "https://api.github.com/users/TheTechromancer/gists{/gist_id}", + "starred_url": "https://api.github.com/users/TheTechromancer/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/TheTechromancer/subscriptions", + "organizations_url": "https://api.github.com/users/TheTechromancer/orgs", + "repos_url": "https://api.github.com/users/TheTechromancer/repos", + "events_url": "https://api.github.com/users/TheTechromancer/events{/privacy}", + "received_events_url": "https://api.github.com/users/TheTechromancer/received_events", + "type": "User", + "site_admin": False, + }, + "jobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/jobs", + "logs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/logs", + "check_suite_url": "https://api.github.com/repos/blacklanternsecurity/bbot/check-suites/23162098295", + "artifacts_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts", + "cancel_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/cancel", + "rerun_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/rerun", + "previous_attempt_url": None, + "workflow_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226", + "head_commit": { + "id": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "tree_id": "fe9b345c0745a5bbacb806225e92e1c48fccf35c", + "message": "remove debug message", + "timestamp": "2024-04-25T21:02:37Z", + "author": {"name": "TheTechromancer", "email": "thetechromancer@protonmail.com"}, + "committer": {"name": "TheTechromancer", "email": "thetechromancer@protonmail.com"}, + }, + "repository": { + "id": 468957086, + "node_id": "R_kgDOG_O3ng", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": "A recursive internet scanner for hackers.", + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + }, + "head_repository": { + "id": 468957086, + "node_id": "R_kgDOG_O3ng", + "name": "bbot", + "full_name": "blacklanternsecurity/bbot", + "private": False, + "owner": { + "login": "blacklanternsecurity", + "id": 25311592, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjI1MzExNTky", + "avatar_url": "https://avatars.githubusercontent.com/u/25311592?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/blacklanternsecurity", + "html_url": "https://github.com/blacklanternsecurity", + "followers_url": "https://api.github.com/users/blacklanternsecurity/followers", + "following_url": "https://api.github.com/users/blacklanternsecurity/following{/other_user}", + "gists_url": "https://api.github.com/users/blacklanternsecurity/gists{/gist_id}", + "starred_url": "https://api.github.com/users/blacklanternsecurity/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/blacklanternsecurity/subscriptions", + "organizations_url": "https://api.github.com/users/blacklanternsecurity/orgs", + "repos_url": "https://api.github.com/users/blacklanternsecurity/repos", + "events_url": "https://api.github.com/users/blacklanternsecurity/events{/privacy}", + "received_events_url": "https://api.github.com/users/blacklanternsecurity/received_events", + "type": "Organization", + "site_admin": False, + }, + "html_url": "https://github.com/blacklanternsecurity/bbot", + "description": "A recursive internet scanner for hackers.", + "fork": False, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot", + "forks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/forks", + "keys_url": "https://api.github.com/repos/blacklanternsecurity/bbot/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/blacklanternsecurity/bbot/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/blacklanternsecurity/bbot/teams", + "hooks_url": "https://api.github.com/repos/blacklanternsecurity/bbot/hooks", + "issue_events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/events{/number}", + "events_url": "https://api.github.com/repos/blacklanternsecurity/bbot/events", + "assignees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/assignees{/user}", + "branches_url": "https://api.github.com/repos/blacklanternsecurity/bbot/branches{/branch}", + "tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/tags", + "blobs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/blacklanternsecurity/bbot/statuses/{sha}", + "languages_url": "https://api.github.com/repos/blacklanternsecurity/bbot/languages", + "stargazers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/stargazers", + "contributors_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contributors", + "subscribers_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscribers", + "subscription_url": "https://api.github.com/repos/blacklanternsecurity/bbot/subscription", + "commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/blacklanternsecurity/bbot/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/blacklanternsecurity/bbot/contents/{+path}", + "compare_url": "https://api.github.com/repos/blacklanternsecurity/bbot/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/blacklanternsecurity/bbot/merges", + "archive_url": "https://api.github.com/repos/blacklanternsecurity/bbot/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/blacklanternsecurity/bbot/downloads", + "issues_url": "https://api.github.com/repos/blacklanternsecurity/bbot/issues{/number}", + "pulls_url": "https://api.github.com/repos/blacklanternsecurity/bbot/pulls{/number}", + "milestones_url": "https://api.github.com/repos/blacklanternsecurity/bbot/milestones{/number}", + "notifications_url": "https://api.github.com/repos/blacklanternsecurity/bbot/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/blacklanternsecurity/bbot/labels{/name}", + "releases_url": "https://api.github.com/repos/blacklanternsecurity/bbot/releases{/id}", + "deployments_url": "https://api.github.com/repos/blacklanternsecurity/bbot/deployments", + }, + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/jobs?per_page=100&page=1", + json={ + "total_count": 8, + "jobs": [ + { + "id": 24272553740, + "run_id": 8839360698, + "workflow_name": "tests", + "head_branch": "dnsbrute-helperify", + "run_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698", + "run_attempt": 1, + "node_id": "CR_kwDOG_O3ns8AAAAFpsHHDA", + "head_sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/jobs/24272553740", + "html_url": "https://github.com/blacklanternsecurity/bbot/actions/runs/8839360698/job/24272553740", + "status": "completed", + "conclusion": "success", + "created_at": "2024-04-25T21:04:54Z", + "started_at": "2024-04-25T21:05:01Z", + "completed_at": "2024-04-25T21:05:18Z", + "name": "lint", + "steps": [ + { + "name": "Set up job", + "status": "completed", + "conclusion": "success", + "number": 1, + "started_at": "2024-04-25T21:05:00.000Z", + "completed_at": "2024-04-25T21:05:02.000Z", + }, + { + "name": "Run actions/checkout@v3", + "status": "completed", + "conclusion": "success", + "number": 2, + "started_at": "2024-04-25T21:05:02.000Z", + "completed_at": "2024-04-25T21:05:04.000Z", + }, + { + "name": "Run psf/black@stable", + "status": "completed", + "conclusion": "success", + "number": 3, + "started_at": "2024-04-25T21:05:04.000Z", + "completed_at": "2024-04-25T21:05:10.000Z", + }, + { + "name": "Install Python 3", + "status": "completed", + "conclusion": "success", + "number": 4, + "started_at": "2024-04-25T21:05:10.000Z", + "completed_at": "2024-04-25T21:05:11.000Z", + }, + { + "name": "Install dependencies", + "status": "completed", + "conclusion": "success", + "number": 5, + "started_at": "2024-04-25T21:05:11.000Z", + "completed_at": "2024-04-25T21:05:12.000Z", + }, + { + "name": "flake8", + "status": "completed", + "conclusion": "success", + "number": 6, + "started_at": "2024-04-25T21:05:12.000Z", + "completed_at": "2024-04-25T21:05:14.000Z", + }, + { + "name": "Post Install Python 3", + "status": "completed", + "conclusion": "success", + "number": 11, + "started_at": "2024-04-25T21:05:15.000Z", + "completed_at": "2024-04-25T21:05:15.000Z", + }, + { + "name": "Post Run actions/checkout@v3", + "status": "completed", + "conclusion": "success", + "number": 12, + "started_at": "2024-04-25T21:05:15.000Z", + "completed_at": "2024-04-25T21:05:15.000Z", + }, + { + "name": "Complete job", + "status": "completed", + "conclusion": "success", + "number": 13, + "started_at": "2024-04-25T21:05:15.000Z", + "completed_at": "2024-04-25T21:05:15.000Z", + }, + ], + "check_run_url": "https://api.github.com/repos/blacklanternsecurity/bbot/check-runs/24272553740", + "labels": ["ubuntu-latest"], + "runner_id": 60, + "runner_name": "GitHub Actions 60", + "runner_group_id": 2, + "runner_group_name": "GitHub Actions", + }, + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://github.com/blacklanternsecurity/bbot/commit/c5de1360e8e5ccba04b23035f675a529282b7dc2/checks/24272553740/logs/1", + headers={ + "location": "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" + }, + status_code=302, + ) + + def check(self, module_test, events): + assert len(events) == 5 + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com" and e.scope_distance == 0 + ] + ), "Failed to emit target DNS_NAME" + assert 1 == len( + [e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity" and e.scope_distance == 1] + ), "Failed to find ORG_STUB" + assert 1 == len( + [ + e + for e in events + if e.type == "SOCIAL" + and e.data["platform"] == "github" + and e.data["profile_name"] == "blacklanternsecurity" + and "github-org" in e.tags + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github" + assert 1 == len( + [ + e + for e in events + if e.type == "CODE_REPOSITORY" + and "git" in e.tags + and e.data["url"] == "https://github.com/blacklanternsecurity/test_keys" + and e.scope_distance == 1 + ] + ), "Failed to find blacklanternsecurity github repo" + assert 1 == len( + [ + e + for e in events + if e.type == "URL_UNVERIFIED" + and e.data + == "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" + and e.scope_distance == 1 + ] + ), "Failed to obtain redirect to the blob" From 9b944542a7a001f9b549e40a5201f8d81d5025d6 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Mon, 29 Apr 2024 18:03:32 +0100 Subject: [PATCH 02/15] Changed produced event to `FILESYSTEM` --- bbot/modules/github_workflows.py | 90 +++++-------- .../test_module_github_workflows.py | 118 ++---------------- 2 files changed, 41 insertions(+), 167 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 5719153c25..08ca162c53 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -1,19 +1,25 @@ +from datetime import date, timedelta + from bbot.modules.templates.github import github class github_workflows(github): watched_events = ["CODE_REPOSITORY"] - produced_events = ["HTTP_RESPONSE"] + produced_events = ["FILESYSTEM"] flags = ["passive", "safe"] - meta = {"description": "Query Github's API for the repositories workflow logs"} - options = {"api_key": ""} + meta = {"description": "Download a github repositories workflow logs"} + options = {"api_key": "", "historical_logs": 7} options_desc = { "api_key": "Github token", + "historical_logs": "Fetch logs that are at most this many days old (default: 7)", } # scope_distance_modifier = 2 async def setup(self): + self.historical_logs = int(self.options.get("historical_logs", 7)) + self.output_dir = self.scan.home / "workflow_logs" + self.helpers.mkdir(self.output_dir) return await super().setup() async def filter_event(self, event): @@ -32,33 +38,15 @@ async def handle_event(self, event): self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}") for run in await self.get_workflow_runs(owner, repo, workflow_id): run_id = run.get("id") - self.log.debug(f"Looking up jobs for {workflow_name}/{run_id} in {owner}/{repo}") - for job in await self.get_run_jobs(owner, repo, run_id): - job_id = job.get("id") - commit_id = job.get("head_sha") - steps = job.get("steps", []) - for step in steps: - if step.get("conclusion") == "success": - step_name = step.get("name") - number = step.get("number") - self.log.debug( - f"Requesting {workflow_name}/run {run_id}/job {job_id}/{step_name} log for {owner}/{repo}" - ) - # Request log step from the html_url as that bypasses the admin restrictions from using the API - response = await self.helpers.request( - f"https://github.com/{owner}/{repo}/commit/{commit_id}/checks/{job_id}/logs/{number}", - follow_redirects=True, - ) - if response: - blob_url = response.headers.get("Location", "") - if blob_url: - url_event = self.make_event( - blob_url, "URL_UNVERIFIED", source=event, tags=["httpx-safe"] - ) - if not url_event: - continue - url_event.scope_distance = event.scope_distance - await self.emit_event(url_event) + self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}") + log_path = await self.download_run_logs(owner, repo, run_id) + if log_path: + self.verbose(f"Downloaded repository workflow logs to {log_path}") + logfile_event = self.make_event( + {"path": str(log_path)}, "FILESYSTEM", tags=["zipfile"], source=event + ) + logfile_event.scope_distance = event.scope_distance + await self.emit_event(logfile_event) async def get_workflows(self, owner, repo): workflows = [] @@ -89,8 +77,11 @@ async def get_workflows(self, owner, repo): async def get_workflow_runs(self, owner, repo, workflow_id): runs = [] + created_date = date.today() - timedelta(days=self.historical_logs) + formated_date = created_date.strftime("%Y-%m-%d") url = ( - f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?per_page=100&page=" + "{page}" + f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?created=>{formated_date}&per_page=100&page=" + + "{page}" ) agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) try: @@ -116,29 +107,14 @@ async def get_workflow_runs(self, owner, repo, workflow_id): agen.aclose() return runs - async def get_run_jobs(self, owner, repo, run_id): - jobs = [] - url = f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/jobs?per_page=100&page=" + "{page}" - agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) - try: - async for r in agen: - if r is None: - break - status_code = getattr(r, "status_code", 0) - if status_code == 403: - self.warning("Github is rate-limiting us (HTTP status: 403)") - break - if status_code != 200: - break - try: - j = r.json() - except Exception as e: - self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") - break - if not j: - break - for item in j.get("jobs", []): - jobs.append(item) - finally: - agen.aclose() - return jobs + async def download_run_logs(self, owner, repo, run_id): + file_destination = self.output_dir / f"{owner}_{repo}_run_{run_id}.zip" + result = await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", filename=file_destination + ) + if result: + self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") + return file_destination + else: + self.warning(f"Failed to download logs for {owner}/{repo}/{run_id}") + return None diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index ee2f93a49b..ed4ce921b9 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -415,118 +415,16 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/jobs?per_page=100&page=1", - json={ - "total_count": 8, - "jobs": [ - { - "id": 24272553740, - "run_id": 8839360698, - "workflow_name": "tests", - "head_branch": "dnsbrute-helperify", - "run_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698", - "run_attempt": 1, - "node_id": "CR_kwDOG_O3ns8AAAAFpsHHDA", - "head_sha": "c5de1360e8e5ccba04b23035f675a529282b7dc2", - "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/jobs/24272553740", - "html_url": "https://github.com/blacklanternsecurity/bbot/actions/runs/8839360698/job/24272553740", - "status": "completed", - "conclusion": "success", - "created_at": "2024-04-25T21:04:54Z", - "started_at": "2024-04-25T21:05:01Z", - "completed_at": "2024-04-25T21:05:18Z", - "name": "lint", - "steps": [ - { - "name": "Set up job", - "status": "completed", - "conclusion": "success", - "number": 1, - "started_at": "2024-04-25T21:05:00.000Z", - "completed_at": "2024-04-25T21:05:02.000Z", - }, - { - "name": "Run actions/checkout@v3", - "status": "completed", - "conclusion": "success", - "number": 2, - "started_at": "2024-04-25T21:05:02.000Z", - "completed_at": "2024-04-25T21:05:04.000Z", - }, - { - "name": "Run psf/black@stable", - "status": "completed", - "conclusion": "success", - "number": 3, - "started_at": "2024-04-25T21:05:04.000Z", - "completed_at": "2024-04-25T21:05:10.000Z", - }, - { - "name": "Install Python 3", - "status": "completed", - "conclusion": "success", - "number": 4, - "started_at": "2024-04-25T21:05:10.000Z", - "completed_at": "2024-04-25T21:05:11.000Z", - }, - { - "name": "Install dependencies", - "status": "completed", - "conclusion": "success", - "number": 5, - "started_at": "2024-04-25T21:05:11.000Z", - "completed_at": "2024-04-25T21:05:12.000Z", - }, - { - "name": "flake8", - "status": "completed", - "conclusion": "success", - "number": 6, - "started_at": "2024-04-25T21:05:12.000Z", - "completed_at": "2024-04-25T21:05:14.000Z", - }, - { - "name": "Post Install Python 3", - "status": "completed", - "conclusion": "success", - "number": 11, - "started_at": "2024-04-25T21:05:15.000Z", - "completed_at": "2024-04-25T21:05:15.000Z", - }, - { - "name": "Post Run actions/checkout@v3", - "status": "completed", - "conclusion": "success", - "number": 12, - "started_at": "2024-04-25T21:05:15.000Z", - "completed_at": "2024-04-25T21:05:15.000Z", - }, - { - "name": "Complete job", - "status": "completed", - "conclusion": "success", - "number": 13, - "started_at": "2024-04-25T21:05:15.000Z", - "completed_at": "2024-04-25T21:05:15.000Z", - }, - ], - "check_run_url": "https://api.github.com/repos/blacklanternsecurity/bbot/check-runs/24272553740", - "labels": ["ubuntu-latest"], - "runner_id": 60, - "runner_name": "GitHub Actions 60", - "runner_group_id": 2, - "runner_group_name": "GitHub Actions", - }, - ], - }, - ) - module_test.httpx_mock.add_response( - url="https://github.com/blacklanternsecurity/bbot/commit/c5de1360e8e5ccba04b23035f675a529282b7dc2/checks/24272553740/logs/1", + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/logs", headers={ "location": "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" }, status_code=302, ) + module_test.httpx_mock.add_response( + url="https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02", + content=self.zip_content, + ) def check(self, module_test, events): assert len(events) == 5 @@ -565,9 +463,9 @@ def check(self, module_test, events): [ e for e in events - if e.type == "URL_UNVERIFIED" - and e.data - == "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" + if e.type == "FILESYSTEM" + and "blacklanternsecurity/bbot/run_8839360698.zip" in e.data + and "zipfile" in e.tags and e.scope_distance == 1 ] ), "Failed to obtain redirect to the blob" From 7078612a3b2a2193fcc3821ba96c7270f58b4f5f Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Mon, 29 Apr 2024 18:14:26 +0100 Subject: [PATCH 03/15] Changed to grab owner and repo from repo_url --- bbot/modules/github_workflows.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 08ca162c53..a018adfa1e 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -24,14 +24,14 @@ async def setup(self): async def filter_event(self, event): if event.type == "CODE_REPOSITORY": - if "git" not in event.tags: + if "git" not in event.tags and "github" not in event.data.get("url", ""): return False, "event is not a git repository" return True async def handle_event(self, event): - # repo_url = event.data.get("url") - owner = "blacklanternsecurity" - repo = "bbot" + repo_url = event.data.get("url") + owner = repo_url.split("/")[-2] + repo = repo_url.split("/")[-1] for workflow in await self.get_workflows(owner, repo): workflow_name = workflow.get("name") workflow_id = workflow.get("id") From c7a78aeed4fbeeec50448234e4b1025f49d5b723 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Mon, 29 Apr 2024 21:14:55 +0100 Subject: [PATCH 04/15] Break out of api_page_iter properly --- bbot/modules/github_workflows.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index a018adfa1e..a151217e94 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -14,7 +14,7 @@ class github_workflows(github): "historical_logs": "Fetch logs that are at most this many days old (default: 7)", } - # scope_distance_modifier = 2 + scope_distance_modifier = 2 async def setup(self): self.historical_logs = int(self.options.get("historical_logs", 7)) @@ -63,13 +63,13 @@ async def get_workflows(self, owner, repo): if status_code != 200: break try: - j = r.json() + j = r.json().get("workflows", []) except Exception as e: self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") break if not j: break - for item in j.get("workflows", []): + for item in j: workflows.append(item) finally: agen.aclose() @@ -95,13 +95,13 @@ async def get_workflow_runs(self, owner, repo, workflow_id): if status_code != 200: break try: - j = r.json() + j = r.json().get("workflow_runs", []) except Exception as e: self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") break if not j: break - for item in j.get("workflow_runs", []): + for item in j: runs.append(item) finally: agen.aclose() From 46f1ddd3ee7937d83bb4d5dac8c434bc6afb6a53 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 30 Apr 2024 08:55:29 +0100 Subject: [PATCH 05/15] Add workflow logs into their own folder --- bbot/modules/github_workflows.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index a151217e94..edfd4b403c 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -108,9 +108,12 @@ async def get_workflow_runs(self, owner, repo, workflow_id): return runs async def download_run_logs(self, owner, repo, run_id): - file_destination = self.output_dir / f"{owner}_{repo}_run_{run_id}.zip" + folder = self.output_dir / owner / repo + self.helpers.mkdir(folder) + filename = f"run_{run_id}.zip" + file_destination = folder / filename result = await self.helpers.download( - f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", filename=file_destination + f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", filename=file_destination, headers=self.headers ) if result: self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") From 40f99ddeaac075b698e71bc95810a26a422d871d Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 30 Apr 2024 10:19:36 +0100 Subject: [PATCH 06/15] Corrected tests and formatted --- bbot/modules/github_workflows.py | 4 +- .../test_module_github_workflows.py | 50 ++++++++++++------- 2 files changed, 36 insertions(+), 18 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index edfd4b403c..170db11900 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -113,7 +113,9 @@ async def download_run_logs(self, owner, repo, run_id): filename = f"run_{run_id}.zip" file_destination = folder / filename result = await self.helpers.download( - f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", filename=file_destination, headers=self.headers + f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", + filename=file_destination, + headers=self.headers, ) if result: self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index ed4ce921b9..18997b9102 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -1,3 +1,7 @@ +import io +import zipfile +from pathlib import Path + from .base import ModuleTestBase @@ -5,6 +9,13 @@ class TestGithub_Workflows(ModuleTestBase): config_overrides = {"modules": {"github_org": {"api_key": "asdf"}}} modules_overrides = ["github_workflows", "github_org", "speculate"] + data = io.BytesIO() + with zipfile.ZipFile(data, mode="w", compression=zipfile.ZIP_DEFLATED) as zipfile: + zipfile.writestr("test.txt", "This is some test data") + data.seek(0) + + zip_content = data.getvalue() + async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response(url="https://api.github.com/zen") module_test.httpx_mock.add_response( @@ -71,7 +82,7 @@ async def setup_before_prep(self, module_test): "type": "Organization", "site_admin": False, }, - "html_url": "https://github.com/blacklanternsecurity/test_keys", + "html_url": "https://github.com/blacklanternsecurity/bbot", "description": None, "fork": False, "url": "https://api.github.com/repos/blacklanternsecurity/test_keys", @@ -116,8 +127,8 @@ async def setup_before_prep(self, module_test): "pushed_at": "2023-10-19T02:56:46Z", "git_url": "git://github.com/blacklanternsecurity/test_keys.git", "ssh_url": "git@github.com:blacklanternsecurity/test_keys.git", - "clone_url": "https://github.com/blacklanternsecurity/test_keys.git", - "svn_url": "https://github.com/blacklanternsecurity/test_keys", + "clone_url": "https://github.com/blacklanternsecurity/bbot.git", + "svn_url": "https://github.com/blacklanternsecurity/bbot", "homepage": None, "size": 2, "stargazers_count": 2, @@ -169,7 +180,7 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?per_page=100&page=1", + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?created=>2024-04-23&per_page=100&page=1", json={ "total_count": 2993, "workflow_runs": [ @@ -415,7 +426,7 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/logs", + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/logs", headers={ "location": "https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02" }, @@ -427,7 +438,7 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - assert len(events) == 5 + assert len(events) == 6 assert 1 == len( [ e @@ -455,17 +466,22 @@ def check(self, module_test, events): for e in events if e.type == "CODE_REPOSITORY" and "git" in e.tags - and e.data["url"] == "https://github.com/blacklanternsecurity/test_keys" + and e.data["url"] == "https://github.com/blacklanternsecurity/bbot" and e.scope_distance == 1 ] ), "Failed to find blacklanternsecurity github repo" - assert 1 == len( - [ - e - for e in events - if e.type == "FILESYSTEM" - and "blacklanternsecurity/bbot/run_8839360698.zip" in e.data - and "zipfile" in e.tags - and e.scope_distance == 1 - ] - ), "Failed to obtain redirect to the blob" + filesystem_events = [ + e + for e in events + if e.type == "FILESYSTEM" + and "workflow_logs/blacklanternsecurity/bbot/run_8839360698.zip" in e.data["path"] + and "zipfile" in e.tags + and e.scope_distance == 1 + ] + assert 1 == len(filesystem_events), "Failed to download workflow logs" + filesystem_event = filesystem_events[0] + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination zip does not exist" + with zipfile.ZipFile(file, "r") as zip_ref: + assert "test.txt" in zip_ref.namelist(), "test.txt not in zip" + assert zip_ref.read("test.txt") == b"This is some test data", "test.txt contents incorrect" From ca56eb33aa3cef418640e858e12ea1b6ee3685f5 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 30 Apr 2024 10:50:54 +0100 Subject: [PATCH 07/15] Match URL using regex --- .../module_tests/test_module_github_workflows.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index 18997b9102..8961b63c04 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -1,4 +1,5 @@ import io +import re import zipfile from pathlib import Path @@ -13,7 +14,6 @@ class TestGithub_Workflows(ModuleTestBase): with zipfile.ZipFile(data, mode="w", compression=zipfile.ZIP_DEFLATED) as zipfile: zipfile.writestr("test.txt", "This is some test data") data.seek(0) - zip_content = data.getvalue() async def setup_before_prep(self, module_test): @@ -180,7 +180,9 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?created=>2024-04-23&per_page=100&page=1", + url=re.compile( + r"https://api\.github\.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs\?created=.*&per_page=100&page=1" + ), json={ "total_count": 2993, "workflow_runs": [ From 98c3281f3a5e642e397aa65a5f83e8baf4ac082e Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 30 Apr 2024 11:47:30 +0100 Subject: [PATCH 08/15] Change the workflow logs obtained to be configurable --- bbot/modules/github_workflows.py | 56 ++++++++----------- .../test_module_github_workflows.py | 7 +-- 2 files changed, 26 insertions(+), 37 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 170db11900..dc0f63ed7b 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -1,5 +1,3 @@ -from datetime import date, timedelta - from bbot.modules.templates.github import github @@ -8,16 +6,19 @@ class github_workflows(github): produced_events = ["FILESYSTEM"] flags = ["passive", "safe"] meta = {"description": "Download a github repositories workflow logs"} - options = {"api_key": "", "historical_logs": 7} + options = {"api_key": "", "num_logs": 1} options_desc = { "api_key": "Github token", - "historical_logs": "Fetch logs that are at most this many days old (default: 7)", + "num_logs": "For each workflow fetch the last N successful runs logs (max 100)", } scope_distance_modifier = 2 async def setup(self): - self.historical_logs = int(self.options.get("historical_logs", 7)) + self.num_logs = int(self.options.get("num_logs", 1)) + if self.num_logs > 100: + self.log.error("num_logs option is capped at 100") + return False self.output_dir = self.scan.home / "workflow_logs" self.helpers.mkdir(self.output_dir) return await super().setup() @@ -77,34 +78,25 @@ async def get_workflows(self, owner, repo): async def get_workflow_runs(self, owner, repo, workflow_id): runs = [] - created_date = date.today() - timedelta(days=self.historical_logs) - formated_date = created_date.strftime("%Y-%m-%d") - url = ( - f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?created=>{formated_date}&per_page=100&page=" - + "{page}" - ) - agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) + url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?status=success&per_page={self.num_logs}" + r = await self.helpers.request(url, headers=self.headers) + if r is None: + return runs + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return runs + if status_code != 200: + return runs try: - async for r in agen: - if r is None: - break - status_code = getattr(r, "status_code", 0) - if status_code == 403: - self.warning("Github is rate-limiting us (HTTP status: 403)") - break - if status_code != 200: - break - try: - j = r.json().get("workflow_runs", []) - except Exception as e: - self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") - break - if not j: - break - for item in j: - runs.append(item) - finally: - agen.aclose() + j = r.json().get("workflow_runs", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return runs + if not j: + return runs + for item in j: + runs.append(item) return runs async def download_run_logs(self, owner, repo, run_id): diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index 8961b63c04..76bd55814c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -1,5 +1,4 @@ import io -import re import zipfile from pathlib import Path @@ -180,9 +179,7 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=re.compile( - r"https://api\.github\.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs\?created=.*&per_page=100&page=1" - ), + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/workflows/22452226/runs?status=success&per_page=1", json={ "total_count": 2993, "workflow_runs": [ @@ -197,7 +194,7 @@ async def setup_before_prep(self, module_test): "run_number": 4520, "event": "pull_request", "status": "completed", - "conclusion": "failure", + "conclusion": "success", "workflow_id": 22452226, "check_suite_id": 23162098295, "check_suite_node_id": "CS_kwDOG_O3ns8AAAAFZJGSdw", From 795a4c9faeb407fe9ed006cd3c748af66e4020fb Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 30 Apr 2024 12:09:32 +0100 Subject: [PATCH 09/15] Change config to download a set number of logs --- bbot/modules/github_workflows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index dc0f63ed7b..b4762ccbdb 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -15,7 +15,7 @@ class github_workflows(github): scope_distance_modifier = 2 async def setup(self): - self.num_logs = int(self.options.get("num_logs", 1)) + self.num_logs = int(self.config.get("num_logs", 1)) if self.num_logs > 100: self.log.error("num_logs option is capped at 100") return False From 0fb1c30bab7ed30c8322701df2f9b678d3ad101e Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Tue, 30 Apr 2024 12:25:28 +0100 Subject: [PATCH 10/15] Changed error message --- bbot/modules/github_workflows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index b4762ccbdb..35609232cb 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -113,5 +113,5 @@ async def download_run_logs(self, owner, repo, run_id): self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") return file_destination else: - self.warning(f"Failed to download logs for {owner}/{repo}/{run_id}") + self.warning(f"The logs for {owner}/{repo}/{run_id} have expired and are no longer available.") return None From 1d0ccf46d5d9014e13727418ae789c0a1b65342d Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 1 May 2024 13:20:38 +0100 Subject: [PATCH 11/15] Added description of the `FILESYSTEM` path --- bbot/modules/github_workflows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 35609232cb..8c8c2c35e9 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -44,7 +44,7 @@ async def handle_event(self, event): if log_path: self.verbose(f"Downloaded repository workflow logs to {log_path}") logfile_event = self.make_event( - {"path": str(log_path)}, "FILESYSTEM", tags=["zipfile"], source=event + {"path": str(log_path), "description": f"Workflow run logs from https://github.com/{owner}/{repo}/actions/runs/{run_id}"}, "FILESYSTEM", tags=["zipfile"], source=event ) logfile_event.scope_distance = event.scope_distance await self.emit_event(logfile_event) From f4cec98faf89030c146741004bf2b59f486b1304 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Wed, 1 May 2024 13:21:44 +0100 Subject: [PATCH 12/15] Lint --- bbot/modules/github_workflows.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 8c8c2c35e9..7c3150f68a 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -44,7 +44,13 @@ async def handle_event(self, event): if log_path: self.verbose(f"Downloaded repository workflow logs to {log_path}") logfile_event = self.make_event( - {"path": str(log_path), "description": f"Workflow run logs from https://github.com/{owner}/{repo}/actions/runs/{run_id}"}, "FILESYSTEM", tags=["zipfile"], source=event + { + "path": str(log_path), + "description": f"Workflow run logs from https://github.com/{owner}/{repo}/actions/runs/{run_id}", + }, + "FILESYSTEM", + tags=["zipfile"], + source=event, ) logfile_event.scope_distance = event.scope_distance await self.emit_event(logfile_event) From 856c23657b36370e2f938594098d7f268290c7b0 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 1 May 2024 09:33:21 -0400 Subject: [PATCH 13/15] small tweak to error handling --- bbot/modules/github_workflows.py | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 7c3150f68a..fb39a02839 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -110,14 +110,25 @@ async def download_run_logs(self, owner, repo, run_id): self.helpers.mkdir(folder) filename = f"run_{run_id}.zip" file_destination = folder / filename - result = await self.helpers.download( - f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", - filename=file_destination, - headers=self.headers, - ) - if result: + try: + result = await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", + filename=file_destination, + headers=self.headers, + raise_error=True, + warn=False, + ) self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") return file_destination - else: - self.warning(f"The logs for {owner}/{repo}/{run_id} have expired and are no longer available.") - return None + except Exception as e: + response = getattr(e, "response", None) + status_code = getattr(response, "status_code", 0) + if status_code == 403: + self.warning( + f"The current access key does not have access to workflow {owner}/{repo}/{run_id} (status: {status_code})" + ) + else: + self.info( + f"The logs for {owner}/{repo}/{run_id} have expired and are no longer available (status: {status_code})" + ) + return None From 9e56987e678024eccdd99abd05bbddb8555282d3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 1 May 2024 09:36:55 -0400 Subject: [PATCH 14/15] flaked --- bbot/modules/github_workflows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index fb39a02839..8999c41a33 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -111,7 +111,7 @@ async def download_run_logs(self, owner, repo, run_id): filename = f"run_{run_id}.zip" file_destination = folder / filename try: - result = await self.helpers.download( + await self.helpers.download( f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", filename=file_destination, headers=self.headers, From 4c6362064f2e7bcec8762f1443b1f5569e162781 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Fri, 3 May 2024 18:21:25 +0100 Subject: [PATCH 15/15] emit the full event logs as `FILESYSTEM` events to be scanned to avoid duplication --- bbot/modules/github_workflows.py | 25 +++++++++++++------ .../test_module_github_workflows.py | 24 ++++++------------ 2 files changed, 26 insertions(+), 23 deletions(-) diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 8999c41a33..03d8e55f01 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -1,3 +1,6 @@ +import zipfile +import fnmatch + from bbot.modules.templates.github import github @@ -40,16 +43,14 @@ async def handle_event(self, event): for run in await self.get_workflow_runs(owner, repo, workflow_id): run_id = run.get("id") self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}") - log_path = await self.download_run_logs(owner, repo, run_id) - if log_path: - self.verbose(f"Downloaded repository workflow logs to {log_path}") + for log in await self.download_run_logs(owner, repo, run_id): logfile_event = self.make_event( { - "path": str(log_path), + "path": str(log), "description": f"Workflow run logs from https://github.com/{owner}/{repo}/actions/runs/{run_id}", }, "FILESYSTEM", - tags=["zipfile"], + tags=["textfile"], source=event, ) logfile_event.scope_distance = event.scope_distance @@ -119,8 +120,8 @@ async def download_run_logs(self, owner, repo, run_id): warn=False, ) self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") - return file_destination except Exception as e: + file_destination = None response = getattr(e, "response", None) status_code = getattr(response, "status_code", 0) if status_code == 403: @@ -131,4 +132,14 @@ async def download_run_logs(self, owner, repo, run_id): self.info( f"The logs for {owner}/{repo}/{run_id} have expired and are no longer available (status: {status_code})" ) - return None + # Secrets are duplicated in the individual workflow steps so just extract the main log files from the top folder + if file_destination: + main_logs = [] + with zipfile.ZipFile(file_destination, "r") as logzip: + for name in logzip.namelist(): + if fnmatch.fnmatch(name, "*.txt") and not "/" in name: + logzip.extract(name, folder) + main_logs.append(folder / name) + return main_logs + else: + return [] diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index 76bd55814c..c5bdf6d07d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -12,6 +12,8 @@ class TestGithub_Workflows(ModuleTestBase): data = io.BytesIO() with zipfile.ZipFile(data, mode="w", compression=zipfile.ZIP_DEFLATED) as zipfile: zipfile.writestr("test.txt", "This is some test data") + zipfile.writestr("test2.txt", "This is some more test data") + zipfile.writestr("folder/test3.txt", "This is yet more test data") data.seek(0) zip_content = data.getvalue() @@ -437,7 +439,7 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - assert len(events) == 6 + assert len(events) == 7 assert 1 == len( [ e @@ -469,18 +471,8 @@ def check(self, module_test, events): and e.scope_distance == 1 ] ), "Failed to find blacklanternsecurity github repo" - filesystem_events = [ - e - for e in events - if e.type == "FILESYSTEM" - and "workflow_logs/blacklanternsecurity/bbot/run_8839360698.zip" in e.data["path"] - and "zipfile" in e.tags - and e.scope_distance == 1 - ] - assert 1 == len(filesystem_events), "Failed to download workflow logs" - filesystem_event = filesystem_events[0] - file = Path(filesystem_event.data["path"]) - assert file.is_file(), "Destination zip does not exist" - with zipfile.ZipFile(file, "r") as zip_ref: - assert "test.txt" in zip_ref.namelist(), "test.txt not in zip" - assert zip_ref.read("test.txt") == b"This is some test data", "test.txt contents incorrect" + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert 2 == len(filesystem_events), filesystem_events + for filesystem_event in filesystem_events: + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination file does not exist"