Skip to content

Commit

Permalink
Merge branch 'dev' into dependabot/pip/dev/mkdocstrings-python-1.10.8
Browse files Browse the repository at this point in the history
  • Loading branch information
TheTechromancer authored Aug 23, 2024
2 parents 7520e93 + 0e1c426 commit 82a0bea
Show file tree
Hide file tree
Showing 7 changed files with 148 additions and 23 deletions.
7 changes: 5 additions & 2 deletions bbot/modules/git_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,16 +46,19 @@ async def handle_event(self, event):
)

async def clone_git_repository(self, repository_url):
owner = repository_url.split("/")[-2]
folder = self.output_dir / owner
self.helpers.mkdir(folder)
if self.api_key:
url = repository_url.replace("https://github.com", f"https://user:{self.api_key}@github.com")
else:
url = repository_url
command = ["git", "-C", self.output_dir, "clone", url]
command = ["git", "-C", folder, "clone", url]
try:
output = await self.run_process(command, env={"GIT_TERMINAL_PROMPT": "0"}, check=True)
except CalledProcessError as e:
self.debug(f"Error cloning {url}. STDERR: {repr(e.stderr)}")
return

folder_name = output.stderr.split("Cloning into '")[1].split("'")[0]
return self.output_dir / folder_name
return folder / folder_name
74 changes: 72 additions & 2 deletions bbot/modules/github_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class github_workflows(github):
produced_events = ["FILESYSTEM"]
flags = ["passive", "safe", "code-enum"]
meta = {
"description": "Download a github repositories workflow logs",
"description": "Download a github repositories workflow logs and workflow artifacts",
"created_date": "2024-04-29",
"author": "@domwhewell-sage",
}
Expand Down Expand Up @@ -46,9 +46,9 @@ async def handle_event(self, event):
self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}")
for run in await self.get_workflow_runs(owner, repo, workflow_id):
run_id = run.get("id")
workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}"
self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}")
for log in await self.download_run_logs(owner, repo, run_id):
workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}"
logfile_event = self.make_event(
{
"path": str(log),
Expand All @@ -62,6 +62,28 @@ async def handle_event(self, event):
logfile_event,
context=f"{{module}} downloaded workflow run logs from {workflow_url} to {{event.type}}: {log}",
)
artifacts = await self.get_run_artifacts(owner, repo, run_id)
if artifacts:
for artifact in artifacts:
artifact_id = artifact.get("id")
artifact_name = artifact.get("name")
expired = artifact.get("expired")
if not expired:
filepath = await self.download_run_artifacts(owner, repo, artifact_id, artifact_name)
if filepath:
artifact_event = self.make_event(
{
"path": str(filepath),
"description": f"Workflow run artifact from {workflow_url}",
},
"FILESYSTEM",
tags=["zipfile"],
parent=event,
)
await self.emit_event(
artifact_event,
context=f"{{module}} downloaded workflow run artifact from {workflow_url} to {{event.type}}: {filepath}",
)

async def get_workflows(self, owner, repo):
workflows = []
Expand Down Expand Up @@ -150,3 +172,51 @@ async def download_run_logs(self, owner, repo, run_id):
return main_logs
else:
return []

async def get_run_artifacts(self, owner, repo, run_id):
artifacts = []
url = f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
r = await self.helpers.request(url, headers=self.headers)
if r is None:
return artifacts
status_code = getattr(r, "status_code", 0)
if status_code == 403:
self.warning("Github is rate-limiting us (HTTP status: 403)")
return artifacts
if status_code != 200:
return artifacts
try:
j = r.json().get("artifacts", [])
except Exception as e:
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
return artifacts
if not j:
return artifacts
for item in j:
artifacts.append(item)
return artifacts

async def download_run_artifacts(self, owner, repo, artifact_id, artifact_name):
folder = self.output_dir / owner / repo
self.helpers.mkdir(folder)
file_destination = folder / artifact_name
try:
await self.helpers.download(
f"{self.base_url}/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip",
filename=file_destination,
headers=self.headers,
raise_error=True,
warn=False,
)
self.info(
f"Downloaded workflow artifact {owner}/{repo}/{artifact_id}/{artifact_name} to {file_destination}"
)
except Exception as e:
file_destination = None
response = getattr(e, "response", None)
status_code = getattr(response, "status_code", 0)
if status_code == 403:
self.warning(
f"The current access key does not have access to workflow artifacts {owner}/{repo}/{artifact_id} (status: {status_code})"
)
return file_destination
25 changes: 19 additions & 6 deletions bbot/modules/trufflehog.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,17 +90,25 @@ async def handle_event(self, event):
host = event.host
else:
host = str(event.parent.host)
async for decoder_name, detector_name, raw_result, verified, source_metadata in self.execute_trufflehog(
module, path
):
async for (
decoder_name,
detector_name,
raw_result,
rawv2_result,
verified,
source_metadata,
) in self.execute_trufflehog(module, path):
if verified:
data = {
"severity": "High",
"description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]",
"description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]",
"host": host,
}
if description:
data["description"] += f" Description: [{description}]"
data["description"] += f" Raw result: [{raw_result}]"
if rawv2_result:
data["description"] += f" RawV2 result: [{rawv2_result}]"
await self.emit_event(
data,
"VULNERABILITY",
Expand All @@ -109,11 +117,14 @@ async def handle_event(self, event):
)
else:
data = {
"description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]",
"description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]",
"host": host,
}
if description:
data["description"] += f" Description: [{description}]"
data["description"] += f" Raw result: [{raw_result}]"
if rawv2_result:
data["description"] += f" RawV2 result: [{rawv2_result}]"
await self.emit_event(
data,
"FINDING",
Expand Down Expand Up @@ -162,11 +173,13 @@ async def execute_trufflehog(self, module, path):

raw_result = j.get("Raw", "")

rawv2_result = j.get("RawV2", "")

verified = j.get("Verified", False)

source_metadata = j.get("SourceMetadata", {})

yield (decoder_name, detector_name, raw_result, verified, source_metadata)
yield (decoder_name, detector_name, raw_result, rawv2_result, verified, source_metadata)
finally:
stats_file.unlink()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def check(self, module_test, events):
e
for e in events
if e.type == "FILESYSTEM"
and "git_repos/test_keys" in e.data["path"]
and "git_repos/.bbot_test/test_keys" in e.data["path"]
and "git" in e.tags
and e.scope_distance == 1
]
Expand Down
42 changes: 40 additions & 2 deletions bbot/test/test_step_2/module_tests/test_module_github_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,9 +437,47 @@ async def setup_before_prep(self, module_test):
url="https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02",
content=self.zip_content,
)
module_test.httpx_mock.add_response(
url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts",
json={
"total_count": 1,
"artifacts": [
{
"id": 1829832535,
"node_id": "MDg6QXJ0aWZhY3QxODI5ODMyNTM1",
"name": "build.tar.gz",
"size_in_bytes": 245770648,
"url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535",
"archive_download_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip",
"expired": False,
"created_at": "2024-08-19T22:32:17Z",
"updated_at": "2024-08-19T22:32:18Z",
"expires_at": "2024-09-02T22:21:59Z",
"workflow_run": {
"id": 10461468466,
"repository_id": 89290483,
"head_repository_id": 799444840,
"head_branch": "not-a-real-branch",
"head_sha": "1eeb5354ab7b1e4141b8a6473846e2a5ea0dd2c6",
},
}
],
},
)
module_test.httpx_mock.add_response(
url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip",
headers={
"location": "https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D"
},
status_code=302,
)
module_test.httpx_mock.add_response(
url="https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D",
content=self.zip_content,
)

def check(self, module_test, events):
assert len(events) == 7
assert len(events) == 8
assert 1 == len(
[
e
Expand Down Expand Up @@ -473,7 +511,7 @@ def check(self, module_test, events):
]
), "Failed to find blacklanternsecurity github repo"
filesystem_events = [e for e in events if e.type == "FILESYSTEM"]
assert 2 == len(filesystem_events), filesystem_events
assert 3 == len(filesystem_events), filesystem_events
for filesystem_event in filesystem_events:
file = Path(filesystem_event.data["path"])
assert file.is_file(), "Destination file does not exist"
9 changes: 5 additions & 4 deletions bbot/test/test_step_2/module_tests/test_module_trufflehog.py
Original file line number Diff line number Diff line change
Expand Up @@ -851,7 +851,8 @@ def check(self, module_test, events):
if e.type == "VULNERABILITY"
and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com")
and "Verified Secret Found." in e.data["description"]
and "Secret: [https://admin:[email protected]]" in e.data["description"]
and "Raw result: [https://admin:[email protected]]" in e.data["description"]
and "RawV2 result: [https://admin:[email protected]/basic_auth]" in e.data["description"]
]
assert 3 == len(vuln_events), "Failed to find secret in events"
github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent
Expand All @@ -867,7 +868,7 @@ def check(self, module_test, events):
[
e
for e in filesystem_events
if e.data["path"].endswith("/git_repos/test_keys") and Path(e.data["path"]).is_dir()
if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir()
]
), "Test keys repo dir does not exist"
assert 1 == len(
Expand Down Expand Up @@ -898,7 +899,7 @@ def check(self, module_test, events):
if e.type == e.type == "FINDING"
and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com")
and "Potential Secret Found." in e.data["description"]
and "Secret: [https://admin:[email protected]]" in e.data["description"]
and "Raw result: [https://admin:[email protected]]" in e.data["description"]
]
assert 3 == len(finding_events), "Failed to find secret in events"
github_repo_event = [e for e in finding_events if "test_keys" in e.data["description"]][0].parent
Expand All @@ -914,7 +915,7 @@ def check(self, module_test, events):
[
e
for e in filesystem_events
if e.data["path"].endswith("/git_repos/test_keys") and Path(e.data["path"]).is_dir()
if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir()
]
), "Test keys repo dir does not exist"
assert 1 == len(
Expand Down
12 changes: 6 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 82a0bea

Please sign in to comment.