Skip to content

Commit

Permalink
Ensure duplicated secret in zip folder does not get detected
Browse files Browse the repository at this point in the history
  • Loading branch information
domwhewell-sage committed May 12, 2024
1 parent 694b5f6 commit 3bce836
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions bbot/test/test_step_2/module_tests/test_module_trufflehog.py
Original file line number Diff line number Diff line change
Expand Up @@ -541,6 +541,7 @@ async def setup_before_prep(self, module_test):
data = io.BytesIO()
with zipfile.ZipFile(data, mode="w", compression=zipfile.ZIP_DEFLATED) as z:
z.writestr("test.txt", self.file_content)
z.writestr("folder/test2.txt", self.file_content)
data.seek(0)
zip_content = data.getvalue()
module_test.httpx_mock.add_response(
Expand Down Expand Up @@ -861,7 +862,7 @@ def check(self, module_test, events):
assert content == self.file_content, "File content doesn't match"
github_workflow_event = [e for e in vuln_events if "bbot" in e.data["description"]][0].source
file = Path(github_workflow_event.data["path"])
assert file.is_file(), "Destination zip does not exist"
assert file.is_file(), "Destination file does not exist"
docker_source_event = [e for e in vuln_events if e.data["host"] == "hub.docker.com"][0].source
file = Path(docker_source_event.data["path"])
assert file.is_file(), "Destination image does not exist"
Expand All @@ -888,7 +889,7 @@ def check(self, module_test, events):
assert content == self.file_content, "File content doesn't match"
github_workflow_event = [e for e in finding_events if "bbot" in e.data["description"]][0].source
file = Path(github_workflow_event.data["path"])
assert file.is_file(), "Destination zip does not exist"
assert file.is_file(), "Destination file does not exist"
docker_source_event = [e for e in finding_events if e.data["host"] == "hub.docker.com"][0].source
file = Path(docker_source_event.data["path"])
assert file.is_file(), "Destination image does not exist"

0 comments on commit 3bce836

Please sign in to comment.