From 00afbfdd78868693f953ff295e498793c44ee659 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 16 Jan 2024 13:54:46 -0500 Subject: [PATCH 01/52] fix event stats tracking bug --- bbot/scanner/manager.py | 6 +-- bbot/test/test_step_1/test_modules_basic.py | 54 +++++++++++++++++++++ 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 7f320dbe2..731787926 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -168,7 +168,6 @@ async def _emit_event(self, event, **kwargs): - Updating scan statistics. """ log.debug(f"Emitting {event}") - event_distributed = False try: on_success_callback = kwargs.pop("on_success_callback", None) abort_if = kwargs.pop("abort_if", None) @@ -269,7 +268,6 @@ async def _emit_event(self, event, **kwargs): await self.scan.helpers.execute_sync_or_async(on_success_callback, event) await self.distribute_event(event) - event_distributed = True # speculate DNS_NAMES and IP_ADDRESSes from other event types source_event = event @@ -328,8 +326,6 @@ async def _emit_event(self, event, **kwargs): finally: event._resolved.set() - if event_distributed: - self.scan.stats.event_distributed(event) log.debug(f"{event.module}.emit_event() finished for {event}") def hash_event_graph(self, event): @@ -415,6 +411,8 @@ async def distribute_event(self, event): if acceptable_dup or graph_important: await mod.queue_event(event) + self.scan.stats.event_distributed(event) + async def _worker_loop(self): try: while not self.scan.stopped: diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index e21a43aa0..4503582d8 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -289,3 +289,57 @@ async def test_modules_basic_perdomainonly(scan, helpers, events, bbot_config, b else: assert valid_1 == True assert valid_2 == True + + +@pytest.mark.asyncio +async def test_modules_basic_stats(helpers, events, bbot_config, bbot_scanner, httpx_mock, monkeypatch): + from bbot.modules.base import BaseModule + + class dummy(BaseModule): + _name = "dummy" + watched_events = ["*"] + + async def handle_event(self, event): + self.emit_event( + {"host": "www.evilcorp.com", "url": "http://www.evilcorp.com", "description": "asdf"}, "FINDING", event + ) + + scan = bbot_scanner( + "evilcorp.com", + config=bbot_config, + force_start=True, + ) + scan.helpers.dns.mock_dns({("evilcorp.com", "A"): "127.0.254.1", ("www.evilcorp.com", "A"): "127.0.254.2"}) + + scan.modules["dummy"] = dummy(scan) + events = [e async for e in scan.async_start()] + + assert len(events) == 3 + + assert set(scan.stats.module_stats) == {"dummy", "python", "TARGET"} + + target_stats = scan.stats.module_stats["TARGET"] + assert target_stats.emitted == {"SCAN": 1, "DNS_NAME": 1} + assert target_stats.emitted_total == 2 + assert target_stats.produced == {"SCAN": 1, "DNS_NAME": 1} + assert target_stats.produced_total == 2 + assert target_stats.consumed == {} + assert target_stats.consumed_total == 0 + + dummy_stats = scan.stats.module_stats["dummy"] + assert dummy_stats.emitted == {"FINDING": 1} + assert dummy_stats.emitted_total == 1 + assert dummy_stats.produced == {"FINDING": 1} + assert dummy_stats.produced_total == 1 + assert dummy_stats.consumed == {"SCAN": 1, "DNS_NAME": 1} + assert dummy_stats.consumed_total == 2 + + python_stats = scan.stats.module_stats["python"] + assert python_stats.emitted == {} + assert python_stats.emitted_total == 0 + assert python_stats.produced == {} + assert python_stats.produced_total == 0 + assert python_stats.consumed == {"SCAN": 1, "FINDING": 1, "DNS_NAME": 1} + assert python_stats.consumed_total == 3 + + assert scan.stats.events_emitted_by_type == {"SCAN": 1, "FINDING": 1, "DNS_NAME": 1} From ff6939eecf45f417fcf2383b82a405a4099d6846 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 16 Jan 2024 17:32:01 -0500 Subject: [PATCH 02/52] added http status code, http title, waf to asset inventory --- bbot/core/event/base.py | 20 ++++++++++- bbot/core/helpers/misc.py | 44 +++++++++++++++++++++++ bbot/modules/output/asset_inventory.py | 49 +++++++++++++++++++++++--- bbot/modules/output/base.py | 2 +- bbot/modules/wafw00f.py | 6 ++++ bbot/scanner/manager.py | 2 ++ bbot/test/test_step_1/test_events.py | 4 ++- bbot/test/test_step_1/test_helpers.py | 6 ++++ 8 files changed, 126 insertions(+), 7 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb16fda7b..142878f58 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1,3 +1,4 @@ +import re import json import asyncio import logging @@ -866,6 +867,8 @@ def _words(self): class URL_UNVERIFIED(BaseEvent): + _status_code_regex = re.compile(r"^status-(\d{1,3})$") + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.web_spider_distance = getattr(self.source, "web_spider_distance", 0) @@ -921,6 +924,14 @@ def _data_id(self): data = "spider-danger" + data return data + @property + def status_code(self): + for t in self.tags: + match = self._status_code_regex.match(t) + if match: + return int(match.groups()[0]) + return 0 + class URL(URL_UNVERIFIED): def sanitize_data(self, data): @@ -973,7 +984,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # count number of consecutive redirects self.num_redirects = getattr(self.source, "num_redirects", 0) - if str(self.data.get("status_code", 0)).startswith("3"): + if str(self.status_code).startswith("3"): self.num_redirects += 1 def sanitize_data(self, data): @@ -1001,6 +1012,13 @@ def _words(self): def _pretty_string(self): return f'{self.data["hash"]["header_mmh3"]}:{self.data["hash"]["body_mmh3"]}' + @property + def status_code(self): + try: + return int(self.data.get("status_code", 0)) + except (ValueError, TypeError): + return 0 + class VULNERABILITY(DictHostEvent): _always_emit = True diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index f62560704..8f788aae6 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -389,6 +389,50 @@ def url_parents(u): u = parent +def best_http_status(code1, code2): + """ + Determine the better HTTP status code between two given codes. + + The 'better' status code is considered based on typical usage and priority in HTTP communication. + Lower codes are generally better than higher codes. Within the same class (e.g., 2xx), a lower code is better. + Between different classes, the order of preference is 2xx > 3xx > 1xx > 4xx > 5xx. + + Args: + code1 (int): The first HTTP status code. + code2 (int): The second HTTP status code. + + Returns: + int: The better HTTP status code between the two provided codes. + + Examples: + >>> better_http_status(200, 404) + 200 + >>> better_http_status(500, 400) + 400 + >>> better_http_status(301, 302) + 301 + """ + + # Classify the codes into their respective categories (1xx, 2xx, 3xx, 4xx, 5xx) + def classify_code(code): + return int(code) // 100 + + class1 = classify_code(code1) + class2 = classify_code(code2) + + # Priority order for classes + priority_order = {2: 1, 3: 2, 1: 3, 4: 4, 5: 5} + + # Compare based on class priority + p1 = priority_order.get(class1, 10) + p2 = priority_order.get(class2, 10) + if p1 != p2: + return code1 if p1 < p2 else code2 + + # If in the same class, the lower code is better + return min(code1, code2) + + def tldextract(data): """ Extracts the subdomain, domain, and suffix from a URL string. diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index db9fcd946..c899d9723 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -3,7 +3,7 @@ from contextlib import suppress from .csv import CSV -from bbot.core.helpers.misc import make_ip_type, is_ip, is_port +from bbot.core.helpers.misc import make_ip_type, is_ip, is_port, best_http_status severity_map = { "INFO": 0, @@ -21,7 +21,17 @@ class asset_inventory(CSV): - watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "URL", "FINDING", "VULNERABILITY", "TECHNOLOGY", "IP_ADDRESS"] + watched_events = [ + "OPEN_TCP_PORT", + "DNS_NAME", + "URL", + "FINDING", + "VULNERABILITY", + "TECHNOLOGY", + "IP_ADDRESS", + "WAF", + "HTTP_RESPONSE", + ] produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] meta = {"description": "Output to an asset inventory style flattened CSV file"} options = {"output_file": "", "use_previous": False, "summary_netmask": 16} @@ -31,7 +41,18 @@ class asset_inventory(CSV): "summary_netmask": "Subnet mask to use when summarizing IP addresses at end of scan", } - header_row = ["Host", "Provider", "IP(s)", "Status", "Open Ports", "Risk Rating", "Findings", "Description"] + header_row = [ + "Host", + "Provider", + "IP(s)", + "HTTP Status", + "HTTP Title", + "Open Ports", + "Risk Rating", + "Findings", + "Description", + "WAF", + ] filename = "asset-inventory.csv" async def setup(self): @@ -103,11 +124,13 @@ def sort_key(asset): "Host": host, "Provider": getattr(asset, "provider", ""), "IP(s)": ",".join(ips), - "Status": "Active" if asset.ports else "N/A", + "HTTP Status": str(getattr(asset, "http_status", 0)), + "HTTP Title": str(getattr(asset, "http_title", "")), "Open Ports": ",".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "WAF": getattr(asset, "waf", ""), } row.update(asset.custom_fields) self.writerow(row) @@ -191,8 +214,11 @@ def __init__(self, host): self.status = "UNKNOWN" self.risk_rating = 0 self.provider = "" + self.waf = "" self.technologies = set() self.custom_fields = {} + self.http_status = 0 + self.http_title = "" def absorb_csv_row(self, row): # host @@ -230,6 +256,11 @@ def absorb_event(self, event): if not is_ip(event.host): self.host = event.host + http_status = getattr(event, "status_code", 0) + update_http_status = best_http_status(http_status, self.http_status) == http_status + if update_http_status: + self.http_status = http_status + self.ip_addresses = set(_make_ip_list(event.resolved_hosts)) if event.port: @@ -251,6 +282,16 @@ def absorb_event(self, event): if event.type == "TECHNOLOGY": self.technologies.add(event.data["technology"]) + if event.type == "WAF": + if waf := event.data.get("WAF", ""): + if update_http_status or not self.waf: + self.waf = waf + + if event.type == "HTTP_RESPONSE": + if title := event.data.get("title", ""): + if update_http_status or not self.http_title: + self.http_title = title + for tag in event.tags: if tag.startswith("cdn-") or tag.startswith("cloud-"): self.provider = tag diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index c82d96304..a294c23bb 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -27,7 +27,7 @@ def _event_precheck(self, event): # output module specific stuff # omitted events such as HTTP_RESPONSE etc. - if event._omit: + if event._omit and not event.type in self.get_watched_events(): return False, "_omit is True" # force-output certain events to the graph diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index 836d14ea2..f80979619 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -1,6 +1,12 @@ from bbot.modules.base import BaseModule from wafw00f import main as wafw00f_main +# disable wafw00f logging +import logging + +wafw00f_logger = logging.getLogger("wafw00f") +wafw00f_logger.setLevel(logging.CRITICAL + 100) + class wafw00f(BaseModule): """ diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 731787926..efeddeb82 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -195,6 +195,8 @@ async def _emit_event(self, event, **kwargs): for ip in ips: resolved_hosts.add(ip) + event._dns_children = dns_children + # kill runaway DNS chains dns_resolve_distance = getattr(event, "dns_resolve_distance", 0) if dns_resolve_distance >= self.scan.helpers.dns.max_dns_resolve_distance: diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 675385784..d5c4a1b65 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -91,7 +91,9 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("https://evilcorp.com.:666", dummy=True) == "https://evilcorp.com:666/" assert scan.make_event("https://[bad::c0de]", dummy=True).with_port().geturl() == "https://[bad::c0de]:443/" assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" - assert "status-200" in scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]).tags + url_event = scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]) + assert "status-200" in url_event.tags + assert url_event.status_code == 200 with pytest.raises(ValidationError, match=".*status tag.*"): scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 0c50f65eb..ee62aa8d8 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -159,6 +159,12 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "]:22/my-file.csv", ) + assert helpers.best_http_status(200, 404) == 200 + assert helpers.best_http_status(500, 400) == 400 + assert helpers.best_http_status(301, 302) == 301 + assert helpers.best_http_status(0, 302) == 302 + assert helpers.best_http_status(500, 0) == 500 + assert helpers.split_domain("www.evilcorp.co.uk") == ("www", "evilcorp.co.uk") assert helpers.split_domain("asdf.www.test.notreal") == ("asdf.www", "test.notreal") assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") From c389563b1aae908708a9d26589def19f6d5f1dba Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:14:09 -0500 Subject: [PATCH 03/52] include DNS records in asset inventory CSV --- bbot/modules/output/asset_inventory.py | 11 ++++++++++- bbot/scanner/manager.py | 3 +-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index c899d9723..962609a92 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -33,7 +33,7 @@ class asset_inventory(CSV): "HTTP_RESPONSE", ] produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] - meta = {"description": "Output to an asset inventory style flattened CSV file"} + meta = {"description": "Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV"} options = {"output_file": "", "use_previous": False, "summary_netmask": 16} options_desc = { "output_file": "Set a custom output file", @@ -52,6 +52,7 @@ class asset_inventory(CSV): "Findings", "Description", "WAF", + "DNS Records", ] filename = "asset-inventory.csv" @@ -131,6 +132,7 @@ def sort_key(asset): "Findings": "\n".join(findings_and_vulns), "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), + "DNS Records": ",".join(getattr(asset, "dns_records", [])), } row.update(asset.custom_fields) self.writerow(row) @@ -208,6 +210,7 @@ class Asset: def __init__(self, host): self.host = host self.ip_addresses = set() + self.dns_records = [] self.ports = set() self.findings = set() self.vulnerabilities = set() @@ -256,6 +259,12 @@ def absorb_event(self, event): if not is_ip(event.host): self.host = event.host + dns_children = getattr(event, "_dns_children", {}) + if dns_children and not self.dns_records: + for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): + for record in sorted(records): + self.dns_records.append(f"{rdtype}:{record}") + http_status = getattr(event, "status_code", 0) update_http_status = best_http_status(http_status, self.http_status) == http_status if update_http_status: diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index efeddeb82..1fa47fef4 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -195,8 +195,6 @@ async def _emit_event(self, event, **kwargs): for ip in ips: resolved_hosts.add(ip) - event._dns_children = dns_children - # kill runaway DNS chains dns_resolve_distance = getattr(event, "dns_resolve_distance", 0) if dns_resolve_distance >= self.scan.helpers.dns.max_dns_resolve_distance: @@ -206,6 +204,7 @@ async def _emit_event(self, event, **kwargs): dns_children = {} if event.type in ("DNS_NAME", "IP_ADDRESS"): + event._dns_children = dns_children for tag in dns_tags: event.add_tag(tag) From 31ae4d603e97aeb95473f247ff5db74a4e264f66 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:16:11 -0500 Subject: [PATCH 04/52] Description --> Technologies --- bbot/modules/output/asset_inventory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 962609a92..6328334c5 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -50,7 +50,7 @@ class asset_inventory(CSV): "Open Ports", "Risk Rating", "Findings", - "Description", + "Technologies", "WAF", "DNS Records", ] @@ -130,7 +130,7 @@ def sort_key(asset): "Open Ports": ",".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), - "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), "DNS Records": ",".join(getattr(asset, "dns_records", [])), } @@ -237,7 +237,7 @@ def absorb_csv_row(self, row): findings = [i.strip() for i in row.get("Findings", "").splitlines()] self.findings.update(set(i for i in findings if i)) # technologies - technologies = [i.strip() for i in row.get("Description", "").splitlines()] + technologies = [i.strip() for i in row.get("Technologies", "").splitlines()] self.technologies.update(set(i for i in technologies if i)) # risk rating risk_rating = row.get("Risk Rating", "").strip() From 9c96cb4eb68c38b8d9f3d6a7a482956415052996 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:35:29 -0500 Subject: [PATCH 05/52] fix tests --- bbot/test/test_step_1/test_modules_basic.py | 23 +++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index 4503582d8..b943144ac 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -18,16 +18,27 @@ async def test_modules_basic(scan, helpers, events, bbot_config, bbot_scanner, h httpx_mock.add_response(method=http_method, url=re.compile(r".*"), json={"test": "test"}) # output module specific event filtering tests - base_output_module = BaseOutputModule(scan) - base_output_module.watched_events = ["IP_ADDRESS"] + base_output_module_1 = BaseOutputModule(scan) + base_output_module_1.watched_events = ["IP_ADDRESS"] + localhost = scan.make_event("127.0.0.1", source=scan.root_event) + assert base_output_module_1._event_precheck(localhost)[0] == True + localhost._internal = True + assert base_output_module_1._event_precheck(localhost)[0] == False + localhost._internal = False + assert base_output_module_1._event_precheck(localhost)[0] == True + localhost._omit = True + assert base_output_module_1._event_precheck(localhost)[0] == True + + base_output_module_2 = BaseOutputModule(scan) + base_output_module_2.watched_events = ["*"] localhost = scan.make_event("127.0.0.1", source=scan.root_event) - assert base_output_module._event_precheck(localhost)[0] == True + assert base_output_module_2._event_precheck(localhost)[0] == True localhost._internal = True - assert base_output_module._event_precheck(localhost)[0] == False + assert base_output_module_2._event_precheck(localhost)[0] == False localhost._internal = False - assert base_output_module._event_precheck(localhost)[0] == True + assert base_output_module_2._event_precheck(localhost)[0] == True localhost._omit = True - assert base_output_module._event_precheck(localhost)[0] == False + assert base_output_module_2._event_precheck(localhost)[0] == False # common event filtering tests for module_class in (BaseModule, BaseOutputModule, BaseReportModule, BaseInternalModule): From 1a48f7537854996c77dffa42cacd895736102ef9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:38:07 -0500 Subject: [PATCH 06/52] fix visual issue with ports in libreoffice --- bbot/modules/output/asset_inventory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 6328334c5..acb833daa 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -124,15 +124,15 @@ def sort_key(asset): row = { "Host": host, "Provider": getattr(asset, "provider", ""), - "IP(s)": ",".join(ips), + "IP(s)": ", ".join(ips), "HTTP Status": str(getattr(asset, "http_status", 0)), "HTTP Title": str(getattr(asset, "http_title", "")), - "Open Ports": ",".join(ports), + "Open Ports": ", ".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), - "DNS Records": ",".join(getattr(asset, "dns_records", [])), + "DNS Records": ", ".join(getattr(asset, "dns_records", [])), } row.update(asset.custom_fields) self.writerow(row) From 491753062cecd82ceaca4a9ac3bf8fede13fbb0b Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Thu, 18 Jan 2024 15:55:06 +0000 Subject: [PATCH 07/52] Refresh module docs --- docs/modules/list_of_modules.md | 2 +- docs/scanning/events.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 60e0c7b62..89bb6d24e 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -96,7 +96,7 @@ | virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | | zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Output to an asset inventory style flattened CSV file | | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY | IP_ADDRESS, OPEN_TCP_PORT | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | | csv | output | No | Output to CSV | | * | | | discord | output | No | Message a Discord channel when certain events are encountered | | * | | | emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index d5ce37b64..defcf46a8 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -61,7 +61,7 @@ Below is a full list of event types along with which modules produce/consume the | FINDING | 2 | 24 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | | GEOLOCATION | 0 | 2 | | ip2location, ipstack | | HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 15 | 1 | ajaxpro, badsecrets, dastardly, excavate, filedownload, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | +| HTTP_RESPONSE | 16 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, excavate, filedownload, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | | IP_ADDRESS | 9 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, masscan, nmap, speculate | asset_inventory, ipneighbor, speculate | | IP_RANGE | 3 | 0 | masscan, nmap, speculate | | | OPEN_TCP_PORT | 4 | 5 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, masscan, nmap, speculate | @@ -77,7 +77,7 @@ Below is a full list of event types along with which modules produce/consume the | USERNAME | 0 | 2 | | credshed, dehashed | | VHOST | 1 | 1 | web_report | vhost | | VULNERABILITY | 2 | 7 | asset_inventory, web_report | ajaxpro, badsecrets, dastardly, generic_ssrf, internetdb, nuclei, telerik | -| WAF | 0 | 1 | | wafw00f | +| WAF | 1 | 1 | asset_inventory | wafw00f | | WEBSCREENSHOT | 0 | 1 | | gowitness | From 2227c89cd2167581ee63ef53267a4136e7c0c3a7 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 11:36:00 -0500 Subject: [PATCH 08/52] fix filedownload options --- bbot/modules/filedownload.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 2f0c26c59..05db2b9df 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -79,8 +79,8 @@ class filedownload(BaseModule): scope_distance_modifier = 3 async def setup(self): - self.extensions = list(set([e.lower().strip(".") for e in self.options.get("extensions", [])])) - self.max_filesize = self.options.get("max_filesize", "10MB") + self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) + self.max_filesize = self.config.get("max_filesize", "10MB") self.download_dir = self.scan.home / "filedownload" self.helpers.mkdir(self.download_dir) self.urls_downloaded = set() From 66a28f2ee7b9154bb6472127e7c3aa5a3f95c7ef Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 11:50:38 -0500 Subject: [PATCH 09/52] improve chain_lists helper --- bbot/core/helpers/misc.py | 4 ++++ bbot/test/test_step_1/test_helpers.py | 1 + 2 files changed, 5 insertions(+) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index f62560704..10c2dc8a6 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1121,6 +1121,8 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): This function takes a list `l` and flattens it by splitting its entries on commas. It also allows you to optionally open entries as files and add their contents to the list. + The order of entries is preserved, and deduplication is performed automatically. + Args: l (list): The list of strings to chain together. try_files (bool, optional): Whether to try to open entries as files. Defaults to False. @@ -1137,6 +1139,8 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): >>> chain_lists(["a,file.txt", "c,d"], try_files=True) ['a', 'f_line1', 'f_line2', 'f_line3', 'c', 'd'] """ + if isinstance(l, str): + l = [l] final_list = dict() for entry in l: for s in entry.split(","): diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 0c50f65eb..1f7fdb9be 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -338,6 +338,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert "asdf" in helpers.str_or_file(str(test_file)) assert "nope" in helpers.str_or_file("nope") assert tuple(helpers.chain_lists([str(test_file), "nope"], try_files=True)) == ("asdf", "fdsa", "nope") + assert tuple(helpers.chain_lists("one, two", try_files=True)) == ("one", "two") assert test_file.is_file() with pytest.raises(DirectoryCreationError, match="Failed to create.*"): From 0143b53c67a7a1a32d2f8e1812959d9ec9cc4c63 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 16:12:44 -0500 Subject: [PATCH 10/52] asset inventory bugfixes and improvements --- bbot/core/event/base.py | 21 ++++++++++++++++++--- bbot/modules/internal/excavate.py | 7 +------ bbot/modules/output/asset_inventory.py | 23 +++++++++++++++++++---- bbot/test/test_step_1/test_events.py | 16 +++++++++++++++- 4 files changed, 53 insertions(+), 14 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 142878f58..616c7ecf4 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -7,6 +7,7 @@ from typing import Optional from datetime import datetime from contextlib import suppress +from urllib.parse import urljoin from pydantic import BaseModel, field_validator from .helpers import * @@ -19,6 +20,7 @@ is_subdomain, is_ip, is_ptr, + is_uri, domain_stem, make_netloc, make_ip_type, @@ -925,7 +927,7 @@ def _data_id(self): return data @property - def status_code(self): + def http_status(self): for t in self.tags: match = self._status_code_regex.match(t) if match: @@ -984,7 +986,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # count number of consecutive redirects self.num_redirects = getattr(self.source, "num_redirects", 0) - if str(self.status_code).startswith("3"): + if str(self.http_status).startswith("3"): self.num_redirects += 1 def sanitize_data(self, data): @@ -1013,12 +1015,25 @@ def _pretty_string(self): return f'{self.data["hash"]["header_mmh3"]}:{self.data["hash"]["body_mmh3"]}' @property - def status_code(self): + def http_status(self): try: return int(self.data.get("status_code", 0)) except (ValueError, TypeError): return 0 + @property + def redirect_location(self): + location = self.data.get("location", "") + # if it's a redirect + if location: + # get the url scheme + scheme = is_uri(location, return_scheme=True) + # if there's no scheme (i.e. it's a relative redirect) + if not scheme: + # then join the location with the current url + location = urljoin(self.parsed.geturl(), location) + return location + class VULNERABILITY(DictHostEvent): _always_emit = True diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f3f15f83f..cf67bf7f6 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -366,16 +366,11 @@ async def handle_event(self, event): # handle redirects web_spider_distance = getattr(event, "web_spider_distance", 0) num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) - location = event.data.get("location", "") + location = getattr(event, "redirect_location", "") # if it's a redirect if location: # get the url scheme scheme = self.helpers.is_uri(location, return_scheme=True) - # if there's no scheme (i.e. it's a relative redirect) - if not scheme: - # then join the location with the current url - location = urljoin(event.parsed.geturl(), location) - scheme = self.helpers.is_uri(location, return_scheme=True) if scheme in ("http", "https"): if num_redirects <= self.max_redirects: # tag redirects to out-of-scope hosts as affiliates diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index acb833daa..79eae5f71 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -125,7 +125,7 @@ def sort_key(asset): "Host": host, "Provider": getattr(asset, "provider", ""), "IP(s)": ", ".join(ips), - "HTTP Status": str(getattr(asset, "http_status", 0)), + "HTTP Status": asset.http_status_full, "HTTP Title": str(getattr(asset, "http_title", "")), "Open Ports": ", ".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], @@ -222,6 +222,7 @@ def __init__(self, host): self.custom_fields = {} self.http_status = 0 self.http_title = "" + self.redirect_location = "" def absorb_csv_row(self, row): # host @@ -265,12 +266,22 @@ def absorb_event(self, event): for record in sorted(records): self.dns_records.append(f"{rdtype}:{record}") - http_status = getattr(event, "status_code", 0) - update_http_status = best_http_status(http_status, self.http_status) == http_status + http_status = getattr(event, "http_status", 0) + # log.hugewarning(event) + # log.hugewarning(f"http_status: {http_status}") + update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status if update_http_status: self.http_status = http_status + if str(http_status).startswith("3"): + if event.type == "HTTP_RESPONSE": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + self.redirect_location = redirect_location + else: + self.redirect_location = "" - self.ip_addresses = set(_make_ip_list(event.resolved_hosts)) + if event.resolved_hosts: + self.ip_addresses.update(set(_make_ip_list(event.resolved_hosts))) if event.port: self.ports.add(str(event.port)) @@ -310,6 +321,10 @@ def absorb_event(self, event): def hostkey(self): return _make_hostkey(self.host, self.ip_addresses) + @property + def http_status_full(self): + return str(self.http_status) + (f" -> {self.redirect_location}" if self.redirect_location else "") + def _make_hostkey(host, ips): """ diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index d5c4a1b65..3181a1688 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -93,7 +93,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" url_event = scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]) assert "status-200" in url_event.tags - assert url_event.status_code == 200 + assert url_event.http_status == 200 with pytest.raises(ValidationError, match=".*status tag.*"): scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) @@ -103,6 +103,20 @@ async def test_events(events, scan, helpers, bbot_config): assert events.http_response.parsed.scheme == "http" assert events.http_response.with_port().geturl() == "http://example.com:80/" + http_response = scan.make_event( + { + "port": "80", + "url": "http://www.evilcorp.com:80", + "input": "http://www.evilcorp.com:80", + "location": "/asdf", + "status_code": 301, + }, + "HTTP_RESPONSE", + dummy=True, + ) + assert http_response.http_status == 301 + assert http_response.redirect_location == "http://www.evilcorp.com/asdf" + # open port tests assert events.open_port in events.domain assert "api.publicapis.org:443" in events.open_port From 4bbb92832e042d498f679665e1fede2bcac876fc Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 17:09:41 -0500 Subject: [PATCH 11/52] removed debugging messages --- bbot/modules/output/asset_inventory.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 79eae5f71..df11e6f34 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -267,8 +267,6 @@ def absorb_event(self, event): self.dns_records.append(f"{rdtype}:{record}") http_status = getattr(event, "http_status", 0) - # log.hugewarning(event) - # log.hugewarning(f"http_status: {http_status}") update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status if update_http_status: self.http_status = http_status From ed93357ac18267200229584fabfc28ec5e154eae Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 17:21:45 -0500 Subject: [PATCH 12/52] url-decode http title --- bbot/core/event/base.py | 14 +++++++++++--- bbot/test/test_step_1/test_events.py | 2 ++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 616c7ecf4..1990fba68 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -14,7 +14,7 @@ from bbot.core.errors import * from bbot.core.helpers import ( extract_words, - split_host_port, + get_file_extension, host_in_host, is_domain, is_subdomain, @@ -24,10 +24,11 @@ domain_stem, make_netloc, make_ip_type, + recursive_decode, smart_decode, - get_file_extension, - validators, + split_host_port, tagify, + validators, ) @@ -1021,6 +1022,13 @@ def http_status(self): except (ValueError, TypeError): return 0 + @property + def http_title(self): + try: + return recursive_decode(self.data.get("title", "")) + except Exception: + return "" + @property def redirect_location(self): location = self.data.get("location", "") diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 3181a1688..1fc01a046 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -106,6 +106,7 @@ async def test_events(events, scan, helpers, bbot_config): http_response = scan.make_event( { "port": "80", + "title": "HTTP%20RESPONSE", "url": "http://www.evilcorp.com:80", "input": "http://www.evilcorp.com:80", "location": "/asdf", @@ -115,6 +116,7 @@ async def test_events(events, scan, helpers, bbot_config): dummy=True, ) assert http_response.http_status == 301 + assert http_response.http_title == "HTTP RESPONSE" assert http_response.redirect_location == "http://www.evilcorp.com/asdf" # open port tests From e342a1e099167be2f0080b0c02a3939ead4c4287 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 17:29:59 -0500 Subject: [PATCH 13/52] http title parsing --- bbot/core/event/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 1990fba68..54b9dae02 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1024,10 +1024,11 @@ def http_status(self): @property def http_title(self): + http_title = self.data.get("title", "") try: - return recursive_decode(self.data.get("title", "")) + return recursive_decode(http_title) except Exception: - return "" + return http_title @property def redirect_location(self): From 7d6fcc7dc8c30f5c357ceb3bfca48a11fd28ae1f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 09:39:11 -0500 Subject: [PATCH 14/52] bugfix, add internal ip address column --- bbot/modules/output/asset_inventory.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index df11e6f34..7092a6461 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -44,10 +44,11 @@ class asset_inventory(CSV): header_row = [ "Host", "Provider", - "IP(s)", + "IP (External)", + "IP (Internal)", + "Open Ports", "HTTP Status", "HTTP Title", - "Open Ports", "Risk Rating", "Findings", "Technologies", @@ -110,13 +111,15 @@ def sort_key(asset): findings_and_vulns = asset.findings.union(asset.vulnerabilities) ports = getattr(asset, "ports", set()) ports = [str(p) for p in sorted([int(p) for p in asset.ports])] - ips = sorted([str(i) for i in getattr(asset, "ip_addresses", [])]) + ips_all = getattr(asset, "ip_addresses", []) + ips_external = sorted([str(ip) for ip in [i for i in ips_all if not i.is_private]]) + ips_internal = sorted([str(ip) for ip in [i for i in ips_all if i.is_private]]) host = self.helpers.make_ip_type(getattr(asset, "host", "")) if host and isinstance(host, str): _, domain = self.helpers.split_domain(host) if domain: increment_stat("Domains", domain) - for ip in ips: + for ip in ips_all: net = ipaddress.ip_network(f"{ip}/{self.summary_netmask}", strict=False) increment_stat("IP Addresses", str(net)) for port in ports: @@ -124,10 +127,11 @@ def sort_key(asset): row = { "Host": host, "Provider": getattr(asset, "provider", ""), - "IP(s)": ", ".join(ips), + "IP (External)": ", ".join(ips_external), + "IP (Internal)": ", ".join(ips_internal), + "Open Ports": ", ".join(ports), "HTTP Status": asset.http_status_full, "HTTP Title": str(getattr(asset, "http_title", "")), - "Open Ports": ", ".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), @@ -161,7 +165,7 @@ async def finish(self): # yield to event loop to make sure we don't hold up the scan await self.helpers.sleep(0) host = row.get("Host", "").strip() - ips = row.get("IP(s)", "") + ips = row.get("IP (External)", "") + "," + row.get("IP (Internal)", "") if not host or not ips: continue hostkey = _make_hostkey(host, ips) @@ -263,7 +267,7 @@ def absorb_event(self, event): dns_children = getattr(event, "_dns_children", {}) if dns_children and not self.dns_records: for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): - for record in sorted(records): + for record in sorted([str(r) for r in records]): self.dns_records.append(f"{rdtype}:{record}") http_status = getattr(event, "http_status", 0) From d9a8074394b2f19fc84e30a9c6f81f95f35f2dd2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 10:19:44 -0500 Subject: [PATCH 15/52] fix tests --- bbot/modules/output/asset_inventory.py | 14 ++++++------- .../test_module_asset_inventory.py | 20 ++++++++----------- 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 7092a6461..c89d7e60f 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -136,7 +136,7 @@ def sort_key(asset): "Findings": "\n".join(findings_and_vulns), "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), - "DNS Records": ", ".join(getattr(asset, "dns_records", [])), + "DNS Records": ", ".join(sorted([str(r) for r in getattr(asset, "dns_records", [])])), } row.update(asset.custom_fields) self.writerow(row) @@ -214,7 +214,7 @@ class Asset: def __init__(self, host): self.host = host self.ip_addresses = set() - self.dns_records = [] + self.dns_records = set() self.ports = set() self.findings = set() self.vulnerabilities = set() @@ -234,7 +234,8 @@ def absorb_csv_row(self, row): if host and not is_ip(host): self.host = host # ips - self.ip_addresses = set(_make_ip_list(row.get("IP(s)", ""))) + self.ip_addresses = set(_make_ip_list(row.get("IP (External)", ""))) + self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", "")))) # ports ports = [i.strip() for i in row.get("Open Ports", "").split(",")] self.ports.update(set(i for i in ports if i and is_port(i))) @@ -265,10 +266,9 @@ def absorb_event(self, event): self.host = event.host dns_children = getattr(event, "_dns_children", {}) - if dns_children and not self.dns_records: - for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): - for record in sorted([str(r) for r in records]): - self.dns_records.append(f"{rdtype}:{record}") + for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): + for record in sorted([str(r) for r in records]): + self.dns_records.add(f"{rdtype}:{record}") http_status = getattr(event, "http_status", 0) update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status diff --git a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py index 2f4303013..cc4399266 100644 --- a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +++ b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py @@ -8,16 +8,12 @@ class TestAsset_Inventory(ModuleTestBase): modules_overrides = ["asset_inventory", "nmap", "sslcert"] async def setup_before_prep(self, module_test): - old_resolve_fn = module_test.scan.helpers.dns.resolve_event - - async def resolve_event(event, minimal=False): - if event.data == "www.bbottest.notreal": - return ["a-record"], True, False, {"A": {"127.0.0.1"}} - elif event.data == "127.0.0.1": - return ["ptr-record"], False, False, {"PTR": {"asdf.bbottest.notreal"}} - return await old_resolve_fn(event, minimal) - - module_test.monkeypatch.setattr(module_test.scan.helpers.dns, "resolve_event", resolve_event) + module_test.scan.helpers.dns.mock_dns( + { + ("127.0.0.1", "PTR"): "www.bbottest.notreal", + ("www.bbottest.notreal", "A"): "127.0.0.1", + } + ) def check(self, module_test, events): assert any(e.data == "127.0.0.1:9999" for e in events), "No open port found" @@ -25,7 +21,7 @@ def check(self, module_test, events): filename = next(module_test.scan.home.glob("asset-inventory.csv")) with open(filename) as f: content = f.read() - assert "www.bbottest.notreal,,127.0.0.1" in content + assert "www.bbottest.notreal,,,127.0.0.1" in content filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) with open(filename) as f: assert "127.0.0.0/16" in f.read() @@ -45,7 +41,7 @@ def check(self, module_test, events): filename = next(module_test.scan.home.glob("asset-inventory.csv")) with open(filename) as f: content = f.read() - assert "www.bbottest.notreal,,127.0.0.1" in content + assert "www.bbottest.notreal,,,127.0.0.1" in content filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) with open(filename) as f: assert "127.0.0.0/16" in f.read() From bbbe7c6ed86421486c8a390ba283c5dc2ecf9e5c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 11:26:05 -0500 Subject: [PATCH 16/52] Increase subdomaincenter timeout --- bbot/modules/subdomaincenter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py index 6d1825b8b..c5c69293c 100644 --- a/bbot/modules/subdomaincenter.py +++ b/bbot/modules/subdomaincenter.py @@ -21,7 +21,7 @@ async def request_url(self, query): for i, _ in enumerate(range(self.retries + 1)): if i > 0: self.verbose(f"Retry #{i} for {query} after response code {status_code}") - response = await self.helpers.request(url) + response = await self.helpers.request(url, timeout=self.http_timeout + 30) status_code = getattr(response, "status_code", 0) if status_code == 429: await self.sleep(20) From e7c716c74efc32614df61cdb07cc77e4a984c5bd Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 11:31:39 -0500 Subject: [PATCH 17/52] asyncify emit_event, default qsize=100 --- bbot/modules/ajaxpro.py | 4 +- bbot/modules/azure_realm.py | 2 +- bbot/modules/azure_tenant.py | 4 +- bbot/modules/badsecrets.py | 6 +-- bbot/modules/base.py | 38 ++++---------- bbot/modules/bevigil.py | 4 +- bbot/modules/bucket_file_enum.py | 2 +- bbot/modules/builtwith.py | 4 +- bbot/modules/bypass403.py | 4 +- bbot/modules/credshed.py | 8 +-- bbot/modules/deadly/dastardly.py | 4 +- bbot/modules/deadly/ffuf.py | 2 +- bbot/modules/deadly/nuclei.py | 4 +- bbot/modules/deadly/vhost.py | 4 +- bbot/modules/dehashed.py | 8 +-- bbot/modules/dnscommonsrv.py | 2 +- bbot/modules/dnszonetransfer.py | 8 +-- bbot/modules/emailformat.py | 2 +- bbot/modules/ffuf_shortnames.py | 16 +++--- bbot/modules/fingerprintx.py | 2 +- bbot/modules/generic_ssrf.py | 2 +- bbot/modules/git.py | 2 +- bbot/modules/github_codesearch.py | 4 +- bbot/modules/github_org.py | 6 +-- bbot/modules/gowitness.py | 6 +-- bbot/modules/host_header.py | 8 +-- bbot/modules/httpx.py | 4 +- bbot/modules/hunt.py | 2 +- bbot/modules/hunterio.py | 6 +-- bbot/modules/iis_shortnames.py | 4 +- bbot/modules/internal/excavate.py | 50 +++++++++---------- bbot/modules/internal/speculate.py | 12 ++--- bbot/modules/internetdb.py | 12 ++--- bbot/modules/ip2location.py | 2 +- bbot/modules/ipneighbor.py | 2 +- bbot/modules/ipstack.py | 2 +- bbot/modules/masscan.py | 12 ++--- bbot/modules/massdns.py | 10 ++-- bbot/modules/nmap.py | 4 +- bbot/modules/nsec.py | 4 +- bbot/modules/ntlm.py | 4 +- bbot/modules/oauth.py | 8 +-- bbot/modules/output/asset_inventory.py | 8 +-- bbot/modules/paramminer_headers.py | 2 +- bbot/modules/pgp.py | 2 +- bbot/modules/postman.py | 2 +- bbot/modules/report/asn.py | 4 +- bbot/modules/robots.py | 2 +- bbot/modules/secretsdb.py | 2 +- bbot/modules/sitedossier.py | 2 +- bbot/modules/skymem.py | 4 +- bbot/modules/smuggler.py | 2 +- bbot/modules/social.py | 2 +- bbot/modules/sslcert.py | 2 +- bbot/modules/subdomain_hijack.py | 4 +- bbot/modules/telerik.py | 14 +++--- bbot/modules/templates/bucket.py | 6 +-- bbot/modules/templates/subdomain_enum.py | 2 +- bbot/modules/url_manipulation.py | 2 +- bbot/modules/urlscan.py | 8 +-- bbot/modules/viewdns.py | 2 +- bbot/modules/wafw00f.py | 4 +- bbot/modules/wappalyzer.py | 2 +- bbot/modules/wayback.py | 2 +- bbot/modules/zoomeye.py | 2 +- .../test_step_1/test_manager_deduplication.py | 4 +- .../test_manager_scope_accuracy.py | 2 +- bbot/test/test_step_1/test_modules_basic.py | 2 +- docs/contribution.md | 2 +- 69 files changed, 189 insertions(+), 197 deletions(-) diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index 924f88835..3a0061315 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -24,7 +24,7 @@ async def handle_event(self, event): probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx") if probe_confirm: if probe_confirm.status_code != 200: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data, @@ -40,7 +40,7 @@ async def handle_event(self, event): ajaxpro_regex_result = self.ajaxpro_regex.search(resp_body) if ajaxpro_regex_result: ajax_pro_path = ajaxpro_regex_result.group(0) - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index f299ca3dc..a3d6ad6ba 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -22,7 +22,7 @@ async def handle_event(self, event): auth_url, "URL_UNVERIFIED", source=event, tags=["affiliate", "ms-auth-url"] ) url_event.source_domain = domain - self.emit_event(url_event) + await self.emit_event(url_event) async def getuserrealm(self, domain): url = f"https://login.microsoftonline.com/getuserrealm.srf?login=test@{domain}" diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index a15fbecf4..909acbe20 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -34,7 +34,7 @@ async def handle_event(self, event): self.verbose(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') for domain in domains: if domain != query: - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) + await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) # tenant names if domain.lower().endswith(".onmicrosoft.com"): tenantname = domain.split(".")[0].lower() @@ -44,7 +44,7 @@ async def handle_event(self, event): event_data = {"tenant-names": sorted(tenant_names), "domains": sorted(domains)} if tenant_id is not None: event_data["tenant-id"] = tenant_id - self.emit_event(event_data, "AZURE_TENANT", source=event) + await self.emit_event(event_data, "AZURE_TENANT", source=event) async def query(self, domain): url = f"{self.base_url}/autodiscover/autodiscover.svc" diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py index 0e188bced..a7a31a48c 100644 --- a/bbot/modules/badsecrets.py +++ b/bbot/modules/badsecrets.py @@ -52,11 +52,11 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - self.emit_event(data, "VULNERABILITY", event) + await self.emit_event(data, "VULNERABILITY", event) elif r["type"] == "IdentifyOnly": # There is little value to presenting a non-vulnerable asp.net viewstate, as it is not crackable without a Matrioshka brain. Just emit a technology instead. if r["detecting_module"] == "ASPNET_Viewstate": - self.emit_event( + await self.emit_event( {"technology": "microsoft asp.net", "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event, @@ -67,4 +67,4 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 7a4a78342..6ae93c7ea 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -104,7 +104,7 @@ class BaseModule: _preserve_graph = False _stats_exclude = False - _qsize = 0 + _qsize = 100 _priority = 3 _name = "base" _type = "scan" @@ -381,7 +381,7 @@ def make_event(self, *args, **kwargs): Examples: >>> new_event = self.make_event("1.2.3.4", source=event) - >>> self.emit_event(new_event) + >>> await self.emit_event(new_event) Returns: Event or None: The created event, or None if a validation error occurred and raise_error was False. @@ -401,7 +401,7 @@ def make_event(self, *args, **kwargs): event.module = self return event - def emit_event(self, *args, **kwargs): + async def emit_event(self, *args, **kwargs): """Emit an event to the event queue and distribute it to interested modules. This is how modules "return" data. @@ -419,10 +419,10 @@ def emit_event(self, *args, **kwargs): ``` Examples: - >>> self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) >>> new_event = self.make_event("1.2.3.4", source=event) - >>> self.emit_event(new_event) + >>> await self.emit_event(new_event) Returns: None @@ -438,27 +438,7 @@ def emit_event(self, *args, **kwargs): emit_kwargs[o] = v event = self.make_event(*args, **event_kwargs) if event: - self.queue_outgoing_event(event, **emit_kwargs) - - async def emit_event_wait(self, *args, **kwargs): - """Emit an event to the event queue and await until there is space in the outgoing queue. - - This method is similar to `emit_event`, but it waits until there's sufficient space in the outgoing - event queue before emitting the event. It utilizes the queue size threshold defined in `self._qsize`. - - Args: - *args: Positional arguments to be passed to `emit_event()` for event creation. - **kwargs: Keyword arguments to be passed to `emit_event()` for event creation or configuration. - - Returns: - None - - See Also: - emit_event: For emitting an event without waiting on the queue size. - """ - while self.outgoing_event_queue.qsize() > self._qsize: - await self.helpers.sleep(0.2) - return self.emit_event(*args, **kwargs) + await self.queue_outgoing_event(event, **emit_kwargs) async def _events_waiting(self): """ @@ -808,7 +788,7 @@ async def queue_event(self, event, precheck=True): except AttributeError: self.debug(f"Not in an acceptable state to queue incoming event") - def queue_outgoing_event(self, event, **kwargs): + async def queue_outgoing_event(self, event, **kwargs): """ Queues an outgoing event to the module's outgoing event queue for further processing. @@ -829,7 +809,7 @@ def queue_outgoing_event(self, event, **kwargs): AttributeError: If the module is not in an acceptable state to queue outgoing events. """ try: - self.outgoing_event_queue.put_nowait((event, kwargs)) + await self.outgoing_event_queue.put((event, kwargs)) except AttributeError: self.debug(f"Not in an acceptable state to queue outgoing event") @@ -1076,7 +1056,7 @@ def incoming_event_queue(self): @property def outgoing_event_queue(self): if self._outgoing_event_queue is None: - self._outgoing_event_queue = ShuffleQueue() + self._outgoing_event_queue = ShuffleQueue(self._qsize) return self._outgoing_event_queue @property diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index e6c9990dd..ff868e969 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -29,13 +29,13 @@ async def handle_event(self, event): subdomains = await self.query(query, request_fn=self.request_subdomains, parse_fn=self.parse_subdomains) if subdomains: for subdomain in subdomains: - self.emit_event(subdomain, "DNS_NAME", source=event) + await self.emit_event(subdomain, "DNS_NAME", source=event) if self.urls: urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) if urls: for parsed_url in await self.scan.run_in_executor(self.helpers.validators.collapse_urls, urls): - self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) + await self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) async def request_subdomains(self, query): url = f"{self.base_url}/{self.helpers.quote(query)}/subdomains/" diff --git a/bbot/modules/bucket_file_enum.py b/bbot/modules/bucket_file_enum.py index 7eb6926c0..facaa021e 100644 --- a/bbot/modules/bucket_file_enum.py +++ b/bbot/modules/bucket_file_enum.py @@ -42,7 +42,7 @@ async def handle_aws(self, event): bucket_file = url + "/" + key file_extension = self.helpers.get_file_extension(key) if file_extension not in self.scan.url_extension_blacklist: - self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") + await self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") urls_emitted += 1 if urls_emitted >= self.file_limit: return diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py index 25a46ddf5..0b5793657 100644 --- a/bbot/modules/builtwith.py +++ b/bbot/modules/builtwith.py @@ -33,14 +33,14 @@ async def handle_event(self, event): if subdomains: for s in subdomains: if s != event: - self.emit_event(s, "DNS_NAME", source=event) + await self.emit_event(s, "DNS_NAME", source=event) # redirects if self.config.get("redirects", True): redirects = await self.query(query, parse_fn=self.parse_redirects, request_fn=self.request_redirects) if redirects: for r in redirects: if r != event: - self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) async def request_domains(self, query): url = f"{self.base_url}/v20/api.json?KEY={self.api_key}&LOOKUP={query}&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes" diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 182f4b4db..e4d8ed3f1 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -132,7 +132,7 @@ async def handle_event(self, event): if results is None: return if len(results) > collapse_threshold: - self.emit_event( + await self.emit_event( { "description": f"403 Bypass MULTIPLE SIGNATURES (exceeded threshold {str(collapse_threshold)})", "host": str(event.host), @@ -143,7 +143,7 @@ async def handle_event(self, event): ) else: for description in results: - self.emit_event( + await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", source=event, diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py index c493199d7..09ed57377 100644 --- a/bbot/modules/credshed.py +++ b/bbot/modules/credshed.py @@ -76,11 +76,11 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: - self.emit_event(email_event) + await self.emit_event(email_event) if user and not self.already_seen(f"{email}:{user}"): - self.emit_event(user, "USERNAME", source=email_event, tags=tags) + await self.emit_event(user, "USERNAME", source=email_event, tags=tags) if pw and not self.already_seen(f"{email}:{pw}"): - self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) for h_pw in hashes: if h_pw and not self.already_seen(f"{email}:{h_pw}"): - self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) diff --git a/bbot/modules/deadly/dastardly.py b/bbot/modules/deadly/dastardly.py index 403e9511e..47fd834fd 100644 --- a/bbot/modules/deadly/dastardly.py +++ b/bbot/modules/deadly/dastardly.py @@ -60,7 +60,7 @@ async def handle_event(self, event): for testcase in testsuite.testcases: for failure in testcase.failures: if failure.severity == "Info": - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": url, @@ -70,7 +70,7 @@ async def handle_event(self, event): event, ) else: - self.emit_event( + await self.emit_event( { "severity": failure.severity, "host": str(event.host), diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index cadaf4990..f7ba3d96e 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -83,7 +83,7 @@ async def handle_event(self, event): filters = await self.baseline_ffuf(fixed_url, exts=exts) async for r in self.execute_ffuf(self.tempfile, fixed_url, exts=exts, filters=filters): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) async def filter_event(self, event): if "endpoint" in event.tags: diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 33bcbe7e0..f3ef4ad67 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -150,7 +150,7 @@ async def handle_batch(self, *events): description_string += f" Extracted Data: [{','.join(extracted_results)}]" if severity in ["INFO", "UNKNOWN"]: - self.emit_event( + await self.emit_event( { "host": str(source_event.host), "url": url, @@ -160,7 +160,7 @@ async def handle_batch(self, *events): source_event, ) else: - self.emit_event( + await self.emit_event( { "severity": severity, "host": str(source_event.host), diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index f4675e10f..e2908dbbe 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -92,9 +92,9 @@ async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=F found_vhost_b64 = r["input"]["FUZZ"] vhost_dict = {"host": str(event.host), "url": host, "vhost": base64.b64decode(found_vhost_b64).decode()} if f"{vhost_dict['vhost']}{basehost}" != event.parsed.netloc: - self.emit_event(vhost_dict, "VHOST", source=event) + await self.emit_event(vhost_dict, "VHOST", source=event) if skip_dns_host == False: - self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) + await self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) yield vhost_dict["vhost"] diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py index a09de454e..4b3546712 100644 --- a/bbot/modules/dehashed.py +++ b/bbot/modules/dehashed.py @@ -49,13 +49,13 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: - self.emit_event(email_event) + await self.emit_event(email_event) if user and not self.already_seen(f"{email}:{user}"): - self.emit_event(user, "USERNAME", source=email_event, tags=tags) + await self.emit_event(user, "USERNAME", source=email_event, tags=tags) if pw and not self.already_seen(f"{email}:{pw}"): - self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) if h_pw and not self.already_seen(f"{email}:{h_pw}"): - self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) async def query(self, event): query = f"domain:{self.make_query(event)}" diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 538f51621..958b6b612 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -106,4 +106,4 @@ async def handle_event(self, event): queries = [event.data] + [f"{srv}.{event.data}" for srv in common_srvs] async for query, results in self.helpers.resolve_batch(queries, type="srv"): if results: - self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) + await self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) diff --git a/bbot/modules/dnszonetransfer.py b/bbot/modules/dnszonetransfer.py index 1ec5bf5eb..0a48526dc 100644 --- a/bbot/modules/dnszonetransfer.py +++ b/bbot/modules/dnszonetransfer.py @@ -42,7 +42,9 @@ async def handle_event(self, event): continue self.hugesuccess(f"Successful zone transfer against {nameserver} for domain {domain}!") finding_description = f"Successful DNS zone transfer against {nameserver} for {domain}" - self.emit_event({"host": str(event.host), "description": finding_description}, "FINDING", source=event) + await self.emit_event( + {"host": str(event.host), "description": finding_description}, "FINDING", source=event + ) for name, ttl, rdata in zone.iterate_rdatas(): if str(name) == "@": parent_data = domain @@ -52,13 +54,13 @@ async def handle_event(self, event): if not parent_event or parent_event == event: parent_event = event else: - self.emit_event(parent_event) + await self.emit_event(parent_event) for rdtype, t in self.helpers.dns.extract_targets(rdata): if not self.helpers.is_ip(t): t = f"{t}.{domain}" module = self.helpers.dns._get_dummy_module(rdtype) child_event = self.scan.make_event(t, "DNS_NAME", parent_event, module=module) - self.emit_event(child_event) + await self.emit_event(child_event) else: self.debug(f"No data returned by {nameserver} for domain {domain}") diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 3fd47ee2d..000c3d5cf 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -19,4 +19,4 @@ async def handle_event(self, event): return for email in self.helpers.extract_emails(r.text): if email.endswith(query): - self.emit_event(email, "EMAIL_ADDRESS", source=event) + await self.emit_event(email, "EMAIL_ADDRESS", source=event) diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index ca45402e6..c062f09df 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -158,12 +158,14 @@ async def handle_event(self, event): if "shortname-file" in event.tags: for ext in used_extensions: async for r in self.execute_ffuf(tempfile, root_url, suffix=f".{ext}"): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + ) elif "shortname-directory" in event.tags: async for r in self.execute_ffuf(tempfile, root_url, exts=["/"]): r_url = f"{r['url'].rstrip('/')}/" - self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) if self.config.get("find_delimiters"): if "shortname-directory" in event.tags: @@ -175,7 +177,9 @@ async def handle_event(self, event): async for r in self.execute_ffuf( tempfile, root_url, prefix=f"{prefix}{delimeter}", exts=["/"] ): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + ) elif "shortname-file" in event.tags: for ext in used_extensions: @@ -187,7 +191,7 @@ async def handle_event(self, event): async for r in self.execute_ffuf( tempfile, root_url, prefix=f"{prefix}{delimeter}", suffix=f".{ext}" ): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] ) @@ -217,7 +221,7 @@ async def finish(self): ) async for r in self.execute_ffuf(tempfile, url, prefix=prefix, exts=["/"]): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=self.shortname_to_event[hint], @@ -233,7 +237,7 @@ async def finish(self): async for r in self.execute_ffuf( tempfile, url, prefix=prefix, suffix=f".{ext}" ): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=self.shortname_to_event[hint], diff --git a/bbot/modules/fingerprintx.py b/bbot/modules/fingerprintx.py index be5695541..a7d8f2ea0 100644 --- a/bbot/modules/fingerprintx.py +++ b/bbot/modules/fingerprintx.py @@ -52,4 +52,4 @@ async def handle_batch(self, *events): protocol_data["port"] = port if banner: protocol_data["banner"] = banner - self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) + await self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index bf2c37097..d4045993b 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -200,7 +200,7 @@ def interactsh_callback(self, r): matched_severity = match[2] matched_read_response = str(match[3]) - self.emit_event( + await self.emit_event( { "severity": matched_severity, "host": str(matched_event.host), diff --git a/bbot/modules/git.py b/bbot/modules/git.py index dafe151d1..fc61402de 100644 --- a/bbot/modules/git.py +++ b/bbot/modules/git.py @@ -29,7 +29,7 @@ async def handle_event(self, event): text = "" if text: if getattr(result, "status_code", 0) == 200 and "[core]" in text and not self.fp_regex.match(text): - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": url, "description": f"Exposed .git config at {url}"}, "FINDING", event, diff --git a/bbot/modules/github_codesearch.py b/bbot/modules/github_codesearch.py index c98335722..a138b4399 100644 --- a/bbot/modules/github_codesearch.py +++ b/bbot/modules/github_codesearch.py @@ -21,13 +21,13 @@ async def handle_event(self, event): repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", source=event) if repo_event is None: continue - self.emit_event(repo_event) + await self.emit_event(repo_event) for raw_url in raw_urls: url_event = self.make_event(raw_url, "URL_UNVERIFIED", source=repo_event, tags=["httpx-safe"]) if not url_event: continue url_event.scope_distance = repo_event.scope_distance - self.emit_event(url_event) + await self.emit_event(url_event) async def query(self, query): repos = {} diff --git a/bbot/modules/github_org.py b/bbot/modules/github_org.py index 66182a2a6..66fa038a7 100644 --- a/bbot/modules/github_org.py +++ b/bbot/modules/github_org.py @@ -57,7 +57,7 @@ async def handle_event(self, event): for repo_url in repos: repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", source=event) repo_event.scope_distance = event.scope_distance - self.emit_event(repo_event) + await self.emit_event(repo_event) # find members from org (SOCIAL --> SOCIAL) if is_org and self.include_members: @@ -66,7 +66,7 @@ async def handle_event(self, event): for member in org_members: event_data = {"platform": "github", "profile_name": member, "url": f"https://github.com/{member}"} member_event = self.make_event(event_data, "SOCIAL", tags="github-org-member", source=event) - self.emit_event(member_event) + await self.emit_event(member_event) # find valid orgs from stub (ORG_STUB --> SOCIAL) elif event.type == "ORG_STUB": @@ -80,7 +80,7 @@ async def handle_event(self, event): event_data = {"platform": "github", "profile_name": user, "url": f"https://github.com/{user}"} github_org_event = self.make_event(event_data, "SOCIAL", tags="github-org", source=event) github_org_event.scope_distance = event.scope_distance - self.emit_event(github_org_event) + await self.emit_event(github_org_event) async def query_org_repos(self, query): repos = [] diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 5f4c4a5e8..5592eaa65 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -137,7 +137,7 @@ async def handle_batch(self, *events): filename = screenshot["filename"] webscreenshot_data = {"filename": filename, "url": final_url} source_event = events[url] - self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) + await self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) # emit URLs for url, row in self.new_network_logs.items(): @@ -151,7 +151,7 @@ async def handle_batch(self, *events): if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): - self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) + await self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) # emit technologies for _, row in self.new_technologies.items(): @@ -160,7 +160,7 @@ async def handle_batch(self, *events): source_event = events[source_url] technology = row["value"] tech_data = {"technology": technology, "url": source_url, "host": str(source_event.host)} - self.emit_event(tech_data, "TECHNOLOGY", source=source_event) + await self.emit_event(tech_data, "TECHNOLOGY", source=source_event) def construct_command(self): # base executable diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index bae721990..bec77c15a 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -30,7 +30,7 @@ async def setup(self): def rand_string(self, *args, **kwargs): return self.helpers.rand_string(*args, **kwargs) - def interactsh_callback(self, r): + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: @@ -40,7 +40,7 @@ def interactsh_callback(self, r): matched_event = match[0] matched_technique = match[1] - self.emit_event( + await self.emit_event( { "host": str(matched_event.host), "url": matched_event.data["url"], @@ -128,7 +128,7 @@ async def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], @@ -168,7 +168,7 @@ async def handle_event(self, event): # emit all the domain reflections we found for dr in domain_reflections: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0c12ad740..393d2a402 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -153,11 +153,11 @@ async def handle_batch(self, *events): url_event = self.make_event(url, "URL", source_event, tags=tags) if url_event: if url_event != source_event: - self.emit_event(url_event) + await self.emit_event(url_event) else: url_event._resolved.set() # HTTP response - self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) + await self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) for tempdir in Path(tempfile.gettempdir()).iterdir(): if tempdir.is_dir() and self.httpx_tempdir_regex.match(tempdir.name): diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index 0ccf0391b..add45b665 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -289,4 +289,4 @@ async def handle_event(self, event): url = event.data.get("url", "") if url: data["url"] = url - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 1e65c6e4c..792ca6d98 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -26,14 +26,14 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", event) if email_event: - self.emit_event(email_event) + await self.emit_event(email_event) for source in sources: domain = source.get("domain", "") if domain: - self.emit_event(domain, "DNS_NAME", email_event) + await self.emit_event(domain, "DNS_NAME", email_event) url = source.get("uri", "") if url: - self.emit_event(url, "URL_UNVERIFIED", email_event) + await self.emit_event(url, "URL_UNVERIFIED", email_event) async def query(self, query): emails = [] diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index ff7941dc2..7d558a23a 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -221,7 +221,7 @@ class safety_counter_obj: technique_strings.append(f"{method} ({technique})") description = f"IIS Shortname Vulnerability Detected. Potentially Vulnerable Method/Techniques: [{','.join(technique_strings)}]" - self.emit_event( + await self.emit_event( {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, @@ -314,7 +314,7 @@ class safety_counter_obj: hint_type = "shortname-file" else: hint_type = "shortname-directory" - self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) + await self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) async def filter_event(self, event): if "dir" in event.tags: diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f3f15f83f..f5c028c95 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -23,7 +23,7 @@ async def search(self, content, event, **kwargs): async for result, name in self._search(content, event, **kwargs): results.add(result) for result in results: - self.report(result, name, event, **kwargs) + await self.report(result, name, event, **kwargs) async def _search(self, content, event, **kwargs): for name, regex in self.compiled_regexes.items(): @@ -32,7 +32,7 @@ async def _search(self, content, event, **kwargs): for result in regex.findall(content): yield result, name - def report(self, result, name, event): + async def report(self, result, name, event): pass @@ -48,10 +48,10 @@ async def search(self, content, event, **kwargs): async for csp, name in self._search(content, event, **kwargs): extracted_domains = self.extract_domains(csp) for domain in extracted_domains: - self.report(domain, event, **kwargs) + await self.report(domain, event, **kwargs) - def report(self, domain, event, **kwargs): - self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + async def report(self, domain, event, **kwargs): + await self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) class HostnameExtractor(BaseExtractor): @@ -62,8 +62,8 @@ def __init__(self, excavate): self.regexes[f"dns_name_{i+1}"] = r.pattern super().__init__(excavate) - def report(self, result, name, event, **kwargs): - self.excavate.emit_event(result, "DNS_NAME", source=event) + async def report(self, result, name, event, **kwargs): + await self.excavate.emit_event(result, "DNS_NAME", source=event) class URLExtractor(BaseExtractor): @@ -95,7 +95,7 @@ async def search(self, content, event, **kwargs): urls_found = 0 for result, name in results: - url_event = self.report(result, name, event, **kwargs) + url_event = await self.report(result, name, event, **kwargs) if url_event is not None: url_in_scope = self.excavate.scan.in_scope(url_event) is_spider_danger = self.excavate.helpers.is_spider_danger(event, result) @@ -115,7 +115,7 @@ async def search(self, content, event, **kwargs): url_event.add_tag("spider-danger") self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") - self.excavate.emit_event(url_event) + await self.excavate.emit_event(url_event) if url_in_scope: urls_found += 1 @@ -150,7 +150,7 @@ async def _search(self, content, event, **kwargs): yield result, name - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): parsed_uri = None try: parsed_uri = self.excavate.helpers.urlparse(result) @@ -168,7 +168,7 @@ def report(self, result, name, event, **kwargs): parsed_url = getattr(event, "parsed", None) if parsed_url: event_data["url"] = parsed_url.geturl() - self.excavate.emit_event( + await self.excavate.emit_event( event_data, "FINDING", source=event, @@ -177,7 +177,7 @@ def report(self, result, name, event, **kwargs): protocol_data = {"protocol": parsed_uri.scheme, "host": str(host)} if port: protocol_data["port"] = port - self.excavate.emit_event( + await self.excavate.emit_event( protocol_data, "PROTOCOL", source=event, @@ -192,12 +192,12 @@ class EmailExtractor(BaseExtractor): regexes = {"email": _email_regex} tld_blacklist = ["png", "jpg", "jpeg", "bmp", "ico", "gif", "svg", "css", "ttf", "woff", "woff2"] - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): result = result.lower() tld = result.split(".")[-1] if tld not in self.tld_blacklist: self.excavate.debug(f"Found email address [{result}] from parsing [{event.data.get('url')}]") - self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) + await self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) class ErrorExtractor(BaseExtractor): @@ -218,10 +218,10 @@ class ErrorExtractor(BaseExtractor): "ASP.NET:4": r"Error ([\d-]+) \([\dA-F]+\)", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found error message from parsing [{event.data.get('url')}] with regex [{name}]") description = f"Error message Detected at Error Type: {name}" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", source=event, @@ -231,7 +231,7 @@ def report(self, result, name, event, **kwargs): class JWTExtractor(BaseExtractor): regexes = {"JWT": r"eyJ(?:[\w-]*\.)(?:[\w-]*\.)[\w-]*"} - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found JWT candidate [{result}]") try: j.decode(result, options={"verify_signature": False}) @@ -240,7 +240,7 @@ def report(self, result, name, event, **kwargs): if jwt_headers["alg"].upper()[0:2] == "HS": tags = ["crackable"] description = f"JWT Identified [{result}]" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event, @@ -259,9 +259,9 @@ class SerializationExtractor(BaseExtractor): "Possible Compressed": r"H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2}", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): description = f"{name} serialized object found" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) @@ -272,9 +272,9 @@ class FunctionalityExtractor(BaseExtractor): "Web Service WSDL": r"(?i)((?:http|https)://[^\s]*?.(?:wsdl))", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): description = f"{name} found" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) @@ -314,7 +314,7 @@ class JavascriptExtractor(BaseExtractor): "possible_creds_var": r"(?:password|passwd|pwd|pass)\s*=+\s*['\"][^\s'\"]{1,60}['\"]", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): # ensure that basic auth matches aren't false positives if name == "authorization_basic": try: @@ -326,7 +326,7 @@ def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found Possible Secret in Javascript [{result}]") description = f"Possible secret in JS [{result}] Signature [{name}]" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event ) @@ -384,7 +384,7 @@ async def handle_event(self, event): # inherit web spider distance from parent (don't increment) source_web_spider_distance = getattr(event, "web_spider_distance", 0) url_event.web_spider_distance = source_web_spider_distance - self.emit_event(url_event) + await self.emit_event(url_event) else: self.verbose(f"Exceeded max HTTP redirects ({self.max_redirects}): {location}") diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 9b57b0e81..8f51d5d95 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -73,13 +73,13 @@ async def handle_event(self, event): ips = list(net) random.shuffle(ips) for ip in ips: - self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) + await self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) # parent domains if event.type == "DNS_NAME": parent = self.helpers.parent_domain(event.data) if parent != event.data: - self.emit_event(parent, "DNS_NAME", source=event, internal=True) + await self.emit_event(parent, "DNS_NAME", source=event, internal=True) # generate open ports @@ -91,7 +91,7 @@ async def handle_event(self, event): if event.type == "URL" or (event.type == "URL_UNVERIFIED" and self.open_port_consumers): # only speculate port from a URL if it wouldn't be speculated naturally from the host if event.host and (event.port not in self.ports or not speculate_open_ports): - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.host, event.port), "OPEN_TCP_PORT", source=event, @@ -108,7 +108,7 @@ async def handle_event(self, event): # inherit web spider distance from parent (don't increment) source_web_spider_distance = getattr(event, "web_spider_distance", 0) url_event.web_spider_distance = source_web_spider_distance - self.emit_event(url_event) + await self.emit_event(url_event) # from hosts if speculate_open_ports: @@ -120,7 +120,7 @@ async def handle_event(self, event): if event.type == "IP_ADDRESS" or usable_dns: for port in self.ports: - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, @@ -154,7 +154,7 @@ async def handle_event(self, event): stub_event = self.make_event(stub, "ORG_STUB", source=event) if event.scope_distance > 0: stub_event.scope_distance = event.scope_distance - self.emit_event(stub_event) + await self.emit_event(stub_event) async def filter_event(self, event): # don't accept errored DNS_NAMEs diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index 0384a4d4d..b3e98b9fc 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -76,7 +76,7 @@ async def handle_event(self, event): return if data: if r.status_code == 200: - self._parse_response(data=data, event=event) + await self._parse_response(data=data, event=event) elif r.status_code == 404: detail = data.get("detail", "") if detail: @@ -86,22 +86,22 @@ async def handle_event(self, event): err_msg = data.get("msg", "") self.verbose(f"Shodan error for {ip}: {err_data}: {err_msg}") - def _parse_response(self, data: dict, event): + async def _parse_response(self, data: dict, event): """Handles emitting events from returned JSON""" data: dict # has keys: cpes, hostnames, ip, ports, tags, vulns # ip is a string, ports is a list of ports, the rest is a list of strings for hostname in data.get("hostnames", []): - self.emit_event(hostname, "DNS_NAME", source=event) + await self.emit_event(hostname, "DNS_NAME", source=event) for cpe in data.get("cpes", []): - self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) + await self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) for port in data.get("ports", []): - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, internal=True, quick=True ) vulns = data.get("vulns", []) if vulns: vulns_str = ", ".join([str(v) for v in vulns]) - self.emit_event( + await self.emit_event( {"description": f"Shodan reported verified vulnerabilities: {vulns_str}", "host": str(event.host)}, "FINDING", source=event, diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index d0a66ce4c..e192b2abb 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -57,4 +57,4 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - self.emit_event(geo_data, "GEOLOCATION", event) + await self.emit_event(geo_data, "GEOLOCATION", event) diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index b6688abee..2b139f807 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -34,4 +34,4 @@ async def handle_event(self, event): ip_event = self.make_event(str(ip), "IP_ADDRESS", event, internal=True) # keep the scope distance low to give it one more hop for DNS resolution # ip_event.scope_distance = max(1, event.scope_distance) - self.emit_event(ip_event) + await self.emit_event(ip_event) diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index a8a143fdc..98f139505 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -47,4 +47,4 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - self.emit_event(geo_data, "GEOLOCATION", event) + await self.emit_event(geo_data, "GEOLOCATION", event) diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py index 4d9c9db68..02598a215 100644 --- a/bbot/modules/masscan.py +++ b/bbot/modules/masscan.py @@ -193,18 +193,18 @@ def process_output(self, line, result_callback): result = self.helpers.make_netloc(result, port_number) if source is None: source = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) - self.emit_event(source) + await self.emit_event(source) result_callback(result, source=source) def append_alive_host(self, host, source): host_event = self.make_event(host, "IP_ADDRESS", source=self.get_source_event(host)) self.alive_hosts[host] = host_event self._write_ping_result(host) - self.emit_event(host_event) + await self.emit_event(host_event) def emit_open_tcp_port(self, data, source): self._write_syn_result(data) - self.emit_event(data, "OPEN_TCP_PORT", source=source) + await self.emit_event(data, "OPEN_TCP_PORT", source=source) def emit_from_cache(self): ip_events = {} @@ -220,7 +220,7 @@ def emit_from_cache(self): break ip_event = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) ip_events[ip] = ip_event - self.emit_event(ip_event) + await self.emit_event(ip_event) # syn scan if self.syn_cache.is_file(): cached_syns = list(self.helpers.read_file(self.syn_cache)) @@ -237,8 +237,8 @@ def emit_from_cache(self): if source_event is None: self.verbose(f"Source event not found for {line}") source_event = self.make_event(line, "IP_ADDRESS", source=self.get_source_event(line)) - self.emit_event(source_event) - self.emit_event(line, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(source_event) + await self.emit_event(line, "OPEN_TCP_PORT", source=source_event) def get_source_event(self, host): source_event = self.scan.whitelist.get(host) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 5a0865476..a345b79f6 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -119,7 +119,7 @@ async def handle_event(self, event): self.info(f"Brute-forcing subdomains for {query} (source: {event.data})") for hostname in await self.massdns(query, self.subdomain_list): - self.emit_result(hostname, event, query) + await self.emit_result(hostname, event, query) def abort_if(self, event): if not event.scope_distance == 0: @@ -127,12 +127,12 @@ def abort_if(self, event): if "wildcard" in event.tags: return True, "event is a wildcard" - def emit_result(self, result, source_event, query, tags=None): + async def emit_result(self, result, source_event, query, tags=None): if not result == source_event: kwargs = {"abort_if": self.abort_if} if tags is not None: kwargs["tags"] = tags - self.emit_event(result, "DNS_NAME", source_event, **kwargs) + await self.emit_event(result, "DNS_NAME", source_event, **kwargs) def already_processed(self, hostname): if hash(hostname) in self.processed: @@ -380,7 +380,9 @@ def add_mutation(_domain_hash, m): if source_event is None: self.warning(f"Could not correlate source event from: {hostname}") source_event = self.scan.root_event - self.emit_result(hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"]) + await self.emit_result( + hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"] + ) if results: found_mutations = True continue diff --git a/bbot/modules/nmap.py b/bbot/modules/nmap.py index aeb28f9ae..6d8a1293b 100644 --- a/bbot/modules/nmap.py +++ b/bbot/modules/nmap.py @@ -52,10 +52,10 @@ async def handle_batch(self, *events): for port in host.open_ports: port_number = int(port.split("/")[0]) netloc = self.helpers.make_netloc(host.address, port_number) - self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) for hostname in host.hostnames: netloc = self.helpers.make_netloc(hostname, port_number) - self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) finally: output_file.unlink(missing_ok=True) diff --git a/bbot/modules/nsec.py b/bbot/modules/nsec.py index bfd770d44..7d313c140 100644 --- a/bbot/modules/nsec.py +++ b/bbot/modules/nsec.py @@ -18,12 +18,12 @@ async def handle_event(self, event): async for result in self.nsec_walk(event.data): if not emitted_finding: emitted_finding = True - self.emit_event( + await self.emit_event( {"host": event.data, "description": f"DNSSEC NSEC Zone Walking Enabled for domain: {event.data}"}, "FINDING", source=event, ) - self.emit_event(result, "DNS_NAME", source=event) + await self.emit_event(result, "DNS_NAME", source=event) async def get_nsec_record(self, domain): domain = domain.replace("\\000.", "") diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 76e93c595..c69beb941 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -87,7 +87,7 @@ async def handle_event(self, event): for result, request_url in await self.handle_url(event): if result and request_url: self.found.add(found_hash) - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": request_url, @@ -98,7 +98,7 @@ async def handle_event(self, event): ) fqdn = result.get("FQDN", "") if fqdn: - self.emit_event(fqdn, "DNS_NAME", source=event) + await self.emit_event(fqdn, "DNS_NAME", source=event) break async def filter_event(self, event): diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index f4d592511..33cb38959 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -66,16 +66,16 @@ async def handle_event(self, event): source=event, ) finding_event.source_domain = source_domain - self.emit_event(finding_event) + await self.emit_event(finding_event) url_event = self.make_event( token_endpoint, "URL_UNVERIFIED", source=event, tags=["affiliate", "oauth-token-endpoint"] ) url_event.source_domain = source_domain - self.emit_event(url_event) + await self.emit_event(url_event) for result in oidc_results: if result not in (domain, event.data): event_type = "URL_UNVERIFIED" if self.helpers.is_url(result) else "DNS_NAME" - self.emit_event(result, event_type, source=event, tags=["affiliate"]) + await self.emit_event(result, event_type, source=event, tags=["affiliate"]) for oauth_task in oauth_tasks: url = await oauth_task @@ -90,7 +90,7 @@ async def handle_event(self, event): source=event, ) oauth_finding.source_domain = source_domain - self.emit_event(oauth_finding) + await self.emit_event(oauth_finding) def url_and_base(self, url): yield url diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index acb833daa..b16dd18fb 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -173,19 +173,19 @@ async def finish(self): self.add_custom_headers(list(asset.custom_fields)) if not is_ip(asset.host): host_event = self.make_event(asset.host, "DNS_NAME", source=self.scan.root_event) - self.emit_event(host_event) + await self.emit_event(host_event) for port in asset.ports: netloc = self.helpers.make_netloc(asset.host, port) open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=host_event) - self.emit_event(open_port_event) + await self.emit_event(open_port_event) else: for ip in asset.ip_addresses: ip_event = self.make_event(ip, "IP_ADDRESS", source=self.scan.root_event) - self.emit_event(ip_event) + await self.emit_event(ip_event) for port in asset.ports: netloc = self.helpers.make_netloc(ip, port) open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=ip_event) - self.emit_event(open_port_event) + await self.emit_event(open_port_event) else: self.warning( f"use_previous=True was set but no previous asset inventory was found at {self.output_file}" diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 65880d9c8..7044ae90a 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -126,7 +126,7 @@ def process_results(self, event, results): if reflection: tags = ["http_reflection"] description = f"[Paramminer] {self.compare_mode.capitalize()}: [{result}] Reasons: [{reasons}] Reflection: [{str(reflection)}]" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": url, "description": description}, "FINDING", event, diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index c1e0773c3..2c378f585 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -20,7 +20,7 @@ async def handle_event(self, event): if results: for hostname in results: if not hostname == event: - self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) + await self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) async def query(self, query): results = set() diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 619107b53..57d361b84 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -26,7 +26,7 @@ async def handle_event(self, event): query = self.make_query(event) self.verbose(f"Searching for any postman workspaces, collections, requests belonging to {query}") for url in await self.query(query): - self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") + await self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") async def query(self, query): interesting_urls = [] diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index a8f57709a..db8612a1e 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -42,9 +42,9 @@ async def handle_event(self, event): emails = asn.pop("emails", []) self.cache_put(asn) asn_event = self.make_event(asn, "ASN", source=event) - self.emit_event(asn_event) + await self.emit_event(asn_event) for email in emails: - self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) + await self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) async def report(self): asn_data = sorted(self.asn_cache.items(), key=lambda x: self.asn_counts[x[0]], reverse=True) diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index fd873b799..717900bee 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -48,4 +48,4 @@ async def handle_event(self, event): tags = [] if self.helpers.is_spider_danger(event, unverified_url): tags.append("spider-danger") - self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) + await self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) diff --git a/bbot/modules/secretsdb.py b/bbot/modules/secretsdb.py index 83f305c61..3fc8ad539 100644 --- a/bbot/modules/secretsdb.py +++ b/bbot/modules/secretsdb.py @@ -54,7 +54,7 @@ async def handle_event(self, event): parsed_url = getattr(event, "parsed", None) if parsed_url: event_data["url"] = parsed_url.geturl() - self.emit_event( + await self.emit_event( event_data, "FINDING", source=event, diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index 87358a955..0c797296a 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -19,7 +19,7 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - await self.emit_event_wait(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) async def query(self, query, parse_fn=None, request_fn=None): results = set() diff --git a/bbot/modules/skymem.py b/bbot/modules/skymem.py index 71d0e883e..4bd76c70d 100644 --- a/bbot/modules/skymem.py +++ b/bbot/modules/skymem.py @@ -19,7 +19,7 @@ async def handle_event(self, event): if not r: return for email in self.helpers.extract_emails(r.text): - self.emit_event(email, "EMAIL_ADDRESS", source=event) + await self.emit_event(email, "EMAIL_ADDRESS", source=event) # iterate through other pages domain_ids = re.findall(r' {e.host}" description += f" ({source_hosts_str})" - self.emit_event({"host": event.host, "url": url, "description": description}, "FINDING", source=event) + await self.emit_event( + {"host": event.host, "url": url, "description": description}, "FINDING", source=event + ) else: self.debug(reason) diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index 71ebc4e08..6cbdfcf19 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -211,7 +211,7 @@ async def handle_event(self, event): version = "<= 2019 (Either Pre-2017 (vulnerable), or 2017-2019 w/ Encrypt-Then-Mac)" description = f"Telerik RAU AXD Handler detected. Verbose Errors Enabled: [{str(verbose_errors)}] Version Guess: [{version}]" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": f"{event.data}{webresource}", "description": description}, "FINDING", event, @@ -237,7 +237,7 @@ async def handle_event(self, event): description = f"[CVE-2017-11317] [{str(version)}] {webresource}" if "fileInfo" in output.stdout: self.debug(f"Confirmed Vulnerable Telerik (version: {str(version)}") - self.emit_event( + await self.emit_event( { "severity": "CRITICAL", "description": description, @@ -276,7 +276,7 @@ async def handle_event(self, event): await self.helpers.cancel_tasks(tasks) self.debug(f"Detected Telerik UI instance ({dh})") description = f"Telerik DialogHandler detected" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": f"{event.data}{dh}", "description": description}, "FINDING", event, @@ -297,7 +297,7 @@ async def handle_event(self, event): if validate_result.status_code != 500: self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") description = f"Telerik SpellCheckHandler detected" - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": f"{event.data}{spellcheckhandler}", @@ -317,7 +317,7 @@ async def handle_event(self, event): chartimagehandler_error = "ChartImage.axd?ImageName=" result_error, _ = await self.test_detector(event.data, chartimagehandler_error) if result_error.status_code != 200: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": f"{event.data}{chartimagehandler}", @@ -331,7 +331,7 @@ async def handle_event(self, event): resp_body = event.data.get("body", None) if resp_body: if '":{"SerializedParameters":"' in resp_body: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], @@ -341,7 +341,7 @@ async def handle_event(self, event): event, ) elif '"_serializedConfiguration":"' in resp_body: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py index eef8f5bee..f9681385b 100644 --- a/bbot/modules/templates/bucket.py +++ b/bbot/modules/templates/bucket.py @@ -52,7 +52,7 @@ async def handle_dns_name(self, event): for d in self.delimiters: buckets.add(d.join(split)) async for bucket_name, url, tags in self.brute_buckets(buckets, permutations=self.permutations): - self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) async def handle_storage_bucket(self, event): url = event.data["url"] @@ -61,12 +61,12 @@ async def handle_storage_bucket(self, event): description, tags = await self._check_bucket_open(bucket_name, url) if description: event_data = {"host": event.host, "url": url, "description": description} - self.emit_event(event_data, "FINDING", source=event, tags=tags) + await self.emit_event(event_data, "FINDING", source=event, tags=tags) async for bucket_name, url, tags in self.brute_buckets( [bucket_name], permutations=self.permutations, omit_base=True ): - self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) async def brute_buckets(self, buckets, permutations=False, omit_base=False): buckets = set(buckets) diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 0a0067628..790b35515 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -38,7 +38,7 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) async def request_url(self, query): url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" diff --git a/bbot/modules/url_manipulation.py b/bbot/modules/url_manipulation.py index f4d598c63..983595fe1 100644 --- a/bbot/modules/url_manipulation.py +++ b/bbot/modules/url_manipulation.py @@ -74,7 +74,7 @@ async def handle_event(self, event): if "body" in reasons: reported_signature = f"Modified URL: {sig[1]}" description = f"Url Manipulation: [{','.join(reasons)}] Sig: [{reported_signature}]" - self.emit_event( + await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", source=event, diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index f1efe08e5..4c3811af0 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -25,16 +25,18 @@ async def handle_event(self, event): domain_event = self.make_event(domain, "DNS_NAME", source=event) if domain_event: if str(domain_event.host).endswith(query) and not str(domain_event.host) == str(event.host): - self.emit_event(domain_event, abort_if=self.abort_if) + await self.emit_event(domain_event, abort_if=self.abort_if) source_event = domain_event if url: url_event = self.make_event(url, "URL_UNVERIFIED", source=source_event) if url_event: if str(url_event.host).endswith(query): if self.urls: - self.emit_event(url_event, abort_if=self.abort_if) + await self.emit_event(url_event, abort_if=self.abort_if) else: - self.emit_event(str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if) + await self.emit_event( + str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if + ) else: self.debug(f"{url_event.host} does not match {query}") diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index c2a5e4431..d9a589845 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -26,7 +26,7 @@ async def setup(self): async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) for domain, _ in await self.query(query): - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) async def query(self, query): results = set() diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index f80979619..89c7ee1fa 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -32,12 +32,12 @@ async def handle_event(self, event): waf_detections = await self.scan.run_in_executor(WW.identwaf) if waf_detections: for waf in waf_detections: - self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) + await self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) else: if self.config.get("generic_detect") == True: generic = await self.scan.run_in_executor(WW.genericdetect) if generic: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": url, diff --git a/bbot/modules/wappalyzer.py b/bbot/modules/wappalyzer.py index 6d30fc057..c87274a29 100644 --- a/bbot/modules/wappalyzer.py +++ b/bbot/modules/wappalyzer.py @@ -28,7 +28,7 @@ async def setup(self): async def handle_event(self, event): for res in await self.scan.run_in_executor(self.wappalyze, event.data): - self.emit_event( + await self.emit_event( {"technology": res.lower(), "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event ) diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 4bec112bf..bf4fb769e 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -27,7 +27,7 @@ async def setup(self): async def handle_event(self, event): query = self.make_query(event) for result, event_type in await self.query(query): - self.emit_event(result, event_type, event, abort_if=self.abort_if) + await self.emit_event(result, event_type, event, abort_if=self.abort_if) async def query(self, query): results = set() diff --git a/bbot/modules/zoomeye.py b/bbot/modules/zoomeye.py index 3c83fa828..b1d4e7670 100644 --- a/bbot/modules/zoomeye.py +++ b/bbot/modules/zoomeye.py @@ -36,7 +36,7 @@ async def handle_event(self, event): tags = [] if not hostname.endswith(f".{query}"): tags = ["affiliate"] - self.emit_event(hostname, "DNS_NAME", event, tags=tags) + await self.emit_event(hostname, "DNS_NAME", event, tags=tags) async def query(self, query): results = set() diff --git a/bbot/test/test_step_1/test_manager_deduplication.py b/bbot/test/test_step_1/test_manager_deduplication.py index 305796bea..e046988ae 100644 --- a/bbot/test/test_step_1/test_manager_deduplication.py +++ b/bbot/test/test_step_1/test_manager_deduplication.py @@ -15,7 +15,7 @@ async def setup(self): async def handle_event(self, event): self.events.append(event) - self.emit_event(f"{self.name}.test.notreal", "DNS_NAME", source=event) + await self.emit_event(f"{self.name}.test.notreal", "DNS_NAME", source=event) class EverythingModule(DefaultModule): _name = "everything_module" @@ -27,7 +27,7 @@ class EverythingModule(DefaultModule): async def handle_event(self, event): self.events.append(event) if event.type == "DNS_NAME": - self.emit_event(f"{event.data}:88", "OPEN_TCP_PORT", source=event) + await self.emit_event(f"{event.data}:88", "OPEN_TCP_PORT", source=event) class NoSuppressDupes(DefaultModule): _name = "no_suppress_dupes" diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index 08ac2c3ae..e8e5da391 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -257,7 +257,7 @@ async def filter_event(self, event): return False, "bleh" async def handle_event(self, event): - self.emit_event( + await self.emit_event( {"host": str(event.host), "description": "yep", "severity": "CRITICAL"}, "VULNERABILITY", source=event ) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index b943144ac..7a25bb4ea 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -311,7 +311,7 @@ class dummy(BaseModule): watched_events = ["*"] async def handle_event(self, event): - self.emit_event( + await self.emit_event( {"host": "www.evilcorp.com", "url": "http://www.evilcorp.com", "description": "asdf"}, "FINDING", event ) diff --git a/docs/contribution.md b/docs/contribution.md index 65b074adb..2d36cfe44 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -74,7 +74,7 @@ class MyModule(BaseModule): self.hugeinfo(f"GOT EVENT: {event}") for ip in await self.helpers.resolve(event.data): self.hugesuccess(f"EMITTING IP_ADDRESS: {ip}") - self.emit_event(ip, "IP_ADDRESS", source=event) + await self.emit_event(ip, "IP_ADDRESS", source=event) ``` After saving the module, you can run it simply by specifying it with `-m`: From a40f5cb9709bb88011b4fe6e6376c14211deb729 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 11:56:43 -0500 Subject: [PATCH 18/52] add option to store httpx responses --- bbot/modules/httpx.py | 9 ++++++++- bbot/test/test_step_2/module_tests/test_module_httpx.py | 3 +++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0c12ad740..0ad4ea66c 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -13,12 +13,13 @@ class httpx(BaseModule): flags = ["active", "safe", "web-basic", "web-thorough", "social-enum", "subdomain-enum", "cloud-enum"] meta = {"description": "Visit webpages. Many other modules rely on httpx"} - options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880} + options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880, "store_responses": False} options_desc = { "threads": "Number of httpx threads to use", "in_scope_only": "Only visit web resources that are in scope.", "version": "httpx version", "max_response_size": "Max response size in bytes", + "store_responses": "Save raw HTTP responses to scan folder", } deps_ansible = [ { @@ -41,6 +42,7 @@ async def setup(self): self.timeout = self.scan.config.get("httpx_timeout", 5) self.retries = self.scan.config.get("httpx_retries", 1) self.max_response_size = self.config.get("max_response_size", 5242880) + self.store_responses = self.config.get("store_responses", False) self.visited = set() self.httpx_tempdir_regex = re.compile(r"^httpx\d+$") return True @@ -104,6 +106,11 @@ async def handle_batch(self, *events): f"{self.max_response_size}", ] + if self.store_responses: + response_dir = self.scan.home / "httpx" + self.helpers.mkdir(response_dir) + command += ["-srd", str(response_dir)] + dns_resolvers = ",".join(self.helpers.system_resolvers) if dns_resolvers: command += ["-r", dns_resolvers] diff --git a/bbot/test/test_step_2/module_tests/test_module_httpx.py b/bbot/test/test_step_2/module_tests/test_module_httpx.py index 77f3e98b8..ebd9bbdb1 100644 --- a/bbot/test/test_step_2/module_tests/test_module_httpx.py +++ b/bbot/test/test_step_2/module_tests/test_module_httpx.py @@ -3,6 +3,7 @@ class TestHTTPX(ModuleTestBase): targets = ["http://127.0.0.1:8888/url", "127.0.0.1:8888"] + config_overrides = {"modules": {"httpx": {"store_responses": True}}} # HTML for a page with a login form html_with_login = """ @@ -48,6 +49,8 @@ def check(self, module_test, events): url = True assert url, "Failed to visit target URL" assert open_port, "Failed to visit target OPEN_TCP_PORT" + saved_response = module_test.scan.home / "httpx" / "127.0.0.1.8888[slash]url.txt" + assert saved_response.is_file(), "Failed to save raw httpx response" class TestHTTPX_404(ModuleTestBase): From 2a2e70d0b52b9db5014ca94154e0bb3c10f8ac87 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 12:00:07 -0500 Subject: [PATCH 19/52] blacked --- bbot/modules/httpx.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0ad4ea66c..e469e2f0a 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -13,7 +13,13 @@ class httpx(BaseModule): flags = ["active", "safe", "web-basic", "web-thorough", "social-enum", "subdomain-enum", "cloud-enum"] meta = {"description": "Visit webpages. Many other modules rely on httpx"} - options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880, "store_responses": False} + options = { + "threads": 50, + "in_scope_only": True, + "version": "1.2.5", + "max_response_size": 5242880, + "store_responses": False, + } options_desc = { "threads": "Number of httpx threads to use", "in_scope_only": "Only visit web resources that are in scope.", From 451e67cf90e9bf556fdcd467e14824ecc630010a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 12:04:48 -0500 Subject: [PATCH 20/52] fix tests --- bbot/modules/generic_ssrf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index d4045993b..9d75f4a9e 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -188,7 +188,7 @@ async def handle_event(self, event): for s in self.submodules.values(): await s.test(event) - def interactsh_callback(self, r): + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: @@ -229,6 +229,6 @@ async def finish(self): await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): - self.interactsh_callback(r) + await self.interactsh_callback(r) except InteractshError as e: self.debug(f"Error in interact.sh: {e}") From b4fea8a72b8142356942e34d70bbfe082c71a04f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 12:59:11 -0500 Subject: [PATCH 21/52] fix tests --- bbot/modules/masscan.py | 14 +++++++------- bbot/modules/paramminer_headers.py | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py index 02598a215..15881d5b2 100644 --- a/bbot/modules/masscan.py +++ b/bbot/modules/masscan.py @@ -105,7 +105,7 @@ async def setup(self): async def handle_batch(self, *events): if self.use_cache: - self.emit_from_cache() + await self.emit_from_cache() else: targets = [str(e.data) for e in events] if not targets: @@ -138,7 +138,7 @@ async def masscan(self, targets, result_callback, ping=False): try: with open(stats_file, "w") as stats_fh: async for line in self.helpers.run_live(command, sudo=True, stderr=stats_fh): - self.process_output(line, result_callback=result_callback) + await self.process_output(line, result_callback=result_callback) finally: for file in (stats_file, target_file): file.unlink() @@ -169,7 +169,7 @@ def _build_masscan_command(self, target_file=None, dry_run=False, ping=False): command += ("--echo",) return command - def process_output(self, line, result_callback): + async def process_output(self, line, result_callback): try: j = json.loads(line) except Exception: @@ -194,19 +194,19 @@ def process_output(self, line, result_callback): if source is None: source = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) await self.emit_event(source) - result_callback(result, source=source) + await result_callback(result, source=source) - def append_alive_host(self, host, source): + async def append_alive_host(self, host, source): host_event = self.make_event(host, "IP_ADDRESS", source=self.get_source_event(host)) self.alive_hosts[host] = host_event self._write_ping_result(host) await self.emit_event(host_event) - def emit_open_tcp_port(self, data, source): + async def emit_open_tcp_port(self, data, source): self._write_syn_result(data) await self.emit_event(data, "OPEN_TCP_PORT", source=source) - def emit_from_cache(self): + async def emit_from_cache(self): ip_events = {} # ping scan if self.ping_cache.is_file(): diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 7044ae90a..3d3861621 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -119,7 +119,7 @@ async def do_mining(self, wl, url, batch_size, compare_helper): pass return results - def process_results(self, event, results): + async def process_results(self, event, results): url = event.data.get("url") for result, reasons, reflection in results: tags = [] @@ -171,7 +171,7 @@ async def handle_event(self, event): results = await self.do_mining(wl, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{event.data}]") - self.process_results(event, results) + await self.process_results(event, results) async def count_test(self, url): baseline = await self.helpers.request(url) From 65757648b17b4b9f1299d3843ae14e6d9f833067 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 19 Jan 2024 21:44:17 +0000 Subject: [PATCH 22/52] Refresh module docs --- docs/scanning/configuration.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index ab57e4eb9..0ebfc89dd 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -238,6 +238,7 @@ Many modules accept their own configuration options. These options have the abil | modules.gowitness.version | str | gowitness version | 2.4.2 | | modules.httpx.in_scope_only | bool | Only visit web resources that are in scope. | True | | modules.httpx.max_response_size | int | Max response size in bytes | 5242880 | +| modules.httpx.store_responses | bool | Save raw HTTP responses to scan folder | False | | modules.httpx.threads | int | Number of httpx threads to use | 50 | | modules.httpx.version | str | httpx version | 1.2.5 | | modules.iis_shortnames.detect_only | bool | Only detect the vulnerability and do not run the shortname scanner | True | From ca3f3c28716efdeba57f6e686e150f183e6f2f5d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 17:21:32 -0500 Subject: [PATCH 23/52] fix tests --- bbot/core/helpers/helper.py | 4 +-- bbot/core/helpers/interactsh.py | 5 ++-- bbot/modules/host_header.py | 2 +- bbot/test/conftest.py | 22 ++++++++++++-- bbot/test/test_step_1/test_web.py | 48 +++++++++++++++++++++++++++---- 5 files changed, 68 insertions(+), 13 deletions(-) diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index dbe19f20c..36c6346c9 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -77,8 +77,8 @@ def __init__(self, config, scan=None): # cloud helpers self.cloud = CloudHelper(self) - def interactsh(self): - return Interactsh(self) + def interactsh(self, *args, **kwargs): + return Interactsh(self, *args, **kwargs) def http_compare(self, url, allow_redirects=False, include_cache_buster=True): return HttpCompare(url, self, allow_redirects=allow_redirects, include_cache_buster=include_cache_buster) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 95f76d2f5..929637dfe 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -78,12 +78,13 @@ class Interactsh: ``` """ - def __init__(self, parent_helper): + def __init__(self, parent_helper, poll_interval=10): self.parent_helper = parent_helper self.server = None self.correlation_id = None self.custom_server = self.parent_helper.config.get("interactsh_server", None) self.token = self.parent_helper.config.get("interactsh_token", None) + self.poll_interval = poll_interval self._poll_task = None async def register(self, callback=None): @@ -279,7 +280,7 @@ async def _poll_loop(self, callback): log.warning(e) log.trace(traceback.format_exc()) if not data_list: - await asyncio.sleep(10) + await asyncio.sleep(self.poll_interval) continue for data in data_list: if data: diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index bec77c15a..4adaa766a 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -58,7 +58,7 @@ async def finish(self): await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): - self.interactsh_callback(r) + await self.interactsh_callback(r) except InteractshError as e: self.debug(f"Error in interact.sh: {e}") diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 684ec18a2..b60a0633d 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -1,10 +1,12 @@ import ssl import shutil import pytest +import asyncio import logging from pathlib import Path from pytest_httpserver import HTTPServer +from bbot.core.helpers.misc import execute_sync_or_async from bbot.core.helpers.interactsh import server_list as interactsh_servers @@ -98,20 +100,34 @@ class Interactsh_mock: def __init__(self): self.interactions = [] self.correlation_id = "deadbeef-dead-beef-dead-beefdeadbeef" + self.stop = False def mock_interaction(self, subdomain_tag): self.interactions.append(subdomain_tag) async def register(self, callback=None): + if callable(callback): + asyncio.create_task(self.poll_loop(callback)) return "fakedomain.fakeinteractsh.com" async def deregister(self, callback=None): - pass + self.stop = True - async def poll(self): + async def poll_loop(self, callback=None): + while not self.stop: + data_list = await self.poll(callback) + if not data_list: + await asyncio.sleep(1) + continue + + async def poll(self, callback=None): poll_results = [] for subdomain_tag in self.interactions: - poll_results.append({"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": "HTTP"}) + result = {"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": "HTTP"} + poll_results.append(result) + if callback is not None: + await execute_sync_or_async(callback, result) + self.interactions = [] return poll_results diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 9179d42e6..13edaf725 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -104,23 +104,61 @@ async def test_web_helpers(bbot_scanner, bbot_config, bbot_httpserver): async def test_web_interactsh(bbot_scanner, bbot_config, bbot_httpserver): from bbot.core.helpers.interactsh import server_list + sync_called = False + async_called = False + + sync_correct_url = False + async_correct_url = False + scan1 = bbot_scanner("8.8.8.8", config=bbot_config) + scan1.status = "RUNNING" - interactsh_client = scan1.helpers.interactsh() + interactsh_client = scan1.helpers.interactsh(poll_interval=3) + interactsh_client2 = scan1.helpers.interactsh(poll_interval=3) async def async_callback(data): - log.debug(f"interactsh poll: {data}") + nonlocal async_called + nonlocal async_correct_url + async_called = True + d = data.get("raw-request", "") + async_correct_url |= "bbot_interactsh_test" in d + log.debug(f"interactsh poll (async): {d}") + + def sync_callback(data): + nonlocal sync_called + nonlocal sync_correct_url + sync_called = True + d = data.get("raw-request", "") + sync_correct_url |= "bbot_interactsh_test" in d + log.debug(f"interactsh poll (sync): {d}") interactsh_domain = await interactsh_client.register(callback=async_callback) - url = f"https://{interactsh_domain}/bbot_interactsh_test" + url = f"http://{interactsh_domain}/bbot_interactsh_test" response = await scan1.helpers.request(url) assert response.status_code == 200 - await asyncio.sleep(10) assert any(interactsh_domain.endswith(f"{s}") for s in server_list) + + interactsh_domain2 = await interactsh_client2.register(callback=sync_callback) + url2 = f"http://{interactsh_domain2}/bbot_interactsh_test" + response2 = await scan1.helpers.request(url2) + assert response2.status_code == 200 + assert any(interactsh_domain2.endswith(f"{s}") for s in server_list) + + await asyncio.sleep(10) + data_list = await interactsh_client.poll() + data_list2 = await interactsh_client2.poll() assert isinstance(data_list, list) - assert any("bbot_interactsh_test" in d.get("raw-request", "") for d in data_list) + assert isinstance(data_list2, list) + assert await interactsh_client.deregister() is None + assert await interactsh_client2.deregister() is None + + assert sync_called, "Interactsh synchrononous callback was not called" + assert async_called, "Interactsh async callback was not called" + + assert sync_correct_url, f"Data content was not correct for {url2}" + assert async_correct_url, f"Data content was not correct for {url}" @pytest.mark.asyncio From 48836874d277aa9ec212d11f98a8f0ca04e30d0c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 09:49:47 -0500 Subject: [PATCH 24/52] fix tests again --- bbot/modules/paramminer_headers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 3d3861621..3458edaa9 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -247,4 +247,4 @@ async def finish(self): results = await self.do_mining(untested_matches_copy, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{url}]") - self.process_results(event, results) + await self.process_results(event, results) From e60f4b7010908d0cc8d58842b413c3c991614339 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 11:35:01 -0500 Subject: [PATCH 25/52] fixed postman tests --- bbot/test/test_step_2/module_tests/test_module_postman.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_postman.py b/bbot/test/test_step_2/module_tests/test_module_postman.py index 2879b4838..4f7515162 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postman.py +++ b/bbot/test/test_step_2/module_tests/test_module_postman.py @@ -183,10 +183,10 @@ async def setup_after_prep(self, module_test): old_emit_event = module_test.module.emit_event - def new_emit_event(event_data, event_type, **kwargs): + async def new_emit_event(event_data, event_type, **kwargs): if event_data.startswith("https://www.postman.com"): event_data = event_data.replace("https://www.postman.com", "http://127.0.0.1:8888") - old_emit_event(event_data, event_type, **kwargs) + await old_emit_event(event_data, event_type, **kwargs) module_test.monkeypatch.setattr(module_test.module, "emit_event", new_emit_event) module_test.scan.helpers.dns.mock_dns({("asdf.blacklanternsecurity.com", "A"): "127.0.0.1"}) From 5af04829bf9ac9f95ffeb3f4993a8ab516152752 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Mon, 22 Jan 2024 17:33:51 +0000 Subject: [PATCH 26/52] Discard postman workspaces that do not explicitly contain the queried domain name --- bbot/modules/postman.py | 26 +++++---- .../module_tests/test_module_postman.py | 53 ++++++++++++++++++- 2 files changed, 67 insertions(+), 12 deletions(-) diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 619107b53..e67bb7dea 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -70,16 +70,22 @@ async def query(self, query): workspaces.append(workspace) for item in workspaces: id = item.get("id", "") - interesting_urls.append(f"{self.base_url}/workspace/{id}") - environments, collections = await self.search_workspace(id) - interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") - for e_id in environments: - interesting_urls.append(f"{self.base_url}/environment/{e_id}") - for c_id in collections: - interesting_urls.append(f"{self.base_url}/collection/{c_id}") - requests = await self.search_collections(id) - for r_id in requests: - interesting_urls.append(f"{self.base_url}/request/{r_id}") + name = item.get("name", "") + tldextract = self.helpers.tldextract(query) + if tldextract.domain.lower() in name.lower(): + self.verbose(f"Discovered workspace {name} ({id})") + interesting_urls.append(f"{self.base_url}/workspace/{id}") + environments, collections = await self.search_workspace(id) + interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") + for e_id in environments: + interesting_urls.append(f"{self.base_url}/environment/{e_id}") + for c_id in collections: + interesting_urls.append(f"{self.base_url}/collection/{c_id}") + requests = await self.search_collections(id) + for r_id in requests: + interesting_urls.append(f"{self.base_url}/request/{r_id}") + else: + self.verbose(f"Skipping workspace {name} ({id}) as it does not appear to be in scope") return interesting_urls async def search_workspace(self, id): diff --git a/bbot/test/test_step_2/module_tests/test_module_postman.py b/bbot/test/test_step_2/module_tests/test_module_postman.py index 2879b4838..2506d0546 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postman.py +++ b/bbot/test/test_step_2/module_tests/test_module_postman.py @@ -30,9 +30,9 @@ async def setup_after_prep(self, module_test): "workspaces": [ { "visibilityStatus": "public", - "name": "SpilledSecrets", + "name": "BlackLanternSecuritySpilledSecrets", "id": "afa061be-9cb0-4520-9d4d-fe63361daf0f", - "slug": "spilledsecrets", + "slug": "blacklanternsecurityspilledsecrets", } ], "collectionForkLabel": "", @@ -60,6 +60,52 @@ async def setup_after_prep(self, module_test): }, }, }, + { + "score": 498.22398, + "normalizedScore": 8.43312266976538, + "document": { + "isPublisherVerified": False, + "publisherType": "user", + "curatedInList": [], + "publisherId": "28329861", + "publisherHandle": "", + "publisherLogo": "", + "isPublic": True, + "customHostName": "", + "id": "b7fa2137-b7fa2137-23bf-45d1-b176-35359af30ded", + "workspaces": [ + { + "visibilityStatus": "public", + "name": "SpilledSecrets", + "id": "92d0451b-119d-4ef0-b74c-22c400e5ce05", + "slug": "spilledsecrets", + } + ], + "collectionForkLabel": "", + "method": "POST", + "entityType": "request", + "url": "www.example.com/index", + "isBlacklisted": False, + "warehouse__updated_at_collection": "2023-12-11 02:00:00", + "isPrivateNetworkEntity": False, + "warehouse__updated_at_request": "2023-12-11 02:00:00", + "publisherName": "NA", + "name": "A test post request", + "privateNetworkMeta": "", + "privateNetworkFolders": [], + "documentType": "request", + "collection": { + "id": "007e8d67-007e8d67-932b-46ff-b95c-a2aa216edaf3", + "name": "Secret Collection", + "tags": [], + "forkCount": 0, + "watcherCount": 0, + "views": 31, + "apiId": "", + "apiName": "", + }, + }, + }, ], }, ) @@ -199,6 +245,9 @@ def check(self, module_test, events): assert any( e.data == "http://127.0.0.1:8888/_api/workspace/afa061be-9cb0-4520-9d4d-fe63361daf0f" for e in events ), "Failed to detect workspace" + assert any( + e.data != "http://127.0.0.1:8888/_api/workspace/92d0451b-119d-4ef0-b74c-22c400e5ce05" for e in events + ), "Workspace should not be detected" assert any( e.data == "http://127.0.0.1:8888/_api/workspace/afa061be-9cb0-4520-9d4d-fe63361daf0f/globals" for e in events From 6df8bffa2189e5fd2945950b3f6924c72a551ee8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 14:32:37 -0500 Subject: [PATCH 27/52] improve task counting for batch modules --- bbot/core/helpers/async_helpers.py | 19 ++++++++++--------- bbot/modules/base.py | 5 +++-- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py index 916764e53..4cc701161 100644 --- a/bbot/core/helpers/async_helpers.py +++ b/bbot/core/helpers/async_helpers.py @@ -57,33 +57,34 @@ def __init__(self): @property def value(self): - return len(self.tasks) + return sum([t.n for t in self.tasks.values()]) - def count(self, task_name, _log=True): + def count(self, task_name, n=1, _log=True): if callable(task_name): task_name = f"{task_name.__qualname__}()" - return self.Task(self, task_name, _log) + return self.Task(self, task_name, n=n, _log=_log) class Task: - def __init__(self, manager, task_name, _log=True): + def __init__(self, manager, task_name, n=1, _log=True): self.manager = manager self.task_name = task_name self.task_id = None self.start_time = None self.log = _log + self.n = n async def __aenter__(self): - self.task_id = uuid.uuid4() # generate a unique ID for the task + self.task_id = uuid.uuid4() # if self.log: # log.trace(f"Starting task {self.task_name} ({self.task_id})") - async with self.manager.lock: # acquire the lock + async with self.manager.lock: self.start_time = datetime.now() self.manager.tasks[self.task_id] = self - return self.task_id # this will be passed as 'task_id' to __aexit__ + return self async def __aexit__(self, exc_type, exc_val, exc_tb): - async with self.manager.lock: # acquire the lock - self.manager.tasks.pop(self.task_id, None) # remove only current task + async with self.manager.lock: + self.manager.tasks.pop(self.task_id, None) # if self.log: # log.trace(f"Finished task {self.task_name} ({self.task_id})") diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 6ae93c7ea..d049f711a 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -175,7 +175,7 @@ async def handle_event(self, event): """ pass - def handle_batch(self, *events): + async def handle_batch(self, *events): """Handles incoming events in batches for optimized processing. This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module. @@ -350,13 +350,14 @@ async def _handle_batch(self): - If a "FINISHED" event is found, invokes 'finish()' method of the module. """ finish = False - async with self._task_counter.count(f"{self.name}.handle_batch()"): + async with self._task_counter.count(f"{self.name}.handle_batch()") as counter: submitted = False if self.batch_size <= 1: return if self.num_incoming_events > 0: events, finish = await self._events_waiting() if events and not self.errored: + counter.n = len(events) self.debug(f"Handling batch of {len(events):,} events") submitted = True async with self.scan._acatch(f"{self.name}.handle_batch()"): From 3778ff977671d3a55d8b499556f428eee008e8e9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 14:32:50 -0500 Subject: [PATCH 28/52] lovecraftian entities --- bbot/core/helpers/names_generator.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 49ed866d6..16432cb0e 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -298,6 +298,7 @@ "ashley", "audrey", "austin", + "azathoth", "baggins", "bailey", "barbara", @@ -347,8 +348,10 @@ "courtney", "craig", "crystal", + "cthulu", "curtis", "cynthia", + "dagon", "dale", "dandelion", "daniel", @@ -554,6 +557,7 @@ "noah", "norma", "norman", + "nyarlathotep", "obama", "olivia", "padme", From 9af5ad83de83d83795914cb06857f980db358401 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 24 Jan 2024 08:50:15 -0500 Subject: [PATCH 29/52] gowitness - don't visit out-of-scope URLs (except social media pages) --- bbot/core/event/base.py | 1 + bbot/modules/gowitness.py | 26 ++++++---- .../module_tests/test_module_gowitness.py | 47 +++++++++++++------ 3 files changed, 50 insertions(+), 24 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 54b9dae02..f46faad01 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1165,6 +1165,7 @@ class SOCIAL(DictEvent): class WEBSCREENSHOT(DictHostEvent): _always_emit = True + _quick_emit = True class AZURE_TENANT(DictEvent): diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 5592eaa65..a335bf021 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -7,7 +7,7 @@ class gowitness(BaseModule): - watched_events = ["URL"] + watched_events = ["URL", "SOCIAL"] produced_events = ["WEBSCREENSHOT", "URL", "URL_UNVERIFIED", "TECHNOLOGY"] flags = ["active", "safe", "web-screenshots"] meta = {"description": "Take screenshots of webpages"} @@ -76,9 +76,8 @@ class gowitness(BaseModule): }, ] _batch_size = 100 - # visit up to and including the scan's configured search distance plus one - # this is one hop further than the default - scope_distance_modifier = 1 + # gowitness accepts SOCIAL events up to distance 2, otherwise it is in-scope-only + scope_distance_modifier = 2 async def setup(self): self.timeout = self.config.get("timeout", 10) @@ -120,12 +119,21 @@ async def filter_event(self, event): # ignore events from self if event.type == "URL" and event.module == self: return False, "event is from self" + # Accept out-of-scope SOCIAL pages, but not URLs + if event.scope_distance > 0: + if event.type != "SOCIAL": + return False, "event is not in-scope" return True async def handle_batch(self, *events): self.prep() - stdin = "\n".join([str(e.data) for e in events]) - events = {e.data: e for e in events} + event_dict = {} + for e in events: + key = e.data + if e.type == "SOCIAL": + key = e.data["url"] + event_dict[key] = e + stdin = "\n".join(list(event_dict)) async for line in self.helpers.run_live(self.command, input=stdin): self.debug(line) @@ -136,7 +144,7 @@ async def handle_batch(self, *events): final_url = screenshot["final_url"] filename = screenshot["filename"] webscreenshot_data = {"filename": filename, "url": final_url} - source_event = events[url] + source_event = event_dict[url] await self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) # emit URLs @@ -147,7 +155,7 @@ async def handle_batch(self, *events): _id = row["url_id"] source_url = self.screenshots_taken[_id] - source_event = events[source_url] + source_event = event_dict[source_url] if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): @@ -157,7 +165,7 @@ async def handle_batch(self, *events): for _, row in self.new_technologies.items(): source_id = row["url_id"] source_url = self.screenshots_taken[source_id] - source_event = events[source_url] + source_event = event_dict[source_url] technology = row["value"] tech_data = {"technology": technology, "url": source_url, "host": str(source_event.host)} await self.emit_event(tech_data, "TECHNOLOGY", source=source_event) diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index 35e0db799..89fed5da5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -3,7 +3,7 @@ class TestGowitness(ModuleTestBase): targets = ["127.0.0.1:8888"] - modules_overrides = ["gowitness", "httpx"] + modules_overrides = ["gowitness", "httpx", "social", "excavate"] import shutil from pathlib import Path @@ -14,6 +14,7 @@ class TestGowitness(ModuleTestBase): async def setup_after_prep(self, module_test): respond_args = { "response_data": """BBOT is life + @@ -21,21 +22,37 @@ async def setup_after_prep(self, module_test): "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, } module_test.set_expect_requests(respond_args=respond_args) + request_args = dict(uri="/blacklanternsecurity") + respond_args = dict(response_data="blacklanternsecurity github") + module_test.set_expect_requests(request_args, respond_args) + + # monkeypatch social + old_emit_event = module_test.scan.modules["social"].emit_event + + async def new_emit_event(event_data, event_type, **kwargs): + if event_data["url"] == "https://github.com/blacklanternsecurity": + event_data["url"] = event_data["url"].replace("https://github.com", "http://127.0.0.1:8888") + await old_emit_event(event_data, event_type, **kwargs) + + module_test.monkeypatch.setattr(module_test.scan.modules["social"], "emit_event", new_emit_event) def check(self, module_test, events): screenshots_path = self.home_dir / "scans" / module_test.scan.name / "gowitness" / "screenshots" screenshots = list(screenshots_path.glob("*.png")) - assert screenshots, f"No .png files found at {screenshots_path}" - url = False - webscreenshot = False - technology = False - for event in events: - if event.type == "URL_UNVERIFIED": - url = True - elif event.type == "WEBSCREENSHOT": - webscreenshot = True - elif event.type == "TECHNOLOGY": - technology = True - assert url, "No URL emitted" - assert webscreenshot, "No WEBSCREENSHOT emitted" - assert technology, "No TECHNOLOGY emitted" + assert ( + len(screenshots) == 2 + ), f"{len(screenshots):,} .png files found at {screenshots_path}, should have been 2" + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) + assert 1 == len( + [e for e in events if e.type == "SOCIAL" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity"] + ) + assert 2 == len([e for e in events if e.type == "WEBSCREENSHOT"]) + assert 1 == len([e for e in events if e.type == "WEBSCREENSHOT" and e.data["url"] == "http://127.0.0.1:8888/"]) + assert 1 == len( + [ + e + for e in events + if e.type == "WEBSCREENSHOT" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity" + ] + ) + assert len([e for e in events if e.type == "TECHNOLOGY"]) From c9a4af4b0f6a54120a5787348bca8fd9d2073520 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 24 Jan 2024 13:02:13 -0500 Subject: [PATCH 30/52] ensure we don't get any unwanted web screenshots --- bbot/test/test_step_2/module_tests/test_module_gowitness.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index 89fed5da5..09ad6144c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -9,7 +9,7 @@ class TestGowitness(ModuleTestBase): home_dir = Path("/tmp/.bbot_gowitness_test") shutil.rmtree(home_dir, ignore_errors=True) - config_overrides = {"force_deps": True, "home": str(home_dir)} + config_overrides = {"force_deps": True, "home": str(home_dir), "scope_report_distance": 2, "omit_event_types": []} async def setup_after_prep(self, module_test): respond_args = { @@ -43,6 +43,10 @@ def check(self, module_test, events): len(screenshots) == 2 ), f"{len(screenshots):,} .png files found at {screenshots_path}, should have been 2" assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) + assert 1 == len( + [e for e in events if e.type == "URL_UNVERIFIED" and e.data == "https://fonts.googleapis.com/"] + ) + assert 0 == len([e for e in events if e.type == "URL" and e.data == "https://fonts.googleapis.com/"]) assert 1 == len( [e for e in events if e.type == "SOCIAL" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity"] ) From 36659a5bcb9e330a18917382f9adfe0548db7a11 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Wed, 24 Jan 2024 22:26:13 +0000 Subject: [PATCH 31/52] Refresh module docs --- docs/modules/list_of_modules.md | 2 +- docs/scanning/events.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 89bb6d24e..3c4154ed8 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -19,7 +19,7 @@ | fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | | generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | | git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | | host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | | httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | | hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index defcf46a8..a6a50d554 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -68,7 +68,7 @@ Below is a full list of event types along with which modules produce/consume the | ORG_STUB | 1 | 1 | github_org | speculate | | PASSWORD | 0 | 2 | | credshed, dehashed | | PROTOCOL | 0 | 1 | | fingerprintx | -| SOCIAL | 2 | 1 | github_org, speculate | social | +| SOCIAL | 3 | 1 | github_org, gowitness, speculate | social | | STORAGE_BUCKET | 7 | 5 | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | | TECHNOLOGY | 2 | 4 | asset_inventory, web_report | badsecrets, gowitness, internetdb, wappalyzer | | URL | 19 | 2 | ajaxpro, asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | From 021a0f63392df5448625673256c77f4721aad7c5 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 27 Jan 2024 16:36:12 -0500 Subject: [PATCH 32/52] update, pin black --- bbot/test/test_step_1/test_events.py | 8 +++-- poetry.lock | 49 ++++++++++++++-------------- pyproject.toml | 2 +- 3 files changed, 31 insertions(+), 28 deletions(-) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 1fc01a046..0f30f6498 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -321,7 +321,10 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" - assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/テスト" + assert ( + scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) # thai assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" @@ -352,8 +355,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" assert ( - scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data - == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) assert ( scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data diff --git a/poetry.lock b/poetry.lock index 8cdf62957..fb069c1aa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -139,33 +139,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.12.1" +version = "24.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, + {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, + {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, + {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, + {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, + {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, + {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, + {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, + {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, + {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, + {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, + {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, + {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, + {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, + {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, + {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, + {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, + {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, + {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, + {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, + {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, + {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, ] [package.dependencies] @@ -1860,6 +1860,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2423,4 +2424,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "55d756e108a8f225f7974eab2b98b7f1b3feeabfb23aed0107f7f0616b201bc6" +content-hash = "2f14476c664bdc8d200619c7959f1e52f18b01b252d3e0dd58be239965881bb1" diff --git a/pyproject.toml b/pyproject.toml index c845bbc60..61b6a70a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,6 @@ tldextract = "^5.1.1" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" -black = "^23.1.0" pytest-cov = "^4.0.0" poetry-dynamic-versioning = "^0.21.4" pytest-rerunfailures = "^11.1.2" @@ -62,6 +61,7 @@ pytest-env = "^0.8.2" pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" +black = "24.1.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From 056206fc3afa3ce9839957e31efa684b6eda3792 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 27 Jan 2024 17:14:26 -0500 Subject: [PATCH 33/52] fix massdns cname wildcard bug --- bbot/modules/massdns.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index a345b79f6..965909d31 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -147,8 +147,7 @@ async def massdns(self, domain, subdomains): for rdtype, results in rdtypes.items(): if results: domain_wildcard_rdtypes.add(rdtype) - - if "A" in domain_wildcard_rdtypes: + if any([r in domain_wildcard_rdtypes for r in ("A", "CNAME")]): self.info( f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(domain_wildcard_rdtypes)})" ) From 30de135c969ba31327f20ebefe289730becf4d73 Mon Sep 17 00:00:00 2001 From: Amir Emami Date: Sat, 27 Jan 2024 16:32:19 +0100 Subject: [PATCH 34/52] added missing web-thorough flag to three web-basic modules: oauth, filedownload, azure_realm --- bbot/modules/azure_realm.py | 2 +- bbot/modules/filedownload.py | 2 +- bbot/modules/oauth.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index a3d6ad6ba..33772921e 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -4,7 +4,7 @@ class azure_realm(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] - flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "passive", "safe"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "passive", "safe"] meta = {"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm'} async def setup(self): diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 05db2b9df..5cf190d1f 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -14,7 +14,7 @@ class filedownload(BaseModule): watched_events = ["URL_UNVERIFIED", "HTTP_RESPONSE"] produced_events = [] - flags = ["active", "safe", "web-basic"] + flags = ["active", "safe", "web-basic", "web-thorough"] meta = {"description": "Download common filetypes such as PDF, DOCX, PPTX, etc."} options = { "extensions": [ diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index 33cb38959..0c9c26dbb 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -6,7 +6,7 @@ class OAUTH(BaseModule): watched_events = ["DNS_NAME", "URL_UNVERIFIED"] produced_events = ["DNS_NAME"] - flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "active", "safe"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "active", "safe"] meta = {"description": "Enumerate OAUTH and OpenID Connect services"} options = {"try_all": False} options_desc = {"try_all": "Check for OAUTH/IODC on every subdomain and URL."} From f89cf0e32d599134b14ccfd9794f60c10e8daeb3 Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Sat, 27 Jan 2024 17:53:24 -0500 Subject: [PATCH 35/52] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 61b6a70a0..fdb9b4e00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.5{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v1.1.6{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From 4e33bb7b7d3a6da276fd5c2dc7fba21aa8e000cf Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Sun, 28 Jan 2024 01:17:57 +0000 Subject: [PATCH 36/52] Refresh module docs --- README.md | 2 +- docs/modules/list_of_modules.md | 6 +++--- docs/scanning/index.md | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a76c2079e..c84d3c278 100644 --- a/README.md +++ b/README.md @@ -263,7 +263,7 @@ For a full list of modules, including the data types consumed and emitted by eac | passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | | subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | | active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | | aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | | web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | | cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 3c4154ed8..d292efced 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -15,7 +15,7 @@ | dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | | ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | | ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic | HTTP_RESPONSE, URL_UNVERIFIED | | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | | fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | | generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | | git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | @@ -28,7 +28,7 @@ | nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | | ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | | nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | | paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | | paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | | paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | @@ -45,7 +45,7 @@ | affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | | anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | | azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | | binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 40e90038a..9d36837ce 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -113,7 +113,7 @@ A single module can have multiple flags. For example, the `securitytrails` modul | passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | | subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | | active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | | aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | | web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | | cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | From 6026e22bc9b2c017ef7b20423d9827dab75b1a02 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 27 Jan 2024 22:33:20 -0500 Subject: [PATCH 37/52] fix dependabot.yml --- dependabot.yml => .github/dependabot.yml | 1 + 1 file changed, 1 insertion(+) rename dependabot.yml => .github/dependabot.yml (84%) diff --git a/dependabot.yml b/.github/dependabot.yml similarity index 84% rename from dependabot.yml rename to .github/dependabot.yml index 43b90890f..bd3b2c06f 100644 --- a/dependabot.yml +++ b/.github/dependabot.yml @@ -4,4 +4,5 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" open-pull-requests-limit: 10 From be093aaacab1192b634a7d51e82c6f8b90b47682 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 29 Jan 2024 14:15:16 -0500 Subject: [PATCH 38/52] update release history --- docs/release_history.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/release_history.md b/docs/release_history.md index 1b68587ef..2f0b16a17 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,3 +1,23 @@ +## v1.1.6 +January 29, 2024 + +## Improvements +- https://github.com/blacklanternsecurity/bbot/pull/1001 +- https://github.com/blacklanternsecurity/bbot/pull/1006 +- https://github.com/blacklanternsecurity/bbot/pull/1010 +- https://github.com/blacklanternsecurity/bbot/pull/1013 +- https://github.com/blacklanternsecurity/bbot/pull/1014 +- https://github.com/blacklanternsecurity/bbot/pull/1015 +- https://github.com/blacklanternsecurity/bbot/pull/1032 + +## Bugfixes +- https://github.com/blacklanternsecurity/bbot/pull/1005 +- https://github.com/blacklanternsecurity/bbot/pull/1022 +- https://github.com/blacklanternsecurity/bbot/pull/1030 +- https://github.com/blacklanternsecurity/bbot/pull/1033 +- https://github.com/blacklanternsecurity/bbot/pull/1034 + + ## v1.1.4 January 11, 2024 From 5d8246a573cf7aee2e2591a482f366fa8949eb19 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 29 Jan 2024 14:21:47 -0500 Subject: [PATCH 39/52] fixing black pin --- poetry.lock | 69 ++++++++++++++++++-------------------------------- pyproject.toml | 2 +- 2 files changed, 25 insertions(+), 46 deletions(-) diff --git a/poetry.lock b/poetry.lock index fb069c1aa..61374ea00 100644 --- a/poetry.lock +++ b/poetry.lock @@ -139,33 +139,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.1.0" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, - {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, - {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, - {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, - {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, - {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, - {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, - {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, - {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, - {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, - {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, - {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, - {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, - {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, - {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, - {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, - {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, - {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, - {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, - {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, - {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, - {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -992,16 +992,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1847,7 +1837,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1855,16 +1844,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1881,7 +1862,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1889,7 +1869,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2424,4 +2403,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "2f14476c664bdc8d200619c7959f1e52f18b01b252d3e0dd58be239965881bb1" +content-hash = "1d40c762ee4808a285f17096c5140f4e6fa7d236a56f97f0415aa81aef917ea1" diff --git a/pyproject.toml b/pyproject.toml index fdb9b4e00..53d178185 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ pytest-env = "^0.8.2" pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" -black = "24.1.0" +black = "^24.1.1" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From edab5a82fe89a106eba0039ff8d46ca7cfff076f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 15:29:38 -0500 Subject: [PATCH 40/52] clarify interactsh polling error --- bbot/core/helpers/interactsh.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 929637dfe..871ecb1c4 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -235,6 +235,8 @@ async def poll(self): r = await self.parent_helper.request( f"https://{self.server}/poll?id={self.correlation_id}&secret={self.secret}", headers=headers ) + if r is None: + raise InteractshError("Error polling interact.sh: No response from server") ret = [] data_list = r.json().get("data", None) From 8bba54cadde118f64c910d3f3104b1d251d79729 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 15:30:32 -0500 Subject: [PATCH 41/52] update release history --- docs/release_history.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/release_history.md b/docs/release_history.md index 2f0b16a17..eef23ba39 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -9,6 +9,7 @@ January 29, 2024 - https://github.com/blacklanternsecurity/bbot/pull/1014 - https://github.com/blacklanternsecurity/bbot/pull/1015 - https://github.com/blacklanternsecurity/bbot/pull/1032 +- https://github.com/blacklanternsecurity/bbot/pull/1040 ## Bugfixes - https://github.com/blacklanternsecurity/bbot/pull/1005 From 47c4c83d8a75276542fac968204d43ca2aaf507d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 17:46:48 -0500 Subject: [PATCH 42/52] poetry update --- poetry.lock | 680 +++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 344 insertions(+), 338 deletions(-) diff --git a/poetry.lock b/poetry.lock index 61374ea00..d1204c670 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ ansible-core = ">=2.14.7,<2.15.0" [[package]] name = "ansible-core" -version = "2.14.13" +version = "2.14.14" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-core-2.14.13.tar.gz", hash = "sha256:4e1bb334f0c3226ab48c599efe49cd5fe03f25d4558bc06c274ade2ba3e2576a"}, - {file = "ansible_core-2.14.13-py3-none-any.whl", hash = "sha256:65e96d04dce1e5dd415b4681d464f7f9a949d515f623145c4a8bc3468e75f3b0"}, + {file = "ansible-core-2.14.14.tar.gz", hash = "sha256:f06a94a88a372d4db4b3973e465022fbe3545602580864115d21a280accb7ca3"}, + {file = "ansible_core-2.14.14-py3-none-any.whl", hash = "sha256:d1d282b71b9d8fdd515ae045e5909cfa393cfa0e9fecaae2dbbb4d326ab58681"}, ] [package.dependencies] @@ -121,19 +121,22 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] @@ -384,13 +387,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "2.1.0.181" +version = "2.2.0.203" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = ">=3.9,<4.0" files = [ - {file = "cloudcheck-2.1.0.181-py3-none-any.whl", hash = "sha256:cb9ebd5d546038974ed13bd7c9edc83fe481af791bf53decf7ecdd9070deb23a"}, - {file = "cloudcheck-2.1.0.181.tar.gz", hash = "sha256:c3b24c72b3f082f87a81bf8542dc2a5177593fe4ea0e983acd2f486a5d127cc9"}, + {file = "cloudcheck-2.2.0.203-py3-none-any.whl", hash = "sha256:8f17944c6183c7ad96b031c4cc3efb1c1addaa23c2d17f3c81ff54c4cd494716"}, + {file = "cloudcheck-2.2.0.203.tar.gz", hash = "sha256:83eb239b024579712a19d736dc9649eff4a3f6e9314db3cd94faa883adbfe46c"}, ] [package.dependencies] @@ -410,63 +413,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -477,47 +480,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, + {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, + {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, + {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, + {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, + {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, + {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -551,22 +563,23 @@ files = [ [[package]] name = "dnspython" -version = "2.4.2" +version = "2.5.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, + {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, + {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, ] [package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +idna = ["idna (>=2.1)"] +trio = ["trio (>=0.14)"] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "docutils" @@ -658,13 +671,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.38.1" +version = "0.39.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.38.1-py3-none-any.whl", hash = "sha256:334c79d3b5964ade65c05dfcaf53518c576dedd387aaba5c9fd71212f34f1483"}, - {file = "griffe-0.38.1.tar.gz", hash = "sha256:bd68d7da7f3d87bc57eb9962b250db123efd9bbcc06c11c1a91b6e583b2a9361"}, + {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, + {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, ] [package.dependencies] @@ -783,13 +796,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -949,13 +962,13 @@ source = ["Cython (==0.29.37)"] [[package]] name = "markdown" -version = "3.5.1" +version = "3.5.2" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.1-py3-none-any.whl", hash = "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc"}, - {file = "Markdown-3.5.1.tar.gz", hash = "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.dependencies] @@ -967,61 +980,71 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -1110,13 +1133,13 @@ mkdocs = ">=1.1" [[package]] name = "mkdocs-material" -version = "9.5.3" +version = "9.5.6" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.3-py3-none-any.whl", hash = "sha256:76c93a8525cceb0b395b9cedab3428bf518cf6439adef2b940f1c1574b775d89"}, - {file = "mkdocs_material-9.5.3.tar.gz", hash = "sha256:5899219f422f0a6de784232d9d40374416302ffae3c160cacc72969fcc1ee372"}, + {file = "mkdocs_material-9.5.6-py3-none-any.whl", hash = "sha256:e115b90fccf5cd7f5d15b0c2f8e6246b21041628b8f590630e7fca66ed7fcf6c"}, + {file = "mkdocs_material-9.5.6.tar.gz", hash = "sha256:5b24df36d8ac6cecd611241ce6f6423ccde3e1ad89f8360c3f76d5565fc2d82a"}, ] [package.dependencies] @@ -1134,7 +1157,7 @@ requests = ">=2.26,<3.0" [package.extras] git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] -imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] @@ -1306,13 +1329,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1358,27 +1381,27 @@ virtualenv = ">=20.10.0" [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -1419,59 +1442,59 @@ files = [ [[package]] name = "pycryptodome" -version = "3.19.1" +version = "3.20.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:694020d2ff985cd714381b9da949a21028c24b86f562526186f6af7c7547e986"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4464b0e8fd5508bff9baf18e6fd4c6548b1ac2ce9862d6965ff6a84ec9cb302a"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:420972f9c62978e852c74055d81c354079ce3c3a2213a92c9d7e37bbc63a26e2"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1bc0c49d986a1491d66d2a56570f12e960b12508b7e71f2423f532e28857f36"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e038ab77fec0956d7aa989a3c647652937fc142ef41c9382c2ebd13c127d5b4a"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-win32.whl", hash = "sha256:a991f8ffe8dfe708f86690948ae46442eebdd0fff07dc1b605987939a34ec979"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-win_amd64.whl", hash = "sha256:2c16426ef49d9cba018be2340ea986837e1dfa25c2ea181787971654dd49aadd"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6d0d2b97758ebf2f36c39060520447c26455acb3bcff309c28b1c816173a6ff5"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b8b80ff92049fd042177282917d994d344365ab7e8ec2bc03e853d93d2401786"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd4e7e8bf0fc1ada854688b9b309ee607e2aa85a8b44180f91021a4dd330a928"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:8cf5d3d6cf921fa81acd1f632f6cedcc03f5f68fc50c364cd39490ba01d17c49"}, - {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297"}, - {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2"}, - {file = "pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03"}, - {file = "pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7"}, - {file = "pycryptodome-3.19.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:ed932eb6c2b1c4391e166e1a562c9d2f020bfff44a0e1b108f67af38b390ea89"}, - {file = "pycryptodome-3.19.1-pp27-pypy_73-win32.whl", hash = "sha256:81e9d23c0316fc1b45d984a44881b220062336bbdc340aa9218e8d0656587934"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37e531bf896b70fe302f003d3be5a0a8697737a8d177967da7e23eff60d6483c"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd4e95b0eb4b28251c825fe7aa941fe077f993e5ca9b855665935b86fbb1cc08"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22c80246c3c880c6950d2a8addf156cee74ec0dc5757d01e8e7067a3c7da015"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e70f5c839c7798743a948efa2a65d1fe96bb397fe6d7f2bde93d869fe4f0ad69"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6c3df3613592ea6afaec900fd7189d23c8c28b75b550254f4bd33fe94acb84b9"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08b445799d571041765e7d5c9ca09c5d3866c2f22eeb0dd4394a4169285184f4"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:954d156cd50130afd53f8d77f830fe6d5801bd23e97a69d358fed068f433fbfe"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b7efd46b0b4ac869046e814d83244aeab14ef787f4850644119b1c8b0ec2d637"}, - {file = "pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.0" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, + {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.1" typing-extensions = ">=4.6.1" [package.extras] @@ -1479,116 +1502,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, + {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, + {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, + {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, + {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, + {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, + {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, + {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, + {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, + {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, + {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, + {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, + {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, + {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, ] [package.dependencies] @@ -1746,18 +1743,18 @@ Werkzeug = ">=2.0.0" [[package]] name = "pytest-httpx" -version = "0.28.0" +version = "0.29.0" description = "Send responses to httpx." optional = false python-versions = ">=3.9" files = [ - {file = "pytest_httpx-0.28.0-py3-none-any.whl", hash = "sha256:045774556a3633688742315a6981aab2806ce93bcbcc8444253ab87bca286800"}, - {file = "pytest_httpx-0.28.0.tar.gz", hash = "sha256:a82505fdf59f19eaaf2853db3f3832b3dee35d3bc58000232db2b65c5fca0614"}, + {file = "pytest_httpx-0.29.0-py3-none-any.whl", hash = "sha256:7d6fd29042e7b98ed98199ded120bc8100c8078ca306952666e89bf8807b95ff"}, + {file = "pytest_httpx-0.29.0.tar.gz", hash = "sha256:ed08ed802e2b315b83cdd16f0b26cbb2b836c29e0fde5c18bc3105f1073e0332"}, ] [package.dependencies] httpx = "==0.26.*" -pytest = "==7.*" +pytest = ">=7,<9" [package.extras] testing = ["pytest-asyncio (==0.23.*)", "pytest-cov (==4.*)"] @@ -1837,6 +1834,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1844,8 +1842,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1862,6 +1868,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1869,6 +1876,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2013,18 +2021,16 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "1.5.1" +version = "2.0.0" description = "File transport adapter for Requests" optional = false python-versions = "*" files = [ - {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, - {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, + {file = "requests-file-2.0.0.tar.gz", hash = "sha256:20c5931629c558fda566cacc10cfe2cd502433e628f568c34c80d96a0cc95972"}, ] [package.dependencies] requests = ">=1.0.0" -six = "*" [[package]] name = "resolvelib" @@ -2403,4 +2409,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "1d40c762ee4808a285f17096c5140f4e6fa7d236a56f97f0415aa81aef917ea1" +content-hash = "8d9864610f54050aec62bf75415e5b683a851323d054a38ff36e54d9d5c284e3" diff --git a/pyproject.toml b/pyproject.toml index 53d178185..93172c060 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,6 @@ aioconsole = "^0.6.2" pydantic = "^2.4.2" httpx = "^0.26.0" cloudcheck = "^2.1.0.181" -pytest-httpx = "^0.28.0" tldextract = "^5.1.1" [tool.poetry.group.dev.dependencies] @@ -62,6 +61,7 @@ pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" black = "^24.1.1" +pytest-httpx = "^0.29.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From 22792c825815747414a11dbafcb889fb339ed39a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 17:47:09 -0500 Subject: [PATCH 43/52] fix event distribution stats --- bbot/modules/base.py | 1 + bbot/scanner/manager.py | 2 -- bbot/scanner/stats.py | 4 ---- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index d049f711a..a4dd81001 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -720,6 +720,7 @@ async def __event_postcheck(self, event): if self._type == "output" and not event._stats_recorded: event._stats_recorded = True + self.scan.stats.event_distributed(event) self.scan.stats.event_produced(event) self.debug(f"{event} passed post-check") diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 1fa47fef4..a511e6316 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -412,8 +412,6 @@ async def distribute_event(self, event): if acceptable_dup or graph_important: await mod.queue_event(event) - self.scan.stats.event_distributed(event) - async def _worker_loop(self): try: while not self.scan.stopped: diff --git a/bbot/scanner/stats.py b/bbot/scanner/stats.py index cc2b3e576..02be9a801 100644 --- a/bbot/scanner/stats.py +++ b/bbot/scanner/stats.py @@ -15,7 +15,6 @@ def __init__(self, scan): self.scan = scan self.module_stats = {} self.events_emitted_by_type = {} - self.perf_stats = [] def event_distributed(self, event): _increment(self.events_emitted_by_type, event.type) @@ -66,9 +65,6 @@ def table(self): return [header] + table def _make_table(self): - self.perf_stats.sort(key=lambda x: x[-1]) - for callback, runtime in self.perf_stats: - log.info(f"{callback}\t{runtime}") table = self.table() if len(table) == 1: table += [["None", "None", "None"]] From 373682ad54e0ddf57956f5bf23ed76719621b6bd Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 30 Jan 2024 12:54:25 -0500 Subject: [PATCH 44/52] improve event stats counter --- bbot/modules/base.py | 5 ----- bbot/modules/output/base.py | 8 ++++++++ bbot/modules/sslcert.py | 2 +- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index a4dd81001..be0d118c6 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -718,11 +718,6 @@ async def __event_postcheck(self, event): if not filter_result: return False, msg - if self._type == "output" and not event._stats_recorded: - event._stats_recorded = True - self.scan.stats.event_distributed(event) - self.scan.stats.event_produced(event) - self.debug(f"{event} passed post-check") return True, "" diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index a294c23bb..1d4ec9a36 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -41,6 +41,14 @@ def _event_precheck(self, event): return True, "precheck succeeded" + async def _event_postcheck(self, event): + acceptable, reason = await super()._event_postcheck(event) + if acceptable and not event._stats_recorded and event.type not in ("FINISHED",): + event._stats_recorded = True + self.scan.stats.event_distributed(event) + self.scan.stats.event_produced(event) + return acceptable, reason + def is_incoming_duplicate(self, event, add=False): is_incoming_duplicate, reason = super().is_incoming_duplicate(event, add=add) # make exception for graph-important events diff --git a/bbot/modules/sslcert.py b/bbot/modules/sslcert.py index b7455ddcd..caca5feca 100644 --- a/bbot/modules/sslcert.py +++ b/bbot/modules/sslcert.py @@ -17,7 +17,7 @@ class sslcert(BaseModule): options = {"timeout": 5.0, "skip_non_ssl": True} options_desc = {"timeout": "Socket connect timeout in seconds", "skip_non_ssl": "Don't try common non-SSL ports"} deps_apt = ["openssl"] - deps_pip = ["pyOpenSSL~=23.1.1"] + deps_pip = ["pyOpenSSL~=24.0.0"] _max_event_handlers = 25 scope_distance_modifier = 1 _priority = 2 From 6cca54b2bef0516d30f9e6e69bf414bb6b0c347d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 30 Jan 2024 14:15:30 -0500 Subject: [PATCH 45/52] fix aggregate tests --- bbot/test/test_step_2/module_tests/test_module_aggregate.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_aggregate.py b/bbot/test/test_step_2/module_tests/test_module_aggregate.py index 1049fb2a2..59f5370b3 100644 --- a/bbot/test/test_step_2/module_tests/test_module_aggregate.py +++ b/bbot/test/test_step_2/module_tests/test_module_aggregate.py @@ -2,7 +2,10 @@ class TestAggregate(ModuleTestBase): - config_overrides = {"dns_resolution": True} + config_overrides = {"dns_resolution": True, "scope_report_distance": 1} + + async def setup_before_prep(self, module_test): + module_test.scan.helpers.dns.mock_dns({("blacklanternsecurity.com", "A"): "1.2.3.4"}) def check(self, module_test, events): filename = next(module_test.scan.home.glob("scan-stats-table*.txt")) From 240bb4b23e38417e4a486b60edb3c0961fd0e325 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 1 Feb 2024 11:02:36 -0500 Subject: [PATCH 46/52] improving serialization regexes, fixing excavate bug, excavate tests --- bbot/modules/internal/excavate.py | 14 ++++--- .../module_tests/test_module_excavate.py | 38 +++++++++++++++++++ 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index da2a16986..5ccc8d36a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -21,8 +21,8 @@ def __init__(self, excavate): async def search(self, content, event, **kwargs): results = set() async for result, name in self._search(content, event, **kwargs): - results.add(result) - for result in results: + results.add((result, name)) + for result, name in results: await self.report(result, name, event, **kwargs) async def _search(self, content, event, **kwargs): @@ -254,13 +254,15 @@ async def report(self, result, name, event, **kwargs): class SerializationExtractor(BaseExtractor): regexes = { "Java": r"(?:[^a-zA-Z0-9+/]|^)(rO0[a-zA-Z0-9+/]+={,2})", - ".NET": r"AAEAAAD//[a-zA-Z0-9+/]+={,2}", - "PHP": r"YTo[xyz0123456][a-zA-Z0-9+/]+={,2}", - "Possible Compressed": r"H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2}", + ".NET": r"(?:[^a-zA-Z0-9+/]|^)(AAEAAAD//[a-zA-Z0-9+/]+={,2})", + "PHP (Array)": r"(?:[^a-zA-Z0-9+/]|^)(YTo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "PHP (String)": r"(?:[^a-zA-Z0-9+/]|^)(czo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "PHP (Object)": r"(?:[^a-zA-Z0-9+/]|^)(Tzo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "Possible Compressed": r"(?:[^a-zA-Z0-9+/]|^)(H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2})", } async def report(self, result, name, event, **kwargs): - description = f"{name} serialized object found" + description = f"{name} serialized object found: [{self.excavate.helpers.truncate_string(result,2000)}]" await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 0add8aea6..2be83574b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -280,3 +280,41 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any(e.data == "asdffoo.test.notreal" for e in events) assert any(e.data == "https://asdffoo.test.notreal/some/path" for e in events) + + +class TestExcavateSerializationNegative(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "

llsdtVVFlJxhcGGYTo2PMGTRNFVKZxeKTVbhyosM3Sm/5apoY1/yUmN6HVcn+Xt798SPzgXQlZMttsqp1U1iJFmFO2aCGL/v3tmm/fs7itYsoNnJCelWvm9P4ic1nlKTBOpMjT5B5NmriZwTAzZ5ASjCKcmN8Vh=

" + ) + + def check(self, module_test, events): + assert not any(e.type == "FINDING" for e in events), "Found Results without word boundary" + + +class TestExcavateSerializationPositive(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + """ +

.NET

+

AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==

+

Java

+

rO0ABXQADUhlbGxvLCB3b3JsZCE=

+

PHP (string)

+

czoyNDoiSGVsbG8sIHdvcmxkISBNb3JlIHRleHQuIjs=

+

PHP (array)

+

YTo0OntpOjA7aToxO2k6MTtzOjE0OiJzZWNvbmQgZWxlbWVudCI7aToyO2k6MztpOjM7czoxODoiTW9yZSB0ZXh0IGluIGFycmF5Ijt9

+

PHP (object)

+

TzoxMjoiU2FtcGxlT2JqZWN0IjoyOntzOjg6InByb3BlcnR5IjtzOjEzOiJJbml0aWFsIHZhbHVlIjtzOjE2OiJhZGRpdGlvbmFsU3RyaW5nIjtzOjIxOiJFeHRyYSB0ZXh0IGluIG9iamVjdC4iO30=

+

Compression

+

H4sIAAAAAAAA/yu2MjS2UvJIzcnJ11Eozy/KSVFUsgYAZN5upRUAAAA=

+ +""" + ) + + def check(self, module_test, events): + + for serialize_type in ["Java", ".NET", "PHP (Array)", "PHP (String)", "PHP (Object)", "Possible Compressed"]: + assert any( + e.type == "FINDING" and serialize_type in e.data["description"] for e in events + ), f"Did not find {serialize_type} Serialized Object" From ec1ea92ef8b739b9e455bbbe4bbf2ad6183256a8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 13:44:53 -0500 Subject: [PATCH 47/52] include source email address in PASSWORD, USERNAME, and HASHED_PASSWORD --- bbot/core/event/base.py | 9 ++--- bbot/modules/credshed.py | 19 +++++----- bbot/modules/dehashed.py | 19 +++++----- bbot/modules/internal/speculate.py | 10 ++++++ bbot/modules/templates/credential_leak.py | 33 ----------------- bbot/scanner/manager.py | 2 +- bbot/test/test_step_1/test_events.py | 4 +++ .../module_tests/test_module_credshed.py | 23 +++++++++--- .../module_tests/test_module_dehashed.py | 36 +++++++++++++++---- 9 files changed, 88 insertions(+), 67 deletions(-) delete mode 100644 bbot/modules/templates/credential_leak.py diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index f46faad01..895b8e2d5 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1246,10 +1246,11 @@ def make_event( """ # allow tags to be either a string or an array - if tags is not None: - if isinstance(tags, str): - tags = [tags] - tags = list(tags) + if not tags: + tags = [] + elif isinstance(tags, str): + tags = [tags] + tags = list(tags) if is_event(data): if scan is not None and not data.scan: diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py index 09ed57377..382644007 100644 --- a/bbot/modules/credshed.py +++ b/bbot/modules/credshed.py @@ -1,9 +1,9 @@ from contextlib import suppress -from bbot.modules.templates.credential_leak import credential_leak +from bbot.modules.base import BaseModule -class credshed(credential_leak): +class credshed(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME", "EMAIL_ADDRESS"] flags = ["passive", "safe"] @@ -17,6 +17,7 @@ class credshed(credential_leak): "password": "Credshed password", "credshed_url": "URL of credshed server", } + target_only = True async def setup(self): self.base_url = self.config.get("credshed_url", "").rstrip("/") @@ -40,7 +41,7 @@ async def setup(self): return await super().setup() async def handle_event(self, event): - query = self.make_query(event) + query = event.data cs_query = await self.helpers.request( f"{self.base_url}/api/search", method="POST", @@ -77,10 +78,10 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: await self.emit_event(email_event) - if user and not self.already_seen(f"{email}:{user}"): - await self.emit_event(user, "USERNAME", source=email_event, tags=tags) - if pw and not self.already_seen(f"{email}:{pw}"): - await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + if user: + await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + if pw: + await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) for h_pw in hashes: - if h_pw and not self.already_seen(f"{email}:{h_pw}"): - await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + if h_pw: + await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py index 4b3546712..c1a35c419 100644 --- a/bbot/modules/dehashed.py +++ b/bbot/modules/dehashed.py @@ -1,15 +1,16 @@ from contextlib import suppress -from bbot.modules.templates.credential_leak import credential_leak +from bbot.modules.base import BaseModule -class dehashed(credential_leak): +class dehashed(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME"] flags = ["passive", "safe", "email-enum"] meta = {"description": "Execute queries against dehashed.com for exposed credentials", "auth_required": True} options = {"username": "", "api_key": ""} options_desc = {"username": "Email Address associated with your API key", "api_key": "DeHashed API Key"} + target_only = True base_url = "https://api.dehashed.com/search" @@ -50,15 +51,15 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: await self.emit_event(email_event) - if user and not self.already_seen(f"{email}:{user}"): - await self.emit_event(user, "USERNAME", source=email_event, tags=tags) - if pw and not self.already_seen(f"{email}:{pw}"): - await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) - if h_pw and not self.already_seen(f"{email}:{h_pw}"): - await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + if user: + await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + if pw: + await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) + if h_pw: + await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) async def query(self, event): - query = f"domain:{self.make_query(event)}" + query = f"domain:{event.data}" url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}" page = 0 num_entries = 0 diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 8f51d5d95..7aaf12d30 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -1,6 +1,7 @@ import random import ipaddress +from bbot.core.helpers import validators from bbot.modules.internal.base import BaseInternalModule @@ -21,6 +22,7 @@ class speculate(BaseInternalModule): "STORAGE_BUCKET", "SOCIAL", "AZURE_TENANT", + "USERNAME", ] produced_events = ["DNS_NAME", "OPEN_TCP_PORT", "IP_ADDRESS", "FINDING", "ORG_STUB"] flags = ["passive"] @@ -156,6 +158,14 @@ async def handle_event(self, event): stub_event.scope_distance = event.scope_distance await self.emit_event(stub_event) + # USERNAME --> EMAIL + if event.type == "USERNAME": + email = event.data.split(":", 1)[-1] + if validators.soft_validate(email, "email"): + email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=["affiliate"]) + email_event.scope_distance = event.scope_distance + await self.emit_event(email_event) + async def filter_event(self, event): # don't accept errored DNS_NAMEs if any(t in event.tags for t in ("unresolved", "a-error", "aaaa-error")): diff --git a/bbot/modules/templates/credential_leak.py b/bbot/modules/templates/credential_leak.py deleted file mode 100644 index 5085e197a..000000000 --- a/bbot/modules/templates/credential_leak.py +++ /dev/null @@ -1,33 +0,0 @@ -from bbot.modules.base import BaseModule - - -class credential_leak(BaseModule): - """ - A typical free API-based subdomain enumeration module - Inherited by many other modules including sublist3r, dnsdumpster, etc. - """ - - async def setup(self): - self.queries_processed = set() - self.data_seen = set() - return True - - async def filter_event(self, event): - query = self.make_query(event) - query_hash = hash(query) - if query_hash not in self.queries_processed: - self.queries_processed.add(query_hash) - return True - return False, f'Already processed "{query}"' - - def make_query(self, event): - if "target" in event.tags: - return event.data - _, domain = self.helpers.split_domain(event.data) - return domain - - def already_seen(self, item): - h = hash(item) - already_seen = h in self.data_seen - self.data_seen.add(h) - return already_seen diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index a511e6316..c0f598230 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -275,7 +275,7 @@ async def _emit_event(self, event, **kwargs): if ( event.host and event.type not in ("DNS_NAME", "DNS_NAME_UNRESOLVED", "IP_ADDRESS", "IP_RANGE") - and not str(event.module) == "speculate" + and not (event.type in ("OPEN_TCP_PORT", "URL_UNVERIFIED") and str(event.module) == "speculate") ): source_module = self.scan.helpers._make_dummy_module("host", _type="internal") source_module._priority = 4 diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 0f30f6498..ccbf17c1f 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -250,6 +250,10 @@ async def test_events(events, scan, helpers, bbot_config): corrected_event3 = scan.make_event("wat.asdf.com", "IP_ADDRESS", dummy=True) assert corrected_event3.type == "DNS_NAME" + corrected_event4 = scan.make_event("bob@evilcorp.com", "USERNAME", dummy=True) + assert corrected_event4.type == "EMAIL_ADDRESS" + assert "affiliate" in corrected_event4.tags + test_vuln = scan.make_event( {"host": "EVILcorp.com", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True ) diff --git a/bbot/test/test_step_2/module_tests/test_module_credshed.py b/bbot/test/test_step_2/module_tests/test_module_credshed.py index 7de642412..4b9566077 100644 --- a/bbot/test/test_step_2/module_tests/test_module_credshed.py +++ b/bbot/test/test_step_2/module_tests/test_module_credshed.py @@ -74,18 +74,31 @@ def check(self, module_test, events): assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "judy@blacklanternsecurity.com"]) assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "tim@blacklanternsecurity.com"]) assert 1 == len( - [e for e in events if e.type == "HASHED_PASSWORD" and e.data == "539FE8942DEADBEEFBC49E6EB2F175AC"] + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:539FE8942DEADBEEFBC49E6EB2F175AC" + ] ) assert 1 == len( - [e for e in events if e.type == "HASHED_PASSWORD" and e.data == "D2D8F0E9A4A2DEADBEEF1AC80F36D61F"] + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:D2D8F0E9A4A2DEADBEEF1AC80F36D61F" + ] ) assert 1 == len( [ e for e in events if e.type == "HASHED_PASSWORD" - and e.data == "$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" + and e.data + == "judy@blacklanternsecurity.com:$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" ] ) - assert 1 == len([e for e in events if e.type == "PASSWORD" and e.data == "TimTamSlam69"]) - assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim"]) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:tim"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_dehashed.py b/bbot/test/test_step_2/module_tests/test_module_dehashed.py index 8b20c85c5..ab1cc20aa 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dehashed.py +++ b/bbot/test/test_step_2/module_tests/test_module_dehashed.py @@ -37,7 +37,11 @@ class TestDehashed(ModuleTestBase): - config_overrides = {"modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}} + modules_overrides = ["dehashed", "speculate"] + config_overrides = { + "scope_report_distance": 2, + "modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}, + } async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( @@ -46,8 +50,9 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - assert len(events) == 9 - assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert len(events) == 11 + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com"]) + assert 1 == len([e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity"]) assert 1 == len( [ e @@ -56,6 +61,22 @@ def check(self, module_test, events): and e.data == "bob@bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "USERNAME" + and e.data == "bob@blacklanternsecurity.com:bob@bob.com" and e.source.data == "bob@blacklanternsecurity.com" ] ) @@ -65,8 +86,11 @@ def check(self, module_test, events): e for e in events if e.type == "HASHED_PASSWORD" - and e.data == "$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" + and e.data + == "bob@blacklanternsecurity.com:$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" ] ) - assert 1 == len([e for e in events if e.type == "PASSWORD" and e.data == "TimTamSlam69"]) - assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "timmy"]) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:timmy"]) From 6ede3229f9f0d7a1d945d717ccda71ce98b07f92 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 17:43:02 -0500 Subject: [PATCH 48/52] JSON module: option for SIEM-friendly output --- bbot/modules/output/json.py | 14 +++++++++++--- .../test_step_2/module_tests/test_module_json.py | 15 +++++++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index f13cf7808..a380ac9a1 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -7,16 +7,24 @@ class JSON(BaseOutputModule): watched_events = ["*"] meta = {"description": "Output to Newline-Delimited JSON (NDJSON)"} - options = {"output_file": "", "console": False} - options_desc = {"output_file": "Output to file", "console": "Output to console"} + options = {"output_file": "", "console": False, "siem_friendly": False} + options_desc = { + "output_file": "Output to file", + "console": "Output to console", + "siem_friendly": "Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc.", + } _preserve_graph = True async def setup(self): self._prep_output_dir("output.ndjson") + self.siem_friendly = self.config.get("siem_friendly", False) return True async def handle_event(self, event): - event_str = json.dumps(dict(event)) + event_json = dict(event) + if self.siem_friendly: + event_json["data"] = {event.type: event_json.pop("data", "")} + event_str = json.dumps(event_json) if self.file is not None: self.file.write(event_str + "\n") self.file.flush() diff --git a/bbot/test/test_step_2/module_tests/test_module_json.py b/bbot/test/test_step_2/module_tests/test_module_json.py index 6dafb68a5..1e67db085 100644 --- a/bbot/test/test_step_2/module_tests/test_module_json.py +++ b/bbot/test/test_step_2/module_tests/test_module_json.py @@ -12,3 +12,18 @@ def check(self, module_test, events): e = event_from_json(json.loads(lines[0])) assert e.type == "SCAN" assert e.data == f"{module_test.scan.name} ({module_test.scan.id})" + + +class TestJSONSIEMFriendly(ModuleTestBase): + modules_overrides = ["json"] + config_overrides = {"output_modules": {"json": {"siem_friendly": True}}} + + def check(self, module_test, events): + txt_file = module_test.scan.home / "output.ndjson" + lines = list(module_test.scan.helpers.read_file(txt_file)) + passed = False + for line in lines: + e = json.loads(line) + if e["data"] == {"DNS_NAME": "blacklanternsecurity.com"}: + passed = True + assert passed From 13af94a529e803071b423b34cc3618c7827d89e8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 17:50:24 -0500 Subject: [PATCH 49/52] updated docs --- docs/scanning/tips_and_tricks.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 0572bedb2..1ed370e52 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -53,6 +53,24 @@ You can also pair the web spider with subdomain enumeration: bbot -t evilcorp.com -f subdomain-enum -c spider.yml ``` +### Ingesting BBOT Data Into SIEM (Elastic, Splunk) + +If your goal is to feed BBOT data into a SIEM such as Elastic, make sure to enable this option when scanning: + +```bash +bbot -t evilcorp.com -c output_modules.json.siem_friendly=true +``` + +This nests the event's `.data` beneath its event type like so: +```json +{ + "type": "DNS_NAME", + "data": { + "DNS_NAME": "blacklanternsecurity.com" + } +} +``` + ### Custom HTTP Proxy Web pentesters may appreciate BBOT's ability to quickly populate Burp Suite site maps for all subdomains in a target. If your scan includes gowitness, this will capture the traffic as if you manually visited each website in your browser -- including auxiliary web resources and javascript API calls. To accomplish this, set the `http_proxy` config option like so: From a83dc73990933f504c274d9be5355a3096ac8133 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Thu, 1 Feb 2024 22:51:11 +0000 Subject: [PATCH 50/52] Refresh module docs --- docs/modules/list_of_modules.md | 224 ++++++++++++++++---------------- docs/scanning/events.md | 2 +- 2 files changed, 113 insertions(+), 113 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index d292efced..e67a11152 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -1,118 +1,118 @@ # List of Modules -| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | -|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| -| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | -| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | -| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | -| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | -| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | -| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | -| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | -| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | -| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | -| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | -| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | -| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | -| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | -| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | -| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | -| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | -| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | -| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | -| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | -| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | -| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | -| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | -| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | -| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | -| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | -| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | -| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | -| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | -| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | -| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | -| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | -| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | -| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | -| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | -| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | -| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | -| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | -| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | -| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | -| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | -| csv | output | No | Output to CSV | | * | | -| discord | output | No | Message a Discord channel when certain events are encountered | | * | | -| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | -| http | output | No | Send every event to a custom URL via a web request | | * | | -| human | output | No | Output to text | | * | | -| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | -| neo4j | output | No | Output to Neo4j | | * | | -| python | output | No | Output via Python API | | * | | -| slack | output | No | Message a Slack channel when certain events are encountered | | * | | -| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | -| teams | output | No | Message a Teams channel when certain events are encountered | | * | | -| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | -| websocket | output | No | Output to websockets | | * | | -| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | -| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | -| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| +| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | +| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | +| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | +| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | +| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | +| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | +| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | +| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | +| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | +| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | +| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | +| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | +| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | +| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | +| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | +| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | +| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | +| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | +| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | +| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | +| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | +| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | +| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | +| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | +| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | +| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | +| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | +| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | +| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | +| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | +| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | +| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | +| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | +| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | +| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | +| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | +| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | +| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | +| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/scanning/events.md b/docs/scanning/events.md index a6a50d554..362a0b958 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -74,7 +74,7 @@ Below is a full list of event types along with which modules produce/consume the | URL | 19 | 2 | ajaxpro, asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | | URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | | URL_UNVERIFIED | 5 | 13 | filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, postman, robots, urlscan, wayback | -| USERNAME | 0 | 2 | | credshed, dehashed | +| USERNAME | 1 | 2 | speculate | credshed, dehashed | | VHOST | 1 | 1 | web_report | vhost | | VULNERABILITY | 2 | 7 | asset_inventory, web_report | ajaxpro, badsecrets, dastardly, generic_ssrf, internetdb, nuclei, telerik | | WAF | 1 | 1 | asset_inventory | wafw00f | From bdac5d66562ad4a54429d76410e9a65b5b93b210 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 17:52:28 -0500 Subject: [PATCH 51/52] updated docs --- docs/scanning/tips_and_tricks.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 1ed370e52..5e115bcde 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -55,7 +55,7 @@ bbot -t evilcorp.com -f subdomain-enum -c spider.yml ### Ingesting BBOT Data Into SIEM (Elastic, Splunk) -If your goal is to feed BBOT data into a SIEM such as Elastic, make sure to enable this option when scanning: +If your goal is to feed BBOT data into a SIEM such as Elastic, be sure to enable this option when scanning: ```bash bbot -t evilcorp.com -c output_modules.json.siem_friendly=true From 3f7c3ac29539bd61c4df85a775a996ed3a8dd02a Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 2 Feb 2024 22:03:08 +0000 Subject: [PATCH 52/52] Refresh module docs --- docs/scanning/configuration.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 0ebfc89dd..2ed2d61de 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -353,6 +353,7 @@ Many modules accept their own configuration options. These options have the abil | output_modules.human.output_file | str | Output to file | | | output_modules.json.console | bool | Output to console | False | | output_modules.json.output_file | str | Output to file | | +| output_modules.json.siem_friendly | bool | Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc. | False | | output_modules.neo4j.password | str | Neo4j password | bbotislife | | output_modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | output_modules.neo4j.username | str | Neo4j username | neo4j |