From 00afbfdd78868693f953ff295e498793c44ee659 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 16 Jan 2024 13:54:46 -0500 Subject: [PATCH 001/159] fix event stats tracking bug --- bbot/scanner/manager.py | 6 +-- bbot/test/test_step_1/test_modules_basic.py | 54 +++++++++++++++++++++ 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 7f320dbe2..731787926 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -168,7 +168,6 @@ async def _emit_event(self, event, **kwargs): - Updating scan statistics. """ log.debug(f"Emitting {event}") - event_distributed = False try: on_success_callback = kwargs.pop("on_success_callback", None) abort_if = kwargs.pop("abort_if", None) @@ -269,7 +268,6 @@ async def _emit_event(self, event, **kwargs): await self.scan.helpers.execute_sync_or_async(on_success_callback, event) await self.distribute_event(event) - event_distributed = True # speculate DNS_NAMES and IP_ADDRESSes from other event types source_event = event @@ -328,8 +326,6 @@ async def _emit_event(self, event, **kwargs): finally: event._resolved.set() - if event_distributed: - self.scan.stats.event_distributed(event) log.debug(f"{event.module}.emit_event() finished for {event}") def hash_event_graph(self, event): @@ -415,6 +411,8 @@ async def distribute_event(self, event): if acceptable_dup or graph_important: await mod.queue_event(event) + self.scan.stats.event_distributed(event) + async def _worker_loop(self): try: while not self.scan.stopped: diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index e21a43aa0..4503582d8 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -289,3 +289,57 @@ async def test_modules_basic_perdomainonly(scan, helpers, events, bbot_config, b else: assert valid_1 == True assert valid_2 == True + + +@pytest.mark.asyncio +async def test_modules_basic_stats(helpers, events, bbot_config, bbot_scanner, httpx_mock, monkeypatch): + from bbot.modules.base import BaseModule + + class dummy(BaseModule): + _name = "dummy" + watched_events = ["*"] + + async def handle_event(self, event): + self.emit_event( + {"host": "www.evilcorp.com", "url": "http://www.evilcorp.com", "description": "asdf"}, "FINDING", event + ) + + scan = bbot_scanner( + "evilcorp.com", + config=bbot_config, + force_start=True, + ) + scan.helpers.dns.mock_dns({("evilcorp.com", "A"): "127.0.254.1", ("www.evilcorp.com", "A"): "127.0.254.2"}) + + scan.modules["dummy"] = dummy(scan) + events = [e async for e in scan.async_start()] + + assert len(events) == 3 + + assert set(scan.stats.module_stats) == {"dummy", "python", "TARGET"} + + target_stats = scan.stats.module_stats["TARGET"] + assert target_stats.emitted == {"SCAN": 1, "DNS_NAME": 1} + assert target_stats.emitted_total == 2 + assert target_stats.produced == {"SCAN": 1, "DNS_NAME": 1} + assert target_stats.produced_total == 2 + assert target_stats.consumed == {} + assert target_stats.consumed_total == 0 + + dummy_stats = scan.stats.module_stats["dummy"] + assert dummy_stats.emitted == {"FINDING": 1} + assert dummy_stats.emitted_total == 1 + assert dummy_stats.produced == {"FINDING": 1} + assert dummy_stats.produced_total == 1 + assert dummy_stats.consumed == {"SCAN": 1, "DNS_NAME": 1} + assert dummy_stats.consumed_total == 2 + + python_stats = scan.stats.module_stats["python"] + assert python_stats.emitted == {} + assert python_stats.emitted_total == 0 + assert python_stats.produced == {} + assert python_stats.produced_total == 0 + assert python_stats.consumed == {"SCAN": 1, "FINDING": 1, "DNS_NAME": 1} + assert python_stats.consumed_total == 3 + + assert scan.stats.events_emitted_by_type == {"SCAN": 1, "FINDING": 1, "DNS_NAME": 1} From ff6939eecf45f417fcf2383b82a405a4099d6846 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 16 Jan 2024 17:32:01 -0500 Subject: [PATCH 002/159] added http status code, http title, waf to asset inventory --- bbot/core/event/base.py | 20 ++++++++++- bbot/core/helpers/misc.py | 44 +++++++++++++++++++++++ bbot/modules/output/asset_inventory.py | 49 +++++++++++++++++++++++--- bbot/modules/output/base.py | 2 +- bbot/modules/wafw00f.py | 6 ++++ bbot/scanner/manager.py | 2 ++ bbot/test/test_step_1/test_events.py | 4 ++- bbot/test/test_step_1/test_helpers.py | 6 ++++ 8 files changed, 126 insertions(+), 7 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb16fda7b..142878f58 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1,3 +1,4 @@ +import re import json import asyncio import logging @@ -866,6 +867,8 @@ def _words(self): class URL_UNVERIFIED(BaseEvent): + _status_code_regex = re.compile(r"^status-(\d{1,3})$") + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.web_spider_distance = getattr(self.source, "web_spider_distance", 0) @@ -921,6 +924,14 @@ def _data_id(self): data = "spider-danger" + data return data + @property + def status_code(self): + for t in self.tags: + match = self._status_code_regex.match(t) + if match: + return int(match.groups()[0]) + return 0 + class URL(URL_UNVERIFIED): def sanitize_data(self, data): @@ -973,7 +984,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # count number of consecutive redirects self.num_redirects = getattr(self.source, "num_redirects", 0) - if str(self.data.get("status_code", 0)).startswith("3"): + if str(self.status_code).startswith("3"): self.num_redirects += 1 def sanitize_data(self, data): @@ -1001,6 +1012,13 @@ def _words(self): def _pretty_string(self): return f'{self.data["hash"]["header_mmh3"]}:{self.data["hash"]["body_mmh3"]}' + @property + def status_code(self): + try: + return int(self.data.get("status_code", 0)) + except (ValueError, TypeError): + return 0 + class VULNERABILITY(DictHostEvent): _always_emit = True diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index f62560704..8f788aae6 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -389,6 +389,50 @@ def url_parents(u): u = parent +def best_http_status(code1, code2): + """ + Determine the better HTTP status code between two given codes. + + The 'better' status code is considered based on typical usage and priority in HTTP communication. + Lower codes are generally better than higher codes. Within the same class (e.g., 2xx), a lower code is better. + Between different classes, the order of preference is 2xx > 3xx > 1xx > 4xx > 5xx. + + Args: + code1 (int): The first HTTP status code. + code2 (int): The second HTTP status code. + + Returns: + int: The better HTTP status code between the two provided codes. + + Examples: + >>> better_http_status(200, 404) + 200 + >>> better_http_status(500, 400) + 400 + >>> better_http_status(301, 302) + 301 + """ + + # Classify the codes into their respective categories (1xx, 2xx, 3xx, 4xx, 5xx) + def classify_code(code): + return int(code) // 100 + + class1 = classify_code(code1) + class2 = classify_code(code2) + + # Priority order for classes + priority_order = {2: 1, 3: 2, 1: 3, 4: 4, 5: 5} + + # Compare based on class priority + p1 = priority_order.get(class1, 10) + p2 = priority_order.get(class2, 10) + if p1 != p2: + return code1 if p1 < p2 else code2 + + # If in the same class, the lower code is better + return min(code1, code2) + + def tldextract(data): """ Extracts the subdomain, domain, and suffix from a URL string. diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index db9fcd946..c899d9723 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -3,7 +3,7 @@ from contextlib import suppress from .csv import CSV -from bbot.core.helpers.misc import make_ip_type, is_ip, is_port +from bbot.core.helpers.misc import make_ip_type, is_ip, is_port, best_http_status severity_map = { "INFO": 0, @@ -21,7 +21,17 @@ class asset_inventory(CSV): - watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "URL", "FINDING", "VULNERABILITY", "TECHNOLOGY", "IP_ADDRESS"] + watched_events = [ + "OPEN_TCP_PORT", + "DNS_NAME", + "URL", + "FINDING", + "VULNERABILITY", + "TECHNOLOGY", + "IP_ADDRESS", + "WAF", + "HTTP_RESPONSE", + ] produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] meta = {"description": "Output to an asset inventory style flattened CSV file"} options = {"output_file": "", "use_previous": False, "summary_netmask": 16} @@ -31,7 +41,18 @@ class asset_inventory(CSV): "summary_netmask": "Subnet mask to use when summarizing IP addresses at end of scan", } - header_row = ["Host", "Provider", "IP(s)", "Status", "Open Ports", "Risk Rating", "Findings", "Description"] + header_row = [ + "Host", + "Provider", + "IP(s)", + "HTTP Status", + "HTTP Title", + "Open Ports", + "Risk Rating", + "Findings", + "Description", + "WAF", + ] filename = "asset-inventory.csv" async def setup(self): @@ -103,11 +124,13 @@ def sort_key(asset): "Host": host, "Provider": getattr(asset, "provider", ""), "IP(s)": ",".join(ips), - "Status": "Active" if asset.ports else "N/A", + "HTTP Status": str(getattr(asset, "http_status", 0)), + "HTTP Title": str(getattr(asset, "http_title", "")), "Open Ports": ",".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "WAF": getattr(asset, "waf", ""), } row.update(asset.custom_fields) self.writerow(row) @@ -191,8 +214,11 @@ def __init__(self, host): self.status = "UNKNOWN" self.risk_rating = 0 self.provider = "" + self.waf = "" self.technologies = set() self.custom_fields = {} + self.http_status = 0 + self.http_title = "" def absorb_csv_row(self, row): # host @@ -230,6 +256,11 @@ def absorb_event(self, event): if not is_ip(event.host): self.host = event.host + http_status = getattr(event, "status_code", 0) + update_http_status = best_http_status(http_status, self.http_status) == http_status + if update_http_status: + self.http_status = http_status + self.ip_addresses = set(_make_ip_list(event.resolved_hosts)) if event.port: @@ -251,6 +282,16 @@ def absorb_event(self, event): if event.type == "TECHNOLOGY": self.technologies.add(event.data["technology"]) + if event.type == "WAF": + if waf := event.data.get("WAF", ""): + if update_http_status or not self.waf: + self.waf = waf + + if event.type == "HTTP_RESPONSE": + if title := event.data.get("title", ""): + if update_http_status or not self.http_title: + self.http_title = title + for tag in event.tags: if tag.startswith("cdn-") or tag.startswith("cloud-"): self.provider = tag diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index c82d96304..a294c23bb 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -27,7 +27,7 @@ def _event_precheck(self, event): # output module specific stuff # omitted events such as HTTP_RESPONSE etc. - if event._omit: + if event._omit and not event.type in self.get_watched_events(): return False, "_omit is True" # force-output certain events to the graph diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index 836d14ea2..f80979619 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -1,6 +1,12 @@ from bbot.modules.base import BaseModule from wafw00f import main as wafw00f_main +# disable wafw00f logging +import logging + +wafw00f_logger = logging.getLogger("wafw00f") +wafw00f_logger.setLevel(logging.CRITICAL + 100) + class wafw00f(BaseModule): """ diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 731787926..efeddeb82 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -195,6 +195,8 @@ async def _emit_event(self, event, **kwargs): for ip in ips: resolved_hosts.add(ip) + event._dns_children = dns_children + # kill runaway DNS chains dns_resolve_distance = getattr(event, "dns_resolve_distance", 0) if dns_resolve_distance >= self.scan.helpers.dns.max_dns_resolve_distance: diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 675385784..d5c4a1b65 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -91,7 +91,9 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("https://evilcorp.com.:666", dummy=True) == "https://evilcorp.com:666/" assert scan.make_event("https://[bad::c0de]", dummy=True).with_port().geturl() == "https://[bad::c0de]:443/" assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" - assert "status-200" in scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]).tags + url_event = scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]) + assert "status-200" in url_event.tags + assert url_event.status_code == 200 with pytest.raises(ValidationError, match=".*status tag.*"): scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 0c50f65eb..ee62aa8d8 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -159,6 +159,12 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "]:22/my-file.csv", ) + assert helpers.best_http_status(200, 404) == 200 + assert helpers.best_http_status(500, 400) == 400 + assert helpers.best_http_status(301, 302) == 301 + assert helpers.best_http_status(0, 302) == 302 + assert helpers.best_http_status(500, 0) == 500 + assert helpers.split_domain("www.evilcorp.co.uk") == ("www", "evilcorp.co.uk") assert helpers.split_domain("asdf.www.test.notreal") == ("asdf.www", "test.notreal") assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") From c389563b1aae908708a9d26589def19f6d5f1dba Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:14:09 -0500 Subject: [PATCH 003/159] include DNS records in asset inventory CSV --- bbot/modules/output/asset_inventory.py | 11 ++++++++++- bbot/scanner/manager.py | 3 +-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index c899d9723..962609a92 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -33,7 +33,7 @@ class asset_inventory(CSV): "HTTP_RESPONSE", ] produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] - meta = {"description": "Output to an asset inventory style flattened CSV file"} + meta = {"description": "Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV"} options = {"output_file": "", "use_previous": False, "summary_netmask": 16} options_desc = { "output_file": "Set a custom output file", @@ -52,6 +52,7 @@ class asset_inventory(CSV): "Findings", "Description", "WAF", + "DNS Records", ] filename = "asset-inventory.csv" @@ -131,6 +132,7 @@ def sort_key(asset): "Findings": "\n".join(findings_and_vulns), "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), + "DNS Records": ",".join(getattr(asset, "dns_records", [])), } row.update(asset.custom_fields) self.writerow(row) @@ -208,6 +210,7 @@ class Asset: def __init__(self, host): self.host = host self.ip_addresses = set() + self.dns_records = [] self.ports = set() self.findings = set() self.vulnerabilities = set() @@ -256,6 +259,12 @@ def absorb_event(self, event): if not is_ip(event.host): self.host = event.host + dns_children = getattr(event, "_dns_children", {}) + if dns_children and not self.dns_records: + for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): + for record in sorted(records): + self.dns_records.append(f"{rdtype}:{record}") + http_status = getattr(event, "status_code", 0) update_http_status = best_http_status(http_status, self.http_status) == http_status if update_http_status: diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index efeddeb82..1fa47fef4 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -195,8 +195,6 @@ async def _emit_event(self, event, **kwargs): for ip in ips: resolved_hosts.add(ip) - event._dns_children = dns_children - # kill runaway DNS chains dns_resolve_distance = getattr(event, "dns_resolve_distance", 0) if dns_resolve_distance >= self.scan.helpers.dns.max_dns_resolve_distance: @@ -206,6 +204,7 @@ async def _emit_event(self, event, **kwargs): dns_children = {} if event.type in ("DNS_NAME", "IP_ADDRESS"): + event._dns_children = dns_children for tag in dns_tags: event.add_tag(tag) From 31ae4d603e97aeb95473f247ff5db74a4e264f66 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:16:11 -0500 Subject: [PATCH 004/159] Description --> Technologies --- bbot/modules/output/asset_inventory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 962609a92..6328334c5 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -50,7 +50,7 @@ class asset_inventory(CSV): "Open Ports", "Risk Rating", "Findings", - "Description", + "Technologies", "WAF", "DNS Records", ] @@ -130,7 +130,7 @@ def sort_key(asset): "Open Ports": ",".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), - "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), "DNS Records": ",".join(getattr(asset, "dns_records", [])), } @@ -237,7 +237,7 @@ def absorb_csv_row(self, row): findings = [i.strip() for i in row.get("Findings", "").splitlines()] self.findings.update(set(i for i in findings if i)) # technologies - technologies = [i.strip() for i in row.get("Description", "").splitlines()] + technologies = [i.strip() for i in row.get("Technologies", "").splitlines()] self.technologies.update(set(i for i in technologies if i)) # risk rating risk_rating = row.get("Risk Rating", "").strip() From 9c96cb4eb68c38b8d9f3d6a7a482956415052996 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:35:29 -0500 Subject: [PATCH 005/159] fix tests --- bbot/test/test_step_1/test_modules_basic.py | 23 +++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index 4503582d8..b943144ac 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -18,16 +18,27 @@ async def test_modules_basic(scan, helpers, events, bbot_config, bbot_scanner, h httpx_mock.add_response(method=http_method, url=re.compile(r".*"), json={"test": "test"}) # output module specific event filtering tests - base_output_module = BaseOutputModule(scan) - base_output_module.watched_events = ["IP_ADDRESS"] + base_output_module_1 = BaseOutputModule(scan) + base_output_module_1.watched_events = ["IP_ADDRESS"] + localhost = scan.make_event("127.0.0.1", source=scan.root_event) + assert base_output_module_1._event_precheck(localhost)[0] == True + localhost._internal = True + assert base_output_module_1._event_precheck(localhost)[0] == False + localhost._internal = False + assert base_output_module_1._event_precheck(localhost)[0] == True + localhost._omit = True + assert base_output_module_1._event_precheck(localhost)[0] == True + + base_output_module_2 = BaseOutputModule(scan) + base_output_module_2.watched_events = ["*"] localhost = scan.make_event("127.0.0.1", source=scan.root_event) - assert base_output_module._event_precheck(localhost)[0] == True + assert base_output_module_2._event_precheck(localhost)[0] == True localhost._internal = True - assert base_output_module._event_precheck(localhost)[0] == False + assert base_output_module_2._event_precheck(localhost)[0] == False localhost._internal = False - assert base_output_module._event_precheck(localhost)[0] == True + assert base_output_module_2._event_precheck(localhost)[0] == True localhost._omit = True - assert base_output_module._event_precheck(localhost)[0] == False + assert base_output_module_2._event_precheck(localhost)[0] == False # common event filtering tests for module_class in (BaseModule, BaseOutputModule, BaseReportModule, BaseInternalModule): From 1a48f7537854996c77dffa42cacd895736102ef9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 17 Jan 2024 16:38:07 -0500 Subject: [PATCH 006/159] fix visual issue with ports in libreoffice --- bbot/modules/output/asset_inventory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 6328334c5..acb833daa 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -124,15 +124,15 @@ def sort_key(asset): row = { "Host": host, "Provider": getattr(asset, "provider", ""), - "IP(s)": ",".join(ips), + "IP(s)": ", ".join(ips), "HTTP Status": str(getattr(asset, "http_status", 0)), "HTTP Title": str(getattr(asset, "http_title", "")), - "Open Ports": ",".join(ports), + "Open Ports": ", ".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), - "DNS Records": ",".join(getattr(asset, "dns_records", [])), + "DNS Records": ", ".join(getattr(asset, "dns_records", [])), } row.update(asset.custom_fields) self.writerow(row) From 491753062cecd82ceaca4a9ac3bf8fede13fbb0b Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Thu, 18 Jan 2024 15:55:06 +0000 Subject: [PATCH 007/159] Refresh module docs --- docs/modules/list_of_modules.md | 2 +- docs/scanning/events.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 60e0c7b62..89bb6d24e 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -96,7 +96,7 @@ | virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | | zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Output to an asset inventory style flattened CSV file | | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY | IP_ADDRESS, OPEN_TCP_PORT | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | | csv | output | No | Output to CSV | | * | | | discord | output | No | Message a Discord channel when certain events are encountered | | * | | | emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index d5ce37b64..defcf46a8 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -61,7 +61,7 @@ Below is a full list of event types along with which modules produce/consume the | FINDING | 2 | 24 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | | GEOLOCATION | 0 | 2 | | ip2location, ipstack | | HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 15 | 1 | ajaxpro, badsecrets, dastardly, excavate, filedownload, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | +| HTTP_RESPONSE | 16 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, excavate, filedownload, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | | IP_ADDRESS | 9 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, masscan, nmap, speculate | asset_inventory, ipneighbor, speculate | | IP_RANGE | 3 | 0 | masscan, nmap, speculate | | | OPEN_TCP_PORT | 4 | 5 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, masscan, nmap, speculate | @@ -77,7 +77,7 @@ Below is a full list of event types along with which modules produce/consume the | USERNAME | 0 | 2 | | credshed, dehashed | | VHOST | 1 | 1 | web_report | vhost | | VULNERABILITY | 2 | 7 | asset_inventory, web_report | ajaxpro, badsecrets, dastardly, generic_ssrf, internetdb, nuclei, telerik | -| WAF | 0 | 1 | | wafw00f | +| WAF | 1 | 1 | asset_inventory | wafw00f | | WEBSCREENSHOT | 0 | 1 | | gowitness | From 2227c89cd2167581ee63ef53267a4136e7c0c3a7 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 11:36:00 -0500 Subject: [PATCH 008/159] fix filedownload options --- bbot/modules/filedownload.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 2f0c26c59..05db2b9df 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -79,8 +79,8 @@ class filedownload(BaseModule): scope_distance_modifier = 3 async def setup(self): - self.extensions = list(set([e.lower().strip(".") for e in self.options.get("extensions", [])])) - self.max_filesize = self.options.get("max_filesize", "10MB") + self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) + self.max_filesize = self.config.get("max_filesize", "10MB") self.download_dir = self.scan.home / "filedownload" self.helpers.mkdir(self.download_dir) self.urls_downloaded = set() From 66a28f2ee7b9154bb6472127e7c3aa5a3f95c7ef Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 11:50:38 -0500 Subject: [PATCH 009/159] improve chain_lists helper --- bbot/core/helpers/misc.py | 4 ++++ bbot/test/test_step_1/test_helpers.py | 1 + 2 files changed, 5 insertions(+) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index f62560704..10c2dc8a6 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1121,6 +1121,8 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): This function takes a list `l` and flattens it by splitting its entries on commas. It also allows you to optionally open entries as files and add their contents to the list. + The order of entries is preserved, and deduplication is performed automatically. + Args: l (list): The list of strings to chain together. try_files (bool, optional): Whether to try to open entries as files. Defaults to False. @@ -1137,6 +1139,8 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): >>> chain_lists(["a,file.txt", "c,d"], try_files=True) ['a', 'f_line1', 'f_line2', 'f_line3', 'c', 'd'] """ + if isinstance(l, str): + l = [l] final_list = dict() for entry in l: for s in entry.split(","): diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 0c50f65eb..1f7fdb9be 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -338,6 +338,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert "asdf" in helpers.str_or_file(str(test_file)) assert "nope" in helpers.str_or_file("nope") assert tuple(helpers.chain_lists([str(test_file), "nope"], try_files=True)) == ("asdf", "fdsa", "nope") + assert tuple(helpers.chain_lists("one, two", try_files=True)) == ("one", "two") assert test_file.is_file() with pytest.raises(DirectoryCreationError, match="Failed to create.*"): From 0143b53c67a7a1a32d2f8e1812959d9ec9cc4c63 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 16:12:44 -0500 Subject: [PATCH 010/159] asset inventory bugfixes and improvements --- bbot/core/event/base.py | 21 ++++++++++++++++++--- bbot/modules/internal/excavate.py | 7 +------ bbot/modules/output/asset_inventory.py | 23 +++++++++++++++++++---- bbot/test/test_step_1/test_events.py | 16 +++++++++++++++- 4 files changed, 53 insertions(+), 14 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 142878f58..616c7ecf4 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -7,6 +7,7 @@ from typing import Optional from datetime import datetime from contextlib import suppress +from urllib.parse import urljoin from pydantic import BaseModel, field_validator from .helpers import * @@ -19,6 +20,7 @@ is_subdomain, is_ip, is_ptr, + is_uri, domain_stem, make_netloc, make_ip_type, @@ -925,7 +927,7 @@ def _data_id(self): return data @property - def status_code(self): + def http_status(self): for t in self.tags: match = self._status_code_regex.match(t) if match: @@ -984,7 +986,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # count number of consecutive redirects self.num_redirects = getattr(self.source, "num_redirects", 0) - if str(self.status_code).startswith("3"): + if str(self.http_status).startswith("3"): self.num_redirects += 1 def sanitize_data(self, data): @@ -1013,12 +1015,25 @@ def _pretty_string(self): return f'{self.data["hash"]["header_mmh3"]}:{self.data["hash"]["body_mmh3"]}' @property - def status_code(self): + def http_status(self): try: return int(self.data.get("status_code", 0)) except (ValueError, TypeError): return 0 + @property + def redirect_location(self): + location = self.data.get("location", "") + # if it's a redirect + if location: + # get the url scheme + scheme = is_uri(location, return_scheme=True) + # if there's no scheme (i.e. it's a relative redirect) + if not scheme: + # then join the location with the current url + location = urljoin(self.parsed.geturl(), location) + return location + class VULNERABILITY(DictHostEvent): _always_emit = True diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f3f15f83f..cf67bf7f6 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -366,16 +366,11 @@ async def handle_event(self, event): # handle redirects web_spider_distance = getattr(event, "web_spider_distance", 0) num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) - location = event.data.get("location", "") + location = getattr(event, "redirect_location", "") # if it's a redirect if location: # get the url scheme scheme = self.helpers.is_uri(location, return_scheme=True) - # if there's no scheme (i.e. it's a relative redirect) - if not scheme: - # then join the location with the current url - location = urljoin(event.parsed.geturl(), location) - scheme = self.helpers.is_uri(location, return_scheme=True) if scheme in ("http", "https"): if num_redirects <= self.max_redirects: # tag redirects to out-of-scope hosts as affiliates diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index acb833daa..79eae5f71 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -125,7 +125,7 @@ def sort_key(asset): "Host": host, "Provider": getattr(asset, "provider", ""), "IP(s)": ", ".join(ips), - "HTTP Status": str(getattr(asset, "http_status", 0)), + "HTTP Status": asset.http_status_full, "HTTP Title": str(getattr(asset, "http_title", "")), "Open Ports": ", ".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], @@ -222,6 +222,7 @@ def __init__(self, host): self.custom_fields = {} self.http_status = 0 self.http_title = "" + self.redirect_location = "" def absorb_csv_row(self, row): # host @@ -265,12 +266,22 @@ def absorb_event(self, event): for record in sorted(records): self.dns_records.append(f"{rdtype}:{record}") - http_status = getattr(event, "status_code", 0) - update_http_status = best_http_status(http_status, self.http_status) == http_status + http_status = getattr(event, "http_status", 0) + # log.hugewarning(event) + # log.hugewarning(f"http_status: {http_status}") + update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status if update_http_status: self.http_status = http_status + if str(http_status).startswith("3"): + if event.type == "HTTP_RESPONSE": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + self.redirect_location = redirect_location + else: + self.redirect_location = "" - self.ip_addresses = set(_make_ip_list(event.resolved_hosts)) + if event.resolved_hosts: + self.ip_addresses.update(set(_make_ip_list(event.resolved_hosts))) if event.port: self.ports.add(str(event.port)) @@ -310,6 +321,10 @@ def absorb_event(self, event): def hostkey(self): return _make_hostkey(self.host, self.ip_addresses) + @property + def http_status_full(self): + return str(self.http_status) + (f" -> {self.redirect_location}" if self.redirect_location else "") + def _make_hostkey(host, ips): """ diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index d5c4a1b65..3181a1688 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -93,7 +93,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" url_event = scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]) assert "status-200" in url_event.tags - assert url_event.status_code == 200 + assert url_event.http_status == 200 with pytest.raises(ValidationError, match=".*status tag.*"): scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) @@ -103,6 +103,20 @@ async def test_events(events, scan, helpers, bbot_config): assert events.http_response.parsed.scheme == "http" assert events.http_response.with_port().geturl() == "http://example.com:80/" + http_response = scan.make_event( + { + "port": "80", + "url": "http://www.evilcorp.com:80", + "input": "http://www.evilcorp.com:80", + "location": "/asdf", + "status_code": 301, + }, + "HTTP_RESPONSE", + dummy=True, + ) + assert http_response.http_status == 301 + assert http_response.redirect_location == "http://www.evilcorp.com/asdf" + # open port tests assert events.open_port in events.domain assert "api.publicapis.org:443" in events.open_port From 4bbb92832e042d498f679665e1fede2bcac876fc Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 17:09:41 -0500 Subject: [PATCH 011/159] removed debugging messages --- bbot/modules/output/asset_inventory.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 79eae5f71..df11e6f34 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -267,8 +267,6 @@ def absorb_event(self, event): self.dns_records.append(f"{rdtype}:{record}") http_status = getattr(event, "http_status", 0) - # log.hugewarning(event) - # log.hugewarning(f"http_status: {http_status}") update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status if update_http_status: self.http_status = http_status From ed93357ac18267200229584fabfc28ec5e154eae Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 17:21:45 -0500 Subject: [PATCH 012/159] url-decode http title --- bbot/core/event/base.py | 14 +++++++++++--- bbot/test/test_step_1/test_events.py | 2 ++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 616c7ecf4..1990fba68 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -14,7 +14,7 @@ from bbot.core.errors import * from bbot.core.helpers import ( extract_words, - split_host_port, + get_file_extension, host_in_host, is_domain, is_subdomain, @@ -24,10 +24,11 @@ domain_stem, make_netloc, make_ip_type, + recursive_decode, smart_decode, - get_file_extension, - validators, + split_host_port, tagify, + validators, ) @@ -1021,6 +1022,13 @@ def http_status(self): except (ValueError, TypeError): return 0 + @property + def http_title(self): + try: + return recursive_decode(self.data.get("title", "")) + except Exception: + return "" + @property def redirect_location(self): location = self.data.get("location", "") diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 3181a1688..1fc01a046 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -106,6 +106,7 @@ async def test_events(events, scan, helpers, bbot_config): http_response = scan.make_event( { "port": "80", + "title": "HTTP%20RESPONSE", "url": "http://www.evilcorp.com:80", "input": "http://www.evilcorp.com:80", "location": "/asdf", @@ -115,6 +116,7 @@ async def test_events(events, scan, helpers, bbot_config): dummy=True, ) assert http_response.http_status == 301 + assert http_response.http_title == "HTTP RESPONSE" assert http_response.redirect_location == "http://www.evilcorp.com/asdf" # open port tests From e342a1e099167be2f0080b0c02a3939ead4c4287 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 18 Jan 2024 17:29:59 -0500 Subject: [PATCH 013/159] http title parsing --- bbot/core/event/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 1990fba68..54b9dae02 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1024,10 +1024,11 @@ def http_status(self): @property def http_title(self): + http_title = self.data.get("title", "") try: - return recursive_decode(self.data.get("title", "")) + return recursive_decode(http_title) except Exception: - return "" + return http_title @property def redirect_location(self): From 7d6fcc7dc8c30f5c357ceb3bfca48a11fd28ae1f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 09:39:11 -0500 Subject: [PATCH 014/159] bugfix, add internal ip address column --- bbot/modules/output/asset_inventory.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index df11e6f34..7092a6461 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -44,10 +44,11 @@ class asset_inventory(CSV): header_row = [ "Host", "Provider", - "IP(s)", + "IP (External)", + "IP (Internal)", + "Open Ports", "HTTP Status", "HTTP Title", - "Open Ports", "Risk Rating", "Findings", "Technologies", @@ -110,13 +111,15 @@ def sort_key(asset): findings_and_vulns = asset.findings.union(asset.vulnerabilities) ports = getattr(asset, "ports", set()) ports = [str(p) for p in sorted([int(p) for p in asset.ports])] - ips = sorted([str(i) for i in getattr(asset, "ip_addresses", [])]) + ips_all = getattr(asset, "ip_addresses", []) + ips_external = sorted([str(ip) for ip in [i for i in ips_all if not i.is_private]]) + ips_internal = sorted([str(ip) for ip in [i for i in ips_all if i.is_private]]) host = self.helpers.make_ip_type(getattr(asset, "host", "")) if host and isinstance(host, str): _, domain = self.helpers.split_domain(host) if domain: increment_stat("Domains", domain) - for ip in ips: + for ip in ips_all: net = ipaddress.ip_network(f"{ip}/{self.summary_netmask}", strict=False) increment_stat("IP Addresses", str(net)) for port in ports: @@ -124,10 +127,11 @@ def sort_key(asset): row = { "Host": host, "Provider": getattr(asset, "provider", ""), - "IP(s)": ", ".join(ips), + "IP (External)": ", ".join(ips_external), + "IP (Internal)": ", ".join(ips_internal), + "Open Ports": ", ".join(ports), "HTTP Status": asset.http_status_full, "HTTP Title": str(getattr(asset, "http_title", "")), - "Open Ports": ", ".join(ports), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), @@ -161,7 +165,7 @@ async def finish(self): # yield to event loop to make sure we don't hold up the scan await self.helpers.sleep(0) host = row.get("Host", "").strip() - ips = row.get("IP(s)", "") + ips = row.get("IP (External)", "") + "," + row.get("IP (Internal)", "") if not host or not ips: continue hostkey = _make_hostkey(host, ips) @@ -263,7 +267,7 @@ def absorb_event(self, event): dns_children = getattr(event, "_dns_children", {}) if dns_children and not self.dns_records: for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): - for record in sorted(records): + for record in sorted([str(r) for r in records]): self.dns_records.append(f"{rdtype}:{record}") http_status = getattr(event, "http_status", 0) From d9a8074394b2f19fc84e30a9c6f81f95f35f2dd2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 10:19:44 -0500 Subject: [PATCH 015/159] fix tests --- bbot/modules/output/asset_inventory.py | 14 ++++++------- .../test_module_asset_inventory.py | 20 ++++++++----------- 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 7092a6461..c89d7e60f 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -136,7 +136,7 @@ def sort_key(asset): "Findings": "\n".join(findings_and_vulns), "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), "WAF": getattr(asset, "waf", ""), - "DNS Records": ", ".join(getattr(asset, "dns_records", [])), + "DNS Records": ", ".join(sorted([str(r) for r in getattr(asset, "dns_records", [])])), } row.update(asset.custom_fields) self.writerow(row) @@ -214,7 +214,7 @@ class Asset: def __init__(self, host): self.host = host self.ip_addresses = set() - self.dns_records = [] + self.dns_records = set() self.ports = set() self.findings = set() self.vulnerabilities = set() @@ -234,7 +234,8 @@ def absorb_csv_row(self, row): if host and not is_ip(host): self.host = host # ips - self.ip_addresses = set(_make_ip_list(row.get("IP(s)", ""))) + self.ip_addresses = set(_make_ip_list(row.get("IP (External)", ""))) + self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", "")))) # ports ports = [i.strip() for i in row.get("Open Ports", "").split(",")] self.ports.update(set(i for i in ports if i and is_port(i))) @@ -265,10 +266,9 @@ def absorb_event(self, event): self.host = event.host dns_children = getattr(event, "_dns_children", {}) - if dns_children and not self.dns_records: - for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): - for record in sorted([str(r) for r in records]): - self.dns_records.append(f"{rdtype}:{record}") + for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): + for record in sorted([str(r) for r in records]): + self.dns_records.add(f"{rdtype}:{record}") http_status = getattr(event, "http_status", 0) update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status diff --git a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py index 2f4303013..cc4399266 100644 --- a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +++ b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py @@ -8,16 +8,12 @@ class TestAsset_Inventory(ModuleTestBase): modules_overrides = ["asset_inventory", "nmap", "sslcert"] async def setup_before_prep(self, module_test): - old_resolve_fn = module_test.scan.helpers.dns.resolve_event - - async def resolve_event(event, minimal=False): - if event.data == "www.bbottest.notreal": - return ["a-record"], True, False, {"A": {"127.0.0.1"}} - elif event.data == "127.0.0.1": - return ["ptr-record"], False, False, {"PTR": {"asdf.bbottest.notreal"}} - return await old_resolve_fn(event, minimal) - - module_test.monkeypatch.setattr(module_test.scan.helpers.dns, "resolve_event", resolve_event) + module_test.scan.helpers.dns.mock_dns( + { + ("127.0.0.1", "PTR"): "www.bbottest.notreal", + ("www.bbottest.notreal", "A"): "127.0.0.1", + } + ) def check(self, module_test, events): assert any(e.data == "127.0.0.1:9999" for e in events), "No open port found" @@ -25,7 +21,7 @@ def check(self, module_test, events): filename = next(module_test.scan.home.glob("asset-inventory.csv")) with open(filename) as f: content = f.read() - assert "www.bbottest.notreal,,127.0.0.1" in content + assert "www.bbottest.notreal,,,127.0.0.1" in content filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) with open(filename) as f: assert "127.0.0.0/16" in f.read() @@ -45,7 +41,7 @@ def check(self, module_test, events): filename = next(module_test.scan.home.glob("asset-inventory.csv")) with open(filename) as f: content = f.read() - assert "www.bbottest.notreal,,127.0.0.1" in content + assert "www.bbottest.notreal,,,127.0.0.1" in content filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) with open(filename) as f: assert "127.0.0.0/16" in f.read() From bbbe7c6ed86421486c8a390ba283c5dc2ecf9e5c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 11:26:05 -0500 Subject: [PATCH 016/159] Increase subdomaincenter timeout --- bbot/modules/subdomaincenter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py index 6d1825b8b..c5c69293c 100644 --- a/bbot/modules/subdomaincenter.py +++ b/bbot/modules/subdomaincenter.py @@ -21,7 +21,7 @@ async def request_url(self, query): for i, _ in enumerate(range(self.retries + 1)): if i > 0: self.verbose(f"Retry #{i} for {query} after response code {status_code}") - response = await self.helpers.request(url) + response = await self.helpers.request(url, timeout=self.http_timeout + 30) status_code = getattr(response, "status_code", 0) if status_code == 429: await self.sleep(20) From e7c716c74efc32614df61cdb07cc77e4a984c5bd Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 11:31:39 -0500 Subject: [PATCH 017/159] asyncify emit_event, default qsize=100 --- bbot/modules/ajaxpro.py | 4 +- bbot/modules/azure_realm.py | 2 +- bbot/modules/azure_tenant.py | 4 +- bbot/modules/badsecrets.py | 6 +-- bbot/modules/base.py | 38 ++++---------- bbot/modules/bevigil.py | 4 +- bbot/modules/bucket_file_enum.py | 2 +- bbot/modules/builtwith.py | 4 +- bbot/modules/bypass403.py | 4 +- bbot/modules/credshed.py | 8 +-- bbot/modules/deadly/dastardly.py | 4 +- bbot/modules/deadly/ffuf.py | 2 +- bbot/modules/deadly/nuclei.py | 4 +- bbot/modules/deadly/vhost.py | 4 +- bbot/modules/dehashed.py | 8 +-- bbot/modules/dnscommonsrv.py | 2 +- bbot/modules/dnszonetransfer.py | 8 +-- bbot/modules/emailformat.py | 2 +- bbot/modules/ffuf_shortnames.py | 16 +++--- bbot/modules/fingerprintx.py | 2 +- bbot/modules/generic_ssrf.py | 2 +- bbot/modules/git.py | 2 +- bbot/modules/github_codesearch.py | 4 +- bbot/modules/github_org.py | 6 +-- bbot/modules/gowitness.py | 6 +-- bbot/modules/host_header.py | 8 +-- bbot/modules/httpx.py | 4 +- bbot/modules/hunt.py | 2 +- bbot/modules/hunterio.py | 6 +-- bbot/modules/iis_shortnames.py | 4 +- bbot/modules/internal/excavate.py | 50 +++++++++---------- bbot/modules/internal/speculate.py | 12 ++--- bbot/modules/internetdb.py | 12 ++--- bbot/modules/ip2location.py | 2 +- bbot/modules/ipneighbor.py | 2 +- bbot/modules/ipstack.py | 2 +- bbot/modules/masscan.py | 12 ++--- bbot/modules/massdns.py | 10 ++-- bbot/modules/nmap.py | 4 +- bbot/modules/nsec.py | 4 +- bbot/modules/ntlm.py | 4 +- bbot/modules/oauth.py | 8 +-- bbot/modules/output/asset_inventory.py | 8 +-- bbot/modules/paramminer_headers.py | 2 +- bbot/modules/pgp.py | 2 +- bbot/modules/postman.py | 2 +- bbot/modules/report/asn.py | 4 +- bbot/modules/robots.py | 2 +- bbot/modules/secretsdb.py | 2 +- bbot/modules/sitedossier.py | 2 +- bbot/modules/skymem.py | 4 +- bbot/modules/smuggler.py | 2 +- bbot/modules/social.py | 2 +- bbot/modules/sslcert.py | 2 +- bbot/modules/subdomain_hijack.py | 4 +- bbot/modules/telerik.py | 14 +++--- bbot/modules/templates/bucket.py | 6 +-- bbot/modules/templates/subdomain_enum.py | 2 +- bbot/modules/url_manipulation.py | 2 +- bbot/modules/urlscan.py | 8 +-- bbot/modules/viewdns.py | 2 +- bbot/modules/wafw00f.py | 4 +- bbot/modules/wappalyzer.py | 2 +- bbot/modules/wayback.py | 2 +- bbot/modules/zoomeye.py | 2 +- .../test_step_1/test_manager_deduplication.py | 4 +- .../test_manager_scope_accuracy.py | 2 +- bbot/test/test_step_1/test_modules_basic.py | 2 +- docs/contribution.md | 2 +- 69 files changed, 189 insertions(+), 197 deletions(-) diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index 924f88835..3a0061315 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -24,7 +24,7 @@ async def handle_event(self, event): probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx") if probe_confirm: if probe_confirm.status_code != 200: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data, @@ -40,7 +40,7 @@ async def handle_event(self, event): ajaxpro_regex_result = self.ajaxpro_regex.search(resp_body) if ajaxpro_regex_result: ajax_pro_path = ajaxpro_regex_result.group(0) - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index f299ca3dc..a3d6ad6ba 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -22,7 +22,7 @@ async def handle_event(self, event): auth_url, "URL_UNVERIFIED", source=event, tags=["affiliate", "ms-auth-url"] ) url_event.source_domain = domain - self.emit_event(url_event) + await self.emit_event(url_event) async def getuserrealm(self, domain): url = f"https://login.microsoftonline.com/getuserrealm.srf?login=test@{domain}" diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index a15fbecf4..909acbe20 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -34,7 +34,7 @@ async def handle_event(self, event): self.verbose(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') for domain in domains: if domain != query: - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) + await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) # tenant names if domain.lower().endswith(".onmicrosoft.com"): tenantname = domain.split(".")[0].lower() @@ -44,7 +44,7 @@ async def handle_event(self, event): event_data = {"tenant-names": sorted(tenant_names), "domains": sorted(domains)} if tenant_id is not None: event_data["tenant-id"] = tenant_id - self.emit_event(event_data, "AZURE_TENANT", source=event) + await self.emit_event(event_data, "AZURE_TENANT", source=event) async def query(self, domain): url = f"{self.base_url}/autodiscover/autodiscover.svc" diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py index 0e188bced..a7a31a48c 100644 --- a/bbot/modules/badsecrets.py +++ b/bbot/modules/badsecrets.py @@ -52,11 +52,11 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - self.emit_event(data, "VULNERABILITY", event) + await self.emit_event(data, "VULNERABILITY", event) elif r["type"] == "IdentifyOnly": # There is little value to presenting a non-vulnerable asp.net viewstate, as it is not crackable without a Matrioshka brain. Just emit a technology instead. if r["detecting_module"] == "ASPNET_Viewstate": - self.emit_event( + await self.emit_event( {"technology": "microsoft asp.net", "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event, @@ -67,4 +67,4 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 7a4a78342..6ae93c7ea 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -104,7 +104,7 @@ class BaseModule: _preserve_graph = False _stats_exclude = False - _qsize = 0 + _qsize = 100 _priority = 3 _name = "base" _type = "scan" @@ -381,7 +381,7 @@ def make_event(self, *args, **kwargs): Examples: >>> new_event = self.make_event("1.2.3.4", source=event) - >>> self.emit_event(new_event) + >>> await self.emit_event(new_event) Returns: Event or None: The created event, or None if a validation error occurred and raise_error was False. @@ -401,7 +401,7 @@ def make_event(self, *args, **kwargs): event.module = self return event - def emit_event(self, *args, **kwargs): + async def emit_event(self, *args, **kwargs): """Emit an event to the event queue and distribute it to interested modules. This is how modules "return" data. @@ -419,10 +419,10 @@ def emit_event(self, *args, **kwargs): ``` Examples: - >>> self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) >>> new_event = self.make_event("1.2.3.4", source=event) - >>> self.emit_event(new_event) + >>> await self.emit_event(new_event) Returns: None @@ -438,27 +438,7 @@ def emit_event(self, *args, **kwargs): emit_kwargs[o] = v event = self.make_event(*args, **event_kwargs) if event: - self.queue_outgoing_event(event, **emit_kwargs) - - async def emit_event_wait(self, *args, **kwargs): - """Emit an event to the event queue and await until there is space in the outgoing queue. - - This method is similar to `emit_event`, but it waits until there's sufficient space in the outgoing - event queue before emitting the event. It utilizes the queue size threshold defined in `self._qsize`. - - Args: - *args: Positional arguments to be passed to `emit_event()` for event creation. - **kwargs: Keyword arguments to be passed to `emit_event()` for event creation or configuration. - - Returns: - None - - See Also: - emit_event: For emitting an event without waiting on the queue size. - """ - while self.outgoing_event_queue.qsize() > self._qsize: - await self.helpers.sleep(0.2) - return self.emit_event(*args, **kwargs) + await self.queue_outgoing_event(event, **emit_kwargs) async def _events_waiting(self): """ @@ -808,7 +788,7 @@ async def queue_event(self, event, precheck=True): except AttributeError: self.debug(f"Not in an acceptable state to queue incoming event") - def queue_outgoing_event(self, event, **kwargs): + async def queue_outgoing_event(self, event, **kwargs): """ Queues an outgoing event to the module's outgoing event queue for further processing. @@ -829,7 +809,7 @@ def queue_outgoing_event(self, event, **kwargs): AttributeError: If the module is not in an acceptable state to queue outgoing events. """ try: - self.outgoing_event_queue.put_nowait((event, kwargs)) + await self.outgoing_event_queue.put((event, kwargs)) except AttributeError: self.debug(f"Not in an acceptable state to queue outgoing event") @@ -1076,7 +1056,7 @@ def incoming_event_queue(self): @property def outgoing_event_queue(self): if self._outgoing_event_queue is None: - self._outgoing_event_queue = ShuffleQueue() + self._outgoing_event_queue = ShuffleQueue(self._qsize) return self._outgoing_event_queue @property diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index e6c9990dd..ff868e969 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -29,13 +29,13 @@ async def handle_event(self, event): subdomains = await self.query(query, request_fn=self.request_subdomains, parse_fn=self.parse_subdomains) if subdomains: for subdomain in subdomains: - self.emit_event(subdomain, "DNS_NAME", source=event) + await self.emit_event(subdomain, "DNS_NAME", source=event) if self.urls: urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) if urls: for parsed_url in await self.scan.run_in_executor(self.helpers.validators.collapse_urls, urls): - self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) + await self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) async def request_subdomains(self, query): url = f"{self.base_url}/{self.helpers.quote(query)}/subdomains/" diff --git a/bbot/modules/bucket_file_enum.py b/bbot/modules/bucket_file_enum.py index 7eb6926c0..facaa021e 100644 --- a/bbot/modules/bucket_file_enum.py +++ b/bbot/modules/bucket_file_enum.py @@ -42,7 +42,7 @@ async def handle_aws(self, event): bucket_file = url + "/" + key file_extension = self.helpers.get_file_extension(key) if file_extension not in self.scan.url_extension_blacklist: - self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") + await self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") urls_emitted += 1 if urls_emitted >= self.file_limit: return diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py index 25a46ddf5..0b5793657 100644 --- a/bbot/modules/builtwith.py +++ b/bbot/modules/builtwith.py @@ -33,14 +33,14 @@ async def handle_event(self, event): if subdomains: for s in subdomains: if s != event: - self.emit_event(s, "DNS_NAME", source=event) + await self.emit_event(s, "DNS_NAME", source=event) # redirects if self.config.get("redirects", True): redirects = await self.query(query, parse_fn=self.parse_redirects, request_fn=self.request_redirects) if redirects: for r in redirects: if r != event: - self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) async def request_domains(self, query): url = f"{self.base_url}/v20/api.json?KEY={self.api_key}&LOOKUP={query}&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes" diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 182f4b4db..e4d8ed3f1 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -132,7 +132,7 @@ async def handle_event(self, event): if results is None: return if len(results) > collapse_threshold: - self.emit_event( + await self.emit_event( { "description": f"403 Bypass MULTIPLE SIGNATURES (exceeded threshold {str(collapse_threshold)})", "host": str(event.host), @@ -143,7 +143,7 @@ async def handle_event(self, event): ) else: for description in results: - self.emit_event( + await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", source=event, diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py index c493199d7..09ed57377 100644 --- a/bbot/modules/credshed.py +++ b/bbot/modules/credshed.py @@ -76,11 +76,11 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: - self.emit_event(email_event) + await self.emit_event(email_event) if user and not self.already_seen(f"{email}:{user}"): - self.emit_event(user, "USERNAME", source=email_event, tags=tags) + await self.emit_event(user, "USERNAME", source=email_event, tags=tags) if pw and not self.already_seen(f"{email}:{pw}"): - self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) for h_pw in hashes: if h_pw and not self.already_seen(f"{email}:{h_pw}"): - self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) diff --git a/bbot/modules/deadly/dastardly.py b/bbot/modules/deadly/dastardly.py index 403e9511e..47fd834fd 100644 --- a/bbot/modules/deadly/dastardly.py +++ b/bbot/modules/deadly/dastardly.py @@ -60,7 +60,7 @@ async def handle_event(self, event): for testcase in testsuite.testcases: for failure in testcase.failures: if failure.severity == "Info": - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": url, @@ -70,7 +70,7 @@ async def handle_event(self, event): event, ) else: - self.emit_event( + await self.emit_event( { "severity": failure.severity, "host": str(event.host), diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index cadaf4990..f7ba3d96e 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -83,7 +83,7 @@ async def handle_event(self, event): filters = await self.baseline_ffuf(fixed_url, exts=exts) async for r in self.execute_ffuf(self.tempfile, fixed_url, exts=exts, filters=filters): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) async def filter_event(self, event): if "endpoint" in event.tags: diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 33bcbe7e0..f3ef4ad67 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -150,7 +150,7 @@ async def handle_batch(self, *events): description_string += f" Extracted Data: [{','.join(extracted_results)}]" if severity in ["INFO", "UNKNOWN"]: - self.emit_event( + await self.emit_event( { "host": str(source_event.host), "url": url, @@ -160,7 +160,7 @@ async def handle_batch(self, *events): source_event, ) else: - self.emit_event( + await self.emit_event( { "severity": severity, "host": str(source_event.host), diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index f4675e10f..e2908dbbe 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -92,9 +92,9 @@ async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=F found_vhost_b64 = r["input"]["FUZZ"] vhost_dict = {"host": str(event.host), "url": host, "vhost": base64.b64decode(found_vhost_b64).decode()} if f"{vhost_dict['vhost']}{basehost}" != event.parsed.netloc: - self.emit_event(vhost_dict, "VHOST", source=event) + await self.emit_event(vhost_dict, "VHOST", source=event) if skip_dns_host == False: - self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) + await self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) yield vhost_dict["vhost"] diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py index a09de454e..4b3546712 100644 --- a/bbot/modules/dehashed.py +++ b/bbot/modules/dehashed.py @@ -49,13 +49,13 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: - self.emit_event(email_event) + await self.emit_event(email_event) if user and not self.already_seen(f"{email}:{user}"): - self.emit_event(user, "USERNAME", source=email_event, tags=tags) + await self.emit_event(user, "USERNAME", source=email_event, tags=tags) if pw and not self.already_seen(f"{email}:{pw}"): - self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) if h_pw and not self.already_seen(f"{email}:{h_pw}"): - self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) async def query(self, event): query = f"domain:{self.make_query(event)}" diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 538f51621..958b6b612 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -106,4 +106,4 @@ async def handle_event(self, event): queries = [event.data] + [f"{srv}.{event.data}" for srv in common_srvs] async for query, results in self.helpers.resolve_batch(queries, type="srv"): if results: - self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) + await self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) diff --git a/bbot/modules/dnszonetransfer.py b/bbot/modules/dnszonetransfer.py index 1ec5bf5eb..0a48526dc 100644 --- a/bbot/modules/dnszonetransfer.py +++ b/bbot/modules/dnszonetransfer.py @@ -42,7 +42,9 @@ async def handle_event(self, event): continue self.hugesuccess(f"Successful zone transfer against {nameserver} for domain {domain}!") finding_description = f"Successful DNS zone transfer against {nameserver} for {domain}" - self.emit_event({"host": str(event.host), "description": finding_description}, "FINDING", source=event) + await self.emit_event( + {"host": str(event.host), "description": finding_description}, "FINDING", source=event + ) for name, ttl, rdata in zone.iterate_rdatas(): if str(name) == "@": parent_data = domain @@ -52,13 +54,13 @@ async def handle_event(self, event): if not parent_event or parent_event == event: parent_event = event else: - self.emit_event(parent_event) + await self.emit_event(parent_event) for rdtype, t in self.helpers.dns.extract_targets(rdata): if not self.helpers.is_ip(t): t = f"{t}.{domain}" module = self.helpers.dns._get_dummy_module(rdtype) child_event = self.scan.make_event(t, "DNS_NAME", parent_event, module=module) - self.emit_event(child_event) + await self.emit_event(child_event) else: self.debug(f"No data returned by {nameserver} for domain {domain}") diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 3fd47ee2d..000c3d5cf 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -19,4 +19,4 @@ async def handle_event(self, event): return for email in self.helpers.extract_emails(r.text): if email.endswith(query): - self.emit_event(email, "EMAIL_ADDRESS", source=event) + await self.emit_event(email, "EMAIL_ADDRESS", source=event) diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index ca45402e6..c062f09df 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -158,12 +158,14 @@ async def handle_event(self, event): if "shortname-file" in event.tags: for ext in used_extensions: async for r in self.execute_ffuf(tempfile, root_url, suffix=f".{ext}"): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + ) elif "shortname-directory" in event.tags: async for r in self.execute_ffuf(tempfile, root_url, exts=["/"]): r_url = f"{r['url'].rstrip('/')}/" - self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) if self.config.get("find_delimiters"): if "shortname-directory" in event.tags: @@ -175,7 +177,9 @@ async def handle_event(self, event): async for r in self.execute_ffuf( tempfile, root_url, prefix=f"{prefix}{delimeter}", exts=["/"] ): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + ) elif "shortname-file" in event.tags: for ext in used_extensions: @@ -187,7 +191,7 @@ async def handle_event(self, event): async for r in self.execute_ffuf( tempfile, root_url, prefix=f"{prefix}{delimeter}", suffix=f".{ext}" ): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] ) @@ -217,7 +221,7 @@ async def finish(self): ) async for r in self.execute_ffuf(tempfile, url, prefix=prefix, exts=["/"]): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=self.shortname_to_event[hint], @@ -233,7 +237,7 @@ async def finish(self): async for r in self.execute_ffuf( tempfile, url, prefix=prefix, suffix=f".{ext}" ): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=self.shortname_to_event[hint], diff --git a/bbot/modules/fingerprintx.py b/bbot/modules/fingerprintx.py index be5695541..a7d8f2ea0 100644 --- a/bbot/modules/fingerprintx.py +++ b/bbot/modules/fingerprintx.py @@ -52,4 +52,4 @@ async def handle_batch(self, *events): protocol_data["port"] = port if banner: protocol_data["banner"] = banner - self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) + await self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index bf2c37097..d4045993b 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -200,7 +200,7 @@ def interactsh_callback(self, r): matched_severity = match[2] matched_read_response = str(match[3]) - self.emit_event( + await self.emit_event( { "severity": matched_severity, "host": str(matched_event.host), diff --git a/bbot/modules/git.py b/bbot/modules/git.py index dafe151d1..fc61402de 100644 --- a/bbot/modules/git.py +++ b/bbot/modules/git.py @@ -29,7 +29,7 @@ async def handle_event(self, event): text = "" if text: if getattr(result, "status_code", 0) == 200 and "[core]" in text and not self.fp_regex.match(text): - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": url, "description": f"Exposed .git config at {url}"}, "FINDING", event, diff --git a/bbot/modules/github_codesearch.py b/bbot/modules/github_codesearch.py index c98335722..a138b4399 100644 --- a/bbot/modules/github_codesearch.py +++ b/bbot/modules/github_codesearch.py @@ -21,13 +21,13 @@ async def handle_event(self, event): repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", source=event) if repo_event is None: continue - self.emit_event(repo_event) + await self.emit_event(repo_event) for raw_url in raw_urls: url_event = self.make_event(raw_url, "URL_UNVERIFIED", source=repo_event, tags=["httpx-safe"]) if not url_event: continue url_event.scope_distance = repo_event.scope_distance - self.emit_event(url_event) + await self.emit_event(url_event) async def query(self, query): repos = {} diff --git a/bbot/modules/github_org.py b/bbot/modules/github_org.py index 66182a2a6..66fa038a7 100644 --- a/bbot/modules/github_org.py +++ b/bbot/modules/github_org.py @@ -57,7 +57,7 @@ async def handle_event(self, event): for repo_url in repos: repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", source=event) repo_event.scope_distance = event.scope_distance - self.emit_event(repo_event) + await self.emit_event(repo_event) # find members from org (SOCIAL --> SOCIAL) if is_org and self.include_members: @@ -66,7 +66,7 @@ async def handle_event(self, event): for member in org_members: event_data = {"platform": "github", "profile_name": member, "url": f"https://github.com/{member}"} member_event = self.make_event(event_data, "SOCIAL", tags="github-org-member", source=event) - self.emit_event(member_event) + await self.emit_event(member_event) # find valid orgs from stub (ORG_STUB --> SOCIAL) elif event.type == "ORG_STUB": @@ -80,7 +80,7 @@ async def handle_event(self, event): event_data = {"platform": "github", "profile_name": user, "url": f"https://github.com/{user}"} github_org_event = self.make_event(event_data, "SOCIAL", tags="github-org", source=event) github_org_event.scope_distance = event.scope_distance - self.emit_event(github_org_event) + await self.emit_event(github_org_event) async def query_org_repos(self, query): repos = [] diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 5f4c4a5e8..5592eaa65 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -137,7 +137,7 @@ async def handle_batch(self, *events): filename = screenshot["filename"] webscreenshot_data = {"filename": filename, "url": final_url} source_event = events[url] - self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) + await self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) # emit URLs for url, row in self.new_network_logs.items(): @@ -151,7 +151,7 @@ async def handle_batch(self, *events): if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): - self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) + await self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) # emit technologies for _, row in self.new_technologies.items(): @@ -160,7 +160,7 @@ async def handle_batch(self, *events): source_event = events[source_url] technology = row["value"] tech_data = {"technology": technology, "url": source_url, "host": str(source_event.host)} - self.emit_event(tech_data, "TECHNOLOGY", source=source_event) + await self.emit_event(tech_data, "TECHNOLOGY", source=source_event) def construct_command(self): # base executable diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index bae721990..bec77c15a 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -30,7 +30,7 @@ async def setup(self): def rand_string(self, *args, **kwargs): return self.helpers.rand_string(*args, **kwargs) - def interactsh_callback(self, r): + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: @@ -40,7 +40,7 @@ def interactsh_callback(self, r): matched_event = match[0] matched_technique = match[1] - self.emit_event( + await self.emit_event( { "host": str(matched_event.host), "url": matched_event.data["url"], @@ -128,7 +128,7 @@ async def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], @@ -168,7 +168,7 @@ async def handle_event(self, event): # emit all the domain reflections we found for dr in domain_reflections: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0c12ad740..393d2a402 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -153,11 +153,11 @@ async def handle_batch(self, *events): url_event = self.make_event(url, "URL", source_event, tags=tags) if url_event: if url_event != source_event: - self.emit_event(url_event) + await self.emit_event(url_event) else: url_event._resolved.set() # HTTP response - self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) + await self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) for tempdir in Path(tempfile.gettempdir()).iterdir(): if tempdir.is_dir() and self.httpx_tempdir_regex.match(tempdir.name): diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index 0ccf0391b..add45b665 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -289,4 +289,4 @@ async def handle_event(self, event): url = event.data.get("url", "") if url: data["url"] = url - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 1e65c6e4c..792ca6d98 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -26,14 +26,14 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", event) if email_event: - self.emit_event(email_event) + await self.emit_event(email_event) for source in sources: domain = source.get("domain", "") if domain: - self.emit_event(domain, "DNS_NAME", email_event) + await self.emit_event(domain, "DNS_NAME", email_event) url = source.get("uri", "") if url: - self.emit_event(url, "URL_UNVERIFIED", email_event) + await self.emit_event(url, "URL_UNVERIFIED", email_event) async def query(self, query): emails = [] diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index ff7941dc2..7d558a23a 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -221,7 +221,7 @@ class safety_counter_obj: technique_strings.append(f"{method} ({technique})") description = f"IIS Shortname Vulnerability Detected. Potentially Vulnerable Method/Techniques: [{','.join(technique_strings)}]" - self.emit_event( + await self.emit_event( {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, @@ -314,7 +314,7 @@ class safety_counter_obj: hint_type = "shortname-file" else: hint_type = "shortname-directory" - self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) + await self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) async def filter_event(self, event): if "dir" in event.tags: diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f3f15f83f..f5c028c95 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -23,7 +23,7 @@ async def search(self, content, event, **kwargs): async for result, name in self._search(content, event, **kwargs): results.add(result) for result in results: - self.report(result, name, event, **kwargs) + await self.report(result, name, event, **kwargs) async def _search(self, content, event, **kwargs): for name, regex in self.compiled_regexes.items(): @@ -32,7 +32,7 @@ async def _search(self, content, event, **kwargs): for result in regex.findall(content): yield result, name - def report(self, result, name, event): + async def report(self, result, name, event): pass @@ -48,10 +48,10 @@ async def search(self, content, event, **kwargs): async for csp, name in self._search(content, event, **kwargs): extracted_domains = self.extract_domains(csp) for domain in extracted_domains: - self.report(domain, event, **kwargs) + await self.report(domain, event, **kwargs) - def report(self, domain, event, **kwargs): - self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + async def report(self, domain, event, **kwargs): + await self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) class HostnameExtractor(BaseExtractor): @@ -62,8 +62,8 @@ def __init__(self, excavate): self.regexes[f"dns_name_{i+1}"] = r.pattern super().__init__(excavate) - def report(self, result, name, event, **kwargs): - self.excavate.emit_event(result, "DNS_NAME", source=event) + async def report(self, result, name, event, **kwargs): + await self.excavate.emit_event(result, "DNS_NAME", source=event) class URLExtractor(BaseExtractor): @@ -95,7 +95,7 @@ async def search(self, content, event, **kwargs): urls_found = 0 for result, name in results: - url_event = self.report(result, name, event, **kwargs) + url_event = await self.report(result, name, event, **kwargs) if url_event is not None: url_in_scope = self.excavate.scan.in_scope(url_event) is_spider_danger = self.excavate.helpers.is_spider_danger(event, result) @@ -115,7 +115,7 @@ async def search(self, content, event, **kwargs): url_event.add_tag("spider-danger") self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") - self.excavate.emit_event(url_event) + await self.excavate.emit_event(url_event) if url_in_scope: urls_found += 1 @@ -150,7 +150,7 @@ async def _search(self, content, event, **kwargs): yield result, name - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): parsed_uri = None try: parsed_uri = self.excavate.helpers.urlparse(result) @@ -168,7 +168,7 @@ def report(self, result, name, event, **kwargs): parsed_url = getattr(event, "parsed", None) if parsed_url: event_data["url"] = parsed_url.geturl() - self.excavate.emit_event( + await self.excavate.emit_event( event_data, "FINDING", source=event, @@ -177,7 +177,7 @@ def report(self, result, name, event, **kwargs): protocol_data = {"protocol": parsed_uri.scheme, "host": str(host)} if port: protocol_data["port"] = port - self.excavate.emit_event( + await self.excavate.emit_event( protocol_data, "PROTOCOL", source=event, @@ -192,12 +192,12 @@ class EmailExtractor(BaseExtractor): regexes = {"email": _email_regex} tld_blacklist = ["png", "jpg", "jpeg", "bmp", "ico", "gif", "svg", "css", "ttf", "woff", "woff2"] - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): result = result.lower() tld = result.split(".")[-1] if tld not in self.tld_blacklist: self.excavate.debug(f"Found email address [{result}] from parsing [{event.data.get('url')}]") - self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) + await self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) class ErrorExtractor(BaseExtractor): @@ -218,10 +218,10 @@ class ErrorExtractor(BaseExtractor): "ASP.NET:4": r"Error ([\d-]+) \([\dA-F]+\)", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found error message from parsing [{event.data.get('url')}] with regex [{name}]") description = f"Error message Detected at Error Type: {name}" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", source=event, @@ -231,7 +231,7 @@ def report(self, result, name, event, **kwargs): class JWTExtractor(BaseExtractor): regexes = {"JWT": r"eyJ(?:[\w-]*\.)(?:[\w-]*\.)[\w-]*"} - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found JWT candidate [{result}]") try: j.decode(result, options={"verify_signature": False}) @@ -240,7 +240,7 @@ def report(self, result, name, event, **kwargs): if jwt_headers["alg"].upper()[0:2] == "HS": tags = ["crackable"] description = f"JWT Identified [{result}]" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event, @@ -259,9 +259,9 @@ class SerializationExtractor(BaseExtractor): "Possible Compressed": r"H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2}", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): description = f"{name} serialized object found" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) @@ -272,9 +272,9 @@ class FunctionalityExtractor(BaseExtractor): "Web Service WSDL": r"(?i)((?:http|https)://[^\s]*?.(?:wsdl))", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): description = f"{name} found" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) @@ -314,7 +314,7 @@ class JavascriptExtractor(BaseExtractor): "possible_creds_var": r"(?:password|passwd|pwd|pass)\s*=+\s*['\"][^\s'\"]{1,60}['\"]", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): # ensure that basic auth matches aren't false positives if name == "authorization_basic": try: @@ -326,7 +326,7 @@ def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found Possible Secret in Javascript [{result}]") description = f"Possible secret in JS [{result}] Signature [{name}]" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event ) @@ -384,7 +384,7 @@ async def handle_event(self, event): # inherit web spider distance from parent (don't increment) source_web_spider_distance = getattr(event, "web_spider_distance", 0) url_event.web_spider_distance = source_web_spider_distance - self.emit_event(url_event) + await self.emit_event(url_event) else: self.verbose(f"Exceeded max HTTP redirects ({self.max_redirects}): {location}") diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 9b57b0e81..8f51d5d95 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -73,13 +73,13 @@ async def handle_event(self, event): ips = list(net) random.shuffle(ips) for ip in ips: - self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) + await self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) # parent domains if event.type == "DNS_NAME": parent = self.helpers.parent_domain(event.data) if parent != event.data: - self.emit_event(parent, "DNS_NAME", source=event, internal=True) + await self.emit_event(parent, "DNS_NAME", source=event, internal=True) # generate open ports @@ -91,7 +91,7 @@ async def handle_event(self, event): if event.type == "URL" or (event.type == "URL_UNVERIFIED" and self.open_port_consumers): # only speculate port from a URL if it wouldn't be speculated naturally from the host if event.host and (event.port not in self.ports or not speculate_open_ports): - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.host, event.port), "OPEN_TCP_PORT", source=event, @@ -108,7 +108,7 @@ async def handle_event(self, event): # inherit web spider distance from parent (don't increment) source_web_spider_distance = getattr(event, "web_spider_distance", 0) url_event.web_spider_distance = source_web_spider_distance - self.emit_event(url_event) + await self.emit_event(url_event) # from hosts if speculate_open_ports: @@ -120,7 +120,7 @@ async def handle_event(self, event): if event.type == "IP_ADDRESS" or usable_dns: for port in self.ports: - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, @@ -154,7 +154,7 @@ async def handle_event(self, event): stub_event = self.make_event(stub, "ORG_STUB", source=event) if event.scope_distance > 0: stub_event.scope_distance = event.scope_distance - self.emit_event(stub_event) + await self.emit_event(stub_event) async def filter_event(self, event): # don't accept errored DNS_NAMEs diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index 0384a4d4d..b3e98b9fc 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -76,7 +76,7 @@ async def handle_event(self, event): return if data: if r.status_code == 200: - self._parse_response(data=data, event=event) + await self._parse_response(data=data, event=event) elif r.status_code == 404: detail = data.get("detail", "") if detail: @@ -86,22 +86,22 @@ async def handle_event(self, event): err_msg = data.get("msg", "") self.verbose(f"Shodan error for {ip}: {err_data}: {err_msg}") - def _parse_response(self, data: dict, event): + async def _parse_response(self, data: dict, event): """Handles emitting events from returned JSON""" data: dict # has keys: cpes, hostnames, ip, ports, tags, vulns # ip is a string, ports is a list of ports, the rest is a list of strings for hostname in data.get("hostnames", []): - self.emit_event(hostname, "DNS_NAME", source=event) + await self.emit_event(hostname, "DNS_NAME", source=event) for cpe in data.get("cpes", []): - self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) + await self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) for port in data.get("ports", []): - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, internal=True, quick=True ) vulns = data.get("vulns", []) if vulns: vulns_str = ", ".join([str(v) for v in vulns]) - self.emit_event( + await self.emit_event( {"description": f"Shodan reported verified vulnerabilities: {vulns_str}", "host": str(event.host)}, "FINDING", source=event, diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index d0a66ce4c..e192b2abb 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -57,4 +57,4 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - self.emit_event(geo_data, "GEOLOCATION", event) + await self.emit_event(geo_data, "GEOLOCATION", event) diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index b6688abee..2b139f807 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -34,4 +34,4 @@ async def handle_event(self, event): ip_event = self.make_event(str(ip), "IP_ADDRESS", event, internal=True) # keep the scope distance low to give it one more hop for DNS resolution # ip_event.scope_distance = max(1, event.scope_distance) - self.emit_event(ip_event) + await self.emit_event(ip_event) diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index a8a143fdc..98f139505 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -47,4 +47,4 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - self.emit_event(geo_data, "GEOLOCATION", event) + await self.emit_event(geo_data, "GEOLOCATION", event) diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py index 4d9c9db68..02598a215 100644 --- a/bbot/modules/masscan.py +++ b/bbot/modules/masscan.py @@ -193,18 +193,18 @@ def process_output(self, line, result_callback): result = self.helpers.make_netloc(result, port_number) if source is None: source = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) - self.emit_event(source) + await self.emit_event(source) result_callback(result, source=source) def append_alive_host(self, host, source): host_event = self.make_event(host, "IP_ADDRESS", source=self.get_source_event(host)) self.alive_hosts[host] = host_event self._write_ping_result(host) - self.emit_event(host_event) + await self.emit_event(host_event) def emit_open_tcp_port(self, data, source): self._write_syn_result(data) - self.emit_event(data, "OPEN_TCP_PORT", source=source) + await self.emit_event(data, "OPEN_TCP_PORT", source=source) def emit_from_cache(self): ip_events = {} @@ -220,7 +220,7 @@ def emit_from_cache(self): break ip_event = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) ip_events[ip] = ip_event - self.emit_event(ip_event) + await self.emit_event(ip_event) # syn scan if self.syn_cache.is_file(): cached_syns = list(self.helpers.read_file(self.syn_cache)) @@ -237,8 +237,8 @@ def emit_from_cache(self): if source_event is None: self.verbose(f"Source event not found for {line}") source_event = self.make_event(line, "IP_ADDRESS", source=self.get_source_event(line)) - self.emit_event(source_event) - self.emit_event(line, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(source_event) + await self.emit_event(line, "OPEN_TCP_PORT", source=source_event) def get_source_event(self, host): source_event = self.scan.whitelist.get(host) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 5a0865476..a345b79f6 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -119,7 +119,7 @@ async def handle_event(self, event): self.info(f"Brute-forcing subdomains for {query} (source: {event.data})") for hostname in await self.massdns(query, self.subdomain_list): - self.emit_result(hostname, event, query) + await self.emit_result(hostname, event, query) def abort_if(self, event): if not event.scope_distance == 0: @@ -127,12 +127,12 @@ def abort_if(self, event): if "wildcard" in event.tags: return True, "event is a wildcard" - def emit_result(self, result, source_event, query, tags=None): + async def emit_result(self, result, source_event, query, tags=None): if not result == source_event: kwargs = {"abort_if": self.abort_if} if tags is not None: kwargs["tags"] = tags - self.emit_event(result, "DNS_NAME", source_event, **kwargs) + await self.emit_event(result, "DNS_NAME", source_event, **kwargs) def already_processed(self, hostname): if hash(hostname) in self.processed: @@ -380,7 +380,9 @@ def add_mutation(_domain_hash, m): if source_event is None: self.warning(f"Could not correlate source event from: {hostname}") source_event = self.scan.root_event - self.emit_result(hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"]) + await self.emit_result( + hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"] + ) if results: found_mutations = True continue diff --git a/bbot/modules/nmap.py b/bbot/modules/nmap.py index aeb28f9ae..6d8a1293b 100644 --- a/bbot/modules/nmap.py +++ b/bbot/modules/nmap.py @@ -52,10 +52,10 @@ async def handle_batch(self, *events): for port in host.open_ports: port_number = int(port.split("/")[0]) netloc = self.helpers.make_netloc(host.address, port_number) - self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) for hostname in host.hostnames: netloc = self.helpers.make_netloc(hostname, port_number) - self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) finally: output_file.unlink(missing_ok=True) diff --git a/bbot/modules/nsec.py b/bbot/modules/nsec.py index bfd770d44..7d313c140 100644 --- a/bbot/modules/nsec.py +++ b/bbot/modules/nsec.py @@ -18,12 +18,12 @@ async def handle_event(self, event): async for result in self.nsec_walk(event.data): if not emitted_finding: emitted_finding = True - self.emit_event( + await self.emit_event( {"host": event.data, "description": f"DNSSEC NSEC Zone Walking Enabled for domain: {event.data}"}, "FINDING", source=event, ) - self.emit_event(result, "DNS_NAME", source=event) + await self.emit_event(result, "DNS_NAME", source=event) async def get_nsec_record(self, domain): domain = domain.replace("\\000.", "") diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 76e93c595..c69beb941 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -87,7 +87,7 @@ async def handle_event(self, event): for result, request_url in await self.handle_url(event): if result and request_url: self.found.add(found_hash) - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": request_url, @@ -98,7 +98,7 @@ async def handle_event(self, event): ) fqdn = result.get("FQDN", "") if fqdn: - self.emit_event(fqdn, "DNS_NAME", source=event) + await self.emit_event(fqdn, "DNS_NAME", source=event) break async def filter_event(self, event): diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index f4d592511..33cb38959 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -66,16 +66,16 @@ async def handle_event(self, event): source=event, ) finding_event.source_domain = source_domain - self.emit_event(finding_event) + await self.emit_event(finding_event) url_event = self.make_event( token_endpoint, "URL_UNVERIFIED", source=event, tags=["affiliate", "oauth-token-endpoint"] ) url_event.source_domain = source_domain - self.emit_event(url_event) + await self.emit_event(url_event) for result in oidc_results: if result not in (domain, event.data): event_type = "URL_UNVERIFIED" if self.helpers.is_url(result) else "DNS_NAME" - self.emit_event(result, event_type, source=event, tags=["affiliate"]) + await self.emit_event(result, event_type, source=event, tags=["affiliate"]) for oauth_task in oauth_tasks: url = await oauth_task @@ -90,7 +90,7 @@ async def handle_event(self, event): source=event, ) oauth_finding.source_domain = source_domain - self.emit_event(oauth_finding) + await self.emit_event(oauth_finding) def url_and_base(self, url): yield url diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index acb833daa..b16dd18fb 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -173,19 +173,19 @@ async def finish(self): self.add_custom_headers(list(asset.custom_fields)) if not is_ip(asset.host): host_event = self.make_event(asset.host, "DNS_NAME", source=self.scan.root_event) - self.emit_event(host_event) + await self.emit_event(host_event) for port in asset.ports: netloc = self.helpers.make_netloc(asset.host, port) open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=host_event) - self.emit_event(open_port_event) + await self.emit_event(open_port_event) else: for ip in asset.ip_addresses: ip_event = self.make_event(ip, "IP_ADDRESS", source=self.scan.root_event) - self.emit_event(ip_event) + await self.emit_event(ip_event) for port in asset.ports: netloc = self.helpers.make_netloc(ip, port) open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=ip_event) - self.emit_event(open_port_event) + await self.emit_event(open_port_event) else: self.warning( f"use_previous=True was set but no previous asset inventory was found at {self.output_file}" diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 65880d9c8..7044ae90a 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -126,7 +126,7 @@ def process_results(self, event, results): if reflection: tags = ["http_reflection"] description = f"[Paramminer] {self.compare_mode.capitalize()}: [{result}] Reasons: [{reasons}] Reflection: [{str(reflection)}]" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": url, "description": description}, "FINDING", event, diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index c1e0773c3..2c378f585 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -20,7 +20,7 @@ async def handle_event(self, event): if results: for hostname in results: if not hostname == event: - self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) + await self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) async def query(self, query): results = set() diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 619107b53..57d361b84 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -26,7 +26,7 @@ async def handle_event(self, event): query = self.make_query(event) self.verbose(f"Searching for any postman workspaces, collections, requests belonging to {query}") for url in await self.query(query): - self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") + await self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") async def query(self, query): interesting_urls = [] diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index a8f57709a..db8612a1e 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -42,9 +42,9 @@ async def handle_event(self, event): emails = asn.pop("emails", []) self.cache_put(asn) asn_event = self.make_event(asn, "ASN", source=event) - self.emit_event(asn_event) + await self.emit_event(asn_event) for email in emails: - self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) + await self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) async def report(self): asn_data = sorted(self.asn_cache.items(), key=lambda x: self.asn_counts[x[0]], reverse=True) diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index fd873b799..717900bee 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -48,4 +48,4 @@ async def handle_event(self, event): tags = [] if self.helpers.is_spider_danger(event, unverified_url): tags.append("spider-danger") - self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) + await self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) diff --git a/bbot/modules/secretsdb.py b/bbot/modules/secretsdb.py index 83f305c61..3fc8ad539 100644 --- a/bbot/modules/secretsdb.py +++ b/bbot/modules/secretsdb.py @@ -54,7 +54,7 @@ async def handle_event(self, event): parsed_url = getattr(event, "parsed", None) if parsed_url: event_data["url"] = parsed_url.geturl() - self.emit_event( + await self.emit_event( event_data, "FINDING", source=event, diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index 87358a955..0c797296a 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -19,7 +19,7 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - await self.emit_event_wait(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) async def query(self, query, parse_fn=None, request_fn=None): results = set() diff --git a/bbot/modules/skymem.py b/bbot/modules/skymem.py index 71d0e883e..4bd76c70d 100644 --- a/bbot/modules/skymem.py +++ b/bbot/modules/skymem.py @@ -19,7 +19,7 @@ async def handle_event(self, event): if not r: return for email in self.helpers.extract_emails(r.text): - self.emit_event(email, "EMAIL_ADDRESS", source=event) + await self.emit_event(email, "EMAIL_ADDRESS", source=event) # iterate through other pages domain_ids = re.findall(r' {e.host}" description += f" ({source_hosts_str})" - self.emit_event({"host": event.host, "url": url, "description": description}, "FINDING", source=event) + await self.emit_event( + {"host": event.host, "url": url, "description": description}, "FINDING", source=event + ) else: self.debug(reason) diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index 71ebc4e08..6cbdfcf19 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -211,7 +211,7 @@ async def handle_event(self, event): version = "<= 2019 (Either Pre-2017 (vulnerable), or 2017-2019 w/ Encrypt-Then-Mac)" description = f"Telerik RAU AXD Handler detected. Verbose Errors Enabled: [{str(verbose_errors)}] Version Guess: [{version}]" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": f"{event.data}{webresource}", "description": description}, "FINDING", event, @@ -237,7 +237,7 @@ async def handle_event(self, event): description = f"[CVE-2017-11317] [{str(version)}] {webresource}" if "fileInfo" in output.stdout: self.debug(f"Confirmed Vulnerable Telerik (version: {str(version)}") - self.emit_event( + await self.emit_event( { "severity": "CRITICAL", "description": description, @@ -276,7 +276,7 @@ async def handle_event(self, event): await self.helpers.cancel_tasks(tasks) self.debug(f"Detected Telerik UI instance ({dh})") description = f"Telerik DialogHandler detected" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": f"{event.data}{dh}", "description": description}, "FINDING", event, @@ -297,7 +297,7 @@ async def handle_event(self, event): if validate_result.status_code != 500: self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") description = f"Telerik SpellCheckHandler detected" - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": f"{event.data}{spellcheckhandler}", @@ -317,7 +317,7 @@ async def handle_event(self, event): chartimagehandler_error = "ChartImage.axd?ImageName=" result_error, _ = await self.test_detector(event.data, chartimagehandler_error) if result_error.status_code != 200: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": f"{event.data}{chartimagehandler}", @@ -331,7 +331,7 @@ async def handle_event(self, event): resp_body = event.data.get("body", None) if resp_body: if '":{"SerializedParameters":"' in resp_body: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], @@ -341,7 +341,7 @@ async def handle_event(self, event): event, ) elif '"_serializedConfiguration":"' in resp_body: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py index eef8f5bee..f9681385b 100644 --- a/bbot/modules/templates/bucket.py +++ b/bbot/modules/templates/bucket.py @@ -52,7 +52,7 @@ async def handle_dns_name(self, event): for d in self.delimiters: buckets.add(d.join(split)) async for bucket_name, url, tags in self.brute_buckets(buckets, permutations=self.permutations): - self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) async def handle_storage_bucket(self, event): url = event.data["url"] @@ -61,12 +61,12 @@ async def handle_storage_bucket(self, event): description, tags = await self._check_bucket_open(bucket_name, url) if description: event_data = {"host": event.host, "url": url, "description": description} - self.emit_event(event_data, "FINDING", source=event, tags=tags) + await self.emit_event(event_data, "FINDING", source=event, tags=tags) async for bucket_name, url, tags in self.brute_buckets( [bucket_name], permutations=self.permutations, omit_base=True ): - self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) async def brute_buckets(self, buckets, permutations=False, omit_base=False): buckets = set(buckets) diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 0a0067628..790b35515 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -38,7 +38,7 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) async def request_url(self, query): url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" diff --git a/bbot/modules/url_manipulation.py b/bbot/modules/url_manipulation.py index f4d598c63..983595fe1 100644 --- a/bbot/modules/url_manipulation.py +++ b/bbot/modules/url_manipulation.py @@ -74,7 +74,7 @@ async def handle_event(self, event): if "body" in reasons: reported_signature = f"Modified URL: {sig[1]}" description = f"Url Manipulation: [{','.join(reasons)}] Sig: [{reported_signature}]" - self.emit_event( + await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", source=event, diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index f1efe08e5..4c3811af0 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -25,16 +25,18 @@ async def handle_event(self, event): domain_event = self.make_event(domain, "DNS_NAME", source=event) if domain_event: if str(domain_event.host).endswith(query) and not str(domain_event.host) == str(event.host): - self.emit_event(domain_event, abort_if=self.abort_if) + await self.emit_event(domain_event, abort_if=self.abort_if) source_event = domain_event if url: url_event = self.make_event(url, "URL_UNVERIFIED", source=source_event) if url_event: if str(url_event.host).endswith(query): if self.urls: - self.emit_event(url_event, abort_if=self.abort_if) + await self.emit_event(url_event, abort_if=self.abort_if) else: - self.emit_event(str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if) + await self.emit_event( + str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if + ) else: self.debug(f"{url_event.host} does not match {query}") diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index c2a5e4431..d9a589845 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -26,7 +26,7 @@ async def setup(self): async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) for domain, _ in await self.query(query): - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) async def query(self, query): results = set() diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index f80979619..89c7ee1fa 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -32,12 +32,12 @@ async def handle_event(self, event): waf_detections = await self.scan.run_in_executor(WW.identwaf) if waf_detections: for waf in waf_detections: - self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) + await self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) else: if self.config.get("generic_detect") == True: generic = await self.scan.run_in_executor(WW.genericdetect) if generic: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": url, diff --git a/bbot/modules/wappalyzer.py b/bbot/modules/wappalyzer.py index 6d30fc057..c87274a29 100644 --- a/bbot/modules/wappalyzer.py +++ b/bbot/modules/wappalyzer.py @@ -28,7 +28,7 @@ async def setup(self): async def handle_event(self, event): for res in await self.scan.run_in_executor(self.wappalyze, event.data): - self.emit_event( + await self.emit_event( {"technology": res.lower(), "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event ) diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 4bec112bf..bf4fb769e 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -27,7 +27,7 @@ async def setup(self): async def handle_event(self, event): query = self.make_query(event) for result, event_type in await self.query(query): - self.emit_event(result, event_type, event, abort_if=self.abort_if) + await self.emit_event(result, event_type, event, abort_if=self.abort_if) async def query(self, query): results = set() diff --git a/bbot/modules/zoomeye.py b/bbot/modules/zoomeye.py index 3c83fa828..b1d4e7670 100644 --- a/bbot/modules/zoomeye.py +++ b/bbot/modules/zoomeye.py @@ -36,7 +36,7 @@ async def handle_event(self, event): tags = [] if not hostname.endswith(f".{query}"): tags = ["affiliate"] - self.emit_event(hostname, "DNS_NAME", event, tags=tags) + await self.emit_event(hostname, "DNS_NAME", event, tags=tags) async def query(self, query): results = set() diff --git a/bbot/test/test_step_1/test_manager_deduplication.py b/bbot/test/test_step_1/test_manager_deduplication.py index 305796bea..e046988ae 100644 --- a/bbot/test/test_step_1/test_manager_deduplication.py +++ b/bbot/test/test_step_1/test_manager_deduplication.py @@ -15,7 +15,7 @@ async def setup(self): async def handle_event(self, event): self.events.append(event) - self.emit_event(f"{self.name}.test.notreal", "DNS_NAME", source=event) + await self.emit_event(f"{self.name}.test.notreal", "DNS_NAME", source=event) class EverythingModule(DefaultModule): _name = "everything_module" @@ -27,7 +27,7 @@ class EverythingModule(DefaultModule): async def handle_event(self, event): self.events.append(event) if event.type == "DNS_NAME": - self.emit_event(f"{event.data}:88", "OPEN_TCP_PORT", source=event) + await self.emit_event(f"{event.data}:88", "OPEN_TCP_PORT", source=event) class NoSuppressDupes(DefaultModule): _name = "no_suppress_dupes" diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index 08ac2c3ae..e8e5da391 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -257,7 +257,7 @@ async def filter_event(self, event): return False, "bleh" async def handle_event(self, event): - self.emit_event( + await self.emit_event( {"host": str(event.host), "description": "yep", "severity": "CRITICAL"}, "VULNERABILITY", source=event ) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index b943144ac..7a25bb4ea 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -311,7 +311,7 @@ class dummy(BaseModule): watched_events = ["*"] async def handle_event(self, event): - self.emit_event( + await self.emit_event( {"host": "www.evilcorp.com", "url": "http://www.evilcorp.com", "description": "asdf"}, "FINDING", event ) diff --git a/docs/contribution.md b/docs/contribution.md index 65b074adb..2d36cfe44 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -74,7 +74,7 @@ class MyModule(BaseModule): self.hugeinfo(f"GOT EVENT: {event}") for ip in await self.helpers.resolve(event.data): self.hugesuccess(f"EMITTING IP_ADDRESS: {ip}") - self.emit_event(ip, "IP_ADDRESS", source=event) + await self.emit_event(ip, "IP_ADDRESS", source=event) ``` After saving the module, you can run it simply by specifying it with `-m`: From a40f5cb9709bb88011b4fe6e6376c14211deb729 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 11:56:43 -0500 Subject: [PATCH 018/159] add option to store httpx responses --- bbot/modules/httpx.py | 9 ++++++++- bbot/test/test_step_2/module_tests/test_module_httpx.py | 3 +++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0c12ad740..0ad4ea66c 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -13,12 +13,13 @@ class httpx(BaseModule): flags = ["active", "safe", "web-basic", "web-thorough", "social-enum", "subdomain-enum", "cloud-enum"] meta = {"description": "Visit webpages. Many other modules rely on httpx"} - options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880} + options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880, "store_responses": False} options_desc = { "threads": "Number of httpx threads to use", "in_scope_only": "Only visit web resources that are in scope.", "version": "httpx version", "max_response_size": "Max response size in bytes", + "store_responses": "Save raw HTTP responses to scan folder", } deps_ansible = [ { @@ -41,6 +42,7 @@ async def setup(self): self.timeout = self.scan.config.get("httpx_timeout", 5) self.retries = self.scan.config.get("httpx_retries", 1) self.max_response_size = self.config.get("max_response_size", 5242880) + self.store_responses = self.config.get("store_responses", False) self.visited = set() self.httpx_tempdir_regex = re.compile(r"^httpx\d+$") return True @@ -104,6 +106,11 @@ async def handle_batch(self, *events): f"{self.max_response_size}", ] + if self.store_responses: + response_dir = self.scan.home / "httpx" + self.helpers.mkdir(response_dir) + command += ["-srd", str(response_dir)] + dns_resolvers = ",".join(self.helpers.system_resolvers) if dns_resolvers: command += ["-r", dns_resolvers] diff --git a/bbot/test/test_step_2/module_tests/test_module_httpx.py b/bbot/test/test_step_2/module_tests/test_module_httpx.py index 77f3e98b8..ebd9bbdb1 100644 --- a/bbot/test/test_step_2/module_tests/test_module_httpx.py +++ b/bbot/test/test_step_2/module_tests/test_module_httpx.py @@ -3,6 +3,7 @@ class TestHTTPX(ModuleTestBase): targets = ["http://127.0.0.1:8888/url", "127.0.0.1:8888"] + config_overrides = {"modules": {"httpx": {"store_responses": True}}} # HTML for a page with a login form html_with_login = """ @@ -48,6 +49,8 @@ def check(self, module_test, events): url = True assert url, "Failed to visit target URL" assert open_port, "Failed to visit target OPEN_TCP_PORT" + saved_response = module_test.scan.home / "httpx" / "127.0.0.1.8888[slash]url.txt" + assert saved_response.is_file(), "Failed to save raw httpx response" class TestHTTPX_404(ModuleTestBase): From 2a2e70d0b52b9db5014ca94154e0bb3c10f8ac87 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 12:00:07 -0500 Subject: [PATCH 019/159] blacked --- bbot/modules/httpx.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0ad4ea66c..e469e2f0a 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -13,7 +13,13 @@ class httpx(BaseModule): flags = ["active", "safe", "web-basic", "web-thorough", "social-enum", "subdomain-enum", "cloud-enum"] meta = {"description": "Visit webpages. Many other modules rely on httpx"} - options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880, "store_responses": False} + options = { + "threads": 50, + "in_scope_only": True, + "version": "1.2.5", + "max_response_size": 5242880, + "store_responses": False, + } options_desc = { "threads": "Number of httpx threads to use", "in_scope_only": "Only visit web resources that are in scope.", From 451e67cf90e9bf556fdcd467e14824ecc630010a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 12:04:48 -0500 Subject: [PATCH 020/159] fix tests --- bbot/modules/generic_ssrf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index d4045993b..9d75f4a9e 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -188,7 +188,7 @@ async def handle_event(self, event): for s in self.submodules.values(): await s.test(event) - def interactsh_callback(self, r): + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: @@ -229,6 +229,6 @@ async def finish(self): await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): - self.interactsh_callback(r) + await self.interactsh_callback(r) except InteractshError as e: self.debug(f"Error in interact.sh: {e}") From b4fea8a72b8142356942e34d70bbfe082c71a04f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 12:59:11 -0500 Subject: [PATCH 021/159] fix tests --- bbot/modules/masscan.py | 14 +++++++------- bbot/modules/paramminer_headers.py | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py index 02598a215..15881d5b2 100644 --- a/bbot/modules/masscan.py +++ b/bbot/modules/masscan.py @@ -105,7 +105,7 @@ async def setup(self): async def handle_batch(self, *events): if self.use_cache: - self.emit_from_cache() + await self.emit_from_cache() else: targets = [str(e.data) for e in events] if not targets: @@ -138,7 +138,7 @@ async def masscan(self, targets, result_callback, ping=False): try: with open(stats_file, "w") as stats_fh: async for line in self.helpers.run_live(command, sudo=True, stderr=stats_fh): - self.process_output(line, result_callback=result_callback) + await self.process_output(line, result_callback=result_callback) finally: for file in (stats_file, target_file): file.unlink() @@ -169,7 +169,7 @@ def _build_masscan_command(self, target_file=None, dry_run=False, ping=False): command += ("--echo",) return command - def process_output(self, line, result_callback): + async def process_output(self, line, result_callback): try: j = json.loads(line) except Exception: @@ -194,19 +194,19 @@ def process_output(self, line, result_callback): if source is None: source = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) await self.emit_event(source) - result_callback(result, source=source) + await result_callback(result, source=source) - def append_alive_host(self, host, source): + async def append_alive_host(self, host, source): host_event = self.make_event(host, "IP_ADDRESS", source=self.get_source_event(host)) self.alive_hosts[host] = host_event self._write_ping_result(host) await self.emit_event(host_event) - def emit_open_tcp_port(self, data, source): + async def emit_open_tcp_port(self, data, source): self._write_syn_result(data) await self.emit_event(data, "OPEN_TCP_PORT", source=source) - def emit_from_cache(self): + async def emit_from_cache(self): ip_events = {} # ping scan if self.ping_cache.is_file(): diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 7044ae90a..3d3861621 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -119,7 +119,7 @@ async def do_mining(self, wl, url, batch_size, compare_helper): pass return results - def process_results(self, event, results): + async def process_results(self, event, results): url = event.data.get("url") for result, reasons, reflection in results: tags = [] @@ -171,7 +171,7 @@ async def handle_event(self, event): results = await self.do_mining(wl, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{event.data}]") - self.process_results(event, results) + await self.process_results(event, results) async def count_test(self, url): baseline = await self.helpers.request(url) From 65757648b17b4b9f1299d3843ae14e6d9f833067 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 19 Jan 2024 21:44:17 +0000 Subject: [PATCH 022/159] Refresh module docs --- docs/scanning/configuration.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index ab57e4eb9..0ebfc89dd 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -238,6 +238,7 @@ Many modules accept their own configuration options. These options have the abil | modules.gowitness.version | str | gowitness version | 2.4.2 | | modules.httpx.in_scope_only | bool | Only visit web resources that are in scope. | True | | modules.httpx.max_response_size | int | Max response size in bytes | 5242880 | +| modules.httpx.store_responses | bool | Save raw HTTP responses to scan folder | False | | modules.httpx.threads | int | Number of httpx threads to use | 50 | | modules.httpx.version | str | httpx version | 1.2.5 | | modules.iis_shortnames.detect_only | bool | Only detect the vulnerability and do not run the shortname scanner | True | From ca3f3c28716efdeba57f6e686e150f183e6f2f5d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 19 Jan 2024 17:21:32 -0500 Subject: [PATCH 023/159] fix tests --- bbot/core/helpers/helper.py | 4 +-- bbot/core/helpers/interactsh.py | 5 ++-- bbot/modules/host_header.py | 2 +- bbot/test/conftest.py | 22 ++++++++++++-- bbot/test/test_step_1/test_web.py | 48 +++++++++++++++++++++++++++---- 5 files changed, 68 insertions(+), 13 deletions(-) diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index dbe19f20c..36c6346c9 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -77,8 +77,8 @@ def __init__(self, config, scan=None): # cloud helpers self.cloud = CloudHelper(self) - def interactsh(self): - return Interactsh(self) + def interactsh(self, *args, **kwargs): + return Interactsh(self, *args, **kwargs) def http_compare(self, url, allow_redirects=False, include_cache_buster=True): return HttpCompare(url, self, allow_redirects=allow_redirects, include_cache_buster=include_cache_buster) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 95f76d2f5..929637dfe 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -78,12 +78,13 @@ class Interactsh: ``` """ - def __init__(self, parent_helper): + def __init__(self, parent_helper, poll_interval=10): self.parent_helper = parent_helper self.server = None self.correlation_id = None self.custom_server = self.parent_helper.config.get("interactsh_server", None) self.token = self.parent_helper.config.get("interactsh_token", None) + self.poll_interval = poll_interval self._poll_task = None async def register(self, callback=None): @@ -279,7 +280,7 @@ async def _poll_loop(self, callback): log.warning(e) log.trace(traceback.format_exc()) if not data_list: - await asyncio.sleep(10) + await asyncio.sleep(self.poll_interval) continue for data in data_list: if data: diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index bec77c15a..4adaa766a 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -58,7 +58,7 @@ async def finish(self): await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): - self.interactsh_callback(r) + await self.interactsh_callback(r) except InteractshError as e: self.debug(f"Error in interact.sh: {e}") diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 684ec18a2..b60a0633d 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -1,10 +1,12 @@ import ssl import shutil import pytest +import asyncio import logging from pathlib import Path from pytest_httpserver import HTTPServer +from bbot.core.helpers.misc import execute_sync_or_async from bbot.core.helpers.interactsh import server_list as interactsh_servers @@ -98,20 +100,34 @@ class Interactsh_mock: def __init__(self): self.interactions = [] self.correlation_id = "deadbeef-dead-beef-dead-beefdeadbeef" + self.stop = False def mock_interaction(self, subdomain_tag): self.interactions.append(subdomain_tag) async def register(self, callback=None): + if callable(callback): + asyncio.create_task(self.poll_loop(callback)) return "fakedomain.fakeinteractsh.com" async def deregister(self, callback=None): - pass + self.stop = True - async def poll(self): + async def poll_loop(self, callback=None): + while not self.stop: + data_list = await self.poll(callback) + if not data_list: + await asyncio.sleep(1) + continue + + async def poll(self, callback=None): poll_results = [] for subdomain_tag in self.interactions: - poll_results.append({"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": "HTTP"}) + result = {"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": "HTTP"} + poll_results.append(result) + if callback is not None: + await execute_sync_or_async(callback, result) + self.interactions = [] return poll_results diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 9179d42e6..13edaf725 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -104,23 +104,61 @@ async def test_web_helpers(bbot_scanner, bbot_config, bbot_httpserver): async def test_web_interactsh(bbot_scanner, bbot_config, bbot_httpserver): from bbot.core.helpers.interactsh import server_list + sync_called = False + async_called = False + + sync_correct_url = False + async_correct_url = False + scan1 = bbot_scanner("8.8.8.8", config=bbot_config) + scan1.status = "RUNNING" - interactsh_client = scan1.helpers.interactsh() + interactsh_client = scan1.helpers.interactsh(poll_interval=3) + interactsh_client2 = scan1.helpers.interactsh(poll_interval=3) async def async_callback(data): - log.debug(f"interactsh poll: {data}") + nonlocal async_called + nonlocal async_correct_url + async_called = True + d = data.get("raw-request", "") + async_correct_url |= "bbot_interactsh_test" in d + log.debug(f"interactsh poll (async): {d}") + + def sync_callback(data): + nonlocal sync_called + nonlocal sync_correct_url + sync_called = True + d = data.get("raw-request", "") + sync_correct_url |= "bbot_interactsh_test" in d + log.debug(f"interactsh poll (sync): {d}") interactsh_domain = await interactsh_client.register(callback=async_callback) - url = f"https://{interactsh_domain}/bbot_interactsh_test" + url = f"http://{interactsh_domain}/bbot_interactsh_test" response = await scan1.helpers.request(url) assert response.status_code == 200 - await asyncio.sleep(10) assert any(interactsh_domain.endswith(f"{s}") for s in server_list) + + interactsh_domain2 = await interactsh_client2.register(callback=sync_callback) + url2 = f"http://{interactsh_domain2}/bbot_interactsh_test" + response2 = await scan1.helpers.request(url2) + assert response2.status_code == 200 + assert any(interactsh_domain2.endswith(f"{s}") for s in server_list) + + await asyncio.sleep(10) + data_list = await interactsh_client.poll() + data_list2 = await interactsh_client2.poll() assert isinstance(data_list, list) - assert any("bbot_interactsh_test" in d.get("raw-request", "") for d in data_list) + assert isinstance(data_list2, list) + assert await interactsh_client.deregister() is None + assert await interactsh_client2.deregister() is None + + assert sync_called, "Interactsh synchrononous callback was not called" + assert async_called, "Interactsh async callback was not called" + + assert sync_correct_url, f"Data content was not correct for {url2}" + assert async_correct_url, f"Data content was not correct for {url}" @pytest.mark.asyncio From 48836874d277aa9ec212d11f98a8f0ca04e30d0c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 09:49:47 -0500 Subject: [PATCH 024/159] fix tests again --- bbot/modules/paramminer_headers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 3d3861621..3458edaa9 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -247,4 +247,4 @@ async def finish(self): results = await self.do_mining(untested_matches_copy, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{url}]") - self.process_results(event, results) + await self.process_results(event, results) From e60f4b7010908d0cc8d58842b413c3c991614339 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 11:35:01 -0500 Subject: [PATCH 025/159] fixed postman tests --- bbot/test/test_step_2/module_tests/test_module_postman.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_postman.py b/bbot/test/test_step_2/module_tests/test_module_postman.py index 2879b4838..4f7515162 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postman.py +++ b/bbot/test/test_step_2/module_tests/test_module_postman.py @@ -183,10 +183,10 @@ async def setup_after_prep(self, module_test): old_emit_event = module_test.module.emit_event - def new_emit_event(event_data, event_type, **kwargs): + async def new_emit_event(event_data, event_type, **kwargs): if event_data.startswith("https://www.postman.com"): event_data = event_data.replace("https://www.postman.com", "http://127.0.0.1:8888") - old_emit_event(event_data, event_type, **kwargs) + await old_emit_event(event_data, event_type, **kwargs) module_test.monkeypatch.setattr(module_test.module, "emit_event", new_emit_event) module_test.scan.helpers.dns.mock_dns({("asdf.blacklanternsecurity.com", "A"): "127.0.0.1"}) From 5af04829bf9ac9f95ffeb3f4993a8ab516152752 Mon Sep 17 00:00:00 2001 From: Dom Whewell Date: Mon, 22 Jan 2024 17:33:51 +0000 Subject: [PATCH 026/159] Discard postman workspaces that do not explicitly contain the queried domain name --- bbot/modules/postman.py | 26 +++++---- .../module_tests/test_module_postman.py | 53 ++++++++++++++++++- 2 files changed, 67 insertions(+), 12 deletions(-) diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 619107b53..e67bb7dea 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -70,16 +70,22 @@ async def query(self, query): workspaces.append(workspace) for item in workspaces: id = item.get("id", "") - interesting_urls.append(f"{self.base_url}/workspace/{id}") - environments, collections = await self.search_workspace(id) - interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") - for e_id in environments: - interesting_urls.append(f"{self.base_url}/environment/{e_id}") - for c_id in collections: - interesting_urls.append(f"{self.base_url}/collection/{c_id}") - requests = await self.search_collections(id) - for r_id in requests: - interesting_urls.append(f"{self.base_url}/request/{r_id}") + name = item.get("name", "") + tldextract = self.helpers.tldextract(query) + if tldextract.domain.lower() in name.lower(): + self.verbose(f"Discovered workspace {name} ({id})") + interesting_urls.append(f"{self.base_url}/workspace/{id}") + environments, collections = await self.search_workspace(id) + interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") + for e_id in environments: + interesting_urls.append(f"{self.base_url}/environment/{e_id}") + for c_id in collections: + interesting_urls.append(f"{self.base_url}/collection/{c_id}") + requests = await self.search_collections(id) + for r_id in requests: + interesting_urls.append(f"{self.base_url}/request/{r_id}") + else: + self.verbose(f"Skipping workspace {name} ({id}) as it does not appear to be in scope") return interesting_urls async def search_workspace(self, id): diff --git a/bbot/test/test_step_2/module_tests/test_module_postman.py b/bbot/test/test_step_2/module_tests/test_module_postman.py index 2879b4838..2506d0546 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postman.py +++ b/bbot/test/test_step_2/module_tests/test_module_postman.py @@ -30,9 +30,9 @@ async def setup_after_prep(self, module_test): "workspaces": [ { "visibilityStatus": "public", - "name": "SpilledSecrets", + "name": "BlackLanternSecuritySpilledSecrets", "id": "afa061be-9cb0-4520-9d4d-fe63361daf0f", - "slug": "spilledsecrets", + "slug": "blacklanternsecurityspilledsecrets", } ], "collectionForkLabel": "", @@ -60,6 +60,52 @@ async def setup_after_prep(self, module_test): }, }, }, + { + "score": 498.22398, + "normalizedScore": 8.43312266976538, + "document": { + "isPublisherVerified": False, + "publisherType": "user", + "curatedInList": [], + "publisherId": "28329861", + "publisherHandle": "", + "publisherLogo": "", + "isPublic": True, + "customHostName": "", + "id": "b7fa2137-b7fa2137-23bf-45d1-b176-35359af30ded", + "workspaces": [ + { + "visibilityStatus": "public", + "name": "SpilledSecrets", + "id": "92d0451b-119d-4ef0-b74c-22c400e5ce05", + "slug": "spilledsecrets", + } + ], + "collectionForkLabel": "", + "method": "POST", + "entityType": "request", + "url": "www.example.com/index", + "isBlacklisted": False, + "warehouse__updated_at_collection": "2023-12-11 02:00:00", + "isPrivateNetworkEntity": False, + "warehouse__updated_at_request": "2023-12-11 02:00:00", + "publisherName": "NA", + "name": "A test post request", + "privateNetworkMeta": "", + "privateNetworkFolders": [], + "documentType": "request", + "collection": { + "id": "007e8d67-007e8d67-932b-46ff-b95c-a2aa216edaf3", + "name": "Secret Collection", + "tags": [], + "forkCount": 0, + "watcherCount": 0, + "views": 31, + "apiId": "", + "apiName": "", + }, + }, + }, ], }, ) @@ -199,6 +245,9 @@ def check(self, module_test, events): assert any( e.data == "http://127.0.0.1:8888/_api/workspace/afa061be-9cb0-4520-9d4d-fe63361daf0f" for e in events ), "Failed to detect workspace" + assert any( + e.data != "http://127.0.0.1:8888/_api/workspace/92d0451b-119d-4ef0-b74c-22c400e5ce05" for e in events + ), "Workspace should not be detected" assert any( e.data == "http://127.0.0.1:8888/_api/workspace/afa061be-9cb0-4520-9d4d-fe63361daf0f/globals" for e in events From 6df8bffa2189e5fd2945950b3f6924c72a551ee8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 14:32:37 -0500 Subject: [PATCH 027/159] improve task counting for batch modules --- bbot/core/helpers/async_helpers.py | 19 ++++++++++--------- bbot/modules/base.py | 5 +++-- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py index 916764e53..4cc701161 100644 --- a/bbot/core/helpers/async_helpers.py +++ b/bbot/core/helpers/async_helpers.py @@ -57,33 +57,34 @@ def __init__(self): @property def value(self): - return len(self.tasks) + return sum([t.n for t in self.tasks.values()]) - def count(self, task_name, _log=True): + def count(self, task_name, n=1, _log=True): if callable(task_name): task_name = f"{task_name.__qualname__}()" - return self.Task(self, task_name, _log) + return self.Task(self, task_name, n=n, _log=_log) class Task: - def __init__(self, manager, task_name, _log=True): + def __init__(self, manager, task_name, n=1, _log=True): self.manager = manager self.task_name = task_name self.task_id = None self.start_time = None self.log = _log + self.n = n async def __aenter__(self): - self.task_id = uuid.uuid4() # generate a unique ID for the task + self.task_id = uuid.uuid4() # if self.log: # log.trace(f"Starting task {self.task_name} ({self.task_id})") - async with self.manager.lock: # acquire the lock + async with self.manager.lock: self.start_time = datetime.now() self.manager.tasks[self.task_id] = self - return self.task_id # this will be passed as 'task_id' to __aexit__ + return self async def __aexit__(self, exc_type, exc_val, exc_tb): - async with self.manager.lock: # acquire the lock - self.manager.tasks.pop(self.task_id, None) # remove only current task + async with self.manager.lock: + self.manager.tasks.pop(self.task_id, None) # if self.log: # log.trace(f"Finished task {self.task_name} ({self.task_id})") diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 6ae93c7ea..d049f711a 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -175,7 +175,7 @@ async def handle_event(self, event): """ pass - def handle_batch(self, *events): + async def handle_batch(self, *events): """Handles incoming events in batches for optimized processing. This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module. @@ -350,13 +350,14 @@ async def _handle_batch(self): - If a "FINISHED" event is found, invokes 'finish()' method of the module. """ finish = False - async with self._task_counter.count(f"{self.name}.handle_batch()"): + async with self._task_counter.count(f"{self.name}.handle_batch()") as counter: submitted = False if self.batch_size <= 1: return if self.num_incoming_events > 0: events, finish = await self._events_waiting() if events and not self.errored: + counter.n = len(events) self.debug(f"Handling batch of {len(events):,} events") submitted = True async with self.scan._acatch(f"{self.name}.handle_batch()"): From 3778ff977671d3a55d8b499556f428eee008e8e9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 22 Jan 2024 14:32:50 -0500 Subject: [PATCH 028/159] lovecraftian entities --- bbot/core/helpers/names_generator.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 49ed866d6..16432cb0e 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -298,6 +298,7 @@ "ashley", "audrey", "austin", + "azathoth", "baggins", "bailey", "barbara", @@ -347,8 +348,10 @@ "courtney", "craig", "crystal", + "cthulu", "curtis", "cynthia", + "dagon", "dale", "dandelion", "daniel", @@ -554,6 +557,7 @@ "noah", "norma", "norman", + "nyarlathotep", "obama", "olivia", "padme", From 9af5ad83de83d83795914cb06857f980db358401 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 24 Jan 2024 08:50:15 -0500 Subject: [PATCH 029/159] gowitness - don't visit out-of-scope URLs (except social media pages) --- bbot/core/event/base.py | 1 + bbot/modules/gowitness.py | 26 ++++++---- .../module_tests/test_module_gowitness.py | 47 +++++++++++++------ 3 files changed, 50 insertions(+), 24 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 54b9dae02..f46faad01 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1165,6 +1165,7 @@ class SOCIAL(DictEvent): class WEBSCREENSHOT(DictHostEvent): _always_emit = True + _quick_emit = True class AZURE_TENANT(DictEvent): diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 5592eaa65..a335bf021 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -7,7 +7,7 @@ class gowitness(BaseModule): - watched_events = ["URL"] + watched_events = ["URL", "SOCIAL"] produced_events = ["WEBSCREENSHOT", "URL", "URL_UNVERIFIED", "TECHNOLOGY"] flags = ["active", "safe", "web-screenshots"] meta = {"description": "Take screenshots of webpages"} @@ -76,9 +76,8 @@ class gowitness(BaseModule): }, ] _batch_size = 100 - # visit up to and including the scan's configured search distance plus one - # this is one hop further than the default - scope_distance_modifier = 1 + # gowitness accepts SOCIAL events up to distance 2, otherwise it is in-scope-only + scope_distance_modifier = 2 async def setup(self): self.timeout = self.config.get("timeout", 10) @@ -120,12 +119,21 @@ async def filter_event(self, event): # ignore events from self if event.type == "URL" and event.module == self: return False, "event is from self" + # Accept out-of-scope SOCIAL pages, but not URLs + if event.scope_distance > 0: + if event.type != "SOCIAL": + return False, "event is not in-scope" return True async def handle_batch(self, *events): self.prep() - stdin = "\n".join([str(e.data) for e in events]) - events = {e.data: e for e in events} + event_dict = {} + for e in events: + key = e.data + if e.type == "SOCIAL": + key = e.data["url"] + event_dict[key] = e + stdin = "\n".join(list(event_dict)) async for line in self.helpers.run_live(self.command, input=stdin): self.debug(line) @@ -136,7 +144,7 @@ async def handle_batch(self, *events): final_url = screenshot["final_url"] filename = screenshot["filename"] webscreenshot_data = {"filename": filename, "url": final_url} - source_event = events[url] + source_event = event_dict[url] await self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) # emit URLs @@ -147,7 +155,7 @@ async def handle_batch(self, *events): _id = row["url_id"] source_url = self.screenshots_taken[_id] - source_event = events[source_url] + source_event = event_dict[source_url] if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): @@ -157,7 +165,7 @@ async def handle_batch(self, *events): for _, row in self.new_technologies.items(): source_id = row["url_id"] source_url = self.screenshots_taken[source_id] - source_event = events[source_url] + source_event = event_dict[source_url] technology = row["value"] tech_data = {"technology": technology, "url": source_url, "host": str(source_event.host)} await self.emit_event(tech_data, "TECHNOLOGY", source=source_event) diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index 35e0db799..89fed5da5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -3,7 +3,7 @@ class TestGowitness(ModuleTestBase): targets = ["127.0.0.1:8888"] - modules_overrides = ["gowitness", "httpx"] + modules_overrides = ["gowitness", "httpx", "social", "excavate"] import shutil from pathlib import Path @@ -14,6 +14,7 @@ class TestGowitness(ModuleTestBase): async def setup_after_prep(self, module_test): respond_args = { "response_data": """BBOT is life + @@ -21,21 +22,37 @@ async def setup_after_prep(self, module_test): "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, } module_test.set_expect_requests(respond_args=respond_args) + request_args = dict(uri="/blacklanternsecurity") + respond_args = dict(response_data="blacklanternsecurity github") + module_test.set_expect_requests(request_args, respond_args) + + # monkeypatch social + old_emit_event = module_test.scan.modules["social"].emit_event + + async def new_emit_event(event_data, event_type, **kwargs): + if event_data["url"] == "https://github.com/blacklanternsecurity": + event_data["url"] = event_data["url"].replace("https://github.com", "http://127.0.0.1:8888") + await old_emit_event(event_data, event_type, **kwargs) + + module_test.monkeypatch.setattr(module_test.scan.modules["social"], "emit_event", new_emit_event) def check(self, module_test, events): screenshots_path = self.home_dir / "scans" / module_test.scan.name / "gowitness" / "screenshots" screenshots = list(screenshots_path.glob("*.png")) - assert screenshots, f"No .png files found at {screenshots_path}" - url = False - webscreenshot = False - technology = False - for event in events: - if event.type == "URL_UNVERIFIED": - url = True - elif event.type == "WEBSCREENSHOT": - webscreenshot = True - elif event.type == "TECHNOLOGY": - technology = True - assert url, "No URL emitted" - assert webscreenshot, "No WEBSCREENSHOT emitted" - assert technology, "No TECHNOLOGY emitted" + assert ( + len(screenshots) == 2 + ), f"{len(screenshots):,} .png files found at {screenshots_path}, should have been 2" + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) + assert 1 == len( + [e for e in events if e.type == "SOCIAL" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity"] + ) + assert 2 == len([e for e in events if e.type == "WEBSCREENSHOT"]) + assert 1 == len([e for e in events if e.type == "WEBSCREENSHOT" and e.data["url"] == "http://127.0.0.1:8888/"]) + assert 1 == len( + [ + e + for e in events + if e.type == "WEBSCREENSHOT" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity" + ] + ) + assert len([e for e in events if e.type == "TECHNOLOGY"]) From c9a4af4b0f6a54120a5787348bca8fd9d2073520 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 24 Jan 2024 13:02:13 -0500 Subject: [PATCH 030/159] ensure we don't get any unwanted web screenshots --- bbot/test/test_step_2/module_tests/test_module_gowitness.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index 89fed5da5..09ad6144c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -9,7 +9,7 @@ class TestGowitness(ModuleTestBase): home_dir = Path("/tmp/.bbot_gowitness_test") shutil.rmtree(home_dir, ignore_errors=True) - config_overrides = {"force_deps": True, "home": str(home_dir)} + config_overrides = {"force_deps": True, "home": str(home_dir), "scope_report_distance": 2, "omit_event_types": []} async def setup_after_prep(self, module_test): respond_args = { @@ -43,6 +43,10 @@ def check(self, module_test, events): len(screenshots) == 2 ), f"{len(screenshots):,} .png files found at {screenshots_path}, should have been 2" assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) + assert 1 == len( + [e for e in events if e.type == "URL_UNVERIFIED" and e.data == "https://fonts.googleapis.com/"] + ) + assert 0 == len([e for e in events if e.type == "URL" and e.data == "https://fonts.googleapis.com/"]) assert 1 == len( [e for e in events if e.type == "SOCIAL" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity"] ) From 36659a5bcb9e330a18917382f9adfe0548db7a11 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Wed, 24 Jan 2024 22:26:13 +0000 Subject: [PATCH 031/159] Refresh module docs --- docs/modules/list_of_modules.md | 2 +- docs/scanning/events.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 89bb6d24e..3c4154ed8 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -19,7 +19,7 @@ | fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | | generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | | git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | | host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | | httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | | hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index defcf46a8..a6a50d554 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -68,7 +68,7 @@ Below is a full list of event types along with which modules produce/consume the | ORG_STUB | 1 | 1 | github_org | speculate | | PASSWORD | 0 | 2 | | credshed, dehashed | | PROTOCOL | 0 | 1 | | fingerprintx | -| SOCIAL | 2 | 1 | github_org, speculate | social | +| SOCIAL | 3 | 1 | github_org, gowitness, speculate | social | | STORAGE_BUCKET | 7 | 5 | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | | TECHNOLOGY | 2 | 4 | asset_inventory, web_report | badsecrets, gowitness, internetdb, wappalyzer | | URL | 19 | 2 | ajaxpro, asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | From 021a0f63392df5448625673256c77f4721aad7c5 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 27 Jan 2024 16:36:12 -0500 Subject: [PATCH 032/159] update, pin black --- bbot/test/test_step_1/test_events.py | 8 +++-- poetry.lock | 49 ++++++++++++++-------------- pyproject.toml | 2 +- 3 files changed, 31 insertions(+), 28 deletions(-) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 1fc01a046..0f30f6498 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -321,7 +321,10 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" - assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/テスト" + assert ( + scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) # thai assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" @@ -352,8 +355,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" assert ( - scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data - == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) assert ( scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data diff --git a/poetry.lock b/poetry.lock index 8cdf62957..fb069c1aa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -139,33 +139,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.12.1" +version = "24.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, + {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, + {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, + {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, + {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, + {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, + {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, + {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, + {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, + {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, + {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, + {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, + {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, + {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, + {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, + {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, + {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, + {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, + {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, + {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, + {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, + {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, ] [package.dependencies] @@ -1860,6 +1860,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2423,4 +2424,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "55d756e108a8f225f7974eab2b98b7f1b3feeabfb23aed0107f7f0616b201bc6" +content-hash = "2f14476c664bdc8d200619c7959f1e52f18b01b252d3e0dd58be239965881bb1" diff --git a/pyproject.toml b/pyproject.toml index c845bbc60..61b6a70a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,6 @@ tldextract = "^5.1.1" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" -black = "^23.1.0" pytest-cov = "^4.0.0" poetry-dynamic-versioning = "^0.21.4" pytest-rerunfailures = "^11.1.2" @@ -62,6 +61,7 @@ pytest-env = "^0.8.2" pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" +black = "24.1.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From 056206fc3afa3ce9839957e31efa684b6eda3792 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 27 Jan 2024 17:14:26 -0500 Subject: [PATCH 033/159] fix massdns cname wildcard bug --- bbot/modules/massdns.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index a345b79f6..965909d31 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -147,8 +147,7 @@ async def massdns(self, domain, subdomains): for rdtype, results in rdtypes.items(): if results: domain_wildcard_rdtypes.add(rdtype) - - if "A" in domain_wildcard_rdtypes: + if any([r in domain_wildcard_rdtypes for r in ("A", "CNAME")]): self.info( f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(domain_wildcard_rdtypes)})" ) From 30de135c969ba31327f20ebefe289730becf4d73 Mon Sep 17 00:00:00 2001 From: Amir Emami Date: Sat, 27 Jan 2024 16:32:19 +0100 Subject: [PATCH 034/159] added missing web-thorough flag to three web-basic modules: oauth, filedownload, azure_realm --- bbot/modules/azure_realm.py | 2 +- bbot/modules/filedownload.py | 2 +- bbot/modules/oauth.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index a3d6ad6ba..33772921e 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -4,7 +4,7 @@ class azure_realm(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] - flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "passive", "safe"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "passive", "safe"] meta = {"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm'} async def setup(self): diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 05db2b9df..5cf190d1f 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -14,7 +14,7 @@ class filedownload(BaseModule): watched_events = ["URL_UNVERIFIED", "HTTP_RESPONSE"] produced_events = [] - flags = ["active", "safe", "web-basic"] + flags = ["active", "safe", "web-basic", "web-thorough"] meta = {"description": "Download common filetypes such as PDF, DOCX, PPTX, etc."} options = { "extensions": [ diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index 33cb38959..0c9c26dbb 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -6,7 +6,7 @@ class OAUTH(BaseModule): watched_events = ["DNS_NAME", "URL_UNVERIFIED"] produced_events = ["DNS_NAME"] - flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "active", "safe"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "active", "safe"] meta = {"description": "Enumerate OAUTH and OpenID Connect services"} options = {"try_all": False} options_desc = {"try_all": "Check for OAUTH/IODC on every subdomain and URL."} From f89cf0e32d599134b14ccfd9794f60c10e8daeb3 Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Sat, 27 Jan 2024 17:53:24 -0500 Subject: [PATCH 035/159] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 61b6a70a0..fdb9b4e00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.5{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v1.1.6{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From 4e33bb7b7d3a6da276fd5c2dc7fba21aa8e000cf Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Sun, 28 Jan 2024 01:17:57 +0000 Subject: [PATCH 036/159] Refresh module docs --- README.md | 2 +- docs/modules/list_of_modules.md | 6 +++--- docs/scanning/index.md | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a76c2079e..c84d3c278 100644 --- a/README.md +++ b/README.md @@ -263,7 +263,7 @@ For a full list of modules, including the data types consumed and emitted by eac | passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | | subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | | active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | | aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | | web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | | cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 3c4154ed8..d292efced 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -15,7 +15,7 @@ | dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | | ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | | ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic | HTTP_RESPONSE, URL_UNVERIFIED | | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | | fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | | generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | | git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | @@ -28,7 +28,7 @@ | nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | | ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | | nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | | paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | | paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | | paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | @@ -45,7 +45,7 @@ | affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | | anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | | azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | | binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 40e90038a..9d36837ce 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -113,7 +113,7 @@ A single module can have multiple flags. For example, the `securitytrails` modul | passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | | subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | | active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | | aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | | web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | | cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | From 6026e22bc9b2c017ef7b20423d9827dab75b1a02 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 27 Jan 2024 22:33:20 -0500 Subject: [PATCH 037/159] fix dependabot.yml --- dependabot.yml => .github/dependabot.yml | 1 + 1 file changed, 1 insertion(+) rename dependabot.yml => .github/dependabot.yml (84%) diff --git a/dependabot.yml b/.github/dependabot.yml similarity index 84% rename from dependabot.yml rename to .github/dependabot.yml index 43b90890f..bd3b2c06f 100644 --- a/dependabot.yml +++ b/.github/dependabot.yml @@ -4,4 +4,5 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" open-pull-requests-limit: 10 From be093aaacab1192b634a7d51e82c6f8b90b47682 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 29 Jan 2024 14:15:16 -0500 Subject: [PATCH 038/159] update release history --- docs/release_history.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/release_history.md b/docs/release_history.md index 1b68587ef..2f0b16a17 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,3 +1,23 @@ +## v1.1.6 +January 29, 2024 + +## Improvements +- https://github.com/blacklanternsecurity/bbot/pull/1001 +- https://github.com/blacklanternsecurity/bbot/pull/1006 +- https://github.com/blacklanternsecurity/bbot/pull/1010 +- https://github.com/blacklanternsecurity/bbot/pull/1013 +- https://github.com/blacklanternsecurity/bbot/pull/1014 +- https://github.com/blacklanternsecurity/bbot/pull/1015 +- https://github.com/blacklanternsecurity/bbot/pull/1032 + +## Bugfixes +- https://github.com/blacklanternsecurity/bbot/pull/1005 +- https://github.com/blacklanternsecurity/bbot/pull/1022 +- https://github.com/blacklanternsecurity/bbot/pull/1030 +- https://github.com/blacklanternsecurity/bbot/pull/1033 +- https://github.com/blacklanternsecurity/bbot/pull/1034 + + ## v1.1.4 January 11, 2024 From 5d8246a573cf7aee2e2591a482f366fa8949eb19 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 29 Jan 2024 14:21:47 -0500 Subject: [PATCH 039/159] fixing black pin --- poetry.lock | 69 ++++++++++++++++++-------------------------------- pyproject.toml | 2 +- 2 files changed, 25 insertions(+), 46 deletions(-) diff --git a/poetry.lock b/poetry.lock index fb069c1aa..61374ea00 100644 --- a/poetry.lock +++ b/poetry.lock @@ -139,33 +139,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.1.0" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, - {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, - {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, - {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, - {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, - {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, - {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, - {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, - {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, - {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, - {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, - {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, - {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, - {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, - {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, - {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, - {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, - {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, - {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, - {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, - {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, - {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -992,16 +992,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1847,7 +1837,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1855,16 +1844,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1881,7 +1862,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1889,7 +1869,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2424,4 +2403,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "2f14476c664bdc8d200619c7959f1e52f18b01b252d3e0dd58be239965881bb1" +content-hash = "1d40c762ee4808a285f17096c5140f4e6fa7d236a56f97f0415aa81aef917ea1" diff --git a/pyproject.toml b/pyproject.toml index fdb9b4e00..53d178185 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ pytest-env = "^0.8.2" pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" -black = "24.1.0" +black = "^24.1.1" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From edab5a82fe89a106eba0039ff8d46ca7cfff076f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 15:29:38 -0500 Subject: [PATCH 040/159] clarify interactsh polling error --- bbot/core/helpers/interactsh.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 929637dfe..871ecb1c4 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -235,6 +235,8 @@ async def poll(self): r = await self.parent_helper.request( f"https://{self.server}/poll?id={self.correlation_id}&secret={self.secret}", headers=headers ) + if r is None: + raise InteractshError("Error polling interact.sh: No response from server") ret = [] data_list = r.json().get("data", None) From 8bba54cadde118f64c910d3f3104b1d251d79729 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 15:30:32 -0500 Subject: [PATCH 041/159] update release history --- docs/release_history.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/release_history.md b/docs/release_history.md index 2f0b16a17..eef23ba39 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -9,6 +9,7 @@ January 29, 2024 - https://github.com/blacklanternsecurity/bbot/pull/1014 - https://github.com/blacklanternsecurity/bbot/pull/1015 - https://github.com/blacklanternsecurity/bbot/pull/1032 +- https://github.com/blacklanternsecurity/bbot/pull/1040 ## Bugfixes - https://github.com/blacklanternsecurity/bbot/pull/1005 From 47c4c83d8a75276542fac968204d43ca2aaf507d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 17:46:48 -0500 Subject: [PATCH 042/159] poetry update --- poetry.lock | 680 +++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 344 insertions(+), 338 deletions(-) diff --git a/poetry.lock b/poetry.lock index 61374ea00..d1204c670 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ ansible-core = ">=2.14.7,<2.15.0" [[package]] name = "ansible-core" -version = "2.14.13" +version = "2.14.14" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-core-2.14.13.tar.gz", hash = "sha256:4e1bb334f0c3226ab48c599efe49cd5fe03f25d4558bc06c274ade2ba3e2576a"}, - {file = "ansible_core-2.14.13-py3-none-any.whl", hash = "sha256:65e96d04dce1e5dd415b4681d464f7f9a949d515f623145c4a8bc3468e75f3b0"}, + {file = "ansible-core-2.14.14.tar.gz", hash = "sha256:f06a94a88a372d4db4b3973e465022fbe3545602580864115d21a280accb7ca3"}, + {file = "ansible_core-2.14.14-py3-none-any.whl", hash = "sha256:d1d282b71b9d8fdd515ae045e5909cfa393cfa0e9fecaae2dbbb4d326ab58681"}, ] [package.dependencies] @@ -121,19 +121,22 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] @@ -384,13 +387,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "2.1.0.181" +version = "2.2.0.203" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = ">=3.9,<4.0" files = [ - {file = "cloudcheck-2.1.0.181-py3-none-any.whl", hash = "sha256:cb9ebd5d546038974ed13bd7c9edc83fe481af791bf53decf7ecdd9070deb23a"}, - {file = "cloudcheck-2.1.0.181.tar.gz", hash = "sha256:c3b24c72b3f082f87a81bf8542dc2a5177593fe4ea0e983acd2f486a5d127cc9"}, + {file = "cloudcheck-2.2.0.203-py3-none-any.whl", hash = "sha256:8f17944c6183c7ad96b031c4cc3efb1c1addaa23c2d17f3c81ff54c4cd494716"}, + {file = "cloudcheck-2.2.0.203.tar.gz", hash = "sha256:83eb239b024579712a19d736dc9649eff4a3f6e9314db3cd94faa883adbfe46c"}, ] [package.dependencies] @@ -410,63 +413,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -477,47 +480,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, + {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, + {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, + {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, + {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, + {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, + {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -551,22 +563,23 @@ files = [ [[package]] name = "dnspython" -version = "2.4.2" +version = "2.5.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, + {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, + {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, ] [package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +idna = ["idna (>=2.1)"] +trio = ["trio (>=0.14)"] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "docutils" @@ -658,13 +671,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.38.1" +version = "0.39.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.38.1-py3-none-any.whl", hash = "sha256:334c79d3b5964ade65c05dfcaf53518c576dedd387aaba5c9fd71212f34f1483"}, - {file = "griffe-0.38.1.tar.gz", hash = "sha256:bd68d7da7f3d87bc57eb9962b250db123efd9bbcc06c11c1a91b6e583b2a9361"}, + {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, + {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, ] [package.dependencies] @@ -783,13 +796,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -949,13 +962,13 @@ source = ["Cython (==0.29.37)"] [[package]] name = "markdown" -version = "3.5.1" +version = "3.5.2" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.1-py3-none-any.whl", hash = "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc"}, - {file = "Markdown-3.5.1.tar.gz", hash = "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.dependencies] @@ -967,61 +980,71 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -1110,13 +1133,13 @@ mkdocs = ">=1.1" [[package]] name = "mkdocs-material" -version = "9.5.3" +version = "9.5.6" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.3-py3-none-any.whl", hash = "sha256:76c93a8525cceb0b395b9cedab3428bf518cf6439adef2b940f1c1574b775d89"}, - {file = "mkdocs_material-9.5.3.tar.gz", hash = "sha256:5899219f422f0a6de784232d9d40374416302ffae3c160cacc72969fcc1ee372"}, + {file = "mkdocs_material-9.5.6-py3-none-any.whl", hash = "sha256:e115b90fccf5cd7f5d15b0c2f8e6246b21041628b8f590630e7fca66ed7fcf6c"}, + {file = "mkdocs_material-9.5.6.tar.gz", hash = "sha256:5b24df36d8ac6cecd611241ce6f6423ccde3e1ad89f8360c3f76d5565fc2d82a"}, ] [package.dependencies] @@ -1134,7 +1157,7 @@ requests = ">=2.26,<3.0" [package.extras] git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] -imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] @@ -1306,13 +1329,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1358,27 +1381,27 @@ virtualenv = ">=20.10.0" [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -1419,59 +1442,59 @@ files = [ [[package]] name = "pycryptodome" -version = "3.19.1" +version = "3.20.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:694020d2ff985cd714381b9da949a21028c24b86f562526186f6af7c7547e986"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4464b0e8fd5508bff9baf18e6fd4c6548b1ac2ce9862d6965ff6a84ec9cb302a"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:420972f9c62978e852c74055d81c354079ce3c3a2213a92c9d7e37bbc63a26e2"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1bc0c49d986a1491d66d2a56570f12e960b12508b7e71f2423f532e28857f36"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e038ab77fec0956d7aa989a3c647652937fc142ef41c9382c2ebd13c127d5b4a"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-win32.whl", hash = "sha256:a991f8ffe8dfe708f86690948ae46442eebdd0fff07dc1b605987939a34ec979"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-win_amd64.whl", hash = "sha256:2c16426ef49d9cba018be2340ea986837e1dfa25c2ea181787971654dd49aadd"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6d0d2b97758ebf2f36c39060520447c26455acb3bcff309c28b1c816173a6ff5"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b8b80ff92049fd042177282917d994d344365ab7e8ec2bc03e853d93d2401786"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd4e7e8bf0fc1ada854688b9b309ee607e2aa85a8b44180f91021a4dd330a928"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:8cf5d3d6cf921fa81acd1f632f6cedcc03f5f68fc50c364cd39490ba01d17c49"}, - {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297"}, - {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2"}, - {file = "pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03"}, - {file = "pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7"}, - {file = "pycryptodome-3.19.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:ed932eb6c2b1c4391e166e1a562c9d2f020bfff44a0e1b108f67af38b390ea89"}, - {file = "pycryptodome-3.19.1-pp27-pypy_73-win32.whl", hash = "sha256:81e9d23c0316fc1b45d984a44881b220062336bbdc340aa9218e8d0656587934"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37e531bf896b70fe302f003d3be5a0a8697737a8d177967da7e23eff60d6483c"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd4e95b0eb4b28251c825fe7aa941fe077f993e5ca9b855665935b86fbb1cc08"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22c80246c3c880c6950d2a8addf156cee74ec0dc5757d01e8e7067a3c7da015"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e70f5c839c7798743a948efa2a65d1fe96bb397fe6d7f2bde93d869fe4f0ad69"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6c3df3613592ea6afaec900fd7189d23c8c28b75b550254f4bd33fe94acb84b9"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08b445799d571041765e7d5c9ca09c5d3866c2f22eeb0dd4394a4169285184f4"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:954d156cd50130afd53f8d77f830fe6d5801bd23e97a69d358fed068f433fbfe"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b7efd46b0b4ac869046e814d83244aeab14ef787f4850644119b1c8b0ec2d637"}, - {file = "pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.0" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, + {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.1" typing-extensions = ">=4.6.1" [package.extras] @@ -1479,116 +1502,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, + {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, + {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, + {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, + {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, + {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, + {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, + {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, + {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, + {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, + {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, + {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, + {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, + {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, ] [package.dependencies] @@ -1746,18 +1743,18 @@ Werkzeug = ">=2.0.0" [[package]] name = "pytest-httpx" -version = "0.28.0" +version = "0.29.0" description = "Send responses to httpx." optional = false python-versions = ">=3.9" files = [ - {file = "pytest_httpx-0.28.0-py3-none-any.whl", hash = "sha256:045774556a3633688742315a6981aab2806ce93bcbcc8444253ab87bca286800"}, - {file = "pytest_httpx-0.28.0.tar.gz", hash = "sha256:a82505fdf59f19eaaf2853db3f3832b3dee35d3bc58000232db2b65c5fca0614"}, + {file = "pytest_httpx-0.29.0-py3-none-any.whl", hash = "sha256:7d6fd29042e7b98ed98199ded120bc8100c8078ca306952666e89bf8807b95ff"}, + {file = "pytest_httpx-0.29.0.tar.gz", hash = "sha256:ed08ed802e2b315b83cdd16f0b26cbb2b836c29e0fde5c18bc3105f1073e0332"}, ] [package.dependencies] httpx = "==0.26.*" -pytest = "==7.*" +pytest = ">=7,<9" [package.extras] testing = ["pytest-asyncio (==0.23.*)", "pytest-cov (==4.*)"] @@ -1837,6 +1834,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1844,8 +1842,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1862,6 +1868,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1869,6 +1876,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2013,18 +2021,16 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "1.5.1" +version = "2.0.0" description = "File transport adapter for Requests" optional = false python-versions = "*" files = [ - {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, - {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, + {file = "requests-file-2.0.0.tar.gz", hash = "sha256:20c5931629c558fda566cacc10cfe2cd502433e628f568c34c80d96a0cc95972"}, ] [package.dependencies] requests = ">=1.0.0" -six = "*" [[package]] name = "resolvelib" @@ -2403,4 +2409,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "1d40c762ee4808a285f17096c5140f4e6fa7d236a56f97f0415aa81aef917ea1" +content-hash = "8d9864610f54050aec62bf75415e5b683a851323d054a38ff36e54d9d5c284e3" diff --git a/pyproject.toml b/pyproject.toml index 53d178185..93172c060 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,6 @@ aioconsole = "^0.6.2" pydantic = "^2.4.2" httpx = "^0.26.0" cloudcheck = "^2.1.0.181" -pytest-httpx = "^0.28.0" tldextract = "^5.1.1" [tool.poetry.group.dev.dependencies] @@ -62,6 +61,7 @@ pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" black = "^24.1.1" +pytest-httpx = "^0.29.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From 22792c825815747414a11dbafcb889fb339ed39a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 29 Jan 2024 17:47:09 -0500 Subject: [PATCH 043/159] fix event distribution stats --- bbot/modules/base.py | 1 + bbot/scanner/manager.py | 2 -- bbot/scanner/stats.py | 4 ---- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index d049f711a..a4dd81001 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -720,6 +720,7 @@ async def __event_postcheck(self, event): if self._type == "output" and not event._stats_recorded: event._stats_recorded = True + self.scan.stats.event_distributed(event) self.scan.stats.event_produced(event) self.debug(f"{event} passed post-check") diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 1fa47fef4..a511e6316 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -412,8 +412,6 @@ async def distribute_event(self, event): if acceptable_dup or graph_important: await mod.queue_event(event) - self.scan.stats.event_distributed(event) - async def _worker_loop(self): try: while not self.scan.stopped: diff --git a/bbot/scanner/stats.py b/bbot/scanner/stats.py index cc2b3e576..02be9a801 100644 --- a/bbot/scanner/stats.py +++ b/bbot/scanner/stats.py @@ -15,7 +15,6 @@ def __init__(self, scan): self.scan = scan self.module_stats = {} self.events_emitted_by_type = {} - self.perf_stats = [] def event_distributed(self, event): _increment(self.events_emitted_by_type, event.type) @@ -66,9 +65,6 @@ def table(self): return [header] + table def _make_table(self): - self.perf_stats.sort(key=lambda x: x[-1]) - for callback, runtime in self.perf_stats: - log.info(f"{callback}\t{runtime}") table = self.table() if len(table) == 1: table += [["None", "None", "None"]] From 373682ad54e0ddf57956f5bf23ed76719621b6bd Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 30 Jan 2024 12:54:25 -0500 Subject: [PATCH 044/159] improve event stats counter --- bbot/modules/base.py | 5 ----- bbot/modules/output/base.py | 8 ++++++++ bbot/modules/sslcert.py | 2 +- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index a4dd81001..be0d118c6 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -718,11 +718,6 @@ async def __event_postcheck(self, event): if not filter_result: return False, msg - if self._type == "output" and not event._stats_recorded: - event._stats_recorded = True - self.scan.stats.event_distributed(event) - self.scan.stats.event_produced(event) - self.debug(f"{event} passed post-check") return True, "" diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index a294c23bb..1d4ec9a36 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -41,6 +41,14 @@ def _event_precheck(self, event): return True, "precheck succeeded" + async def _event_postcheck(self, event): + acceptable, reason = await super()._event_postcheck(event) + if acceptable and not event._stats_recorded and event.type not in ("FINISHED",): + event._stats_recorded = True + self.scan.stats.event_distributed(event) + self.scan.stats.event_produced(event) + return acceptable, reason + def is_incoming_duplicate(self, event, add=False): is_incoming_duplicate, reason = super().is_incoming_duplicate(event, add=add) # make exception for graph-important events diff --git a/bbot/modules/sslcert.py b/bbot/modules/sslcert.py index b7455ddcd..caca5feca 100644 --- a/bbot/modules/sslcert.py +++ b/bbot/modules/sslcert.py @@ -17,7 +17,7 @@ class sslcert(BaseModule): options = {"timeout": 5.0, "skip_non_ssl": True} options_desc = {"timeout": "Socket connect timeout in seconds", "skip_non_ssl": "Don't try common non-SSL ports"} deps_apt = ["openssl"] - deps_pip = ["pyOpenSSL~=23.1.1"] + deps_pip = ["pyOpenSSL~=24.0.0"] _max_event_handlers = 25 scope_distance_modifier = 1 _priority = 2 From 6cca54b2bef0516d30f9e6e69bf414bb6b0c347d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 30 Jan 2024 14:15:30 -0500 Subject: [PATCH 045/159] fix aggregate tests --- bbot/test/test_step_2/module_tests/test_module_aggregate.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_aggregate.py b/bbot/test/test_step_2/module_tests/test_module_aggregate.py index 1049fb2a2..59f5370b3 100644 --- a/bbot/test/test_step_2/module_tests/test_module_aggregate.py +++ b/bbot/test/test_step_2/module_tests/test_module_aggregate.py @@ -2,7 +2,10 @@ class TestAggregate(ModuleTestBase): - config_overrides = {"dns_resolution": True} + config_overrides = {"dns_resolution": True, "scope_report_distance": 1} + + async def setup_before_prep(self, module_test): + module_test.scan.helpers.dns.mock_dns({("blacklanternsecurity.com", "A"): "1.2.3.4"}) def check(self, module_test, events): filename = next(module_test.scan.home.glob("scan-stats-table*.txt")) From 240bb4b23e38417e4a486b60edb3c0961fd0e325 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 1 Feb 2024 11:02:36 -0500 Subject: [PATCH 046/159] improving serialization regexes, fixing excavate bug, excavate tests --- bbot/modules/internal/excavate.py | 14 ++++--- .../module_tests/test_module_excavate.py | 38 +++++++++++++++++++ 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index da2a16986..5ccc8d36a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -21,8 +21,8 @@ def __init__(self, excavate): async def search(self, content, event, **kwargs): results = set() async for result, name in self._search(content, event, **kwargs): - results.add(result) - for result in results: + results.add((result, name)) + for result, name in results: await self.report(result, name, event, **kwargs) async def _search(self, content, event, **kwargs): @@ -254,13 +254,15 @@ async def report(self, result, name, event, **kwargs): class SerializationExtractor(BaseExtractor): regexes = { "Java": r"(?:[^a-zA-Z0-9+/]|^)(rO0[a-zA-Z0-9+/]+={,2})", - ".NET": r"AAEAAAD//[a-zA-Z0-9+/]+={,2}", - "PHP": r"YTo[xyz0123456][a-zA-Z0-9+/]+={,2}", - "Possible Compressed": r"H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2}", + ".NET": r"(?:[^a-zA-Z0-9+/]|^)(AAEAAAD//[a-zA-Z0-9+/]+={,2})", + "PHP (Array)": r"(?:[^a-zA-Z0-9+/]|^)(YTo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "PHP (String)": r"(?:[^a-zA-Z0-9+/]|^)(czo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "PHP (Object)": r"(?:[^a-zA-Z0-9+/]|^)(Tzo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "Possible Compressed": r"(?:[^a-zA-Z0-9+/]|^)(H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2})", } async def report(self, result, name, event, **kwargs): - description = f"{name} serialized object found" + description = f"{name} serialized object found: [{self.excavate.helpers.truncate_string(result,2000)}]" await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 0add8aea6..2be83574b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -280,3 +280,41 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any(e.data == "asdffoo.test.notreal" for e in events) assert any(e.data == "https://asdffoo.test.notreal/some/path" for e in events) + + +class TestExcavateSerializationNegative(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "

llsdtVVFlJxhcGGYTo2PMGTRNFVKZxeKTVbhyosM3Sm/5apoY1/yUmN6HVcn+Xt798SPzgXQlZMttsqp1U1iJFmFO2aCGL/v3tmm/fs7itYsoNnJCelWvm9P4ic1nlKTBOpMjT5B5NmriZwTAzZ5ASjCKcmN8Vh=

" + ) + + def check(self, module_test, events): + assert not any(e.type == "FINDING" for e in events), "Found Results without word boundary" + + +class TestExcavateSerializationPositive(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + """ +

.NET

+

AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==

+

Java

+

rO0ABXQADUhlbGxvLCB3b3JsZCE=

+

PHP (string)

+

czoyNDoiSGVsbG8sIHdvcmxkISBNb3JlIHRleHQuIjs=

+

PHP (array)

+

YTo0OntpOjA7aToxO2k6MTtzOjE0OiJzZWNvbmQgZWxlbWVudCI7aToyO2k6MztpOjM7czoxODoiTW9yZSB0ZXh0IGluIGFycmF5Ijt9

+

PHP (object)

+

TzoxMjoiU2FtcGxlT2JqZWN0IjoyOntzOjg6InByb3BlcnR5IjtzOjEzOiJJbml0aWFsIHZhbHVlIjtzOjE2OiJhZGRpdGlvbmFsU3RyaW5nIjtzOjIxOiJFeHRyYSB0ZXh0IGluIG9iamVjdC4iO30=

+

Compression

+

H4sIAAAAAAAA/yu2MjS2UvJIzcnJ11Eozy/KSVFUsgYAZN5upRUAAAA=

+ +""" + ) + + def check(self, module_test, events): + + for serialize_type in ["Java", ".NET", "PHP (Array)", "PHP (String)", "PHP (Object)", "Possible Compressed"]: + assert any( + e.type == "FINDING" and serialize_type in e.data["description"] for e in events + ), f"Did not find {serialize_type} Serialized Object" From ec1ea92ef8b739b9e455bbbe4bbf2ad6183256a8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 13:44:53 -0500 Subject: [PATCH 047/159] include source email address in PASSWORD, USERNAME, and HASHED_PASSWORD --- bbot/core/event/base.py | 9 ++--- bbot/modules/credshed.py | 19 +++++----- bbot/modules/dehashed.py | 19 +++++----- bbot/modules/internal/speculate.py | 10 ++++++ bbot/modules/templates/credential_leak.py | 33 ----------------- bbot/scanner/manager.py | 2 +- bbot/test/test_step_1/test_events.py | 4 +++ .../module_tests/test_module_credshed.py | 23 +++++++++--- .../module_tests/test_module_dehashed.py | 36 +++++++++++++++---- 9 files changed, 88 insertions(+), 67 deletions(-) delete mode 100644 bbot/modules/templates/credential_leak.py diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index f46faad01..895b8e2d5 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1246,10 +1246,11 @@ def make_event( """ # allow tags to be either a string or an array - if tags is not None: - if isinstance(tags, str): - tags = [tags] - tags = list(tags) + if not tags: + tags = [] + elif isinstance(tags, str): + tags = [tags] + tags = list(tags) if is_event(data): if scan is not None and not data.scan: diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py index 09ed57377..382644007 100644 --- a/bbot/modules/credshed.py +++ b/bbot/modules/credshed.py @@ -1,9 +1,9 @@ from contextlib import suppress -from bbot.modules.templates.credential_leak import credential_leak +from bbot.modules.base import BaseModule -class credshed(credential_leak): +class credshed(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME", "EMAIL_ADDRESS"] flags = ["passive", "safe"] @@ -17,6 +17,7 @@ class credshed(credential_leak): "password": "Credshed password", "credshed_url": "URL of credshed server", } + target_only = True async def setup(self): self.base_url = self.config.get("credshed_url", "").rstrip("/") @@ -40,7 +41,7 @@ async def setup(self): return await super().setup() async def handle_event(self, event): - query = self.make_query(event) + query = event.data cs_query = await self.helpers.request( f"{self.base_url}/api/search", method="POST", @@ -77,10 +78,10 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: await self.emit_event(email_event) - if user and not self.already_seen(f"{email}:{user}"): - await self.emit_event(user, "USERNAME", source=email_event, tags=tags) - if pw and not self.already_seen(f"{email}:{pw}"): - await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + if user: + await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + if pw: + await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) for h_pw in hashes: - if h_pw and not self.already_seen(f"{email}:{h_pw}"): - await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + if h_pw: + await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py index 4b3546712..c1a35c419 100644 --- a/bbot/modules/dehashed.py +++ b/bbot/modules/dehashed.py @@ -1,15 +1,16 @@ from contextlib import suppress -from bbot.modules.templates.credential_leak import credential_leak +from bbot.modules.base import BaseModule -class dehashed(credential_leak): +class dehashed(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME"] flags = ["passive", "safe", "email-enum"] meta = {"description": "Execute queries against dehashed.com for exposed credentials", "auth_required": True} options = {"username": "", "api_key": ""} options_desc = {"username": "Email Address associated with your API key", "api_key": "DeHashed API Key"} + target_only = True base_url = "https://api.dehashed.com/search" @@ -50,15 +51,15 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: await self.emit_event(email_event) - if user and not self.already_seen(f"{email}:{user}"): - await self.emit_event(user, "USERNAME", source=email_event, tags=tags) - if pw and not self.already_seen(f"{email}:{pw}"): - await self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) - if h_pw and not self.already_seen(f"{email}:{h_pw}"): - await self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + if user: + await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + if pw: + await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) + if h_pw: + await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) async def query(self, event): - query = f"domain:{self.make_query(event)}" + query = f"domain:{event.data}" url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}" page = 0 num_entries = 0 diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 8f51d5d95..7aaf12d30 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -1,6 +1,7 @@ import random import ipaddress +from bbot.core.helpers import validators from bbot.modules.internal.base import BaseInternalModule @@ -21,6 +22,7 @@ class speculate(BaseInternalModule): "STORAGE_BUCKET", "SOCIAL", "AZURE_TENANT", + "USERNAME", ] produced_events = ["DNS_NAME", "OPEN_TCP_PORT", "IP_ADDRESS", "FINDING", "ORG_STUB"] flags = ["passive"] @@ -156,6 +158,14 @@ async def handle_event(self, event): stub_event.scope_distance = event.scope_distance await self.emit_event(stub_event) + # USERNAME --> EMAIL + if event.type == "USERNAME": + email = event.data.split(":", 1)[-1] + if validators.soft_validate(email, "email"): + email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=["affiliate"]) + email_event.scope_distance = event.scope_distance + await self.emit_event(email_event) + async def filter_event(self, event): # don't accept errored DNS_NAMEs if any(t in event.tags for t in ("unresolved", "a-error", "aaaa-error")): diff --git a/bbot/modules/templates/credential_leak.py b/bbot/modules/templates/credential_leak.py deleted file mode 100644 index 5085e197a..000000000 --- a/bbot/modules/templates/credential_leak.py +++ /dev/null @@ -1,33 +0,0 @@ -from bbot.modules.base import BaseModule - - -class credential_leak(BaseModule): - """ - A typical free API-based subdomain enumeration module - Inherited by many other modules including sublist3r, dnsdumpster, etc. - """ - - async def setup(self): - self.queries_processed = set() - self.data_seen = set() - return True - - async def filter_event(self, event): - query = self.make_query(event) - query_hash = hash(query) - if query_hash not in self.queries_processed: - self.queries_processed.add(query_hash) - return True - return False, f'Already processed "{query}"' - - def make_query(self, event): - if "target" in event.tags: - return event.data - _, domain = self.helpers.split_domain(event.data) - return domain - - def already_seen(self, item): - h = hash(item) - already_seen = h in self.data_seen - self.data_seen.add(h) - return already_seen diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index a511e6316..c0f598230 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -275,7 +275,7 @@ async def _emit_event(self, event, **kwargs): if ( event.host and event.type not in ("DNS_NAME", "DNS_NAME_UNRESOLVED", "IP_ADDRESS", "IP_RANGE") - and not str(event.module) == "speculate" + and not (event.type in ("OPEN_TCP_PORT", "URL_UNVERIFIED") and str(event.module) == "speculate") ): source_module = self.scan.helpers._make_dummy_module("host", _type="internal") source_module._priority = 4 diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 0f30f6498..ccbf17c1f 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -250,6 +250,10 @@ async def test_events(events, scan, helpers, bbot_config): corrected_event3 = scan.make_event("wat.asdf.com", "IP_ADDRESS", dummy=True) assert corrected_event3.type == "DNS_NAME" + corrected_event4 = scan.make_event("bob@evilcorp.com", "USERNAME", dummy=True) + assert corrected_event4.type == "EMAIL_ADDRESS" + assert "affiliate" in corrected_event4.tags + test_vuln = scan.make_event( {"host": "EVILcorp.com", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True ) diff --git a/bbot/test/test_step_2/module_tests/test_module_credshed.py b/bbot/test/test_step_2/module_tests/test_module_credshed.py index 7de642412..4b9566077 100644 --- a/bbot/test/test_step_2/module_tests/test_module_credshed.py +++ b/bbot/test/test_step_2/module_tests/test_module_credshed.py @@ -74,18 +74,31 @@ def check(self, module_test, events): assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "judy@blacklanternsecurity.com"]) assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "tim@blacklanternsecurity.com"]) assert 1 == len( - [e for e in events if e.type == "HASHED_PASSWORD" and e.data == "539FE8942DEADBEEFBC49E6EB2F175AC"] + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:539FE8942DEADBEEFBC49E6EB2F175AC" + ] ) assert 1 == len( - [e for e in events if e.type == "HASHED_PASSWORD" and e.data == "D2D8F0E9A4A2DEADBEEF1AC80F36D61F"] + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:D2D8F0E9A4A2DEADBEEF1AC80F36D61F" + ] ) assert 1 == len( [ e for e in events if e.type == "HASHED_PASSWORD" - and e.data == "$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" + and e.data + == "judy@blacklanternsecurity.com:$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" ] ) - assert 1 == len([e for e in events if e.type == "PASSWORD" and e.data == "TimTamSlam69"]) - assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim"]) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:tim"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_dehashed.py b/bbot/test/test_step_2/module_tests/test_module_dehashed.py index 8b20c85c5..ab1cc20aa 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dehashed.py +++ b/bbot/test/test_step_2/module_tests/test_module_dehashed.py @@ -37,7 +37,11 @@ class TestDehashed(ModuleTestBase): - config_overrides = {"modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}} + modules_overrides = ["dehashed", "speculate"] + config_overrides = { + "scope_report_distance": 2, + "modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}, + } async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( @@ -46,8 +50,9 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - assert len(events) == 9 - assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert len(events) == 11 + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com"]) + assert 1 == len([e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity"]) assert 1 == len( [ e @@ -56,6 +61,22 @@ def check(self, module_test, events): and e.data == "bob@bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "USERNAME" + and e.data == "bob@blacklanternsecurity.com:bob@bob.com" and e.source.data == "bob@blacklanternsecurity.com" ] ) @@ -65,8 +86,11 @@ def check(self, module_test, events): e for e in events if e.type == "HASHED_PASSWORD" - and e.data == "$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" + and e.data + == "bob@blacklanternsecurity.com:$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" ] ) - assert 1 == len([e for e in events if e.type == "PASSWORD" and e.data == "TimTamSlam69"]) - assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "timmy"]) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:timmy"]) From 6ede3229f9f0d7a1d945d717ccda71ce98b07f92 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 17:43:02 -0500 Subject: [PATCH 048/159] JSON module: option for SIEM-friendly output --- bbot/modules/output/json.py | 14 +++++++++++--- .../test_step_2/module_tests/test_module_json.py | 15 +++++++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index f13cf7808..a380ac9a1 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -7,16 +7,24 @@ class JSON(BaseOutputModule): watched_events = ["*"] meta = {"description": "Output to Newline-Delimited JSON (NDJSON)"} - options = {"output_file": "", "console": False} - options_desc = {"output_file": "Output to file", "console": "Output to console"} + options = {"output_file": "", "console": False, "siem_friendly": False} + options_desc = { + "output_file": "Output to file", + "console": "Output to console", + "siem_friendly": "Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc.", + } _preserve_graph = True async def setup(self): self._prep_output_dir("output.ndjson") + self.siem_friendly = self.config.get("siem_friendly", False) return True async def handle_event(self, event): - event_str = json.dumps(dict(event)) + event_json = dict(event) + if self.siem_friendly: + event_json["data"] = {event.type: event_json.pop("data", "")} + event_str = json.dumps(event_json) if self.file is not None: self.file.write(event_str + "\n") self.file.flush() diff --git a/bbot/test/test_step_2/module_tests/test_module_json.py b/bbot/test/test_step_2/module_tests/test_module_json.py index 6dafb68a5..1e67db085 100644 --- a/bbot/test/test_step_2/module_tests/test_module_json.py +++ b/bbot/test/test_step_2/module_tests/test_module_json.py @@ -12,3 +12,18 @@ def check(self, module_test, events): e = event_from_json(json.loads(lines[0])) assert e.type == "SCAN" assert e.data == f"{module_test.scan.name} ({module_test.scan.id})" + + +class TestJSONSIEMFriendly(ModuleTestBase): + modules_overrides = ["json"] + config_overrides = {"output_modules": {"json": {"siem_friendly": True}}} + + def check(self, module_test, events): + txt_file = module_test.scan.home / "output.ndjson" + lines = list(module_test.scan.helpers.read_file(txt_file)) + passed = False + for line in lines: + e = json.loads(line) + if e["data"] == {"DNS_NAME": "blacklanternsecurity.com"}: + passed = True + assert passed From 13af94a529e803071b423b34cc3618c7827d89e8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 17:50:24 -0500 Subject: [PATCH 049/159] updated docs --- docs/scanning/tips_and_tricks.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 0572bedb2..1ed370e52 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -53,6 +53,24 @@ You can also pair the web spider with subdomain enumeration: bbot -t evilcorp.com -f subdomain-enum -c spider.yml ``` +### Ingesting BBOT Data Into SIEM (Elastic, Splunk) + +If your goal is to feed BBOT data into a SIEM such as Elastic, make sure to enable this option when scanning: + +```bash +bbot -t evilcorp.com -c output_modules.json.siem_friendly=true +``` + +This nests the event's `.data` beneath its event type like so: +```json +{ + "type": "DNS_NAME", + "data": { + "DNS_NAME": "blacklanternsecurity.com" + } +} +``` + ### Custom HTTP Proxy Web pentesters may appreciate BBOT's ability to quickly populate Burp Suite site maps for all subdomains in a target. If your scan includes gowitness, this will capture the traffic as if you manually visited each website in your browser -- including auxiliary web resources and javascript API calls. To accomplish this, set the `http_proxy` config option like so: From a83dc73990933f504c274d9be5355a3096ac8133 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Thu, 1 Feb 2024 22:51:11 +0000 Subject: [PATCH 050/159] Refresh module docs --- docs/modules/list_of_modules.md | 224 ++++++++++++++++---------------- docs/scanning/events.md | 2 +- 2 files changed, 113 insertions(+), 113 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index d292efced..e67a11152 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -1,118 +1,118 @@ # List of Modules -| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | -|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| -| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | -| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | -| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | -| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | -| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | -| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | -| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | -| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | -| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | -| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | -| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | -| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | -| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | -| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | -| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | -| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | -| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | -| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | -| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | -| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | -| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | -| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | -| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | -| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | -| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | -| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | -| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | -| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | -| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | -| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | -| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | -| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | -| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | -| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | -| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | -| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | -| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | -| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | -| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | -| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | -| csv | output | No | Output to CSV | | * | | -| discord | output | No | Message a Discord channel when certain events are encountered | | * | | -| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | -| http | output | No | Send every event to a custom URL via a web request | | * | | -| human | output | No | Output to text | | * | | -| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | -| neo4j | output | No | Output to Neo4j | | * | | -| python | output | No | Output via Python API | | * | | -| slack | output | No | Message a Slack channel when certain events are encountered | | * | | -| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | -| teams | output | No | Message a Teams channel when certain events are encountered | | * | | -| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | -| websocket | output | No | Output to websockets | | * | | -| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | -| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | -| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| +| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | +| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | +| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | +| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | +| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | +| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | +| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | +| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | +| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | +| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | +| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | +| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | +| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | +| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | +| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | +| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | +| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | +| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | +| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | +| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | +| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | +| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | +| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | +| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | +| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | +| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | +| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | +| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | +| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | +| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | +| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | +| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | +| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | +| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | +| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | +| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | +| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | +| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | +| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/scanning/events.md b/docs/scanning/events.md index a6a50d554..362a0b958 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -74,7 +74,7 @@ Below is a full list of event types along with which modules produce/consume the | URL | 19 | 2 | ajaxpro, asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | | URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | | URL_UNVERIFIED | 5 | 13 | filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, postman, robots, urlscan, wayback | -| USERNAME | 0 | 2 | | credshed, dehashed | +| USERNAME | 1 | 2 | speculate | credshed, dehashed | | VHOST | 1 | 1 | web_report | vhost | | VULNERABILITY | 2 | 7 | asset_inventory, web_report | ajaxpro, badsecrets, dastardly, generic_ssrf, internetdb, nuclei, telerik | | WAF | 1 | 1 | asset_inventory | wafw00f | From bdac5d66562ad4a54429d76410e9a65b5b93b210 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 17:52:28 -0500 Subject: [PATCH 051/159] updated docs --- docs/scanning/tips_and_tricks.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 1ed370e52..5e115bcde 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -55,7 +55,7 @@ bbot -t evilcorp.com -f subdomain-enum -c spider.yml ### Ingesting BBOT Data Into SIEM (Elastic, Splunk) -If your goal is to feed BBOT data into a SIEM such as Elastic, make sure to enable this option when scanning: +If your goal is to feed BBOT data into a SIEM such as Elastic, be sure to enable this option when scanning: ```bash bbot -t evilcorp.com -c output_modules.json.siem_friendly=true From 52429f38e7851d5628f6799a37af549d094d19e8 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Fri, 2 Feb 2024 09:56:34 -0600 Subject: [PATCH 052/159] Added Newsletter Module --- bbot/modules/newsletters.py | 72 +++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 bbot/modules/newsletters.py diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py new file mode 100644 index 000000000..f81cfedd1 --- /dev/null +++ b/bbot/modules/newsletters.py @@ -0,0 +1,72 @@ +# Created a new module called 'newsletters' that will scrap the websites (or recursive websites, +# thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also +# contains a 'placeholder'. The combination of these two HTML items usually signify the presence +# of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used +# to find newsletters for a future email bombing attack and/or find user-input fields that could +# be be susceptible to overflows or injections. + +from .base import BaseModule +import requests +import re +from bs4 import BeautifulSoup + +# Known Websites with Newsletters +# https://futureparty.com/ +# https://www.marketingbrew.com/ +# https://buffer.com/ +# https://www.milkkarten.net/ +# https://geekout.mattnavarra.com/ + +deps_pip = ["requests", "beautifulsoup4"] + +headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" +} + + +class newsletters(BaseModule): + watched_events = ["URL"] + produced_events = ["NEWSLETTER"] + flags = ["passive", "safe"] + meta = {"description": "Searches for Newsletter Registrations"} + + # Parse through Website to find a Text Entry Box of 'type = email' + # and ensure that there is placeholder text within it. + def find_type(self, soup): + email_type = soup.find(type="email") + if email_type: + regex = re.compile(r"placeholder") + if regex.search(str(email_type)): + return True + else: + return False + else: + return False + + async def handle_event(self, event): + if event.type == "URL": + # self.hugeinfo(f"event = {event}") + + req_url = event.data + req = requests.request("GET", req_url, headers=headers) + # req = self.helpers.request("GET", req_url, headers=headers) + + if req.status_code == 200: + # print(f'\n[+] {req_url}') + # print('[+] Status Code: 200') + soup = BeautifulSoup(req.content, "html.parser") + result = self.find_type(soup) + if result: + newsletter_result = self.make_event( + data=f"Newsletter {event.data}", + event_type="NEWSLETTER", + source=event, + tags=event.tags, + ) + self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") + self.emit_event(newsletter_result) + return + else: + return + else: + return From 3f7c3ac29539bd61c4df85a775a996ed3a8dd02a Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 2 Feb 2024 22:03:08 +0000 Subject: [PATCH 053/159] Refresh module docs --- docs/scanning/configuration.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 0ebfc89dd..2ed2d61de 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -353,6 +353,7 @@ Many modules accept their own configuration options. These options have the abil | output_modules.human.output_file | str | Output to file | | | output_modules.json.console | bool | Output to console | False | | output_modules.json.output_file | str | Output to file | | +| output_modules.json.siem_friendly | bool | Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc. | False | | output_modules.neo4j.password | str | Neo4j password | bbotislife | | output_modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | output_modules.neo4j.username | str | Neo4j username | neo4j | From c4540cd3c36f4f84df82c1b0ef8b38ca9afec320 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 3 Feb 2024 22:38:14 -0500 Subject: [PATCH 054/159] moving _baselined assignement to prevent race condition --- bbot/core/helpers/diff.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index fd8d3474f..3a385a4db 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -7,6 +7,8 @@ log = logging.getLogger("bbot.core.helpers.diff") +from time import sleep + class HttpCompare: def __init__(self, baseline_url, parent_helper, method="GET", allow_redirects=False, include_cache_buster=True): @@ -19,7 +21,6 @@ def __init__(self, baseline_url, parent_helper, method="GET", allow_redirects=Fa async def _baseline(self): if not self._baselined: - self._baselined = True # vanilla URL if self.include_cache_buster: url_1 = self.parent_helper.add_get_params(self.baseline_url, self.gen_cache_buster()).geturl() @@ -86,6 +87,7 @@ async def _baseline(self): self.baseline_ignore_headers += [x.lower() for x in dynamic_headers] self.baseline_body_distance = self.compare_body(baseline_1_json, baseline_2_json) + self._baselined = True def gen_cache_buster(self): return {self.parent_helper.rand_string(6): "1"} From 499ec3e646fc6a0f2063c7a36c1d78ed9d26b7b1 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 3 Feb 2024 22:39:52 -0500 Subject: [PATCH 055/159] removing debug sleep import --- bbot/core/helpers/diff.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index 3a385a4db..670ab83f5 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -7,8 +7,6 @@ log = logging.getLogger("bbot.core.helpers.diff") -from time import sleep - class HttpCompare: def __init__(self, baseline_url, parent_helper, method="GET", allow_redirects=False, include_cache_buster=True): From c302cb48f83bad5233a0635579c2673916e4fe39 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 3 Feb 2024 22:42:10 -0500 Subject: [PATCH 056/159] fix whitespace --- bbot/core/helpers/diff.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index 670ab83f5..ca83eb915 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -85,7 +85,7 @@ async def _baseline(self): self.baseline_ignore_headers += [x.lower() for x in dynamic_headers] self.baseline_body_distance = self.compare_body(baseline_1_json, baseline_2_json) - self._baselined = True + self._baselined = True def gen_cache_buster(self): return {self.parent_helper.rand_string(6): "1"} From 38982add006a04e3358a13a3d04d748345124791 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Sat, 3 Feb 2024 22:44:08 -0500 Subject: [PATCH 057/159] lowering bypass403 collapse threshold --- bbot/modules/bypass403.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 182f4b4db..1ea884c84 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -127,7 +127,7 @@ async def handle_event(self, event): self.debug(e) return - collapse_threshold = 10 + collapse_threshold = 6 results = await self.do_checks(compare_helper, event, collapse_threshold) if results is None: return From 66a33f9176e6fdb44db4037b7ad6025842ae1cb2 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Mon, 5 Feb 2024 12:40:22 -0600 Subject: [PATCH 058/159] Now using helpers.request --- bbot/modules/newsletters.py | 59 +++++++++---------- .../module_tests/test_module_newsletters.py | 15 +++++ 2 files changed, 44 insertions(+), 30 deletions(-) create mode 100644 bbot/test/test_step_2/module_tests/test_module_newsletters.py diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index f81cfedd1..d60441e58 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -1,8 +1,8 @@ -# Created a new module called 'newsletters' that will scrap the websites (or recursive websites, -# thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also -# contains a 'placeholder'. The combination of these two HTML items usually signify the presence -# of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used -# to find newsletters for a future email bombing attack and/or find user-input fields that could +# Created a new module called 'newsletters' that will scrap the websites (or recursive websites, +# thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also +# contains a 'placeholder'. The combination of these two HTML items usually signify the presence +# of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used +# to find newsletters for a future email bombing attack and/or find user-input fields that could # be be susceptible to overflows or injections. from .base import BaseModule @@ -25,12 +25,12 @@ class newsletters(BaseModule): - watched_events = ["URL"] + watched_events = ["HTTP_RESPONSE"] produced_events = ["NEWSLETTER"] flags = ["passive", "safe"] - meta = {"description": "Searches for Newsletter Registrations"} + meta = {"description": "Searches for Newsletter Submission Entry Fields on Websites"} - # Parse through Website to find a Text Entry Box of 'type = email' + # Parse through Website to find a Text Entry Box of 'type = email' # and ensure that there is placeholder text within it. def find_type(self, soup): email_type = soup.find(type="email") @@ -44,29 +44,28 @@ def find_type(self, soup): return False async def handle_event(self, event): - if event.type == "URL": - # self.hugeinfo(f"event = {event}") + req_url = event.data - req_url = event.data - req = requests.request("GET", req_url, headers=headers) - # req = self.helpers.request("GET", req_url, headers=headers) + # req = requests.request("GET", req_url, headers=headers) ## If it ain't broke, don't fix it + # req = self.helpers.request(method="GET", url=req_url, headers=headers) # Doesn't return a status_code + # req = await self.helpers.curl(url=req_url, headers=headers) # Doesn't return a status_code - if req.status_code == 200: - # print(f'\n[+] {req_url}') - # print('[+] Status Code: 200') - soup = BeautifulSoup(req.content, "html.parser") - result = self.find_type(soup) - if result: - newsletter_result = self.make_event( - data=f"Newsletter {event.data}", - event_type="NEWSLETTER", - source=event, - tags=event.tags, - ) - self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") - self.emit_event(newsletter_result) - return - else: - return + if event.data["status_code"] == 200: + soup = BeautifulSoup(event.data["body"], "html.parser") + result = self.find_type(soup) + if result: + newsletter_result = self.make_event( + data=f"Newsletter {event.data['url']}", + event_type="NEWSLETTER", + source=event, + tags=event.tags + ) + # self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") + self.emit_event(newsletter_result) + return else: return + else: + return + + diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py new file mode 100644 index 000000000..f7a23a166 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -0,0 +1,15 @@ +from .base import ModuleTestBase + + +class TestNewsletters(ModuleTestBase): + targets = ["blacklanternsecurity.com", "futureparty.com"] + modules_overrides = ["speculate", "httpx", "newsletters"] + + config_overrides = {} + + def check(self, module_test, events): + newsletter = False + for event in events: + if event.type == "NEWSLETTER": + newsletter = True + assert newsletter, "No NEWSLETTER emitted" From 6fe98a7c68a6fa49fbd008e475f705d50d0f97a1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 5 Feb 2024 13:41:18 -0500 Subject: [PATCH 059/159] truncate human output to prevent blockingioerror --- bbot/core/event/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb16fda7b..c1fa9fff2 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -485,7 +485,7 @@ def data_human(self): return self._data_human() def _data_human(self): - return str(self.data) + return truncate_string(str(self.data), n=2000) def _data_load(self, data): """ From b427b1716662f0099df5b3607efaeaa2348d6689 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 5 Feb 2024 13:47:41 -0500 Subject: [PATCH 060/159] fix truncate string bug --- bbot/core/event/base.py | 1 + bbot/modules/newsletters.py | 7 +------ 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index c1fa9fff2..5cf9bea18 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -25,6 +25,7 @@ get_file_extension, validators, tagify, + truncate_string, ) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index d60441e58..e88d86910 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -55,10 +55,7 @@ async def handle_event(self, event): result = self.find_type(soup) if result: newsletter_result = self.make_event( - data=f"Newsletter {event.data['url']}", - event_type="NEWSLETTER", - source=event, - tags=event.tags + data=f"Newsletter {event.data['url']}", event_type="NEWSLETTER", source=event, tags=event.tags ) # self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") self.emit_event(newsletter_result) @@ -67,5 +64,3 @@ async def handle_event(self, event): return else: return - - From e980afd84f6f9d80d305a3c3a5c62fa1a045e9e3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 10:27:09 -0500 Subject: [PATCH 061/159] log bbot version, start time, and full command at the beginning of every scan --- bbot/cli.py | 2 -- bbot/scanner/scanner.py | 3 +++ 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index 1eaf66996..9a8ab51dd 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -88,8 +88,6 @@ async def _main(): sys.exit(0) return - log.verbose(f'Command: {" ".join(sys.argv)}') - if options.agent_mode: from bbot.agent import Agent diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 31bc37680..48833d150 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -1,4 +1,5 @@ import re +import sys import asyncio import logging import traceback @@ -12,6 +13,7 @@ from collections import OrderedDict from concurrent.futures import ProcessPoolExecutor +from bbot import __version__ from bbot import config as bbot_config from .target import Target @@ -329,6 +331,7 @@ async def async_start(self): await self._prep() self._start_log_handlers() + log.verbose(f'Ran BBOT {__version__} at {scan_start_time}, command: {" ".join(sys.argv)}') if not self.target: self.warning(f"No scan targets specified") From 738b7af1db11edc7ec4559fb5fe2d09c556ff0cb Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 10:48:25 -0500 Subject: [PATCH 062/159] fix rare telerik bug --- bbot/modules/telerik.py | 69 ++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 35 deletions(-) diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index 6cbdfcf19..45b9b31d7 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -288,44 +288,43 @@ async def handle_event(self, event): spellcheckhandler = "Telerik.Web.UI.SpellCheckHandler.axd" result, _ = await self.test_detector(event.data, spellcheckhandler) - try: - # The standard behavior for the spellcheck handler without parameters is a 500 - if result.status_code == 500: - # Sometimes webapps will just return 500 for everything, so rule out the false positive - validate_result, _ = await self.test_detector(event.data, self.helpers.rand_string()) - self.debug(validate_result) - if validate_result.status_code != 500: - self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") - description = f"Telerik SpellCheckHandler detected" - await self.emit_event( - { - "host": str(event.host), - "url": f"{event.data}{spellcheckhandler}", - "description": description, - }, - "FINDING", - event, - ) - except Exception: - pass + status_code = getattr(result, "status_code", 0) + # The standard behavior for the spellcheck handler without parameters is a 500 + if status_code == 500: + # Sometimes webapps will just return 500 for everything, so rule out the false positive + validate_result, _ = await self.test_detector(event.data, self.helpers.rand_string()) + self.debug(validate_result) + validate_status_code = getattr(validate_result, "status_code", 0) + if validate_status_code not in (0, 500): + self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") + description = f"Telerik SpellCheckHandler detected" + await self.emit_event( + { + "host": str(event.host), + "url": f"{event.data}{spellcheckhandler}", + "description": description, + }, + "FINDING", + event, + ) chartimagehandler = "ChartImage.axd?ImageName=bqYXJAqm315eEd6b%2bY4%2bGqZpe7a1kY0e89gfXli%2bjFw%3d" result, _ = await self.test_detector(event.data, chartimagehandler) - - if result: - if result.status_code == 200: - chartimagehandler_error = "ChartImage.axd?ImageName=" - result_error, _ = await self.test_detector(event.data, chartimagehandler_error) - if result_error.status_code != 200: - await self.emit_event( - { - "host": str(event.host), - "url": f"{event.data}{chartimagehandler}", - "description": "Telerik ChartImage AXD Handler Detected", - }, - "FINDING", - event, - ) + status_code = getattr(result, "status_code", 0) + if status_code == 200: + chartimagehandler_error = "ChartImage.axd?ImageName=" + result_error, _ = await self.test_detector(event.data, chartimagehandler_error) + error_status_code = getattr(result_error, "status_code", 0) + if error_status_code not in (0, 200): + await self.emit_event( + { + "host": str(event.host), + "url": f"{event.data}{chartimagehandler}", + "description": "Telerik ChartImage AXD Handler Detected", + }, + "FINDING", + event, + ) elif event.type == "HTTP_RESPONSE": resp_body = event.data.get("body", None) From 0c896e23429f6328bd9f36526309f2bfbe0866e0 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 11:38:32 -0500 Subject: [PATCH 063/159] Fix rare postman error --- bbot/modules/postman.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index e58169d02..6c31e893f 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -96,6 +96,8 @@ async def search_workspace(self, id): status_code = getattr(r, "status_code", 0) try: json = r.json() + if not isinstance(json, dict): + raise ValueError(f"Got unexpected value for JSON: {json}" except Exception as e: self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") return [], [] From 553cb8bf0726c96ad8c7659a1ab34b9ed28b43cf Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 11:45:54 -0500 Subject: [PATCH 064/159] fix syntax error --- bbot/modules/postman.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 6c31e893f..5a63f824e 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -97,7 +97,7 @@ async def search_workspace(self, id): try: json = r.json() if not isinstance(json, dict): - raise ValueError(f"Got unexpected value for JSON: {json}" + raise ValueError(f"Got unexpected value for JSON: {json}") except Exception as e: self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") return [], [] From d8a6418773c2d77ad81235293adde7e5c5665104 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 19:24:43 -0500 Subject: [PATCH 065/159] don't create asyncio tasks for dns stuff --- bbot/core/helpers/dns.py | 34 +++++++++------------------------- 1 file changed, 9 insertions(+), 25 deletions(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 9ad22116f..cc3cded6c 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -627,24 +627,13 @@ def event_cache_get(self, host): except KeyError: return set(), None, None, set() - async def _resolve_batch_coro_wrapper(self, q, **kwargs): - """ - Helps us correlate task results back to their original arguments - """ - result = await self.resolve(q, **kwargs) - return (q, result) - async def resolve_batch(self, queries, **kwargs): """ - Asynchronously resolves a batch of queries in parallel and yields the results as they are completed. - - This method wraps around `_resolve_batch_coro_wrapper` to resolve a list of queries in parallel. - It batches the queries to a manageable size and executes them asynchronously, respecting - global rate limits. + A helper to execute a bunch of DNS requests. Args: queries (list): List of queries to resolve. - **kwargs: Additional keyword arguments to pass to `_resolve_batch_coro_wrapper`. + **kwargs: Additional keyword arguments to pass to `resolve()`. Yields: tuple: A tuple containing the original query and its resolved value. @@ -658,13 +647,8 @@ async def resolve_batch(self, queries, **kwargs): ('evilcorp.com', {'2.2.2.2'}) """ - queries = list(queries) - batch_size = 250 - for i in range(0, len(queries), batch_size): - batch = queries[i : i + batch_size] - tasks = [asyncio.create_task(self._resolve_batch_coro_wrapper(q, **kwargs)) for q in batch] - async for task in as_completed(tasks): - yield await task + for q in queries: + (q, await self.resolve(q, **kwargs)) def extract_targets(self, record): """ @@ -837,9 +821,7 @@ async def is_wildcard(self, query, ips=None, rdtype=None): # if the caller hasn't already done the work of resolving the IPs if ips is None: # then resolve the query for all rdtypes - base_query_tasks = { - t: asyncio.create_task(self.resolve_raw(query, type=t, use_cache=True)) for t in rdtypes_to_check - } + base_query_tasks = {t: self.resolve_raw(query, type=t, use_cache=True) for t in rdtypes_to_check} for _rdtype, task in base_query_tasks.items(): raw_results, errors = await task if errors and not raw_results: @@ -949,6 +931,8 @@ async def is_wildcard_domain(self, domain, log_info=False): wildcard_domain_results[host] = self._wildcard_cache[host_hash] continue + log.verbose(f"Checking if {host} is a wildcard") + # determine if this is a wildcard domain wildcard_tasks = {t: [] for t in rdtypes_to_check} # resolve a bunch of random subdomains of the same parent @@ -958,14 +942,14 @@ async def is_wildcard_domain(self, domain, log_info=False): # continue for _ in range(self.wildcard_tests): rand_query = f"{rand_string(digits=False, length=10)}.{host}" - wildcard_task = asyncio.create_task(self.resolve(rand_query, type=rdtype, use_cache=False)) + wildcard_task = self.resolve(rand_query, type=rdtype, use_cache=False) wildcard_tasks[rdtype].append(wildcard_task) # combine the random results is_wildcard = False wildcard_results = dict() for rdtype, tasks in wildcard_tasks.items(): - async for task in as_completed(tasks): + for task in tasks: results = await task if results: is_wildcard = True From ce75405e022e9b8b5f7f24557e65858ec3f12cb8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 19:27:47 -0500 Subject: [PATCH 066/159] fixed resolve_batch bug --- bbot/core/helpers/dns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index cc3cded6c..f1c38cad3 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -648,7 +648,7 @@ async def resolve_batch(self, queries, **kwargs): """ for q in queries: - (q, await self.resolve(q, **kwargs)) + yield (q, await self.resolve(q, **kwargs)) def extract_targets(self, record): """ From 60e095f2eff0bade531199941f9205a04d15d363 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 19:32:51 -0500 Subject: [PATCH 067/159] multiprocessize collapse_url --- bbot/modules/bevigil.py | 2 +- bbot/modules/wayback.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index ff868e969..435ceae08 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -34,7 +34,7 @@ async def handle_event(self, event): if self.urls: urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) if urls: - for parsed_url in await self.scan.run_in_executor(self.helpers.validators.collapse_urls, urls): + for parsed_url in await self.scan.run_in_executor_mp(self.helpers.validators.collapse_urls, urls): await self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) async def request_subdomains(self, query): diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index bf4fb769e..92dc78db5 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -56,7 +56,7 @@ async def query(self, query): dns_names = set() collapsed_urls = 0 start_time = datetime.now() - parsed_urls = await self.scan.run_in_executor( + parsed_urls = await self.scan.run_in_executor_mp( self.helpers.validators.collapse_urls, urls, threshold=self.garbage_threshold, From b6babc78024fde6d0117ad1117a9b0bd223108cd Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 19:41:38 -0500 Subject: [PATCH 068/159] logging for wildcards --- bbot/core/helpers/dns.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index f1c38cad3..4d9f4e497 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -969,6 +969,8 @@ async def is_wildcard_domain(self, domain, log_info=False): if log_info: log_fn = log.info log_fn(f"Encountered domain with wildcard DNS ({wildcard_rdtypes_str}): {host}") + else: + log.verbose(f"Finished checking {host}, it is not a wildcard") return wildcard_domain_results From a06081527d524c180b69617807b97643d65764ed Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 19:57:29 -0500 Subject: [PATCH 069/159] make sure things are awaited --- bbot/core/helpers/dns.py | 24 ++++++++---------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 4d9f4e497..40b7ec1e1 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -526,9 +526,8 @@ async def resolve_event(self, event, minimal=False): types = ("A", "AAAA") if types: - tasks = [self.resolve_raw(event_host, type=t, use_cache=True) for t in types] - async for task in as_completed(tasks): - resolved_raw, errors = await task + for t in types: + resolved_raw, errors = await self.resolve_raw(event_host, type=t, use_cache=True) for rdtype, e in errors: if rdtype not in resolved_raw: event_tags.add(f"{rdtype.lower()}-error") @@ -821,9 +820,8 @@ async def is_wildcard(self, query, ips=None, rdtype=None): # if the caller hasn't already done the work of resolving the IPs if ips is None: # then resolve the query for all rdtypes - base_query_tasks = {t: self.resolve_raw(query, type=t, use_cache=True) for t in rdtypes_to_check} - for _rdtype, task in base_query_tasks.items(): - raw_results, errors = await task + for t in rdtypes_to_check: + raw_results, errors = await self.resolve_raw(query, type=t, use_cache=True) if errors and not raw_results: self.debug(f"Failed to resolve {query} ({_rdtype}) during wildcard detection") result[_rdtype] = (None, parent) @@ -934,23 +932,17 @@ async def is_wildcard_domain(self, domain, log_info=False): log.verbose(f"Checking if {host} is a wildcard") # determine if this is a wildcard domain - wildcard_tasks = {t: [] for t in rdtypes_to_check} + # resolve a bunch of random subdomains of the same parent + is_wildcard = False + wildcard_results = dict() for rdtype in rdtypes_to_check: # continue if a wildcard was already found for this rdtype # if rdtype in self._wildcard_cache[host_hash]: # continue for _ in range(self.wildcard_tests): rand_query = f"{rand_string(digits=False, length=10)}.{host}" - wildcard_task = self.resolve(rand_query, type=rdtype, use_cache=False) - wildcard_tasks[rdtype].append(wildcard_task) - - # combine the random results - is_wildcard = False - wildcard_results = dict() - for rdtype, tasks in wildcard_tasks.items(): - for task in tasks: - results = await task + results = await self.resolve(rand_query, type=rdtype, use_cache=False) if results: is_wildcard = True if not rdtype in wildcard_results: From 0e6254eb00337c7a377ab492ce473db9b1fb5af4 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 20:02:22 -0500 Subject: [PATCH 070/159] fix bug in dns.py --- bbot/core/helpers/dns.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 40b7ec1e1..97696b96b 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -823,8 +823,8 @@ async def is_wildcard(self, query, ips=None, rdtype=None): for t in rdtypes_to_check: raw_results, errors = await self.resolve_raw(query, type=t, use_cache=True) if errors and not raw_results: - self.debug(f"Failed to resolve {query} ({_rdtype}) during wildcard detection") - result[_rdtype] = (None, parent) + self.debug(f"Failed to resolve {query} ({t}) during wildcard detection") + result[t] = (None, parent) continue for __rdtype, answers in raw_results: base_query_results = set() From bf539b33728c6d5f0ffb93c9b6fe028c33d214e1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 20:59:27 -0500 Subject: [PATCH 071/159] flaked --- bbot/core/helpers/dns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 97696b96b..0c1c17c01 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -13,7 +13,7 @@ from bbot.core.helpers.ratelimiter import RateLimiter from bbot.core.helpers.async_helpers import NamedLock from bbot.core.errors import ValidationError, DNSError, DNSWildcardBreak -from .misc import is_ip, is_domain, is_dns_name, domain_parents, parent_domain, rand_string, cloudcheck, as_completed +from .misc import is_ip, is_domain, is_dns_name, domain_parents, parent_domain, rand_string, cloudcheck log = logging.getLogger("bbot.core.helpers.dns") From 19fe685bad40452b444b1e339daae9ebaaff1dfd Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 21:16:38 -0500 Subject: [PATCH 072/159] fix rare dns bug, verbosify abort_if --- bbot/core/helpers/dns.py | 2 -- bbot/scanner/manager.py | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 0c1c17c01..74dc43a4a 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -950,8 +950,6 @@ async def is_wildcard_domain(self, domain, log_info=False): wildcard_results[rdtype].update(results) # we know this rdtype is a wildcard # so we don't need to check it anymore - with suppress(KeyError): - rdtypes_to_check.remove(rdtype) self._wildcard_cache.update({host_hash: wildcard_results}) wildcard_domain_results.update({host: wildcard_results}) diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index c0f598230..4c08ff847 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -260,7 +260,7 @@ async def _emit_event(self, event, **kwargs): abort_result, reason = abort_result msg += f": {reason}" if abort_result: - log.debug(msg) + log.verbose(msg) return # run success callback before distributing event (so it can add tags, etc.) From 126e39b21b89bf31333b2328d05f3ed6a7d2901c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 21:35:38 -0500 Subject: [PATCH 073/159] limit anubisdb due to excessive garbage results --- bbot/modules/anubisdb.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index 7b0cda171..299122295 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -6,6 +6,8 @@ class anubisdb(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] meta = {"description": "Query jldc.me's database for subdomains"} + options = {"limit": 1000} + options_desc = {"limit": "Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API)"} base_url = "https://jldc.me/anubis/subdomains" dns_abort_depth = 5 @@ -38,4 +40,4 @@ def parse_results(self, r, query): hostname = str(hostname).lower() if hostname.endswith(f".{query}") and not self.abort_if_pre(hostname): results.add(hostname) - return results + return sorted(results)[:self.config.get("limit", 1000)] From 99042a8c39a8e90631cb54533c3cf2101c778fe8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 21:41:01 -0500 Subject: [PATCH 074/159] clean up code --- bbot/modules/anubisdb.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index 299122295..ceaed2bbb 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -38,6 +38,9 @@ def parse_results(self, r, query): if json: for hostname in json: hostname = str(hostname).lower() - if hostname.endswith(f".{query}") and not self.abort_if_pre(hostname): + in_scope = hostname.endswith(f".{query}") + is_ptr = self.helpers.is_ptr(hostname) + too_long = self.abort_if_pre(hostname) + if in_scope and not is_ptr and not too_long: results.add(hostname) return sorted(results)[:self.config.get("limit", 1000)] From 9c92e937c2d030bfec0241ef955e96f8f86dd908 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 1 Feb 2024 21:41:23 -0500 Subject: [PATCH 075/159] blacked --- bbot/modules/anubisdb.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index ceaed2bbb..9864e3c6d 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -7,7 +7,9 @@ class anubisdb(subdomain_enum): produced_events = ["DNS_NAME"] meta = {"description": "Query jldc.me's database for subdomains"} options = {"limit": 1000} - options_desc = {"limit": "Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API)"} + options_desc = { + "limit": "Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API)" + } base_url = "https://jldc.me/anubis/subdomains" dns_abort_depth = 5 @@ -43,4 +45,4 @@ def parse_results(self, r, query): too_long = self.abort_if_pre(hostname) if in_scope and not is_ptr and not too_long: results.add(hostname) - return sorted(results)[:self.config.get("limit", 1000)] + return sorted(results)[: self.config.get("limit", 1000)] From 43133a1422a162f15c9382698b7fe090b71629ec Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 2 Feb 2024 10:55:17 -0500 Subject: [PATCH 076/159] remove custom cache, use cachetools --- bbot/core/helpers/async_helpers.py | 9 ++- bbot/core/helpers/cache.py | 83 --------------------------- bbot/core/helpers/dns.py | 9 +-- bbot/core/helpers/helper.py | 2 +- bbot/modules/output/emails.py | 7 ++- bbot/modules/output/subdomains.py | 4 +- bbot/test/test_step_1/test_helpers.py | 14 ----- poetry.lock | 13 ++++- pyproject.toml | 1 + 9 files changed, 32 insertions(+), 110 deletions(-) diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py index 4cc701161..e73afc515 100644 --- a/bbot/core/helpers/async_helpers.py +++ b/bbot/core/helpers/async_helpers.py @@ -5,13 +5,12 @@ import threading from datetime import datetime from queue import Queue, Empty +from cachetools import LRUCache from .misc import human_timedelta from contextlib import asynccontextmanager log = logging.getLogger("bbot.core.helpers.async_helpers") -from .cache import CacheDict - class ShuffleQueue(asyncio.Queue): def _put(self, item): @@ -37,15 +36,15 @@ class NamedLock: """ def __init__(self, max_size=1000): - self._cache = CacheDict(max_size=max_size) + self._cache = LRUCache(maxsize=max_size) @asynccontextmanager async def lock(self, name): try: - lock = self._cache.get(name) + lock = self._cache[name] except KeyError: lock = _Lock(name) - self._cache.put(name, lock) + self._cache[name] = lock async with lock: yield diff --git a/bbot/core/helpers/cache.py b/bbot/core/helpers/cache.py index 3eb54daf7..3a70fbd24 100644 --- a/bbot/core/helpers/cache.py +++ b/bbot/core/helpers/cache.py @@ -1,8 +1,6 @@ import os import time import logging -from contextlib import suppress -from collections import OrderedDict from .misc import sha1 @@ -53,84 +51,3 @@ def is_cached(self, key, cache_hrs=24 * 7): def cache_filename(self, key): return self.cache_dir / sha1(key).hexdigest() - - -_sentinel = object() - - -class CacheDict: - """ - Dictionary to store cached values, with a maximum size limit - """ - - def __init__(self, max_size=1000): - self._cache = OrderedDict() - self._max_size = int(max_size) - - def get(self, name, fallback=_sentinel): - name_hash = self._hash(name) - try: - return self._cache[name_hash] - except KeyError: - if fallback is not _sentinel: - return fallback - raise - finally: - with suppress(KeyError): - self._cache.move_to_end(name_hash) - self._truncate() - - def put(self, name, value): - name_hash = self._hash(name) - try: - self._cache[name_hash] = value - finally: - with suppress(KeyError): - self._cache.move_to_end(name_hash) - self._truncate() - - def _truncate(self): - if not self or len(self) <= self._max_size: - return - for nh in list(self._cache.keys()): - try: - del self._cache[nh] - except KeyError: - pass - if not self or len(self) <= self._max_size: - break - - def keys(self): - return self._cache.keys() - - def values(self): - return self._cache.values() - - def items(self): - return self._cache.items() - - def clear(self): - return self._cache.clear() - - def _hash(self, v): - if type(v) == int: - return v - return hash(str(v)) - - def __contains__(self, item): - return self._hash(item) in self._cache - - def __iter__(self): - return iter(self._cache) - - def __getitem__(self, item): - return self.get(item) - - def __setitem__(self, item, value): - self.put(item, value) - - def __bool__(self): - return bool(self._cache) - - def __len__(self): - return len(self._cache) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 74dc43a4a..35cf97c8b 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -7,6 +7,7 @@ import contextlib import dns.exception import dns.asyncresolver +from cachetools import LRUCache from contextlib import suppress from .regexes import dns_name_regex @@ -64,8 +65,8 @@ class DNSHelper: wildcard_ignore (tuple): Domains to be ignored during wildcard detection. wildcard_tests (int): Number of tests to be run for wildcard detection. Defaults to 5. _wildcard_cache (dict): Cache for wildcard detection results. - _dns_cache (CacheDict): Cache for DNS resolution results, limited in size. - _event_cache (CacheDict): Cache for event resolution results, tags. Limited in size. + _dns_cache (LRUCache): Cache for DNS resolution results, limited in size. + _event_cache (LRUCache): Cache for event resolution results, tags. Limited in size. resolver_file (Path): File containing system's current resolver nameservers. filter_bad_ptrs (bool): Whether to filter out DNS names that appear to be auto-generated PTR records. Defaults to True. @@ -130,8 +131,8 @@ def __init__(self, parent_helper): self.fallback_nameservers_file = self.parent_helper.wordlist_dir / "nameservers.txt" self._debug = self.parent_helper.config.get("dns_debug", False) self._dummy_modules = dict() - self._dns_cache = self.parent_helper.CacheDict(max_size=100000) - self._event_cache = self.parent_helper.CacheDict(max_size=10000) + self._dns_cache = LRUCache(maxsize=10000) + self._event_cache = LRUCache(maxsize=10000) self._event_cache_locks = NamedLock() # for mocking DNS queries diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 36c6346c9..899f3ab0b 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -48,8 +48,8 @@ class ConfigAwareHelper: from . import regexes from . import validators from .files import tempfile, feed_pipe, _feed_pipe, tempfile_tail + from .cache import cache_get, cache_put, cache_filename, is_cached from .command import run, run_live, _spawn_proc, _prepare_command_kwargs - from .cache import cache_get, cache_put, cache_filename, is_cached, CacheDict def __init__(self, config, scan=None): self.config = config diff --git a/bbot/modules/output/emails.py b/bbot/modules/output/emails.py index 029bc5aca..e96c5d97c 100644 --- a/bbot/modules/output/emails.py +++ b/bbot/modules/output/emails.py @@ -12,14 +12,19 @@ class Emails(Human): output_filename = "emails.txt" + async def setup(self): + self.emails_written = 0 + return await super().setup() + def _scope_distance_check(self, event): return BaseModule._scope_distance_check(self, event) async def handle_event(self, event): if self.file is not None: + self.emails_written += 1 self.file.write(f"{event.data}\n") self.file.flush() async def report(self): if getattr(self, "_file", None) is not None: - self.info(f"Saved email addresses to {self.output_file}") + self.info(f"Saved {self.emails_written:,} email addresses to {self.output_file}") diff --git a/bbot/modules/output/subdomains.py b/bbot/modules/output/subdomains.py index 49dea2db8..bfb7174ac 100644 --- a/bbot/modules/output/subdomains.py +++ b/bbot/modules/output/subdomains.py @@ -15,6 +15,7 @@ class Subdomains(Human): async def setup(self): self.include_unresolved = self.config.get("include_unresolved", False) + self.subdomains_written = 0 return await super().setup() async def filter_event(self, event): @@ -27,9 +28,10 @@ def _scope_distance_check(self, event): async def handle_event(self, event): if self.file is not None: + self.subdomains_written += 1 self.file.write(f"{event.data}\n") self.file.flush() async def report(self): if getattr(self, "_file", None) is not None: - self.info(f"Saved subdomains to {self.output_file}") + self.info(f"Saved {self.subdomains_written:,} subdomains to {self.output_file}") diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index b972c8561..ba72d41dc 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -441,20 +441,6 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.cache_get("string", cache_hrs=24 * 7) is None assert helpers.cache_get("string", cache_hrs=24 * 14) == "wat" - cache_dict = helpers.CacheDict(max_size=10) - cache_dict.put("1", 2) - assert cache_dict["1"] == 2 - assert cache_dict.get("1") == 2 - assert len(cache_dict) == 1 - cache_dict["2"] = 3 - assert cache_dict["2"] == 3 - assert cache_dict.get("2") == 3 - assert len(cache_dict) == 2 - for i in range(20): - cache_dict[str(i)] = i + 1 - assert len(cache_dict) == 10 - assert tuple(cache_dict) == tuple(hash(str(x)) for x in range(10, 20)) - test_file = Path(scan.config["home"]) / "testfile.asdf" with open(test_file, "w") as f: for i in range(100): diff --git a/poetry.lock b/poetry.lock index d1204c670..3a7c834a5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -186,6 +186,17 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + [[package]] name = "certifi" version = "2023.11.17" @@ -2409,4 +2420,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "8d9864610f54050aec62bf75415e5b683a851323d054a38ff36e54d9d5c284e3" +content-hash = "4eb296ea314405bf39920f67d20eebb13cc8974254fd1643538bcb3a338976d2" diff --git a/pyproject.toml b/pyproject.toml index 93172c060..7d1b2fb32 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,7 @@ pydantic = "^2.4.2" httpx = "^0.26.0" cloudcheck = "^2.1.0.181" tldextract = "^5.1.1" +cachetools = "^5.3.2" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" From d2fbaf52079f90eb636ff3449c840eaf531bca90 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 2 Feb 2024 14:58:30 -0500 Subject: [PATCH 077/159] increase max dnscommonsrv handlers, small masscan bugfix --- bbot/modules/dnscommonsrv.py | 2 +- bbot/modules/masscan.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 958b6b612..4d7e09e4b 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -94,7 +94,7 @@ class dnscommonsrv(BaseModule): produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] meta = {"description": "Check for common SRV records"} - _max_event_handlers = 5 + _max_event_handlers = 10 async def filter_event(self, event): # skip SRV wildcards diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py index 15881d5b2..895ffe243 100644 --- a/bbot/modules/masscan.py +++ b/bbot/modules/masscan.py @@ -241,9 +241,11 @@ async def emit_from_cache(self): await self.emit_event(line, "OPEN_TCP_PORT", source=source_event) def get_source_event(self, host): - source_event = self.scan.whitelist.get(host) + source_event = self.scan.target.get(host) if source_event is None: - source_event = self.scan.root_event + source_event = self.scan.whitelist.get(host) + if source_event is None: + source_event = self.scan.root_event return source_event async def cleanup(self): From 2958a3b957db43bdfdbee95622ad36ca1f48a532 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 17:33:05 -0500 Subject: [PATCH 078/159] massdns speed optimizations --- bbot/modules/massdns.py | 73 +++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 28 deletions(-) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 965909d31..1bce928de 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -1,6 +1,7 @@ import re import json import random +import asyncio import subprocess from bbot.modules.templates.subdomain_enum import subdomain_enum @@ -13,8 +14,8 @@ class massdns(subdomain_enum): It uses massdns to brute-force subdomains. At the end of a scan, it will leverage BBOT's word cloud to recursively discover target-specific subdomain mutations. - Each subdomain discovered via mutations is tagged with the "mutation" tag. This tag includes the depth at which - the mutations is found. I.e. the first mutation will be tagged "mutation-1". The second one (a mutation of a + Each subdomain discovered via mutations is tagged with the "mutation" tag. This tag indicates the depth at which + the mutation was found. I.e. the first mutation will be tagged "mutation-1". The second one (a mutation of a mutation) will be "mutation-2". Mutations of mutations of mutations will be "mutation-3", etc. This is especially use for bug bounties because it enables you to recognize distant/rare subdomains at a glance. @@ -97,7 +98,10 @@ async def setup(self): cache_hrs=24 * 7, ) self.devops_mutations = list(self.helpers.word_cloud.devops_mutations) - self._mutation_run = 1 + self.mutation_run = 1 + + self.resolve_and_emit_queue = asyncio.Queue() + self.resolve_and_emit_task = asyncio.create_task(self.resolve_and_emit()) return await super().setup() async def filter_event(self, event): @@ -116,23 +120,19 @@ async def filter_event(self, event): async def handle_event(self, event): query = self.make_query(event) self.source_events.add_target(event) - self.info(f"Brute-forcing subdomains for {query} (source: {event.data})") - for hostname in await self.massdns(query, self.subdomain_list): - await self.emit_result(hostname, event, query) + results = await self.massdns(query, self.subdomain_list) + await self.resolve_and_emit_queue.put((results, event, None)) def abort_if(self, event): if not event.scope_distance == 0: return True, "event is not in scope" if "wildcard" in event.tags: return True, "event is a wildcard" - - async def emit_result(self, result, source_event, query, tags=None): - if not result == source_event: - kwargs = {"abort_if": self.abort_if} - if tags is not None: - kwargs["tags"] = tags - await self.emit_event(result, "DNS_NAME", source_event, **kwargs) + if "unresolved" in event.tags: + self.critical(f"{event} IS UNRESOLVED") + return True, "event is unresolved" + return False, "" def already_processed(self, hostname): if hash(hostname) in self.processed: @@ -204,12 +204,36 @@ async def massdns(self, domain, subdomains): ) # everything checks out - self.verbose(f"Resolving batch of {len(results):,} results") - resolved = dict([l async for l in self.helpers.resolve_batch(results, type=("A", "CNAME"))]) - resolved = {k: v for k, v in resolved.items() if v} - for hostname in resolved: - self.add_found(hostname) - return list(resolved) + return results + + async def resolve_and_emit(self): + """ + When results are found, they are placed into self.resolve_and_emit_queue. + The purpose of this function (which is started as a task in the module's setup()) is to consume results from + the queue, resolve them, and if they resolve, emit them. + + This exists to prevent disrupting the scan with huge batches of DNS resolutions. + """ + while 1: + results, source_event, tags = await self.resolve_and_emit_queue.get() + self.verbose(f"Resolving batch of {len(results):,} results") + async with self._task_counter.count(f"{self.name}.resolve_and_emit()"): + async for hostname, r in self.helpers.resolve_batch(results, type=("A", "CNAME")): + if not r: + self.debug(f"Discarding {hostname} because it didn't resolve") + continue + self.add_found(hostname) + if source_event is None: + source_event = self.source_events.get(hostname) + if source_event is None: + self.warning(f"Could not correlate source event from: {hostname}") + source_event = self.scan.root_event + kwargs = {"abort_if": self.abort_if, "tags": tags} + await self.emit_event(hostname, "DNS_NAME", source_event, **kwargs) + + @property + def running(self): + return super().running or self.resolve_and_emit_queue.qsize() > 0 async def _canary_check(self, domain, num_checks=50): random_subdomains = list(self.gen_random_subdomains(num_checks)) @@ -374,15 +398,8 @@ def add_mutation(_domain_hash, m): if mutations: self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") results = list(await self.massdns(query, mutations)) - for hostname in results: - source_event = self.source_events.get(hostname) - if source_event is None: - self.warning(f"Could not correlate source event from: {hostname}") - source_event = self.scan.root_event - await self.emit_result( - hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"] - ) if results: + await self.resolve_and_emit_queue.put((results, None, [f"mutation-{self.mutation_run}"])) found_mutations = True continue break @@ -390,7 +407,7 @@ def add_mutation(_domain_hash, m): self.warning(e) if found_mutations: - self._mutation_run += 1 + self.mutation_run += 1 def add_found(self, host): if not isinstance(host, str): From fd984cd15920920f73dc9af717f536d189e6a74f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 18:18:31 -0500 Subject: [PATCH 079/159] dnscommonsrv rework, spellchecking --- bbot/core/helpers/async_helpers.py | 2 +- bbot/core/helpers/depsinstaller/installer.py | 4 +- bbot/core/helpers/misc.py | 14 +- bbot/core/helpers/ntlm.py | 8 +- bbot/core/helpers/web.py | 2 +- bbot/modules/base.py | 2 +- bbot/modules/deadly/ffuf.py | 2 +- bbot/modules/deadly/nuclei.py | 14 +- bbot/modules/dnscommonsrv.py | 246 +++++++++++------- bbot/modules/ffuf_shortnames.py | 22 +- bbot/modules/github_codesearch.py | 4 +- bbot/test/test_step_1/test_helpers.py | 9 + bbot/test/test_step_1/test_web.py | 2 +- .../test_module_ffuf_shortnames.py | 18 +- .../module_tests/test_module_telerik.py | 2 +- 15 files changed, 219 insertions(+), 132 deletions(-) diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py index e73afc515..8434ccb0f 100644 --- a/bbot/core/helpers/async_helpers.py +++ b/bbot/core/helpers/async_helpers.py @@ -31,7 +31,7 @@ class NamedLock: """ Returns a unique asyncio.Lock() based on a provided string - Useful for preventing multiple operations from occuring on the same data in parallel + Useful for preventing multiple operations from occurring on the same data in parallel E.g. simultaneous DNS lookups on the same hostname """ diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index 00662b969..049baef86 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -157,9 +157,9 @@ async def pip_install(self, packages, constraints=None): command = [sys.executable, "-m", "pip", "install", "--upgrade"] + packages if constraints: - contraints_tempfile = self.parent_helper.tempfile(constraints, pipe=False) + constraints_tempfile = self.parent_helper.tempfile(constraints, pipe=False) command.append("--constraint") - command.append(contraints_tempfile) + command.append(constraints_tempfile) process = None try: diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 283bc3782..d02f50584 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -67,8 +67,11 @@ def is_domain(d): """ d, _ = split_host_port(d) extracted = tldextract(d) - if extracted.domain and not extracted.subdomain: - return True + if extracted.registered_domain: + if not extracted.subdomain: + return True + else: + return d.count(".") == 1 return False @@ -97,8 +100,11 @@ def is_subdomain(d): """ d, _ = split_host_port(d) extracted = tldextract(d) - if extracted.domain and extracted.subdomain: - return True + if extracted.registered_domain: + if extracted.subdomain: + return True + else: + return d.count(".") > 1 return False diff --git a/bbot/core/helpers/ntlm.py b/bbot/core/helpers/ntlm.py index e4d9cd1ca..8605ef34a 100644 --- a/bbot/core/helpers/ntlm.py +++ b/bbot/core/helpers/ntlm.py @@ -38,7 +38,7 @@ def __init__(self, pos_tup, raw): def decode_ntlm_challenge(st): hdr_tup = struct.unpack(" Date: Sat, 3 Feb 2024 18:29:21 -0500 Subject: [PATCH 080/159] fix tests --- bbot/core/helpers/misc.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index d02f50584..43fd0595f 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -66,6 +66,8 @@ def is_domain(d): - Port, if present in input, is ignored. """ d, _ = split_host_port(d) + if is_ip(d): + return False extracted = tldextract(d) if extracted.registered_domain: if not extracted.subdomain: @@ -99,6 +101,8 @@ def is_subdomain(d): - Port, if present in input, is ignored. """ d, _ = split_host_port(d) + if is_ip(d): + return False extracted = tldextract(d) if extracted.registered_domain: if extracted.subdomain: @@ -607,9 +611,6 @@ def is_ip(d, version=None): >>> is_ip('evilcorp.com') False """ - if isinstance(d, (ipaddress.IPv4Address, ipaddress.IPv6Address)): - if version is None or version == d.version: - return True try: ip = ipaddress.ip_address(d) if version is None or ip.version == version: From d11ad068b9465a0347b4e19de990727c143c291b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 18:54:31 -0500 Subject: [PATCH 081/159] fix scope accuracy tests --- .../test_step_1/test_manager_scope_accuracy.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index e8e5da391..a927da8a3 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -681,7 +681,7 @@ def custom_setup(scan): assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) assert 0 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) - assert len(all_events) == 14 + assert len(all_events) == 13 assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) @@ -692,9 +692,8 @@ def custom_setup(scan): assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert len(all_events_nodups) == 12 + assert len(all_events_nodups) == 11 assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) @@ -705,7 +704,6 @@ def custom_setup(scan): assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 6 @@ -719,7 +717,6 @@ def custom_setup(scan): assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999"]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) - assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) # sslcert with out-of-scope chain events, all_events, all_events_nodups, graph_output_events, graph_output_batch_events = await do_scan( @@ -739,9 +736,8 @@ def custom_setup(scan): assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) - assert 0 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) - assert len(all_events) == 12 + assert len(all_events) == 11 assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 1]) assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 1]) @@ -750,9 +746,8 @@ def custom_setup(scan): assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert len(all_events_nodups) == 10 + assert len(all_events_nodups) == 9 assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 1]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 1]) @@ -761,7 +756,6 @@ def custom_setup(scan): assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 5 @@ -773,7 +767,6 @@ def custom_setup(scan): assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) - assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) @pytest.mark.asyncio From 5da0e431d9c682fbb3f7f7705acf004c26842fc9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 19:44:26 -0500 Subject: [PATCH 082/159] just telerik things --- bbot/test/test_step_2/module_tests/test_module_telerik.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_telerik.py b/bbot/test/test_step_2/module_tests/test_module_telerik.py index 21c4d2b86..98c511f2a 100644 --- a/bbot/test/test_step_2/module_tests/test_module_telerik.py +++ b/bbot/test/test_step_2/module_tests/test_module_telerik.py @@ -11,7 +11,7 @@ async def setup_before_prep(self, module_test): # Simulate Telerik.Web.UI.WebResource.axd?type=rau detection expect_args = {"method": "GET", "uri": "/Telerik.Web.UI.WebResource.axd", "query_string": "type=rau"} respond_args = { - "response_data": '{ "message" : "RadAsyncUpload handler is registered successfully, however, it may not be accessed directly." }' + "response_data": '{ "message" : "RadAsyncUpload handler is registered succesfully, however, it may not be accessed directly." }' } module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) From 834ed0c21218c2b59aa177943fec48974efa3290 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 21:32:20 -0500 Subject: [PATCH 083/159] increase dnscommonsrv threads --- bbot/modules/dnscommonsrv.py | 4 ++-- bbot/modules/sitedossier.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 4500e7fc0..eef8e2d8c 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -154,12 +154,12 @@ class dnscommonsrv(BaseModule): produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] meta = {"description": "Check for common SRV records"} - options = {"top": 50, "max_event_handlers": 5} + options = {"top": 50, "max_event_handlers": 10} options_desc = { "top": "How many of the top SRV records to check", "max_event_handlers": "How many instances of the module to run concurrently", } - _max_event_handlers = 5 + _max_event_handlers = 10 def _incoming_dedup_hash(self, event): # dedupe by parent diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index 0c797296a..86872c052 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -43,5 +43,5 @@ async def query(self, query, parse_fn=None, request_fn=None): results.add(hostname) yield hostname if '
Date: Sat, 3 Feb 2024 22:09:59 -0500 Subject: [PATCH 084/159] limit massdns brute force depth --- bbot/core/helpers/misc.py | 17 +++++++++++++++++ bbot/modules/internetdb.py | 3 +-- bbot/modules/massdns.py | 16 +++++++++++++++- bbot/test/test_step_1/test_helpers.py | 6 ++++++ 4 files changed, 39 insertions(+), 3 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 43fd0595f..aa3b645e0 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -337,6 +337,23 @@ def domain_parents(d, include_self=False): break +def subdomain_depth(d): + """ + Calculate the depth of subdomains within a given domain name. + + Args: + d (str): The domain name to analyze. + + Returns: + int: The depth of the subdomain. For example, a hostname "5.4.3.2.1.evilcorp.com" + has a subdomain depth of 5. + """ + subdomain, domain = split_domain(d) + if not subdomain: + return 0 + return subdomain.count(".") + 1 + + def parent_url(u): """ Retrieve the parent URL of a given URL. diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index b3e98b9fc..cc6bde575 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -40,8 +40,7 @@ class internetdb(BaseModule): flags = ["passive", "safe", "portscan", "subdomain-enum"] meta = {"description": "Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities"} - # limit outgoing queue size to help avoid rate limiting - _qsize = 100 + _qsize = 500 base_url = "https://internetdb.shodan.io" diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 1bce928de..ab913128d 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -30,11 +30,13 @@ class massdns(subdomain_enum): "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", "max_resolvers": 1000, "max_mutations": 500, + "max_depth": 5, } options_desc = { "wordlist": "Subdomain wordlist URL", "max_resolvers": "Number of concurrent massdns resolvers", "max_mutations": "Max number of smart mutations per subdomain", + "max_depth": "How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com", } subdomain_file = None deps_ansible = [ @@ -90,6 +92,7 @@ async def setup(self): self.max_resolvers = self.config.get("max_resolvers", 1000) self.max_mutations = self.config.get("max_mutations", 500) + self.max_depth = max(1, self.config.get("max_depth", 5)) nameservers_url = ( "https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt" ) @@ -107,6 +110,18 @@ async def setup(self): async def filter_event(self, event): query = self.make_query(event) eligible, reason = await self.eligible_for_enumeration(event) + + # limit brute force depth + subdomain_depth = self.helpers.subdomain_depth(query) + 1 + if subdomain_depth > self.max_depth: + eligible = False + reason = f"subdomain depth of *.{event.data} ({subdomain_depth}) > max_depth ({self.max_depth})" + + # don't brute-force things that look like autogenerated PTRs + if self.helpers.is_ptr(query): + eligible = False + reason = f'"{query}" looks like an autogenerated PTR' + if eligible: self.add_found(event) # reject if already processed @@ -130,7 +145,6 @@ def abort_if(self, event): if "wildcard" in event.tags: return True, "event is a wildcard" if "unresolved" in event.tags: - self.critical(f"{event} IS UNRESOLVED") return True, "event is unresolved" return False, "" diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index a078110ce..c8045e595 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -182,6 +182,12 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.split_domain("192.168.0.1") == ("", "192.168.0.1") assert helpers.split_domain("dead::beef") == ("", "dead::beef") + assert helpers.subdomain_depth("a.s.d.f.evilcorp.co.uk") == 4 + assert helpers.subdomain_depth("a.s.d.f.evilcorp.com") == 4 + assert helpers.subdomain_depth("evilcorp.com") == 0 + assert helpers.subdomain_depth("a.evilcorp.com") == 1 + assert helpers.subdomain_depth("a.s.d.f.evilcorp.notreal") == 4 + assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) assert helpers.split_host_port("evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) From cab606df8e3c5b92f390dd172896570d3aac6298 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 22:24:01 -0500 Subject: [PATCH 085/159] small wildcard tweak --- bbot/core/helpers/dns.py | 5 +++-- bbot/modules/massdns.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 35cf97c8b..6bcac6257 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -849,12 +849,13 @@ async def is_wildcard(self, query, ips=None, rdtype=None): # for every parent domain, starting with the shortest try: for host in parents[::-1]: + # make sure we've checked that domain for wildcards + await self.is_wildcard_domain(host) + # for every rdtype for _rdtype in list(base_query_ips): # get the IPs from above query_ips = base_query_ips.get(_rdtype, set()) - # make sure we've checked that domain for wildcards - await self.is_wildcard_domain(host) host_hash = hash(host) if host_hash in self._wildcard_cache: diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index ab913128d..caf0b7fab 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -115,7 +115,7 @@ async def filter_event(self, event): subdomain_depth = self.helpers.subdomain_depth(query) + 1 if subdomain_depth > self.max_depth: eligible = False - reason = f"subdomain depth of *.{event.data} ({subdomain_depth}) > max_depth ({self.max_depth})" + reason = f"subdomain depth of *.{query} ({subdomain_depth}) > max_depth ({self.max_depth})" # don't brute-force things that look like autogenerated PTRs if self.helpers.is_ptr(query): @@ -127,6 +127,7 @@ async def filter_event(self, event): # reject if already processed if self.already_processed(query): return False, f'Query "{query}" was already processed' + if eligible: self.processed.add(hash(query)) return True, reason From bca67d84adb7ba706e55ea9b1117042fe0367eac Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 3 Feb 2024 22:29:59 -0500 Subject: [PATCH 086/159] internetdb speed optimization --- bbot/modules/internetdb.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index cc6bde575..5e33f1155 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -115,10 +115,11 @@ def get_ip(self, event): elif event.type == "DNS_NAME": # always try IPv4 first ipv6 = [] - for host in event.resolved_hosts: - if self.helpers.is_ip(host, version=4): - return host - elif self.helpers.is_ip(host, version=6): - ipv6.append(host) + ips = [self.helpers.make_ip_type(h) for h in event.resolved_hosts if self.helpers.is_ip(h)] + for ip in sorted(ips): + if self.helpers.is_ip(ip, version=4): + return ip + elif self.helpers.is_ip(ip, version=6): + ipv6.append(ip) for ip in ipv6: return ip From ed36be588ab6a39e14ad29d7f9b3c19b2f392dd2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sun, 4 Feb 2024 19:06:49 -0500 Subject: [PATCH 087/159] massdns tweaks --- bbot/modules/massdns.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index caf0b7fab..c71138ef3 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -158,7 +158,7 @@ async def massdns(self, domain, subdomains): subdomains = list(subdomains) domain_wildcard_rdtypes = set() - for domain, rdtypes in (await self.helpers.is_wildcard_domain(domain)).items(): + for _domain, rdtypes in (await self.helpers.is_wildcard_domain(domain)).items(): for rdtype, results in rdtypes.items(): if results: domain_wildcard_rdtypes.add(rdtype) @@ -385,10 +385,7 @@ def add_mutation(_domain_hash, m): for s in _subdomains: first_segment = s.split(".")[0] # skip stuff with lots of numbers (e.g. PTRs) - digits = self.digit_regex.findall(first_segment) - excessive_digits = len(digits) > 2 - long_digits = any(len(d) > 3 for d in digits) - if excessive_digits or long_digits: + if self.has_excessive_digits(first_segment): continue add_mutation(domain_hash, first_segment) for word in self.helpers.extract_words( @@ -454,3 +451,16 @@ def gen_random_subdomains(self, n=50): yield subdomain for _ in range(5): yield self.helpers.rand_string(length=8, digits=False) + + def has_excessive_digits(self, d): + """ + Identifies dns names with excessive numbers, e.g.: + - w1-2-3.evilcorp.com + - ptr1234.evilcorp.com + """ + digits = self.digit_regex.findall(d) + excessive_digits = len(digits) > 2 + long_digits = any(len(d) > 3 for d in digits) + if excessive_digits or long_digits: + return True + return False From d89cda2f5032990aa4a53c5237598b5a8eaa23d9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 5 Feb 2024 11:17:13 -0500 Subject: [PATCH 088/159] fix internetdb bug --- bbot/modules/internetdb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index 5e33f1155..847db0c7a 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -115,8 +115,8 @@ def get_ip(self, event): elif event.type == "DNS_NAME": # always try IPv4 first ipv6 = [] - ips = [self.helpers.make_ip_type(h) for h in event.resolved_hosts if self.helpers.is_ip(h)] - for ip in sorted(ips): + ips = [h for h in event.resolved_hosts if self.helpers.is_ip(h)] + for ip in sorted([str(ip) for ip in ips]): if self.helpers.is_ip(ip, version=4): return ip elif self.helpers.is_ip(ip, version=6): From 4be75af2155196d66712683939a66dfab5ee02f4 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 5 Feb 2024 14:56:29 -0500 Subject: [PATCH 089/159] fix \s warning --- bbot/core/helpers/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index aa3b645e0..a67e89402 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1451,7 +1451,7 @@ def search_dict_values(d, *regexes): ... ] ... } ... } - >>> url_regexes = re.compile(r'https?://[^\s<>"]+|www\.[^\s<>"]+') + >>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\.[^\\s<>"]+') >>> list(search_dict_values(dict_to_search, url_regexes)) ["https://www.evilcorp.com"] """ From 1c5a23411a6cbe3ac5b5f675b81634b8a77a1577 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 5 Feb 2024 15:38:00 -0500 Subject: [PATCH 090/159] increase massdns qsize --- bbot/modules/massdns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index c71138ef3..84e406628 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -75,7 +75,7 @@ class massdns(subdomain_enum): }, ] reject_wildcards = "strict" - _qsize = 100 + _qsize = 10000 digit_regex = re.compile(r"\d+") From 86c0171099d38411a0cc2202d725abcc12ae2461 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 12:54:29 -0500 Subject: [PATCH 091/159] increase qsize for speculate and excavate --- bbot/modules/internal/excavate.py | 1 + bbot/modules/internal/speculate.py | 1 + 2 files changed, 2 insertions(+) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 5ccc8d36a..e4fe1c476 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -340,6 +340,7 @@ class excavate(BaseInternalModule): meta = {"description": "Passively extract juicy tidbits from scan data"} scope_distance_modifier = None + _qsize = 10000 async def setup(self): self.csp = CSPExtractor(self) diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 7aaf12d30..c039b3b1b 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -35,6 +35,7 @@ class speculate(BaseInternalModule): } scope_distance_modifier = 1 _priority = 4 + _qsize = 10000 async def setup(self): scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"] From e02c6865acdcd5cb3415e78492ae9041d74ea799 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 12:59:06 -0500 Subject: [PATCH 092/159] log version command: verbose() --> trace() --- bbot/scanner/scanner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 48833d150..311c2048c 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -331,7 +331,7 @@ async def async_start(self): await self._prep() self._start_log_handlers() - log.verbose(f'Ran BBOT {__version__} at {scan_start_time}, command: {" ".join(sys.argv)}') + self.trace(f'Ran BBOT {__version__} at {scan_start_time}, command: {" ".join(sys.argv)}') if not self.target: self.warning(f"No scan targets specified") From e9cb4fdfa6c8688e0322c28b6bc91d22fd83bb58 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 13:17:12 -0500 Subject: [PATCH 093/159] allow independent http/dns debugging (without needing -d) --- bbot/core/helpers/dns.py | 2 +- bbot/core/helpers/web.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 6bcac6257..2d4016754 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -1009,7 +1009,7 @@ def _parse_rdtype(self, t, default=None): def debug(self, *args, **kwargs): if self._debug: - log.debug(*args, **kwargs) + log.trace(*args, **kwargs) def _get_dummy_module(self, name): try: diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index 41a755c0e..27b1c8717 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -55,7 +55,7 @@ def __init__(self, *args, **kwargs): http_debug = self._bbot_scan.config.get("http_debug", None) if http_debug: - log.debug(f"Creating AsyncClient: {args}, {kwargs}") + log.trace(f"Creating AsyncClient: {args}, {kwargs}") self._persist_cookies = kwargs.pop("persist_cookies", True) @@ -224,10 +224,10 @@ async def request(self, *args, **kwargs): async with self._acatch(url, raise_error): if self.http_debug: logstr = f"Web request: {str(args)}, {str(kwargs)}" - log.debug(logstr) + log.trace(logstr) response = await client.request(*args, **kwargs) if self.http_debug: - log.debug( + log.trace( f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}" ) return response From f54081121dcba2b030f93c928bfe677d8626b77b Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Tue, 6 Feb 2024 12:50:03 -0600 Subject: [PATCH 094/159] Added/Fixed Newsletters Tests; which added Newsletters to the Docs --- .gitignore | 1 + README.md | 44 ++++++++--------- bbot/modules/newsletters.py | 9 +--- .../module_tests/test_module_newsletters.py | 48 ++++++++++++++++--- docs/modules/list_of_modules.md | 1 + docs/scanning/advanced.md | 22 ++++----- docs/scanning/events.md | 3 +- docs/scanning/index.md | 44 ++++++++--------- 8 files changed, 102 insertions(+), 70 deletions(-) diff --git a/.gitignore b/.gitignore index c18dd8d83..57ff75186 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ __pycache__/ +.coverage* diff --git a/README.md b/README.md index a76c2079e..14613258f 100644 --- a/README.md +++ b/README.md @@ -257,27 +257,27 @@ BBOT consistently finds 20-50% more subdomains than other tools. The bigger the For a full list of modules, including the data types consumed and emitted by each one, see [List of Modules](https://www.blacklanternsecurity.com/bbot/modules/list_of_modules/). -| Flag | # Modules | Description | Modules | -|------------------|-------------|-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 75 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | -| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | -| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | -| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | -| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | -| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 76 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 58 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, newsletters, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | +| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index e88d86910..5aec4457e 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -6,7 +6,6 @@ # be be susceptible to overflows or injections. from .base import BaseModule -import requests import re from bs4 import BeautifulSoup @@ -44,18 +43,12 @@ def find_type(self, soup): return False async def handle_event(self, event): - req_url = event.data - - # req = requests.request("GET", req_url, headers=headers) ## If it ain't broke, don't fix it - # req = self.helpers.request(method="GET", url=req_url, headers=headers) # Doesn't return a status_code - # req = await self.helpers.curl(url=req_url, headers=headers) # Doesn't return a status_code - if event.data["status_code"] == 200: soup = BeautifulSoup(event.data["body"], "html.parser") result = self.find_type(soup) if result: newsletter_result = self.make_event( - data=f"Newsletter {event.data['url']}", event_type="NEWSLETTER", source=event, tags=event.tags + data=event.data['url'], event_type="NEWSLETTER", source=event, tags=event.tags ) # self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") self.emit_event(newsletter_result) diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index f7a23a166..d070ae052 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -2,14 +2,50 @@ class TestNewsletters(ModuleTestBase): - targets = ["blacklanternsecurity.com", "futureparty.com"] + found_tgt = "http://127.0.0.1:8888/found" + missing_tgt = "http://127.0.0.1:8888/missing" + targets = [found_tgt, missing_tgt] modules_overrides = ["speculate", "httpx", "newsletters"] - config_overrides = {} + html_with_newsletter = """ + + """ + + html_without_newsletter = """ + + """ + + async def setup_after_prep(self, module_test): + request_args = dict(uri="/found", headers={"test": "header"}) + respond_args = dict(response_data=self.html_with_newsletter) + module_test.set_expect_requests(request_args, respond_args) + request_args = dict(uri="/missing", headers={"test": "header"}) + respond_args = dict(response_data=self.html_without_newsletter) + module_test.set_expect_requests(request_args, respond_args) def check(self, module_test, events): - newsletter = False - for event in events: + count = 0 + for event in events: if event.type == "NEWSLETTER": - newsletter = True - assert newsletter, "No NEWSLETTER emitted" + # Verify Positive Result + if event.data == self.found_tgt: + count += 1 + # Verify Negative Result (should skip this statement if correct) + elif event.data == self.missing_tgt: + count += -1 + assert count==1, f"NEWSLETTER Error - Expect count of 1 but got {count}" \ No newline at end of file diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 60e0c7b62..925283b9c 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -76,6 +76,7 @@ | leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | | myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | passive, safe | HTTP_RESPONSE | NEWSLETTER | | nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 3b134df5d..67528bed1 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -33,16 +33,16 @@ asyncio.run(main()) ```text -usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] - [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] - [--strict-scope] [-m MODULE [MODULE ...]] [-l] - [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] - [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] - [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] - [-o DIR] [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] - [--dry-run] [--current-config] - [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] - [-a] [--version] +usage: pytest [-h] [--help-all] [-t TARGET [TARGET ...]] + [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] + [--strict-scope] [-m MODULE [MODULE ...]] [-l] + [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] + [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] + [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] + [-o DIR] [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] + [--dry-run] [--current-config] + [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] + [-a] [--version] Bighuge BLS OSINT Tool @@ -61,7 +61,7 @@ Target: Modules: -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: affiliates,ajaxpro,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,credshed,crobat,crt,dastardly,dehashed,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,github_codesearch,github_org,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,leakix,masscan,massdns,myssl,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,postman,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye + Modules to enable. Choices: affiliates,ajaxpro,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,credshed,crobat,crt,dastardly,dehashed,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,github_codesearch,github_org,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,leakix,masscan,massdns,myssl,newsletters,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,postman,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye -l, --list-modules List available modules. -em MODULE [MODULE ...], --exclude-modules MODULE [MODULE ...] Exclude these modules. diff --git a/docs/scanning/events.md b/docs/scanning/events.md index d5ce37b64..44637587a 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -61,9 +61,10 @@ Below is a full list of event types along with which modules produce/consume the | FINDING | 2 | 24 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | | GEOLOCATION | 0 | 2 | | ip2location, ipstack | | HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 15 | 1 | ajaxpro, badsecrets, dastardly, excavate, filedownload, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | +| HTTP_RESPONSE | 16 | 1 | ajaxpro, badsecrets, dastardly, excavate, filedownload, host_header, hunt, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | | IP_ADDRESS | 9 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, masscan, nmap, speculate | asset_inventory, ipneighbor, speculate | | IP_RANGE | 3 | 0 | masscan, nmap, speculate | | +| NEWSLETTER | 0 | 1 | | newsletters | | OPEN_TCP_PORT | 4 | 5 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, masscan, nmap, speculate | | ORG_STUB | 1 | 1 | github_org | speculate | | PASSWORD | 0 | 2 | | credshed, dehashed | diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 40e90038a..ce2575a2b 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -107,28 +107,28 @@ A single module can have multiple flags. For example, the `securitytrails` modul ### List of Flags -| Flag | # Modules | Description | Modules | -|------------------|-------------|-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 75 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | -| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | -| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | -| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | -| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | -| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 76 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 58 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, newsletters, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | +| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | ## Dependencies From 923c3cb4a49e1f77f6d761e2958df493742db441 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Tue, 6 Feb 2024 13:12:19 -0600 Subject: [PATCH 095/159] Undo 'usage: pytest' and fix endline in Newsletter --- bbot/test/test_step_2/module_tests/test_module_newsletters.py | 3 ++- docs/scanning/advanced.md | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index d070ae052..3fbf22dd9 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -48,4 +48,5 @@ def check(self, module_test, events): # Verify Negative Result (should skip this statement if correct) elif event.data == self.missing_tgt: count += -1 - assert count==1, f"NEWSLETTER Error - Expect count of 1 but got {count}" \ No newline at end of file + assert count==1, f"NEWSLETTER Error - Expect count of 1 but got {count}" + \ No newline at end of file diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 67528bed1..9a035b806 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -33,7 +33,7 @@ asyncio.run(main()) ```text -usage: pytest [-h] [--help-all] [-t TARGET [TARGET ...]] +usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] [--strict-scope] [-m MODULE [MODULE ...]] [-l] [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] From 3ff35723167d6b508c980d1a0a78fb21da3fcbf0 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Tue, 6 Feb 2024 13:15:42 -0600 Subject: [PATCH 096/159] Fixed endline --- bbot/test/test_step_2/module_tests/test_module_newsletters.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index 3fbf22dd9..deb280c89 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -49,4 +49,3 @@ def check(self, module_test, events): elif event.data == self.missing_tgt: count += -1 assert count==1, f"NEWSLETTER Error - Expect count of 1 but got {count}" - \ No newline at end of file From e73196c5c6f3c919a05a7274f303dff13fe1d7d9 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Tue, 6 Feb 2024 14:19:56 -0600 Subject: [PATCH 097/159] run_tests, black, lint, etc --- bbot/modules/newsletters.py | 2 +- bbot/test/test_step_2/module_tests/test_module_newsletters.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 5aec4457e..4494bc4ce 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -48,7 +48,7 @@ async def handle_event(self, event): result = self.find_type(soup) if result: newsletter_result = self.make_event( - data=event.data['url'], event_type="NEWSLETTER", source=event, tags=event.tags + data=event.data["url"], event_type="NEWSLETTER", source=event, tags=event.tags ) # self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") self.emit_event(newsletter_result) diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index deb280c89..4bd552b12 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -40,7 +40,7 @@ async def setup_after_prep(self, module_test): def check(self, module_test, events): count = 0 - for event in events: + for event in events: if event.type == "NEWSLETTER": # Verify Positive Result if event.data == self.found_tgt: @@ -48,4 +48,4 @@ def check(self, module_test, events): # Verify Negative Result (should skip this statement if correct) elif event.data == self.missing_tgt: count += -1 - assert count==1, f"NEWSLETTER Error - Expect count of 1 but got {count}" + assert count == 1, f"NEWSLETTER Error - Expect count of 1 but got {count}" From 12451d0915b89a85c4c96ac597745afcd284c832 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Tue, 6 Feb 2024 15:53:13 -0500 Subject: [PATCH 098/159] better bypass403 error handling --- bbot/modules/bypass403.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 6799a5bb1..3fd40a1cf 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -63,6 +63,7 @@ "X-Host": "127.0.0.1", } +# This is planned to be replaced in the future: https://github.com/blacklanternsecurity/bbot/issues/1068 waf_strings = ["The requested URL was rejected"] for qp in query_payloads: @@ -83,8 +84,13 @@ class bypass403(BaseModule): async def do_checks(self, compare_helper, event, collapse_threshold): results = set() + error_count = 0 for sig in signatures: + if error_count > 3: + self.warning(f"Received too many errors for URL {event.data} aborting bypass403") + return None + sig = self.format_signature(sig, event) if sig[2] != None: headers = dict(sig[2]) @@ -95,6 +101,7 @@ async def do_checks(self, compare_helper, event, collapse_threshold): sig[1], headers=headers, method=sig[0], allow_redirects=True ) except HttpCompareError as e: + error_count += 1 self.debug(e) continue @@ -106,7 +113,8 @@ async def do_checks(self, compare_helper, event, collapse_threshold): return if match == False: - if str(subject_response.status_code)[0] != "4": + self.critical(subject_response.status_code) + if str(subject_response.status_code)[0] != "4" and subject_response.status_code != 503: if sig[2]: added_header_tuple = next(iter(sig[2].items())) reported_signature = f"Added Header: {added_header_tuple[0]}: {added_header_tuple[1]}" @@ -149,6 +157,7 @@ async def handle_event(self, event): source=event, ) + # When a WAF-check helper is available in the future, we will convert to HTTP_RESPONSE and check for the WAF string here. async def filter_event(self, event): if ("status-403" in event.tags) or ("status-401" in event.tags): return True From 1da63d2bfb68a6ef0c48eb3bf13c615bbe783d48 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Tue, 6 Feb 2024 15:55:29 -0500 Subject: [PATCH 099/159] remove testing code --- bbot/modules/bypass403.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 3fd40a1cf..c58463401 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -113,8 +113,7 @@ async def do_checks(self, compare_helper, event, collapse_threshold): return if match == False: - self.critical(subject_response.status_code) - if str(subject_response.status_code)[0] != "4" and subject_response.status_code != 503: + if str(subject_response.status_code)[0] != "4": if sig[2]: added_header_tuple = next(iter(sig[2].items())) reported_signature = f"Added Header: {added_header_tuple[0]}: {added_header_tuple[1]}" From 3ab53d058d0055f0c59b1fe31c06189bd636d0ae Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 16:15:14 -0500 Subject: [PATCH 100/159] use better sqlite text factory --- bbot/modules/gowitness.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index a335bf021..c49b4d3ae 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -200,6 +200,7 @@ def new_screenshots(self): if self.db_path.is_file(): with sqlite3.connect(str(self.db_path)) as con: con.row_factory = sqlite3.Row + con.text_factory = self.helpers.smart_decode cur = con.cursor() res = self.cur_execute(cur, "SELECT * FROM urls") for row in res: From bfef4731f5b37220e4708e623e834cd5ed5f0b33 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 6 Feb 2024 18:01:56 -0500 Subject: [PATCH 101/159] fix trace --- bbot/scanner/scanner.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 311c2048c..1a74d41a8 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -934,10 +934,13 @@ def error(self, *args, trace=True, **kwargs): if trace: self.trace() - def trace(self): - e_type, e_val, e_traceback = exc_info() - if e_type is not None: - log.trace(traceback.format_exc()) + def trace(self, msg=None): + if msg is None: + e_type, e_val, e_traceback = exc_info() + if e_type is not None: + log.trace(traceback.format_exc()) + else: + log.trace(msg) def critical(self, *args, trace=True, **kwargs): log.critical(*args, extra={"scan_id": self.id}, **kwargs) From 003803046c8c5a4c1e61b71c904436b0db222131 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Wed, 7 Feb 2024 10:16:16 -0600 Subject: [PATCH 102/159] Slimmed down module and using [FINDING] now --- bbot/modules/newsletters.py | 32 +++++++++++++------------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 4494bc4ce..94ff91a7e 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -1,4 +1,4 @@ -# Created a new module called 'newsletters' that will scrap the websites (or recursive websites, +# Created a new module called 'newsletters' that will scrape the websites (or recursive websites, # thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also # contains a 'placeholder'. The combination of these two HTML items usually signify the presence # of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used @@ -16,16 +16,12 @@ # https://www.milkkarten.net/ # https://geekout.mattnavarra.com/ -deps_pip = ["requests", "beautifulsoup4"] - -headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" -} +deps_pip = ["beautifulsoup4"] class newsletters(BaseModule): watched_events = ["HTTP_RESPONSE"] - produced_events = ["NEWSLETTER"] + produced_events = ["FINDING"] flags = ["passive", "safe"] meta = {"description": "Searches for Newsletter Submission Entry Fields on Websites"} @@ -37,10 +33,7 @@ def find_type(self, soup): regex = re.compile(r"placeholder") if regex.search(str(email_type)): return True - else: - return False - else: - return False + return False async def handle_event(self, event): if event.data["status_code"] == 200: @@ -48,12 +41,13 @@ async def handle_event(self, event): result = self.find_type(soup) if result: newsletter_result = self.make_event( - data=event.data["url"], event_type="NEWSLETTER", source=event, tags=event.tags + data=event.data["url"], + event_type="NEWSLETTER", + source=event, + tags=event.tags ) - # self.hugesuccess(f"Yippie! There is a Newsletter at {event.data}") - self.emit_event(newsletter_result) - return - else: - return - else: - return + # self.emit_event(newsletter_result) + description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" + data = {"host": str(event.host), "description": description, "url":event.data["url"]} + + self.emit_event(data, "FINDING", event) From 161ec0a77b80a8e1e0f2f97abf4e3e71d4f730e1 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Wed, 7 Feb 2024 10:23:54 -0600 Subject: [PATCH 103/159] black & remove unused variable --- bbot/modules/newsletters.py | 9 +-------- bbot/test/test_step_1/test_events.py | 8 +++----- .../test_step_2/module_tests/test_module_excavate.py | 1 - 3 files changed, 4 insertions(+), 14 deletions(-) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 94ff91a7e..edad58d35 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -40,14 +40,7 @@ async def handle_event(self, event): soup = BeautifulSoup(event.data["body"], "html.parser") result = self.find_type(soup) if result: - newsletter_result = self.make_event( - data=event.data["url"], - event_type="NEWSLETTER", - source=event, - tags=event.tags - ) - # self.emit_event(newsletter_result) description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" - data = {"host": str(event.host), "description": description, "url":event.data["url"]} + data = {"host": str(event.host), "description": description, "url": event.data["url"]} self.emit_event(data, "FINDING", event) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index ccbf17c1f..fc049a49b 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -325,10 +325,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" - assert ( - scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data - == "http://xn--eckwd4c7c.xn--zckzah/テスト" - ) + assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/テスト" # thai assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" @@ -359,7 +356,8 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" assert ( - scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) assert ( scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 2be83574b..52e447a21 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -313,7 +313,6 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - for serialize_type in ["Java", ".NET", "PHP (Array)", "PHP (String)", "PHP (Object)", "Possible Compressed"]: assert any( e.type == "FINDING" and serialize_type in e.data["description"] for e in events From 2f788d70fbf0cbdce65ebe66b92598490d8073b1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 7 Feb 2024 11:30:35 -0500 Subject: [PATCH 104/159] default qsize --> 1000, unlimited qsize for speculate & excavate --- bbot/modules/base.py | 2 +- bbot/modules/internal/excavate.py | 1 - bbot/modules/internal/speculate.py | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index e5fb35500..3c94ebc57 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -104,7 +104,7 @@ class BaseModule: _preserve_graph = False _stats_exclude = False - _qsize = 100 + _qsize = 1000 _priority = 3 _name = "base" _type = "scan" diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index e4fe1c476..5ccc8d36a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -340,7 +340,6 @@ class excavate(BaseInternalModule): meta = {"description": "Passively extract juicy tidbits from scan data"} scope_distance_modifier = None - _qsize = 10000 async def setup(self): self.csp = CSPExtractor(self) diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index c039b3b1b..7aaf12d30 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -35,7 +35,6 @@ class speculate(BaseInternalModule): } scope_distance_modifier = 1 _priority = 4 - _qsize = 10000 async def setup(self): scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"] From b57e914a30041e4056abba5dbfcde86002b57b5d Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Wed, 7 Feb 2024 11:52:30 -0600 Subject: [PATCH 105/159] Added log to base.py, added await self.emit_event, fixed test to look for event.data['host'] --- bbot/core/event/base.py | 1 + bbot/modules/newsletters.py | 2 +- bbot/test/test_step_2/module_tests/base.py | 6 ++++++ .../module_tests/test_module_newsletters.py | 21 +++++++++++-------- 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 2c50b0b60..24606f890 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -28,6 +28,7 @@ smart_decode, split_host_port, tagify, + validators, truncate_string, ) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index edad58d35..9a64a7873 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -43,4 +43,4 @@ async def handle_event(self, event): description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" data = {"host": str(event.host), "description": description, "url": event.data["url"]} - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index a4562cfc7..aa8b2968f 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -47,6 +47,7 @@ class ModuleTestBase: module_name = None config_overrides = {} modules_overrides = [] + log = logging.getLogger("bbot") class ModuleTest: def __init__(self, module_test_base, httpx_mock, httpserver, httpserver_ssl, monkeypatch, request): @@ -127,6 +128,11 @@ def name(self): if self.module_name is not None: return self.module_name return self.__class__.__name__.split("Test")[-1].lower() + + # # YOU ARE HERE + # @property + # def log(self): + # return self.module_test.log @property def _scan_name(self): diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index 4bd552b12..324539c05 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -1,8 +1,9 @@ from .base import ModuleTestBase - +import logging class TestNewsletters(ModuleTestBase): - found_tgt = "http://127.0.0.1:8888/found" + # found_tgt = "http://127.0.0.1:8888/found" + found_tgt = "futureparty.com" missing_tgt = "http://127.0.0.1:8888/missing" targets = [found_tgt, missing_tgt] modules_overrides = ["speculate", "httpx", "newsletters"] @@ -39,13 +40,15 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(request_args, respond_args) def check(self, module_test, events): - count = 0 + status = 0 for event in events: - if event.type == "NEWSLETTER": + self.log.info(f"event type: {event.type}") + if event.type == "FINDING": + self.log.info(f"event data: {event.data['host']}") # Verify Positive Result - if event.data == self.found_tgt: - count += 1 + if event.data['host'] == self.found_tgt: + status = 1 # Verify Negative Result (should skip this statement if correct) - elif event.data == self.missing_tgt: - count += -1 - assert count == 1, f"NEWSLETTER Error - Expect count of 1 but got {count}" + elif event.data['host'] == self.missing_tgt: + status = -2 + assert status == 1, f"NEWSLETTER Error - Expect status of 1 but got {status}" From a8f53a08ae0c1776b4e25a1fdd48eb7f55d1cc1b Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Wed, 7 Feb 2024 11:55:13 -0600 Subject: [PATCH 106/159] black and code cleanup --- bbot/test/test_step_2/module_tests/base.py | 5 ----- .../test_step_2/module_tests/test_module_newsletters.py | 8 ++++---- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index aa8b2968f..fa8d0a2d3 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -128,11 +128,6 @@ def name(self): if self.module_name is not None: return self.module_name return self.__class__.__name__.split("Test")[-1].lower() - - # # YOU ARE HERE - # @property - # def log(self): - # return self.module_test.log @property def _scan_name(self): diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index 324539c05..1dbe5026c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -1,9 +1,9 @@ from .base import ModuleTestBase import logging + class TestNewsletters(ModuleTestBase): - # found_tgt = "http://127.0.0.1:8888/found" - found_tgt = "futureparty.com" + found_tgt = "http://127.0.0.1:8888/found" missing_tgt = "http://127.0.0.1:8888/missing" targets = [found_tgt, missing_tgt] modules_overrides = ["speculate", "httpx", "newsletters"] @@ -46,9 +46,9 @@ def check(self, module_test, events): if event.type == "FINDING": self.log.info(f"event data: {event.data['host']}") # Verify Positive Result - if event.data['host'] == self.found_tgt: + if event.data["host"] == self.found_tgt: status = 1 # Verify Negative Result (should skip this statement if correct) - elif event.data['host'] == self.missing_tgt: + elif event.data["host"] == self.missing_tgt: status = -2 assert status == 1, f"NEWSLETTER Error - Expect status of 1 but got {status}" From eccb7a9737bbc7d4f16dbc59bc1e3488243f3e4a Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Wed, 7 Feb 2024 11:58:27 -0600 Subject: [PATCH 107/159] Test now compares to 'host' or 'url' --- .../test_step_2/module_tests/test_module_newsletters.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index 1dbe5026c..def440f54 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -44,11 +44,15 @@ def check(self, module_test, events): for event in events: self.log.info(f"event type: {event.type}") if event.type == "FINDING": - self.log.info(f"event data: {event.data['host']}") + self.log.info(f"event data: {event.data}") # Verify Positive Result if event.data["host"] == self.found_tgt: status = 1 + elif event.data["url"] == self.found_tgt: + status = 1 # Verify Negative Result (should skip this statement if correct) elif event.data["host"] == self.missing_tgt: status = -2 + elif event.data["url"] == self.missing_tgt: + status = -2 assert status == 1, f"NEWSLETTER Error - Expect status of 1 but got {status}" From 0ec10c74c0f4272469c8b2239d5d4b049ff78bb6 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 7 Feb 2024 17:11:41 -0500 Subject: [PATCH 108/159] make sure gowitness has a working chrome install --- bbot/modules/gowitness.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index a335bf021..614a9051d 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -94,6 +94,20 @@ async def setup(self): custom_chrome_path = self.helpers.tools_dir / "chrome-linux" / "chrome" if custom_chrome_path.is_file(): self.chrome_path = custom_chrome_path + + # make sure we have a working chrome install + chrome_test_pass = False + for binary in ("chrome", "chromium", custom_chrome_path): + binary_path = self.helpers.which(binary) + if binary_path and Path(binary_path).is_file(): + chrome_test_proc = await self.helpers.run([binary, "--version"]) + if getattr(chrome_test_proc, "returncode", 1) == 0: + self.verbose(f"Found chrome executable at {binary_path}") + chrome_test_pass = True + break + if not chrome_test_pass: + return False, "Failed to set up Google chrome. Please install manually use try again with --force-deps." + self.db_path = self.base_path / "gowitness.sqlite3" self.screenshot_path = self.base_path / "screenshots" self.command = self.construct_command() From d9efdc60370a581895a1beefc4de55a4a4e02692 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 7 Feb 2024 17:16:26 -0500 Subject: [PATCH 109/159] fix typo --- bbot/modules/gowitness.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 614a9051d..4acaf30f4 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -106,7 +106,7 @@ async def setup(self): chrome_test_pass = True break if not chrome_test_pass: - return False, "Failed to set up Google chrome. Please install manually use try again with --force-deps." + return False, "Failed to set up Google chrome. Please install manually or try again with --force-deps." self.db_path = self.base_path / "gowitness.sqlite3" self.screenshot_path = self.base_path / "screenshots" From d37c0a3e2b8fc364b265d92d17ca7b90169527f8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 7 Feb 2024 17:19:35 -0500 Subject: [PATCH 110/159] use full binary path --- bbot/modules/gowitness.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 4acaf30f4..0734858f1 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -100,7 +100,7 @@ async def setup(self): for binary in ("chrome", "chromium", custom_chrome_path): binary_path = self.helpers.which(binary) if binary_path and Path(binary_path).is_file(): - chrome_test_proc = await self.helpers.run([binary, "--version"]) + chrome_test_proc = await self.helpers.run([binary_path, "--version"]) if getattr(chrome_test_proc, "returncode", 1) == 0: self.verbose(f"Found chrome executable at {binary_path}") chrome_test_pass = True From f498a97484c54a7e87d28ea85fdd6675fe5cbec5 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 8 Feb 2024 09:57:58 -0500 Subject: [PATCH 111/159] removing weird characters from docs --- docs/modules/nuclei.md | 22 +++++++++++----------- docs/scanning/events.md | 8 ++++---- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/modules/nuclei.md b/docs/modules/nuclei.md index 3f492b8ba..d94e6f7f0 100644 --- a/docs/modules/nuclei.md +++ b/docs/modules/nuclei.md @@ -8,7 +8,7 @@ BBOT integrates with [Nuclei](https://github.com/projectdiscovery/nuclei), an op * The BBOT Nuclei module ingests **[URL]** events and emits events of type **[VULNERABILITY]** or **[FINDING]** -* Vulnerabilities will inherit their severity from the Nuclei templates​ +* Vulnerabilities will inherit their severity from the Nuclei templates * Nuclei templates of severity INFO will be emitted as **[FINDINGS]** ## Default Behavior @@ -59,15 +59,15 @@ This is equivalent to the Nuclei '-as' scan option. It only use templates that m #### Budget -Budget mode is unique to BBOT. ​ +Budget mode is unique to BBOT. -For larger scans with thousands of targets, doing a FULL Nuclei scan (1000s of Requests) for each is not realistic. ​ -As an alternative to the other modes, you can take advantage of Nuclei's "collapsible" template feature. ​ +For larger scans with thousands of targets, doing a FULL Nuclei scan (1000s of Requests) for each is not realistic. +As an alternative to the other modes, you can take advantage of Nuclei's "collapsible" template feature. For only the cost of one (or more) "extra" request(s) per host, it can activate several hundred modules. These are modules which happen to look at a BaseUrl, and typically look for a specific string or other attribute. Nuclei is smart about reusing the request data when it can, and we can use this to our advantage. -The budget parameter is the # of extra requests per host you are willing to send to "feed" Nuclei templates​ (defaults to 1). -For those times when vulnerability scanning isn't the main focus, but you want to look for easy wins.​ +The budget parameter is the # of extra requests per host you are willing to send to "feed" Nuclei templates (defaults to 1). +For those times when vulnerability scanning isn't the main focus, but you want to look for easy wins. Of course, there is a rapidly diminishing return when you set he value to more than a handful. Eventually, this becomes 1 template per 1 budget value increase. However, in the 1-10 range there is a lot of value. This graphic should give you a rough visual idea of this concept. @@ -86,20 +86,20 @@ The **ratelimit** and **concurrency** settings default to the same defaults that ```bash # Scan a SINGLE target with a basic port scan and web modules -bbot -f web-basic -m nmap nuclei --allow-deadly -t app.evilcorp.com​ +bbot -f web-basic -m nmap nuclei --allow-deadly -t app.evilcorp.com ``` ```bash # Scanning MULTIPLE targets -bbot -f web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com​ +bbot -f web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` ```bash # Scanning MULTIPLE targets while performing subdomain enumeration -bbot -f subdomain-enum web-basic -m nmap nuclei –allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com​ +bbot -f subdomain-enum web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` ```bash -# Scanning MULTIPLE targets on a BUDGET​ -bbot -f subdomain-enum web-basic -m nmap nuclei –allow-deadly –c modules.nuclei.mode=Budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com​ +# Scanning MULTIPLE targets on a BUDGET +bbot -f subdomain-enum web-basic -m nmap nuclei --allow-deadly -c modules.nuclei.mode=budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 362a0b958..8f02125e7 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -87,12 +87,12 @@ BBOT has a sharp distinction between Findings and Vulnerabilities: **VULNERABILITY** -* There's a higher standard for what is allowed to be a vulnerability. They should be considered **confirmed** and **actionable​** - no additional confirmation required -* They are always assigned a severity. The possible severities are: LOW, MEDIUM, HIGH, or CRITICAL​ +* There's a higher standard for what is allowed to be a vulnerability. They should be considered **confirmed** and **actionable** - no additional confirmation required +* They are always assigned a severity. The possible severities are: LOW, MEDIUM, HIGH, or CRITICAL -**FINDING​** +**FINDING** -* Findings can range anywhere from "slightly interesting behavior" to "likely, but unconfirmed vulnerability"​ +* Findings can range anywhere from "slightly interesting behavior" to "likely, but unconfirmed vulnerability" * Are often false positives By making this separation, actionable vulnerabilities can be identified quickly in the midst of a large scan From 1a0ebf29da4eae88cbf44d8dc1201de37f3da677 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 8 Feb 2024 09:58:15 -0500 Subject: [PATCH 112/159] making nuclei mode case insensitive --- bbot/modules/deadly/nuclei.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index f3ef4ad67..be24599ac 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -70,7 +70,7 @@ async def setup(self): else: self.warning("Error running nuclei template update command") self.proxy = self.scan.config.get("http_proxy", "") - self.mode = self.config.get("mode", "severe") + self.mode = self.config.get("mode", "severe").lower() self.ratelimit = int(self.config.get("ratelimit", 150)) self.concurrency = int(self.config.get("concurrency", 25)) self.budget = int(self.config.get("budget", 1)) From bb7a55c144eba056f45ce105c50e444d675bd48e Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 8 Feb 2024 09:50:26 -0600 Subject: [PATCH 113/159] Refined Newsletter Test, Updated Poetry, Ran Black --- bbot/test/test_step_1/test_events.py | 8 +- .../module_tests/test_module_newsletters.py | 20 +- poetry.lock | 462 +++++++++--------- 3 files changed, 247 insertions(+), 243 deletions(-) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index fc049a49b..ccbf17c1f 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -325,7 +325,10 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" - assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/テスト" + assert ( + scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) # thai assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" @@ -356,8 +359,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" assert ( - scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data - == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) assert ( scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index def440f54..8e499cfa8 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -40,19 +40,17 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(request_args, respond_args) def check(self, module_test, events): - status = 0 + found = False + missing = True for event in events: - self.log.info(f"event type: {event.type}") + # self.log.info(f"event type: {event.type}") if event.type == "FINDING": - self.log.info(f"event data: {event.data}") + # self.log.info(f"event data: {event.data}") # Verify Positive Result - if event.data["host"] == self.found_tgt: - status = 1 - elif event.data["url"] == self.found_tgt: - status = 1 + if event.data["url"] == self.found_tgt: + found = True # Verify Negative Result (should skip this statement if correct) - elif event.data["host"] == self.missing_tgt: - status = -2 elif event.data["url"] == self.missing_tgt: - status = -2 - assert status == 1, f"NEWSLETTER Error - Expect status of 1 but got {status}" + missing = False + assert found, f"NEWSLETTER 'Found' Error - Expect status of True but got False" + assert missing, f"NEWSLETTER 'Missing' Error - Expect status of True but got False" diff --git a/poetry.lock b/poetry.lock index d1204c670..676f41389 100644 --- a/poetry.lock +++ b/poetry.lock @@ -188,13 +188,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -480,43 +480,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.1" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, - {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, - {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, - {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, - {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, - {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, - {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, - {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, ] [package.dependencies] @@ -594,13 +594,13 @@ files = [ [[package]] name = "dunamai" -version = "1.19.0" +version = "1.19.1" description = "Dynamic version generation" optional = false -python-versions = ">=3.5,<4.0" +python-versions = ">=3.5" files = [ - {file = "dunamai-1.19.0-py3-none-any.whl", hash = "sha256:1ed948676bbf0812bfaafe315a134634f8d6eb67138513c75aa66e747404b9c6"}, - {file = "dunamai-1.19.0.tar.gz", hash = "sha256:6ad99ae34f7cd290550a2ef1305d2e0292e6e6b5b1b830dfc07ceb7fd35fec09"}, + {file = "dunamai-1.19.1-py3-none-any.whl", hash = "sha256:a6aa0ae3bdb01a12d6f219555d6e230ec02663afb43a7bd37933e6c4fecefc9b"}, + {file = "dunamai-1.19.1.tar.gz", hash = "sha256:c8112efec15cd1a8e0384d5d6f3be97a01bf7bc45b54069533856aaaa33a9b9e"}, ] [package.dependencies] @@ -671,13 +671,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.39.1" +version = "0.40.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, - {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, + {file = "griffe-0.40.1-py3-none-any.whl", hash = "sha256:5b8c023f366fe273e762131fe4bfd141ea56c09b3cb825aa92d06a82681cfd93"}, + {file = "griffe-0.40.1.tar.gz", hash = "sha256:66c48a62e2ce5784b6940e603300fcfb807b6f099b94e7f753f1841661fd5c7c"}, ] [package.dependencies] @@ -980,71 +980,71 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1133,13 +1133,13 @@ mkdocs = ">=1.1" [[package]] name = "mkdocs-material" -version = "9.5.6" +version = "9.5.8" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.6-py3-none-any.whl", hash = "sha256:e115b90fccf5cd7f5d15b0c2f8e6246b21041628b8f590630e7fca66ed7fcf6c"}, - {file = "mkdocs_material-9.5.6.tar.gz", hash = "sha256:5b24df36d8ac6cecd611241ce6f6423ccde3e1ad89f8360c3f76d5565fc2d82a"}, + {file = "mkdocs_material-9.5.8-py3-none-any.whl", hash = "sha256:14563314bbf97da4bfafc69053772341babfaeb3329cde01d3e63cec03997af8"}, + {file = "mkdocs_material-9.5.8.tar.gz", hash = "sha256:2a429213e83f84eda7a588e2b186316d806aac602b7f93990042f7a1f3d3cf65"}, ] [package.dependencies] @@ -1156,7 +1156,7 @@ regex = ">=2022.4" requests = ">=2.26,<3.0" [package.extras] -git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] @@ -1314,18 +1314,18 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" @@ -1483,18 +1483,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.0" +version = "2.6.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, - {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.1" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -1502,90 +1502,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.1" +version = "2.16.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, - {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, - {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, - {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, - {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, - {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, - {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, - {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, - {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, - {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, - {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, - {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, - {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, - {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -2027,6 +2027,7 @@ optional = false python-versions = "*" files = [ {file = "requests-file-2.0.0.tar.gz", hash = "sha256:20c5931629c558fda566cacc10cfe2cd502433e628f568c34c80d96a0cc95972"}, + {file = "requests_file-2.0.0-py2.py3-none-any.whl", hash = "sha256:3e493d390adb44aa102ebea827a48717336d5268968c370eaf19abaf5cae13bf"}, ] [package.dependencies] @@ -2187,17 +2188,18 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2223,38 +2225,40 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] From 0c134222970a1a0f96a8e1b5c87f01cf893f4371 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 8 Feb 2024 10:21:19 -0600 Subject: [PATCH 114/159] Ran Flake8 & Created lint-tests.sh --- lint-tests.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100755 lint-tests.sh diff --git a/lint-tests.sh b/lint-tests.sh new file mode 100755 index 000000000..ee02b6290 --- /dev/null +++ b/lint-tests.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +# Locally runs the same commands that the Github Action does. +# Run this script before pushing to avoid Github Push issues. + +RED="\e[31m" +GREEN="\e[32m" +BLUE="\e[34m" +ENDCOLOR="\e[0m" + +# Run Black +echo -e "${GREEN}Running Black${ENDCOLOR}" +black . +echo \ + +# Run Flake8 +echo -e "${GREEN}Running Flake8${ENDCOLOR}" +flake8 --select F,E722 --ignore F403,F405,F541 --per-file-ignores="*/__init__.py:F401,F403" +echo -e "${BLUE}All Done!${ENDCOLOR}" + +echo \ + +# Show Changes +echo -e "${GREEN}Showing Changes Made from Black & Flake8 (using git status)${ENDCOLOR}" +git status +echo \ \ No newline at end of file From 040f72e157fa7e1c206f37da6949beec8a0b32ef Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 8 Feb 2024 10:23:28 -0600 Subject: [PATCH 115/159] Newline & Remove 'logging' --- bbot/test/test_step_2/module_tests/test_module_newsletters.py | 3 ++- lint-tests.sh | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index 8e499cfa8..d3712be5c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -1,5 +1,6 @@ from .base import ModuleTestBase -import logging + +# import logging class TestNewsletters(ModuleTestBase): diff --git a/lint-tests.sh b/lint-tests.sh index ee02b6290..cd910192f 100755 --- a/lint-tests.sh +++ b/lint-tests.sh @@ -23,4 +23,4 @@ echo \ # Show Changes echo -e "${GREEN}Showing Changes Made from Black & Flake8 (using git status)${ENDCOLOR}" git status -echo \ \ No newline at end of file +echo \ From 1466be3b7b867dd039220df2325ede68c2bf419e Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Fri, 9 Feb 2024 10:13:52 -0600 Subject: [PATCH 116/159] Set Newsletter to 'active' and removed lint-tests --- bbot/modules/newsletters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 9a64a7873..62ef98463 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -22,7 +22,7 @@ class newsletters(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] - flags = ["passive", "safe"] + flags = ["active", "safe"] meta = {"description": "Searches for Newsletter Submission Entry Fields on Websites"} # Parse through Website to find a Text Entry Box of 'type = email' From 8411acbe236ff029f9d2e4433f51b47399cd9116 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Fri, 9 Feb 2024 10:14:26 -0600 Subject: [PATCH 117/159] Removed lint-tests --- lint-tests.sh | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100755 lint-tests.sh diff --git a/lint-tests.sh b/lint-tests.sh deleted file mode 100755 index cd910192f..000000000 --- a/lint-tests.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# Locally runs the same commands that the Github Action does. -# Run this script before pushing to avoid Github Push issues. - -RED="\e[31m" -GREEN="\e[32m" -BLUE="\e[34m" -ENDCOLOR="\e[0m" - -# Run Black -echo -e "${GREEN}Running Black${ENDCOLOR}" -black . -echo \ - -# Run Flake8 -echo -e "${GREEN}Running Flake8${ENDCOLOR}" -flake8 --select F,E722 --ignore F403,F405,F541 --per-file-ignores="*/__init__.py:F401,F403" -echo -e "${BLUE}All Done!${ENDCOLOR}" - -echo \ - -# Show Changes -echo -e "${GREEN}Showing Changes Made from Black & Flake8 (using git status)${ENDCOLOR}" -git status -echo \ From ae25d8a0d532fe31258005b77aca6a36bb289029 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 9 Feb 2024 11:23:17 -0500 Subject: [PATCH 118/159] fix aioconsole bug --- bbot/cli.py | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index 9a8ab51dd..c64d242ff 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -6,9 +6,9 @@ import asyncio import logging import traceback -from aioconsole import ainput from omegaconf import OmegaConf from contextlib import suppress +from aioconsole.stream import NonFileStreamReader # fix tee buffering sys.stdout.reconfigure(line_buffering=True) @@ -20,6 +20,7 @@ from bbot import __version__ from bbot.modules import module_loader from bbot.core.configurator.args import parser +from bbot.core.helpers.misc import smart_decode from bbot.core.helpers.logger import log_to_stderr from bbot.core.configurator import ensure_config_files, check_cli_args, environ @@ -321,21 +322,28 @@ def handle_keyboard_input(keyboard_input): toggle_log_level(logger=log) scanner.manager.modules_status(_log=True) + reader = NonFileStreamReader(sys.stdin) + async def akeyboard_listen(): - allowed_errors = 10 - while 1: - keyboard_input = "a" - try: - keyboard_input = await ainput() - except Exception: - allowed_errors -= 1 - handle_keyboard_input(keyboard_input) - if allowed_errors <= 0: - break + try: + allowed_errors = 10 + while 1: + keyboard_input = None + try: + keyboard_input = smart_decode((await reader.readline()).strip()) + allowed_errors = 0 + except Exception: + allowed_errors -= 1 + if keyboard_input is not None: + handle_keyboard_input(keyboard_input) + if allowed_errors <= 0: + break + except Exception as e: + log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") + log_to_stderr(traceback.format_exc(), level="TRACE") try: keyboard_listen_task = asyncio.create_task(akeyboard_listen()) - await scanner.async_start_without_generator() finally: keyboard_listen_task.cancel() From c15141100bf67dda34e99c9772190a836a3f92fc Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 9 Feb 2024 11:52:37 -0500 Subject: [PATCH 119/159] more aioconsole bugfixing --- bbot/cli.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index c64d242ff..bc9e10039 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -8,7 +8,7 @@ import traceback from omegaconf import OmegaConf from contextlib import suppress -from aioconsole.stream import NonFileStreamReader +from aioconsole import stream # fix tee buffering sys.stdout.reconfigure(line_buffering=True) @@ -322,7 +322,13 @@ def handle_keyboard_input(keyboard_input): toggle_log_level(logger=log) scanner.manager.modules_status(_log=True) - reader = NonFileStreamReader(sys.stdin) + log.critical(f"is_pipe_transport_compatible: {stream.is_pipe_transport_compatible(sys.stdin)}") + reader = stream.NonFileStreamReader(sys.stdin) + + # Reader + reader = stream.StandardStreamReader() + protocol = stream.StandardStreamReaderProtocol(reader) + await asyncio.get_event_loop().connect_read_pipe(lambda: protocol, sys.stdin) async def akeyboard_listen(): try: @@ -331,8 +337,10 @@ async def akeyboard_listen(): keyboard_input = None try: keyboard_input = smart_decode((await reader.readline()).strip()) - allowed_errors = 0 - except Exception: + allowed_errors = 10 + except Exception as e: + log_to_stderr(f"Error in keyboard listen loop: {e}", level="TRACE") + log_to_stderr(traceback.format_exc(), level="TRACE") allowed_errors -= 1 if keyboard_input is not None: handle_keyboard_input(keyboard_input) From 381f12a9d1dae6fab338e5fc91bd02b955f50fc2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 9 Feb 2024 11:56:10 -0500 Subject: [PATCH 120/159] fixed console logic --- bbot/cli.py | 100 ++++++++++++++++++++++++++-------------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index bc9e10039..679ca24fb 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -302,60 +302,60 @@ async def _main(): if not options.dry_run: log.trace(f"Command: {' '.join(sys.argv)}") - if not options.agent_mode and not options.yes and sys.stdin.isatty(): - log.hugesuccess(f"Scan ready. Press enter to execute {scanner.name}") - input() - - def handle_keyboard_input(keyboard_input): - kill_regex = re.compile(r"kill (?P[a-z0-9_]+)") - if keyboard_input: - log.verbose(f'Got keyboard input: "{keyboard_input}"') - kill_match = kill_regex.match(keyboard_input) - if kill_match: - module = kill_match.group("module") - if module in scanner.modules: - log.hugewarning(f'Killing module: "{module}"') - scanner.manager.kill_module(module, message="killed by user") - else: - log.warning(f'Invalid module: "{module}"') - else: - toggle_log_level(logger=log) - scanner.manager.modules_status(_log=True) - - log.critical(f"is_pipe_transport_compatible: {stream.is_pipe_transport_compatible(sys.stdin)}") - reader = stream.NonFileStreamReader(sys.stdin) - - # Reader - reader = stream.StandardStreamReader() - protocol = stream.StandardStreamReaderProtocol(reader) - await asyncio.get_event_loop().connect_read_pipe(lambda: protocol, sys.stdin) - - async def akeyboard_listen(): - try: - allowed_errors = 10 - while 1: - keyboard_input = None - try: - keyboard_input = smart_decode((await reader.readline()).strip()) - allowed_errors = 10 - except Exception as e: - log_to_stderr(f"Error in keyboard listen loop: {e}", level="TRACE") - log_to_stderr(traceback.format_exc(), level="TRACE") - allowed_errors -= 1 - if keyboard_input is not None: - handle_keyboard_input(keyboard_input) - if allowed_errors <= 0: - break - except Exception as e: - log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") - log_to_stderr(traceback.format_exc(), level="TRACE") - try: + if sys.stdin.isatty(): + if not options.agent_mode and not options.yes: + log.hugesuccess(f"Scan ready. Press enter to execute {scanner.name}") + input() + + def handle_keyboard_input(keyboard_input): + kill_regex = re.compile(r"kill (?P[a-z0-9_]+)") + if keyboard_input: + log.verbose(f'Got keyboard input: "{keyboard_input}"') + kill_match = kill_regex.match(keyboard_input) + if kill_match: + module = kill_match.group("module") + if module in scanner.modules: + log.hugewarning(f'Killing module: "{module}"') + scanner.manager.kill_module(module, message="killed by user") + else: + log.warning(f'Invalid module: "{module}"') + else: + toggle_log_level(logger=log) + scanner.manager.modules_status(_log=True) + + # Reader + reader = stream.StandardStreamReader() + protocol = stream.StandardStreamReaderProtocol(reader) + await asyncio.get_event_loop().connect_read_pipe(lambda: protocol, sys.stdin) + + async def akeyboard_listen(): + try: + allowed_errors = 10 + while 1: + keyboard_input = None + try: + keyboard_input = smart_decode((await reader.readline()).strip()) + allowed_errors = 10 + except Exception as e: + log_to_stderr(f"Error in keyboard listen loop: {e}", level="TRACE") + log_to_stderr(traceback.format_exc(), level="TRACE") + allowed_errors -= 1 + if keyboard_input is not None: + handle_keyboard_input(keyboard_input) + if allowed_errors <= 0: + break + except Exception as e: + log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") + log_to_stderr(traceback.format_exc(), level="TRACE") + keyboard_listen_task = asyncio.create_task(akeyboard_listen()) + + try: await scanner.async_start_without_generator() finally: - keyboard_listen_task.cancel() - with suppress(asyncio.CancelledError): + with suppress(Exception): + keyboard_listen_task.cancel() await keyboard_listen_task except bbot.core.errors.ScanError as e: From 94981175703a994bc5443cbe2a0cbdbce2ba1f0d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 9 Feb 2024 12:14:46 -0500 Subject: [PATCH 121/159] remove unneeded cancel logic --- bbot/cli.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index 679ca24fb..e480b3d4e 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -351,12 +351,7 @@ async def akeyboard_listen(): keyboard_listen_task = asyncio.create_task(akeyboard_listen()) - try: - await scanner.async_start_without_generator() - finally: - with suppress(Exception): - keyboard_listen_task.cancel() - await keyboard_listen_task + await scanner.async_start_without_generator() except bbot.core.errors.ScanError as e: log_to_stderr(str(e), level="ERROR") From a1d4f0d445abcdaa1942ecd770cbb42fbb246136 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 9 Feb 2024 13:34:08 -0500 Subject: [PATCH 122/159] flaked --- bbot/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/cli.py b/bbot/cli.py index e480b3d4e..e7e440092 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -349,7 +349,7 @@ async def akeyboard_listen(): log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") log_to_stderr(traceback.format_exc(), level="TRACE") - keyboard_listen_task = asyncio.create_task(akeyboard_listen()) + asyncio.create_task(akeyboard_listen()) await scanner.async_start_without_generator() From 75d31cd33d8b91efb47cc89b4b3ab1dcc9a2d4ac Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 9 Feb 2024 19:16:47 +0000 Subject: [PATCH 123/159] Refresh module docs --- README.md | 6 +- docs/modules/list_of_modules.md | 226 ++++++++++++++++---------------- docs/scanning/advanced.md | 18 +-- docs/scanning/events.md | 5 +- docs/scanning/index.md | 6 +- 5 files changed, 130 insertions(+), 131 deletions(-) diff --git a/README.md b/README.md index 14613258f..6b0cca3be 100644 --- a/README.md +++ b/README.md @@ -260,10 +260,10 @@ For a full list of modules, including the data types consumed and emitted by eac | Flag | # Modules | Description | Modules | |------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | safe | 76 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 58 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, newsletters, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | | subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| active | 40 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, newsletters, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | | aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | | web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | | cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 925283b9c..a3ffc76c6 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -1,119 +1,119 @@ # List of Modules -| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | -|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| -| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | -| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | -| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | -| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | -| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic | HTTP_RESPONSE, URL_UNVERIFIED | | -| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | -| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | -| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | -| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | -| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | -| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | -| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | -| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | -| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME, URL_UNVERIFIED | DNS_NAME | -| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | -| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | -| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | -| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | -| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | -| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | -| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | -| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | -| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | -| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | -| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | -| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | -| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | -| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | -| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | -| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | -| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | -| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | -| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | -| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | -| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | -| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | -| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | passive, safe | HTTP_RESPONSE | NEWSLETTER | -| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | -| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | -| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | -| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Output to an asset inventory style flattened CSV file | | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY | IP_ADDRESS, OPEN_TCP_PORT | -| csv | output | No | Output to CSV | | * | | -| discord | output | No | Message a Discord channel when certain events are encountered | | * | | -| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | -| http | output | No | Send every event to a custom URL via a web request | | * | | -| human | output | No | Output to text | | * | | -| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | -| neo4j | output | No | Output to Neo4j | | * | | -| python | output | No | Output via Python API | | * | | -| slack | output | No | Message a Slack channel when certain events are encountered | | * | | -| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | -| teams | output | No | Message a Teams channel when certain events are encountered | | * | | -| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | -| websocket | output | No | Output to websockets | | * | | -| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | -| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | -| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| +| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | +| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | +| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | +| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | +| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | +| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | +| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | +| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | +| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | +| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | +| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | +| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | active, safe | HTTP_RESPONSE | FINDING | +| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | +| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | +| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | +| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | +| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | +| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | +| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | +| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | +| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | +| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | +| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | +| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | +| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | +| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | +| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | +| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | +| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | +| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | +| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | +| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | +| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | +| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | +| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | +| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | +| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | +| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | +| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | +| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 9a035b806..0baaf35c8 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -34,15 +34,15 @@ asyncio.run(main()) ```text usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] - [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] - [--strict-scope] [-m MODULE [MODULE ...]] [-l] - [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] - [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] - [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] - [-o DIR] [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] - [--dry-run] [--current-config] - [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] - [-a] [--version] + [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] + [--strict-scope] [-m MODULE [MODULE ...]] [-l] + [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] + [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] + [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] + [-o DIR] [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] + [--dry-run] [--current-config] + [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] + [-a] [--version] Bighuge BLS OSINT Tool diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 5f3fbda1d..6628fac46 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -58,13 +58,12 @@ Below is a full list of event types along with which modules produce/consume the | DNS_NAME | 58 | 44 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, internetdb, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | | DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | | EMAIL_ADDRESS | 1 | 6 | emails | credshed, emailformat, hunterio, pgp, skymem, sslcert | -| FINDING | 2 | 24 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | +| FINDING | 2 | 25 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | | GEOLOCATION | 0 | 2 | | ip2location, ipstack | | HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 16 | 1 | ajaxpro, badsecrets, dastardly, excavate, filedownload, host_header, hunt, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | +| HTTP_RESPONSE | 17 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, excavate, filedownload, host_header, hunt, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | | IP_ADDRESS | 9 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, masscan, nmap, speculate | asset_inventory, ipneighbor, speculate | | IP_RANGE | 3 | 0 | masscan, nmap, speculate | | -| NEWSLETTER | 0 | 1 | | newsletters | | OPEN_TCP_PORT | 4 | 5 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, masscan, nmap, speculate | | ORG_STUB | 1 | 1 | github_org | speculate | | PASSWORD | 0 | 2 | | credshed, dehashed | diff --git a/docs/scanning/index.md b/docs/scanning/index.md index ce2575a2b..1c93bedb3 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -110,10 +110,10 @@ A single module can have multiple flags. For example, the `securitytrails` modul | Flag | # Modules | Description | Modules | |------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | safe | 76 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 58 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, newsletters, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | | subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| active | 40 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, newsletters, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | | aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | | web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | | cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | From 7a6a4d51e27562169c9ecbe35ce554a7dd79b4bc Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Fri, 9 Feb 2024 17:30:11 -0600 Subject: [PATCH 124/159] Added BeautifulSoup Helper - Work in Progress --- bbot/core/helpers/web.py | 55 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index 983a88c96..d834f8955 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -566,6 +566,61 @@ def is_spider_danger(self, source_event, url): return True return False + def beautifulsoup( + self, + markup, + features="html.parser", + builder=None, + parse_only=None, + from_encoding=None, + exclude_encodings=None, + element_classes=None, + **kwargs, + ): + """ + Naviate, Search, Modify, Parse, or PrettyPrint HTML Content. + More information at https://beautiful-soup-4.readthedocs.io/en/latest/ + + Args: + markup: A string or a file-like object representing markup to be parsed. + features: Desirable features of the parser to be used. + This may be the name of a specific parser ("lxml", + "lxml-xml", "html.parser", or "html5lib") or it may be + the type of markup to be used ("html", "html5", "xml"). + Defaults to 'html.parser'. + builder: A TreeBuilder subclass to instantiate (or instance to use) + instead of looking one up based on `features`. + parse_only: A SoupStrainer. Only parts of the document + matching the SoupStrainer will be considered. + from_encoding: A string indicating the encoding of the + document to be parsed. + exclude_encodings = A list of strings indicating + encodings known to be wrong. + element_classes = A dictionary mapping BeautifulSoup + classes like Tag and NavigableString, to other classes you'd + like to be instantiated instead as the parse tree is + built. + **kwargs = For backwards compatibility purposes. + + Returns: + soup: An instance of the BeautifulSoup class + + Todo: + - Write tests for this function + + Examples: + >>> soup = BeautifulSoup(event.data["body"], "html.parser") + Perform an html parse of the 'markup' argument and return a soup instance + + >>> email_type = soup.find(type="email") + Searches the soup instance for all occurances of the passed in argument + """ + + soup = BeautifulSoup( + markup, features, builder, parse_only, from_encoding, exclude_encodings, element_classes, **kwargs + ) + return soup + def ssl_context_noverify(self): if self._ssl_context_noverify is None: ssl_context = ssl.create_default_context() From 9d24e30c8002234db46c8fd011dd10dc3276b290 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 12 Feb 2024 13:03:24 -0500 Subject: [PATCH 125/159] adding more possible ajax endpoints --- bbot/modules/ajaxpro.py | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index 3a0061315..afdb19b77 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -17,22 +17,24 @@ async def handle_event(self, event): if event.type == "URL": if "dir" not in event.tags: return False - probe_url = f"{event.data}ajaxpro/whatever.ashx" - probe = await self.helpers.request(probe_url) - if probe: - if probe.status_code == 200: - probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx") - if probe_confirm: - if probe_confirm.status_code != 200: - await self.emit_event( - { - "host": str(event.host), - "url": event.data, - "description": f"Ajaxpro Detected (Version Unconfirmed) Trigger: [{probe_url}]", - }, - "FINDING", - event, - ) + for stem in ["ajax","ajaxpro"]: + probe_url = f"{event.data}{stem}/whatever.ashx" + self.critical(probe_url) + probe = await self.helpers.request(probe_url) + if probe: + if probe.status_code == 200: + probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx") + if probe_confirm: + if probe_confirm.status_code != 200: + await self.emit_event( + { + "host": str(event.host), + "url": event.data, + "description": f"Ajaxpro Detected (Version Unconfirmed) Trigger: [{probe_url}]", + }, + "FINDING", + event, + ) elif event.type == "HTTP_RESPONSE": resp_body = event.data.get("body", None) From 9a0a058827f213aafd6e63a2a46058de1ecbb71f Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 12 Feb 2024 13:03:40 -0500 Subject: [PATCH 126/159] black --- bbot/modules/ajaxpro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index afdb19b77..4dad1729e 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -17,7 +17,7 @@ async def handle_event(self, event): if event.type == "URL": if "dir" not in event.tags: return False - for stem in ["ajax","ajaxpro"]: + for stem in ["ajax", "ajaxpro"]: probe_url = f"{event.data}{stem}/whatever.ashx" self.critical(probe_url) probe = await self.helpers.request(probe_url) From 36817d8fecc39c1e2fb6bc6040ad132c3cfd9f83 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Mon, 12 Feb 2024 16:59:28 -0500 Subject: [PATCH 127/159] removing debug statement --- bbot/modules/ajaxpro.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index 4dad1729e..46d475cca 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -19,7 +19,6 @@ async def handle_event(self, event): return False for stem in ["ajax", "ajaxpro"]: probe_url = f"{event.data}{stem}/whatever.ashx" - self.critical(probe_url) probe = await self.helpers.request(probe_url) if probe: if probe.status_code == 200: From 7ec6e9ff7776d35883f28f538ef44c31bed40a57 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 12 Feb 2024 17:07:24 -0500 Subject: [PATCH 128/159] debug --> verbose for batch event handling --- bbot/modules/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 3c94ebc57..d888bf699 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -358,12 +358,12 @@ async def _handle_batch(self): events, finish = await self._events_waiting() if events and not self.errored: counter.n = len(events) - self.debug(f"Handling batch of {len(events):,} events") + self.verbose(f"Handling batch of {len(events):,} events") submitted = True async with self.scan._acatch(f"{self.name}.handle_batch()"): handle_batch_task = asyncio.create_task(self.handle_batch(*events)) await handle_batch_task - self.debug(f"Finished handling batch of {len(events):,} events") + self.verbose(f"Finished handling batch of {len(events):,} events") if finish: context = f"{self.name}.finish()" async with self.scan._acatch(context), self._task_counter.count(context): From 07eea207443551656f6f2316c4fc04b6451b4a12 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 12 Feb 2024 17:08:56 -0500 Subject: [PATCH 129/159] keyboard listen logic fix --- bbot/cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index e7e440092..275d156ec 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -343,8 +343,8 @@ async def akeyboard_listen(): allowed_errors -= 1 if keyboard_input is not None: handle_keyboard_input(keyboard_input) - if allowed_errors <= 0: - break + if allowed_errors <= 0: + break except Exception as e: log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") log_to_stderr(traceback.format_exc(), level="TRACE") From 29dce2f64021a25d66a225d94a7d1b652caa5a63 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 12 Feb 2024 17:31:45 -0500 Subject: [PATCH 130/159] debug massdns mutations --- bbot/modules/massdns.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 84e406628..1a19e9ac8 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -378,6 +378,9 @@ def add_mutation(_domain_hash, m): self.mutations_tried.add(h) mutations.add(m) + num_base_mutations = len(base_mutations) + self.critical(f"base mutations: {num_base_mutations}") + # try every subdomain everywhere else for _domain, _subdomains in found: if _domain == domain: @@ -393,6 +396,9 @@ def add_mutation(_domain_hash, m): ): add_mutation(domain_hash, word) + num_massdns_mutations = len(mutations) - num_base_mutations + self.critical(f"added by massdns: {num_massdns_mutations}") + # numbers + devops mutations for mutation in self.helpers.word_cloud.mutations( subdomains, cloud=False, numbers=3, number_padding=1 @@ -401,12 +407,19 @@ def add_mutation(_domain_hash, m): m = delimiter.join(mutation).lower() add_mutation(domain_hash, m) + num_devops = len(mutations) - num_massdns_mutations + self.critical(f"added by word cloud: {num_devops}") + # special dns mutator + self.critical(f"dns_mutator size: {len(self.helpers.word_cloud.dns_mutator)} (max: {self.max_mutations})") for subdomain in self.helpers.word_cloud.dns_mutator.mutations( subdomains, max_mutations=self.max_mutations ): add_mutation(domain_hash, subdomain) + num_mutations = len(mutations) - num_devops + self.critical(f"added by dns mutator: {num_mutations}") + if mutations: self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") results = list(await self.massdns(query, mutations)) From a91fc43c73b994f83693d0790bd7a415622fda0a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 13 Feb 2024 10:26:03 -0500 Subject: [PATCH 131/159] don't dedup DNS child events --- bbot/core/helpers/dns.py | 5 +++++ bbot/scanner/manager.py | 1 + 2 files changed, 6 insertions(+) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 9ad22116f..b81194403 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -334,6 +334,10 @@ async def _resolve_hostname(self, query, **kwargs): if results: self._last_dns_success = time.time() + self.debug(f"Answers for {query} with kwargs={kwargs}: {list(results)}") + + if errors: + self.debug(f"Errors for {query} with kwargs={kwargs}: {errors}") return results, errors @@ -1038,6 +1042,7 @@ def _get_dummy_module(self, name): dummy_module = self._dummy_modules[name] except KeyError: dummy_module = self.parent_helper._make_dummy_module(name=name, _type="DNS") + dummy_module.suppress_dupes = False self._dummy_modules[name] = dummy_module return dummy_module diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index c0f598230..6b89529a4 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -319,6 +319,7 @@ async def _emit_event(self, event, **kwargs): f'Event validation failed for DNS child of {source_event}: "{record}" ({rdtype}): {e}' ) for child_event in dns_child_events: + log.debug(f"Queueing DNS child for {event}: {child_event}") self.queue_event(child_event) except ValidationError as e: From ce95ce8f078bd5b674e86527b61bd6f48d8bee40 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 13 Feb 2024 13:17:09 -0500 Subject: [PATCH 132/159] add debug statements for mutations --- bbot/modules/massdns.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 1a19e9ac8..02ff1aa85 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -379,7 +379,7 @@ def add_mutation(_domain_hash, m): mutations.add(m) num_base_mutations = len(base_mutations) - self.critical(f"base mutations: {num_base_mutations}") + self.debug(f"Base mutations for {domain}: {num_base_mutations:,}") # try every subdomain everywhere else for _domain, _subdomains in found: @@ -397,7 +397,7 @@ def add_mutation(_domain_hash, m): add_mutation(domain_hash, word) num_massdns_mutations = len(mutations) - num_base_mutations - self.critical(f"added by massdns: {num_massdns_mutations}") + self.debug(f"Mutations from previous subdomains for {domain}: {num_massdns_mutations:,}") # numbers + devops mutations for mutation in self.helpers.word_cloud.mutations( @@ -407,18 +407,20 @@ def add_mutation(_domain_hash, m): m = delimiter.join(mutation).lower() add_mutation(domain_hash, m) - num_devops = len(mutations) - num_massdns_mutations - self.critical(f"added by word cloud: {num_devops}") + num_word_cloud_mutations = len(mutations) - num_massdns_mutations + self.debug(f"Mutations added by word cloud for {domain}: {num_word_cloud_mutations:,}") # special dns mutator - self.critical(f"dns_mutator size: {len(self.helpers.word_cloud.dns_mutator)} (max: {self.max_mutations})") + self.debug( + f"DNS Mutator size: {len(self.helpers.word_cloud.dns_mutator):,} (limited to {self.max_mutations:,})" + ) for subdomain in self.helpers.word_cloud.dns_mutator.mutations( subdomains, max_mutations=self.max_mutations ): add_mutation(domain_hash, subdomain) - num_mutations = len(mutations) - num_devops - self.critical(f"added by dns mutator: {num_mutations}") + num_mutations = len(mutations) - num_word_cloud_mutations + self.debug(f"Mutations added by DNS Mutator: {num_mutations:,}") if mutations: self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") From e4db934a0d4ad238a4f3c193ee6efe63f17dcde3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 13 Feb 2024 15:17:59 -0500 Subject: [PATCH 133/159] don't follow redirects in wafw00f --- bbot/modules/wafw00f.py | 8 +++++++- .../module_tests/test_module_wafw00f.py | 16 ++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index 89c7ee1fa..8fd0bc3d4 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -26,9 +26,15 @@ class wafw00f(BaseModule): in_scope_only = True per_hostport_only = True + async def filter_event(self, event): + http_status = getattr(event, "http_status", 0) + if not http_status or str(http_status).startswith("3"): + return False, f"Invalid HTTP status code: {http_status}" + return True, "" + async def handle_event(self, event): url = f"{event.parsed.scheme}://{event.parsed.netloc}/" - WW = await self.scan.run_in_executor(wafw00f_main.WAFW00F, url) + WW = await self.scan.run_in_executor(wafw00f_main.WAFW00F, url, followredirect=False) waf_detections = await self.scan.run_in_executor(WW.identwaf) if waf_detections: for waf in waf_detections: diff --git a/bbot/test/test_step_2/module_tests/test_module_wafw00f.py b/bbot/test/test_step_2/module_tests/test_module_wafw00f.py index 38f1d5764..9955e584b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_wafw00f.py +++ b/bbot/test/test_step_2/module_tests/test_module_wafw00f.py @@ -12,3 +12,19 @@ async def setup_after_prep(self, module_test): def check(self, module_test, events): assert any(e.type == "WAF" and "LiteSpeed" in e.data["WAF"] for e in events) + + +class TestWafw00f_noredirect(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wafw00f"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"status": 301, "headers": {"Location": "/redirect"}} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + expect_args = {"method": "GET", "uri": "/redirect"} + respond_args = {"response_data": "Proudly powered by litespeed web server"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert not any(e.type == "WAF" for e in events) From 28f24ace780e1659ab4e05dc4f1a6242d79da963 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 14 Feb 2024 11:50:14 -0500 Subject: [PATCH 134/159] restore wildcard rdtype optimization --- bbot/core/helpers/dns.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 2d4016754..1c7e5187d 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -938,7 +938,7 @@ async def is_wildcard_domain(self, domain, log_info=False): # resolve a bunch of random subdomains of the same parent is_wildcard = False wildcard_results = dict() - for rdtype in rdtypes_to_check: + for rdtype in list(rdtypes_to_check): # continue if a wildcard was already found for this rdtype # if rdtype in self._wildcard_cache[host_hash]: # continue @@ -952,6 +952,8 @@ async def is_wildcard_domain(self, domain, log_info=False): wildcard_results[rdtype].update(results) # we know this rdtype is a wildcard # so we don't need to check it anymore + with suppress(KeyError): + rdtypes_to_check.remove(rdtype) self._wildcard_cache.update({host_hash: wildcard_results}) wildcard_domain_results.update({host: wildcard_results}) From 92b563b9712a0b1c66e2cd6cfc3d1e0e0a87842b Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Wed, 14 Feb 2024 23:28:24 +0000 Subject: [PATCH 135/159] Refresh module docs --- docs/scanning/configuration.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 2ed2d61de..21db76264 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -291,6 +291,7 @@ Many modules accept their own configuration options. These options have the abil | modules.vhost.lines | int | take only the first N lines from the wordlist when finding directories | 5000 | | modules.vhost.wordlist | str | Wordlist containing subdomains | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt | | modules.wafw00f.generic_detect | bool | When no specific WAF detections are made, try to perform a generic detect | True | +| modules.anubisdb.limit | int | Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API) | 1000 | | modules.bevigil.api_key | str | BeVigil OSINT API Key | | | modules.bevigil.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.binaryedge.api_key | str | BinaryEdge API key | | @@ -308,6 +309,8 @@ Many modules accept their own configuration options. These options have the abil | modules.credshed.username | str | Credshed username | | | modules.dehashed.api_key | str | DeHashed API Key | | | modules.dehashed.username | str | Email Address associated with your API key | | +| modules.dnscommonsrv.max_event_handlers | int | How many instances of the module to run concurrently | 10 | +| modules.dnscommonsrv.top | int | How many of the top SRV records to check | 50 | | modules.fullhunt.api_key | str | FullHunt API Key | | | modules.github_codesearch.api_key | str | Github token | | | modules.github_codesearch.limit | int | Limit code search to this many results | 100 | @@ -320,6 +323,7 @@ Many modules accept their own configuration options. These options have the abil | modules.ipneighbor.num_bits | int | Netmask size (in CIDR notation) to check. Default is 4 bits (16 hosts) | 4 | | modules.ipstack.api_key | str | IPStack GeoIP API Key | | | modules.leakix.api_key | str | LeakIX API Key | | +| modules.massdns.max_depth | int | How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com | 5 | | modules.massdns.max_mutations | int | Max number of smart mutations per subdomain | 500 | | modules.massdns.max_resolvers | int | Number of concurrent massdns resolvers | 1000 | | modules.massdns.wordlist | str | Subdomain wordlist URL | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt | From 205697bacababa81d8412abb7e7c93f50f1d61fb Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 15 Feb 2024 12:04:48 -0600 Subject: [PATCH 136/159] Replace BeautifulSoup with the new 'helpers.beautifulsoup()' --- bbot/modules/dnsdumpster.py | 5 ++--- bbot/modules/newsletters.py | 3 +-- bbot/modules/viewdns.py | 5 ++--- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index 8bb1fa1ed..c119857be 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -1,5 +1,4 @@ import re -from bs4 import BeautifulSoup from bbot.modules.templates.subdomain_enum import subdomain_enum @@ -25,7 +24,7 @@ async def query(self, domain): return ret else: self.debug(f'Valid response code "{status_code}" from DNSDumpster') - html = BeautifulSoup(res1.content, "html.parser") + html = self.helpers.beautifulsoup(res1.content, "html.parser") csrftoken = None csrfmiddlewaretoken = None try: @@ -73,7 +72,7 @@ async def query(self, domain): self.verbose(f'Bad response code "{status_code}" from DNSDumpster') return ret - html = BeautifulSoup(res2.content, "html.parser") + html = self.helpers.beautifulsoup(res2.content, "html.parser") escaped_domain = re.escape(domain) match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$") for subdomain in html.findAll(text=match_pattern): diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 62ef98463..d6c990650 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -7,7 +7,6 @@ from .base import BaseModule import re -from bs4 import BeautifulSoup # Known Websites with Newsletters # https://futureparty.com/ @@ -37,7 +36,7 @@ def find_type(self, soup): async def handle_event(self, event): if event.data["status_code"] == 200: - soup = BeautifulSoup(event.data["body"], "html.parser") + soup = self.helpers.beautifulsoup(event.data["body"], "html.parser") result = self.find_type(soup) if result: description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index d9a589845..bf55953a3 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -37,9 +37,8 @@ async def query(self, query): self.verbose(f"Error retrieving reverse whois results (status code: {status_code})") content = getattr(r, "content", b"") - from bs4 import BeautifulSoup - - html = BeautifulSoup(content, "html.parser") + + html = self.helpers.beautifulsoup(content, "html.parser") found = set() for table_row in html.findAll("tr"): table_cells = table_row.findAll("td") From d00a0f8e6c6eb3e6277e1cf92251e04788673b39 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 15 Feb 2024 12:06:24 -0600 Subject: [PATCH 137/159] black & flake8 --- bbot/modules/viewdns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index bf55953a3..9b154ab63 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -37,7 +37,7 @@ async def query(self, query): self.verbose(f"Error retrieving reverse whois results (status code: {status_code})") content = getattr(r, "content", b"") - + html = self.helpers.beautifulsoup(content, "html.parser") found = set() for table_row in html.findAll("tr"): From 625d2ca17afa412ebb24bb4d1cb9f53992a29f16 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 15 Feb 2024 14:24:04 -0600 Subject: [PATCH 138/159] Added BeautifulSoup Web Helper Test --- bbot/core/helpers/web.py | 17 ++++++++++------- bbot/test/test_step_1/test_web.py | 24 ++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 7 deletions(-) diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index d834f8955..c17cfb5a4 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -398,7 +398,7 @@ async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **re new_url = next_key(result) except Exception as e: log.debug(f"Failed to extract next page of results from {url}: {e}") - log.debug(traceback.formate_exc()) + log.debug(traceback.format_exc()) else: new_url = url.format(page=page, page_size=page_size, offset=offset) result = await self.request(new_url, **requests_kwargs) @@ -609,17 +609,20 @@ def beautifulsoup( - Write tests for this function Examples: - >>> soup = BeautifulSoup(event.data["body"], "html.parser") + >>> soup = self.helpers.beautifulsoup(event.data["body"], "html.parser") Perform an html parse of the 'markup' argument and return a soup instance >>> email_type = soup.find(type="email") Searches the soup instance for all occurances of the passed in argument """ - - soup = BeautifulSoup( - markup, features, builder, parse_only, from_encoding, exclude_encodings, element_classes, **kwargs - ) - return soup + try: + soup = BeautifulSoup( + markup, features, builder, parse_only, from_encoding, exclude_encodings, element_classes, **kwargs + ) + return soup + except Exception as e: + log.debug(f"Error parsing beautifulsoup: {e}") + return False def ssl_context_noverify(self): if self._ssl_context_noverify is None: diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 13edaf725..a8ea39ae5 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -45,6 +45,30 @@ async def test_web_helpers(bbot_scanner, bbot_config, bbot_httpserver): assert filename2.is_file() with open(filename2) as f: assert f.read() == download_content + + # beautifulsoup + download_content = """ +
+

Example Domain

+

This domain is for use in illustrative examples in documents. You may use this + domain in literature without prior coordination or asking for permission.

+

More information...

+
+ """ + + path = "/test_http_helpers_beautifulsoup" + url = bbot_httpserver.url_for(path) + bbot_httpserver.expect_request(uri=path).respond_with_data(download_content, status=200) + webpage = await scan1.helpers.request(url) + assert webpage, f"Webpage is False" + soup = scan1.helpers.beautifulsoup(webpage, "html.parser") + assert soup, f"Soup is False" + # pretty_print = soup.prettify() + # assert pretty_print, f"PrettyPrint is False" + # scan1.helpers.log.info(f"{pretty_print}") + html_text = soup.find(text="Example Domain") + assert html_text, f"Find HTML Text is False" + # 404 path = "/test_http_helpers_download_404" url = bbot_httpserver.url_for(path) From c51e059bdab236fee1d687438e873233e57a72a0 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 15 Feb 2024 16:44:20 -0500 Subject: [PATCH 139/159] add socksio --- poetry.lock | 14 ++++++++++++-- pyproject.toml | 1 + 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index f12b77dc7..ab85c1785 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2038,7 +2038,6 @@ optional = false python-versions = "*" files = [ {file = "requests-file-2.0.0.tar.gz", hash = "sha256:20c5931629c558fda566cacc10cfe2cd502433e628f568c34c80d96a0cc95972"}, - {file = "requests_file-2.0.0-py2.py3-none-any.whl", hash = "sha256:3e493d390adb44aa102ebea827a48717336d5268968c370eaf19abaf5cae13bf"}, ] [package.dependencies] @@ -2099,6 +2098,17 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] +[[package]] +name = "socksio" +version = "1.0.0" +description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." +optional = false +python-versions = ">=3.6" +files = [ + {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, + {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, +] + [[package]] name = "soupsieve" version = "2.5" @@ -2424,4 +2434,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "4eb296ea314405bf39920f67d20eebb13cc8974254fd1643538bcb3a338976d2" +content-hash = "e9c476ba44a5968f7bd6c9759ac4c6f8e679384bd6b0dd4f128af873a68a34da" diff --git a/pyproject.toml b/pyproject.toml index 7d1b2fb32..f16540fb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,7 @@ httpx = "^0.26.0" cloudcheck = "^2.1.0.181" tldextract = "^5.1.1" cachetools = "^5.3.2" +socksio = "^1.0.0" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" From 497e5bb3932ab3c8adfc62284dd76b6ca2cf9855 Mon Sep 17 00:00:00 2001 From: Jack Ward Date: Thu, 15 Feb 2024 16:38:57 -0600 Subject: [PATCH 140/159] Merged Dev & removed 'deps_pip' --- bbot/modules/newsletters.py | 5 +---- docs/contribution.md | 1 - 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index d6c990650..a59cc30e3 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -2,8 +2,7 @@ # thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also # contains a 'placeholder'. The combination of these two HTML items usually signify the presence # of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used -# to find newsletters for a future email bombing attack and/or find user-input fields that could -# be be susceptible to overflows or injections. +# to find newsletters for a future email bombing attack. from .base import BaseModule import re @@ -15,8 +14,6 @@ # https://www.milkkarten.net/ # https://geekout.mattnavarra.com/ -deps_pip = ["beautifulsoup4"] - class newsletters(BaseModule): watched_events = ["HTTP_RESPONSE"] diff --git a/docs/contribution.md b/docs/contribution.md index 2d36cfe44..175c3e7af 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -134,7 +134,6 @@ BBOT automates module dependencies with **Ansible**. If your module relies on a ```python class MyModule(BaseModule): ... - deps_pip = ["beautifulsoup4"] deps_apt = ["chromium-browser"] deps_ansible = [ { From 7fa71403ed624516a3196e941b1c1ce647aafe03 Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Fri, 16 Feb 2024 15:39:23 +0800 Subject: [PATCH 141/159] Updated download count badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6b0cca3be..e99dcf3d6 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ### A Recursive Internet Scanner for Hackers. -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Pypi Downloads](https://img.shields.io/pypi/dm/bbot)](https://pypistats.org/packages/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Static Badge](https://img.shields.io/badge/downloads%2Fmonth-23k-FF8400)](https://pypistats.org/packages/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) BBOT (Bighuge BLS OSINT Tool) is a recursive internet scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), but designed to be faster, more reliable, and friendlier to pentesters, bug bounty hunters, and developers. From 5682abd8a2788e5008ef9a772051d3234c37dfc5 Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Fri, 16 Feb 2024 22:26:36 +0800 Subject: [PATCH 142/159] use pepy for badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e99dcf3d6..fad621f98 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ### A Recursive Internet Scanner for Hackers. -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Static Badge](https://img.shields.io/badge/downloads%2Fmonth-23k-FF8400)](https://pypistats.org/packages/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) BBOT (Bighuge BLS OSINT Tool) is a recursive internet scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), but designed to be faster, more reliable, and friendlier to pentesters, bug bounty hunters, and developers. From 0bf795718e0633325ebba0ef184b5284705969d6 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 16 Feb 2024 11:39:31 -0500 Subject: [PATCH 143/159] handle SOCKS error --- bbot/core/helpers/web.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index 880565132..1a442c7e3 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -11,6 +11,7 @@ from contextlib import asynccontextmanager from httpx._models import Cookies +from socksio.exceptions import SOCKSError from bbot.core.errors import WordlistError, CurlError from bbot.core.helpers.ratelimiter import RateLimiter @@ -674,6 +675,12 @@ async def _acatch(self, url, raise_error): log.trace(traceback.format_exc()) if raise_error: raise httpx.RequestError(msg) + except SOCKSError as e: + msg = f"SOCKS error with request to URL: {url}: {e}" + log.trace(msg) + log.trace(traceback.format_exc()) + if raise_error: + raise httpx.RequestError(msg) except BaseException as e: # don't log if the error is the result of an intentional cancellation if not any( From dedb8ccc04f5da26c6f8accd5989be409581f3b2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 16 Feb 2024 16:23:13 -0500 Subject: [PATCH 144/159] fix interactsh bug with custom server --- bbot/core/helpers/interactsh.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 871ecb1c4..d349c80c0 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -127,7 +127,8 @@ async def register(self, callback=None): if self.custom_server: if not self.token: log.verbose("Interact.sh token is not set") - headers["Authorization"] = self.token + else: + headers["Authorization"] = self.token self.server_list = [self.custom_server] else: self.server_list = random.sample(server_list, k=len(server_list)) From e870cffb30ffeb7015503690943738eefb66383b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 16 Feb 2024 16:31:50 -0500 Subject: [PATCH 145/159] stringify custom server --- bbot/core/helpers/interactsh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index d349c80c0..aad4a169f 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -129,7 +129,7 @@ async def register(self, callback=None): log.verbose("Interact.sh token is not set") else: headers["Authorization"] = self.token - self.server_list = [self.custom_server] + self.server_list = [str(self.custom_server)] else: self.server_list = random.sample(server_list, k=len(server_list)) for server in self.server_list: From 3c5f9e4ef4d909dadece3b7395c3f4c1d01eb63d Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Sat, 17 Feb 2024 09:09:16 +0800 Subject: [PATCH 146/159] separate different colors --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fad621f98..252c2028a 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ### A Recursive Internet Scanner for Hackers. -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) BBOT (Bighuge BLS OSINT Tool) is a recursive internet scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), but designed to be faster, more reliable, and friendlier to pentesters, bug bounty hunters, and developers. From 51a01e2e9ad589006039050c536ca0451aa440ec Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 17 Feb 2024 11:49:45 -0500 Subject: [PATCH 147/159] add siem_friendly option to http output module --- bbot/core/event/base.py | 22 +++++++--- bbot/modules/output/http.py | 5 ++- bbot/modules/output/json.py | 4 +- bbot/test/test_step_1/test_events.py | 13 ++++++ .../module_tests/test_module_http.py | 41 ++++++++++++++++++- 5 files changed, 74 insertions(+), 11 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 24606f890..0e63b6291 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -565,7 +565,7 @@ def __contains__(self, other): return host_in_host(other.host, self.host) return False - def json(self, mode="json"): + def json(self, mode="json", siem_friendly=False): """ Serializes the event object to a JSON-compatible dictionary. @@ -574,6 +574,7 @@ def json(self, mode="json"): Parameters: mode (str): Specifies the data serialization mode. Default is "json". Other options include "graph", "human", and "id". + siem_friendly (bool): Whether to format the JSON in a way that's friendly to SIEM ingestion by Elastic, Splunk, etc. This ensures the value of "data" is always the same type (a dictionary). Returns: dict: JSON-serializable dictionary representation of the event object. @@ -585,9 +586,13 @@ def json(self, mode="json"): j.update({i: v}) data_attr = getattr(self, f"data_{mode}", None) if data_attr is not None: - j["data"] = data_attr + data = data_attr else: - j["data"] = smart_decode(self.data) + data = smart_decode(self.data) + if siem_friendly: + j["data"] = {self.type: data} + else: + j["data"] = data web_spider_distance = getattr(self, "web_spider_distance", None) if web_spider_distance is not None: j["web_spider_distance"] = web_spider_distance @@ -1312,7 +1317,7 @@ def make_event( ) -def event_from_json(j): +def event_from_json(j, siem_friendly=False): """ Creates an event object from a JSON dictionary. @@ -1335,14 +1340,19 @@ def event_from_json(j): if required keys are missing. Make sure to validate the JSON input beforehand. """ try: + event_type = j["type"] kwargs = { - "data": j["data"], - "event_type": j["type"], + "event_type": event_type, "scans": j.get("scans", []), "tags": j.get("tags", []), "confidence": j.get("confidence", 5), "dummy": True, } + if siem_friendly: + data = j["data"][event_type] + else: + data = j["data"] + kwargs["data"] = data event = make_event(**kwargs) resolved_hosts = j.get("resolved_hosts", []) diff --git a/bbot/modules/output/http.py b/bbot/modules/output/http.py index 10ca1c8df..014610736 100644 --- a/bbot/modules/output/http.py +++ b/bbot/modules/output/http.py @@ -13,6 +13,7 @@ class HTTP(BaseOutputModule): "username": "", "password": "", "timeout": 10, + "siem_friendly": False, } options_desc = { "url": "Web URL", @@ -21,12 +22,14 @@ class HTTP(BaseOutputModule): "username": "Username (basic auth)", "password": "Password (basic auth)", "timeout": "HTTP timeout", + "siem_friendly": "Format JSON in a SIEM-friendly way for ingestion into Elastic, Splunk, etc.", } async def setup(self): self.url = self.config.get("url", "") self.method = self.config.get("method", "POST") self.timeout = self.config.get("timeout", 10) + self.siem_friendly = self.config.get("siem_friendly", False) self.headers = {} bearer = self.config.get("bearer", "") if bearer: @@ -52,7 +55,7 @@ async def handle_event(self, event): method=self.method, auth=self.auth, headers=self.headers, - json=dict(event), + json=event.json(siem_friendly=self.siem_friendly), raise_error=True, ) break diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index a380ac9a1..bf8517db9 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -21,9 +21,7 @@ async def setup(self): return True async def handle_event(self, event): - event_json = dict(event) - if self.siem_friendly: - event_json["data"] = {event.type: event_json.pop("data", "")} + event_json = event.json(siem_friendly=self.siem_friendly) event_str = json.dumps(event_json) if self.file is not None: self.file.write(event_str + "\n") diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index ccbf17c1f..cadde29ad 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -385,6 +385,19 @@ async def test_events(events, scan, helpers, bbot_config): assert reconstituted_event.type == "DNS_NAME" assert "127.0.0.1" in reconstituted_event.resolved_hosts + # SIEM-friendly serialize/deserialize + json_event_siemfriendly = db_event.json(siem_friendly=True) + assert json_event_siemfriendly["scope_distance"] == 1 + assert json_event_siemfriendly["data"] == {"DNS_NAME": "evilcorp.com"} + assert json_event_siemfriendly["type"] == "DNS_NAME" + assert json_event_siemfriendly["timestamp"] == timestamp + reconstituted_event2 = event_from_json(json_event_siemfriendly, siem_friendly=True) + assert reconstituted_event2.scope_distance == 1 + assert reconstituted_event2.timestamp.timestamp() == timestamp + assert reconstituted_event2.data == "evilcorp.com" + assert reconstituted_event2.type == "DNS_NAME" + assert "127.0.0.1" in reconstituted_event2.resolved_hosts + http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", source=scan.root_event) assert http_response.source_id == scan.root_event.id assert http_response.data["input"] == "http://example.com:80" diff --git a/bbot/test/test_step_2/module_tests/test_module_http.py b/bbot/test/test_step_2/module_tests/test_module_http.py index 3b4e819b9..44a969370 100644 --- a/bbot/test/test_step_2/module_tests/test_module_http.py +++ b/bbot/test/test_step_2/module_tests/test_module_http.py @@ -1,3 +1,6 @@ +import json +import httpx + from .base import ModuleTestBase @@ -15,10 +18,46 @@ class TestHTTP(ModuleTestBase): } } + def verify_data(self, j): + return j["data"] == "blacklanternsecurity.com" and j["type"] == "DNS_NAME" + async def setup_after_prep(self, module_test): + self.got_event = False + self.headers_correct = False + self.method_correct = False + self.url_correct = False + + async def custom_callback(request): + j = json.loads(request.content) + if request.url == self.downstream_url: + self.url_correct = True + if request.method == "PUT": + self.method_correct = True + if "Authorization" in request.headers: + self.headers_correct = True + if self.verify_data(j): + self.got_event = True + return httpx.Response( + status_code=200, + ) + + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_callback(custom_callback) module_test.httpx_mock.add_response( method="PUT", headers={"Authorization": "bearer auth_token"}, url=self.downstream_url ) def check(self, module_test, events): - pass + assert self.got_event == True + assert self.headers_correct == True + assert self.method_correct == True + assert self.url_correct == True + + +class TestHTTPSIEMFriendly(TestHTTP): + modules_overrides = ["http"] + config_overrides = TestHTTP.config_overrides + config_overrides["output_modules"]["http"]["siem_friendly"] = True + + def verify_data(self, j): + return j["data"] == {"DNS_NAME": "blacklanternsecurity.com"} and j["type"] == "DNS_NAME" From 6d41eead733b8dd1ad33dc0b627649f5eaac6ca6 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 17 Feb 2024 12:17:40 -0500 Subject: [PATCH 148/159] fix tests --- bbot/test/test_step_2/module_tests/test_module_http.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_http.py b/bbot/test/test_step_2/module_tests/test_module_http.py index 44a969370..d0afcefb2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_http.py +++ b/bbot/test/test_step_2/module_tests/test_module_http.py @@ -56,7 +56,7 @@ def check(self, module_test, events): class TestHTTPSIEMFriendly(TestHTTP): modules_overrides = ["http"] - config_overrides = TestHTTP.config_overrides + config_overrides = {"output_modules": {"http": dict(TestHTTP.config_overrides["output_modules"]["http"])}} config_overrides["output_modules"]["http"]["siem_friendly"] = True def verify_data(self, j): From f7bdabaef477dcc1de9e3445a38ec4d91a0dabf4 Mon Sep 17 00:00:00 2001 From: w0Tx <98209373+w0Tx@users.noreply.github.com> Date: Wed, 14 Feb 2024 16:43:39 +0100 Subject: [PATCH 149/159] Add splunk.py output module Add support to send JSON data to splunk instance via HTTP Event Collector --- bbot/modules/output/splunk.py | 59 +++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 bbot/modules/output/splunk.py diff --git a/bbot/modules/output/splunk.py b/bbot/modules/output/splunk.py new file mode 100644 index 000000000..704aff542 --- /dev/null +++ b/bbot/modules/output/splunk.py @@ -0,0 +1,59 @@ +from bbot.core.errors import RequestError + +from bbot.modules.output.base import BaseOutputModule + +class SPLUNK(BaseOutputModule): + watched_events = ["*"] + meta = {"description": "Send every event to a splunk instance through HTTP Event Collector"} + options = { + "url": "", + "hectoken": "", + "index": "", + "source": "", + "timeout": 10, + } + options_desc = { + "url": "Web URL", + "hectoken": "HEC Token", + "index": "Index to send data to", + "source": "Source path to be added to the metadata", + "timeout": "HTTP timeout", + } + + async def setup(self): + self.url = self.config.get("url", "") + self.source = self.config.get("source", "") + self.index = self.config.get("index", "main") + self.timeout = self.config.get("timeout", 10) + self.headers = {} + + hectoken = self.config.get("hectoken", "") + if hectoken: + self.headers["Authorization"] = f"Splunk {hectoken}" + if not self.url: + self.warning("Must set URL") + return False + if not self.source: + self.warning("Please provide a source") + return True + + async def handle_event(self, event): + while 1: + try: + data = { + "index": self.index, + "source": self.source, + "sourcetype": "_json", + "event": dict(event), + } + await self.helpers.request( + url=self.url, + method="POST", + headers=self.headers, + json=data, + raise_error=True, + ) + break + except RequestError as e: + self.warning(f"Error sending {event}: {e}, retrying...") + await self.helpers.sleep(1) From 5f7596e9d897b16ec90e6bbac13ab290dee0e310 Mon Sep 17 00:00:00 2001 From: w0Tx <98209373+w0Tx@users.noreply.github.com> Date: Wed, 14 Feb 2024 16:45:01 +0100 Subject: [PATCH 150/159] Add doc for splunk output module Add documentation for splunk output module --- docs/scanning/output.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 81b4b8ede..16c317336 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -135,6 +135,25 @@ output_modules: password: P@ssw0rd ``` +### Splunk + +The `splunk` output module sends [events](events.md) in JSON format to a desired splunk instance via [HEC](https://docs.splunk.com/Documentation/Splunk/9.2.0/Data/UsetheHTTPEventCollector) + +You can customize this output with the following variables : + +```yaml title="~/.bbot/config/bbot.yml" +output_modules: + splunk: + #The full URL with the URI `/services/collector/event` + url: https://localhost:8088/services/collector/event + #Has to be generated from splunk webui + hectoken: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + #Defaults to `main` if not set + index: my-specific-index + #Defaults to `bbot` if not set + source: /my/source.json +``` + ### Asset Inventory The `asset_inventory` module produces a CSV like this: From a4f030d949efcaa57fd820b8a9af1af4d8d6a6b3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 17 Feb 2024 17:56:46 -0500 Subject: [PATCH 151/159] splunk tests --- bbot/modules/output/splunk.py | 8 +-- .../module_tests/test_module_splunk.py | 58 +++++++++++++++++++ 2 files changed, 62 insertions(+), 4 deletions(-) create mode 100644 bbot/test/test_step_2/module_tests/test_module_splunk.py diff --git a/bbot/modules/output/splunk.py b/bbot/modules/output/splunk.py index 704aff542..25ddfef3d 100644 --- a/bbot/modules/output/splunk.py +++ b/bbot/modules/output/splunk.py @@ -2,7 +2,8 @@ from bbot.modules.output.base import BaseOutputModule -class SPLUNK(BaseOutputModule): + +class Splunk(BaseOutputModule): watched_events = ["*"] meta = {"description": "Send every event to a splunk instance through HTTP Event Collector"} options = { @@ -31,8 +32,7 @@ async def setup(self): if hectoken: self.headers["Authorization"] = f"Splunk {hectoken}" if not self.url: - self.warning("Must set URL") - return False + return False, "Must set URL" if not self.source: self.warning("Please provide a source") return True @@ -44,7 +44,7 @@ async def handle_event(self, event): "index": self.index, "source": self.source, "sourcetype": "_json", - "event": dict(event), + "event": event.json(), } await self.helpers.request( url=self.url, diff --git a/bbot/test/test_step_2/module_tests/test_module_splunk.py b/bbot/test/test_step_2/module_tests/test_module_splunk.py new file mode 100644 index 000000000..67d67a4ef --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_splunk.py @@ -0,0 +1,58 @@ +import json +import httpx + +from .base import ModuleTestBase + + +class TestSplunk(ModuleTestBase): + downstream_url = "https://splunk.blacklanternsecurity.fakedomain:1234/services/collector" + config_overrides = { + "output_modules": { + "splunk": { + "url": downstream_url, + "hectoken": "HECTOKEN", + "index": "bbot_index", + "source": "bbot_source", + } + } + } + + def verify_data(self, j): + if not j["source"] == "bbot_source": + return False + if not j["index"] == "bbot_index": + return False + data = j["event"] + if not data["data"] == "blacklanternsecurity.com" and data["type"] == "DNS_NAME": + return False + return True + + async def setup_after_prep(self, module_test): + self.url_correct = False + self.method_correct = False + self.got_event = False + self.headers_correct = False + + async def custom_callback(request): + j = json.loads(request.content) + if request.url == self.downstream_url: + self.url_correct = True + if request.method == "POST": + self.method_correct = True + if "Authorization" in request.headers: + self.headers_correct = True + if self.verify_data(j): + self.got_event = True + return httpx.Response( + status_code=200, + ) + + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_response() + + def check(self, module_test, events): + assert self.got_event == True + assert self.headers_correct == True + assert self.method_correct == True + assert self.url_correct == True From 4b94b75db0a8f88fdd073646ab4d2cf20697093f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 17 Feb 2024 18:01:44 -0500 Subject: [PATCH 152/159] updated docs --- bbot/modules/output/splunk.py | 2 +- docs/scanning/output.md | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/bbot/modules/output/splunk.py b/bbot/modules/output/splunk.py index 25ddfef3d..242f1759e 100644 --- a/bbot/modules/output/splunk.py +++ b/bbot/modules/output/splunk.py @@ -23,7 +23,7 @@ class Splunk(BaseOutputModule): async def setup(self): self.url = self.config.get("url", "") - self.source = self.config.get("source", "") + self.source = self.config.get("source", "bbot") self.index = self.config.get("index", "main") self.timeout = self.config.get("timeout", 10) self.headers = {} diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 16c317336..376585ebe 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -137,20 +137,20 @@ output_modules: ### Splunk -The `splunk` output module sends [events](events.md) in JSON format to a desired splunk instance via [HEC](https://docs.splunk.com/Documentation/Splunk/9.2.0/Data/UsetheHTTPEventCollector) +The `splunk` output module sends [events](events.md) in JSON format to a desired splunk instance via [HEC](https://docs.splunk.com/Documentation/Splunk/9.2.0/Data/UsetheHTTPEventCollector). -You can customize this output with the following variables : +You can customize this output with the following config options: ```yaml title="~/.bbot/config/bbot.yml" output_modules: splunk: - #The full URL with the URI `/services/collector/event` + # The full URL with the URI `/services/collector/event` url: https://localhost:8088/services/collector/event - #Has to be generated from splunk webui + # Generated from splunk webui hectoken: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - #Defaults to `main` if not set + # Defaults to `main` if not set index: my-specific-index - #Defaults to `bbot` if not set + # Defaults to `bbot` if not set source: /my/source.json ``` From 2e705228f0768fa1828c173a08673782fe3f25b6 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 17 Feb 2024 20:38:39 -0500 Subject: [PATCH 153/159] add output module list in README --- README.md | 24 +++++++++++++++++++++++- bbot/scripts/docs.py | 5 +++++ docs/modules/list_of_modules.md | 1 + docs/scanning/advanced.md | 16 +++++----------- docs/scanning/configuration.md | 5 +++++ docs/scanning/events.md | 2 +- 6 files changed, 40 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 6b0cca3be..7d19fa630 100644 --- a/README.md +++ b/README.md @@ -280,4 +280,26 @@ For a full list of modules, including the data types consumed and emitted by eac | subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | | web-screenshots | 1 | Takes screenshots of web pages | gowitness | - + +## BBOT Output Modules +BBOT can save its data to TXT, CSV, JSON, and tons of other destinations including [Neo4j](https://www.blacklanternsecurity.com/bbot/scanning/output/#neo4j), [Splunk](https://www.blacklanternsecurity.com/bbot/scanning/output/#splunk), and [Discord](https://www.blacklanternsecurity.com/bbot/scanning/output/#discord-slack-teams). For instructions on how to use these, see [Output Modules](https://www.blacklanternsecurity.com/bbot/scanning/output). + + +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|-----------------|--------|-----------------|-----------------------------------------------------------------------------------------|----------------|--------------------------------------------------------------------------------------------------|---------------------------| +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | + diff --git a/bbot/scripts/docs.py b/bbot/scripts/docs.py index dcf9cd710..8e6d045f3 100755 --- a/bbot/scripts/docs.py +++ b/bbot/scripts/docs.py @@ -94,6 +94,11 @@ def update_individual_module_options(): assert len(bbot_module_table.splitlines()) > 50 update_md_files("BBOT MODULES", bbot_module_table) + # BBOT output modules + bbot_output_module_table = module_loader.modules_table(mod_type="output") + assert len(bbot_output_module_table.splitlines()) > 10 + update_md_files("BBOT OUTPUT MODULES", bbot_output_module_table) + # BBOT module options bbot_module_options_table = module_loader.modules_options_table() assert len(bbot_module_options_table.splitlines()) > 100 diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index a3ffc76c6..ebf4f182f 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -107,6 +107,7 @@ | neo4j | output | No | Output to Neo4j | | * | | | python | output | No | Output via Python API | | * | | | slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | | subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | | teams | output | No | Message a Teams channel when certain events are encountered | | * | | | web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 0baaf35c8..8207b7ce7 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -33,16 +33,10 @@ asyncio.run(main()) ```text -usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] - [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] - [--strict-scope] [-m MODULE [MODULE ...]] [-l] - [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] - [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] - [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] - [-o DIR] [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] - [--dry-run] [--current-config] - [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] - [-a] [--version] +usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] [--strict-scope] [-m MODULE [MODULE ...]] [-l] + [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] [-o DIR] + [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] [--dry-run] [--current-config] [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] [-a] + [--version] Bighuge BLS OSINT Tool @@ -73,7 +67,7 @@ Modules: -ef FLAG [FLAG ...], --exclude-flags FLAG [FLAG ...] Disable modules with these flags. (e.g. -ef aggressive) -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: asset_inventory,csv,discord,emails,http,human,json,neo4j,python,slack,subdomains,teams,web_report,websocket + Output module(s). Choices: asset_inventory,csv,discord,emails,http,human,json,neo4j,python,slack,splunk,subdomains,teams,web_report,websocket --allow-deadly Enable the use of highly aggressive modules Scan: diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 21db76264..babe891b1 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -364,6 +364,11 @@ Many modules accept their own configuration options. These options have the abil | output_modules.slack.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | | output_modules.slack.min_severity | str | Only allow VULNERABILITY events of this severity or higher | LOW | | output_modules.slack.webhook_url | str | Discord webhook URL | | +| output_modules.splunk.hectoken | str | HEC Token | | +| output_modules.splunk.index | str | Index to send data to | | +| output_modules.splunk.source | str | Source path to be added to the metadata | | +| output_modules.splunk.timeout | int | HTTP timeout | 10 | +| output_modules.splunk.url | str | Web URL | | | output_modules.subdomains.include_unresolved | bool | Include unresolved subdomains in output | False | | output_modules.subdomains.output_file | str | Output to file | | | output_modules.teams.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 6628fac46..d2aaa4595 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -51,7 +51,7 @@ Below is a full list of event types along with which modules produce/consume the | Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | |---------------------|-----------------------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | +| * | 12 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, splunk, teams, websocket | | | ASN | 0 | 1 | | asn | | AZURE_TENANT | 1 | 0 | speculate | | | CODE_REPOSITORY | 0 | 2 | | github_codesearch, github_org | From 6967bce5b6d68ee549ff440756058fdb5f930d1c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 17 Feb 2024 20:43:09 -0500 Subject: [PATCH 154/159] update output module docs --- docs/scanning/output.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 376585ebe..af1db4737 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -1,6 +1,6 @@ # Output -By default, BBOT saves its output in TXT, JSON, and CSV formats: +By default, BBOT saves its output in TXT, JSON, and CSV formats. The filenames are logged at the end of each scan: ![bbot output](https://github.com/blacklanternsecurity/bbot/assets/20261699/bb3da441-2682-408f-b955-19b268823b82) Every BBOT scan gets a unique and mildly-entertaining name like **`demonic_jimmy`**. Output for that scan, including scan stats and any web screenshots, etc., are saved to a folder by that name in `~/.bbot/scans`. The most recent 20 scans are kept, and older ones are removed. You can change the location of BBOT's output with `--output`, and you can also pick a custom scan name with `--name`. From cb085d4020ec4415632af73ea32c58aad60d3a95 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 19 Feb 2024 13:44:29 -0500 Subject: [PATCH 155/159] clarify targets in docs --- README.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 252c2028a..06af79047 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ git clone https://github.com/blacklanternsecurity/bbot && cd bbot
-Usage +Example Usage ## Example Commands @@ -114,7 +114,13 @@ bbot -t evilcorp.com -f subdomain-enum email-enum cloud-enum web-basic -m nmap g ## Targets -BBOT accepts an unlimited number of targets via `-t`. You can specify targets either directly on the command line or in files (or both!). Targets can be any of the following: +BBOT accepts an unlimited number of targets via `-t`. You can specify targets either directly on the command line or in files (or both!): + +```bash +bbot -t evilcorp.com evilcorp.org 1.2.3.0/24 -f subdomain-enum +``` + +Targets can be any of the following: - `DNS_NAME` (`evilcorp.com`) - `IP_ADDRESS` (`1.2.3.4`) From 307f913ca66c078b38be77254c40537402fe92df Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Tue, 20 Feb 2024 16:23:32 +0000 Subject: [PATCH 156/159] Refresh module docs --- docs/scanning/configuration.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 21db76264..51a704a1d 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -350,6 +350,7 @@ Many modules accept their own configuration options. These options have the abil | output_modules.http.bearer | str | Authorization Bearer token | | | output_modules.http.method | str | HTTP method | POST | | output_modules.http.password | str | Password (basic auth) | | +| output_modules.http.siem_friendly | bool | Format JSON in a SIEM-friendly way for ingestion into Elastic, Splunk, etc. | False | | output_modules.http.timeout | int | HTTP timeout | 10 | | output_modules.http.url | str | Web URL | | | output_modules.http.username | str | Username (basic auth) | | From 65907a3a1c4a3d81c9e72ed5d3a67151447ef030 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Tue, 20 Feb 2024 21:18:39 +0000 Subject: [PATCH 157/159] Refresh module docs --- docs/scanning/advanced.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 8207b7ce7..628cb8847 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -33,10 +33,16 @@ asyncio.run(main()) ```text -usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] [--strict-scope] [-m MODULE [MODULE ...]] [-l] - [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] [-o DIR] - [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] [--dry-run] [--current-config] [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] [-a] - [--version] +usage: bbot [-h] [--help-all] [-t TARGET [TARGET ...]] + [-w WHITELIST [WHITELIST ...]] [-b BLACKLIST [BLACKLIST ...]] + [--strict-scope] [-m MODULE [MODULE ...]] [-l] + [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] + [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] + [-om MODULE [MODULE ...]] [--allow-deadly] [-n SCAN_NAME] + [-o DIR] [-c [CONFIG ...]] [-v] [-d] [-s] [--force] [-y] + [--dry-run] [--current-config] + [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] + [-a] [--version] Bighuge BLS OSINT Tool From 7a9428a0d7547a953227c96561692d95d646c2a9 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 21 Feb 2024 10:24:56 -0500 Subject: [PATCH 158/159] 1.1.6 release notes --- docs/release_history.md | 46 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/docs/release_history.md b/docs/release_history.md index eef23ba39..0730376d5 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,4 +1,50 @@ + + ## v1.1.6 +February 21, 2024 + +## Improvements +- https://github.com/blacklanternsecurity/bbot/pull/1001 +- https://github.com/blacklanternsecurity/bbot/pull/1006 +- https://github.com/blacklanternsecurity/bbot/pull/1010 +- https://github.com/blacklanternsecurity/bbot/pull/1013 +- https://github.com/blacklanternsecurity/bbot/pull/1014 +- https://github.com/blacklanternsecurity/bbot/pull/1015 +- https://github.com/blacklanternsecurity/bbot/pull/1032 +- https://github.com/blacklanternsecurity/bbot/pull/1043 +- https://github.com/blacklanternsecurity/bbot/pull/1047 +- https://github.com/blacklanternsecurity/bbot/pull/1048 +- https://github.com/blacklanternsecurity/bbot/pull/1049 +- https://github.com/blacklanternsecurity/bbot/pull/1051 +- https://github.com/blacklanternsecurity/bbot/pull/1065 +- https://github.com/blacklanternsecurity/bbot/pull/1070 +- https://github.com/blacklanternsecurity/bbot/pull/1076 +- https://github.com/blacklanternsecurity/bbot/pull/1077 +- https://github.com/blacklanternsecurity/bbot/pull/1095 +- https://github.com/blacklanternsecurity/bbot/pull/1101 +- https://github.com/blacklanternsecurity/bbot/pull/1103 + +## Bigfixes +- https://github.com/blacklanternsecurity/bbot/pull/1005 +- https://github.com/blacklanternsecurity/bbot/pull/1022 +- https://github.com/blacklanternsecurity/bbot/pull/1030 +- https://github.com/blacklanternsecurity/bbot/pull/1033 +- https://github.com/blacklanternsecurity/bbot/pull/1034 +- https://github.com/blacklanternsecurity/bbot/pull/1042 +- https://github.com/blacklanternsecurity/bbot/pull/1066 +- https://github.com/blacklanternsecurity/bbot/pull/1067 +- https://github.com/blacklanternsecurity/bbot/pull/1073 +- https://github.com/blacklanternsecurity/bbot/pull/1086 +- https://github.com/blacklanternsecurity/bbot/pull/1089 +- https://github.com/blacklanternsecurity/bbot/pull/1094 +- https://github.com/blacklanternsecurity/bbot/pull/1098 + +## New Modules +- https://github.com/blacklanternsecurity/bbot/pull/1072 +- https://github.com/blacklanternsecurity/bbot/pull/1091 + + +## v1.1.5 January 29, 2024 ## Improvements From 28cb6e334b216b9e70556fab7f14969dd43586e4 Mon Sep 17 00:00:00 2001 From: liquidsec Date: Wed, 21 Feb 2024 10:27:56 -0500 Subject: [PATCH 159/159] whitespace --- docs/release_history.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/release_history.md b/docs/release_history.md index 0730376d5..4b5b15bac 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,5 +1,3 @@ - - ## v1.1.6 February 21, 2024