From 8b1bd58cb0788fa08f86d2b3bd39f3dc6dcf9474 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 18 Nov 2024 17:50:02 -0500 Subject: [PATCH 01/64] fix speculate conflict --- bbot/cli.py | 14 +- bbot/modules/base.py | 2 +- bbot/modules/internal/speculate.py | 2 +- bbot/modules/output/nmap_xml.py | 161 ++++++++++++++++++ bbot/modules/output/stdout.py | 2 +- bbot/scanner/preset/args.py | 18 +- bbot/test/test_step_1/test_modules_basic.py | 20 +-- .../module_tests/test_module_nmap_xml.py | 85 +++++++++ 8 files changed, 282 insertions(+), 22 deletions(-) create mode 100644 bbot/modules/output/nmap_xml.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_nmap_xml.py diff --git a/bbot/cli.py b/bbot/cli.py index 877f2bcaa5..4ffd3398b2 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -80,7 +80,7 @@ async def _main(): return # if we're listing modules or their options - if options.list_modules or options.list_module_options: + if options.list_modules or options.list_output_modules or options.list_module_options: # if no modules or flags are specified, enable everything if not (options.modules or options.output_modules or options.flags): @@ -99,7 +99,17 @@ async def _main(): print("") print("### MODULES ###") print("") - for row in preset.module_loader.modules_table(preset.modules).splitlines(): + modules = sorted(set(preset.scan_modules + preset.internal_modules)) + for row in preset.module_loader.modules_table(modules).splitlines(): + print(row) + return + + # --list-output-modules + if options.list_output_modules: + print("") + print("### OUTPUT MODULES ###") + print("") + for row in preset.module_loader.modules_table(preset.output_modules).splitlines(): print(row) return diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 956d59c98c..93593b9a34 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -51,7 +51,7 @@ class BaseModule: target_only (bool): Accept only the initial target event(s). Default is False. - in_scope_only (bool): Accept only explicitly in-scope events. Default is False. + in_scope_only (bool): Accept only explicitly in-scope events, regardless of the scan's search distance. Default is False. options (Dict): Customizable options for the module, e.g., {"api_key": ""}. Empty dict by default. diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index e52e4e1bb2..ef0f0ea46f 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -104,7 +104,7 @@ async def handle_event(self, event): # don't act on unresolved DNS_NAMEs usable_dns = False if event.type == "DNS_NAME": - if self.dns_disable or ("a-record" in event.tags or "aaaa-record" in event.tags): + if self.dns_disable or event.resolved_hosts: usable_dns = True if event.type == "IP_ADDRESS" or usable_dns: diff --git a/bbot/modules/output/nmap_xml.py b/bbot/modules/output/nmap_xml.py new file mode 100644 index 0000000000..5279a5a3c8 --- /dev/null +++ b/bbot/modules/output/nmap_xml.py @@ -0,0 +1,161 @@ +import sys +from xml.dom import minidom +from datetime import datetime +from xml.etree.ElementTree import Element, SubElement, tostring + +from bbot import __version__ +from bbot.modules.output.base import BaseOutputModule + + +class NmapHost: + __slots__ = ["hostnames", "open_ports"] + + def __init__(self): + self.hostnames = set() + # a dict of {port: {protocol: banner}} + self.open_ports = dict() + + +class Nmap_XML(BaseOutputModule): + watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "IP_ADDRESS", "PROTOCOL"] + meta = {"description": "Output to Nmap XML", "created_date": "2024-11-16", "author": "@TheTechromancer"} + output_filename = "output.nmap.xml" + in_scope_only = True + + async def setup(self): + self.hosts = {} + self._prep_output_dir(self.output_filename) + return True + + async def handle_event(self, event): + event_host = event.host + + # we always record by IP + ips = [] + for ip in event.resolved_hosts: + try: + ips.append(self.helpers.make_ip_type(ip)) + except ValueError: + continue + if not ips and self.helpers.is_ip(event_host): + ips = [event_host] + + for ip in ips: + try: + nmap_host = self.hosts[ip] + except KeyError: + nmap_host = NmapHost() + self.hosts[ip] = nmap_host + + event_port = getattr(event, "port", None) + if event.type == "OPEN_TCP_PORT": + if event_port not in nmap_host.open_ports: + nmap_host.open_ports[event.port] = {} + elif event.type == "PROTOCOL": + if event_port is not None: + try: + existing_services = nmap_host.open_ports[event.port] + except KeyError: + existing_services = {} + nmap_host.open_ports[event.port] = existing_services + protocol = event.data["protocol"].lower() + if protocol not in existing_services: + existing_services[protocol] = event.data.get("banner", None) + + if self.helpers.is_ip(event_host): + if str(event.module) == "PTR": + nmap_host.hostnames.add(event.parent.data) + else: + nmap_host.hostnames.add(event_host) + + async def report(self): + scan_start_time = str(int(self.scan.start_time.timestamp())) + scan_start_time_str = self.scan.start_time.strftime("%a %b %d %H:%M:%S %Y") + scan_end_time = datetime.now() + scan_end_time_str = scan_end_time.strftime("%a %b %d %H:%M:%S %Y") + scan_end_time_timestamp = str(scan_end_time.timestamp()) + scan_duration = scan_end_time - self.scan.start_time + num_hosts_up = len(self.hosts) + + # Create the root element + nmaprun = Element( + "nmaprun", + { + "scanner": "bbot", + "args": " ".join(sys.argv), + "start": scan_start_time, + "startstr": scan_start_time_str, + "version": str(__version__), + "xmloutputversion": "1.05", + }, + ) + + ports_scanned = [] + speculate_module = self.scan.modules.get("speculate", None) + if speculate_module is not None: + ports_scanned = speculate_module.ports + portscan_module = self.scan.modules.get("portscan", None) + if portscan_module is not None: + ports_scanned = self.helpers.parse_port_string(str(portscan_module.ports)) + num_ports_scanned = len(sorted(ports_scanned)) + ports_scanned = ",".join(str(x) for x in sorted(ports_scanned)) + + # Add scaninfo + SubElement( + nmaprun, + "scaninfo", + {"type": "syn", "protocol": "tcp", "numservices": str(num_ports_scanned), "services": ports_scanned}, + ) + + # Add host information + for ip, nmap_host in self.hosts.items(): + hostnames = sorted(nmap_host.hostnames) + ports = sorted(nmap_host.open_ports) + + host_elem = SubElement(nmaprun, "host") + SubElement(host_elem, "status", {"state": "up", "reason": "user-set", "reason_ttl": "0"}) + SubElement(host_elem, "address", {"addr": str(ip), "addrtype": f"ipv{ip.version}"}) + + if hostnames: + hostnames_elem = SubElement(host_elem, "hostnames") + for hostname in hostnames: + SubElement(hostnames_elem, "hostname", {"name": hostname, "type": "user"}) + + ports = SubElement(host_elem, "ports") + for port, protocols in nmap_host.open_ports.items(): + port_elem = SubElement(ports, "port", {"protocol": "tcp", "portid": str(port)}) + SubElement(port_elem, "state", {"state": "open", "reason": "syn-ack", "reason_ttl": "0"}) + # + for protocol, banner in protocols.items(): + attrs = {"name": protocol, "method": "probed", "conf": "10"} + if banner is not None: + attrs["product"] = banner + attrs["extrainfo"] = banner + SubElement(port_elem, "service", attrs) + + # Add runstats + runstats = SubElement(nmaprun, "runstats") + SubElement( + runstats, + "finished", + { + "time": scan_end_time_timestamp, + "timestr": scan_end_time_str, + "summary": f"BBOT done at {scan_end_time_str}; {num_hosts_up} scanned in {scan_duration} seconds", + "elapsed": str(scan_duration.total_seconds()), + "exit": "success", + }, + ) + SubElement(runstats, "hosts", {"up": str(num_hosts_up), "down": "0", "total": str(num_hosts_up)}) + + # make backup of the file + self.helpers.backup_file(self.output_file) + + # Pretty-format the XML + rough_string = tostring(nmaprun, encoding="utf-8") + reparsed = minidom.parseString(rough_string) + pretty_xml = reparsed.toprettyxml(indent=" ") + + with open(self.output_file, "w") as f: + f.write(pretty_xml) + self.info(f"Saved Nmap XML output to {self.output_file}") diff --git a/bbot/modules/output/stdout.py b/bbot/modules/output/stdout.py index 6e4ccf5bea..33ffad5da2 100644 --- a/bbot/modules/output/stdout.py +++ b/bbot/modules/output/stdout.py @@ -6,7 +6,7 @@ class Stdout(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to text"} + meta = {"description": "Output to text", "created_date": "2024-04-03", "author": "@TheTechromancer"} options = {"format": "text", "event_types": [], "event_fields": [], "in_scope_only": False, "accept_dupes": True} options_desc = { "format": "Which text format to display, choices: text,json", diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index cf48dd4b9f..9f9dc61ed1 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -53,6 +53,11 @@ class BBOTArgs: "", "bbot -l", ), + ( + "List output modules", + "", + "bbot -lo", + ), ( "List presets", "", @@ -289,12 +294,6 @@ def create_parser(self, *args, **kwargs): ) output = p.add_argument_group(title="Output") - output.add_argument( - "-o", - "--output-dir", - help="Directory to output scan results", - metavar="DIR", - ) output.add_argument( "-om", "--output-modules", @@ -303,6 +302,13 @@ def create_parser(self, *args, **kwargs): help=f'Output module(s). Choices: {",".join(self.preset.module_loader.output_module_choices)}', metavar="MODULE", ) + output.add_argument("-lo", "--list-output-modules", action="store_true", help="List available output modules") + output.add_argument( + "-o", + "--output-dir", + help="Directory to output scan results", + metavar="DIR", + ) output.add_argument("--json", "-j", action="store_true", help="Output scan data in JSON format") output.add_argument("--brief", "-br", action="store_true", help="Output only the data itself") output.add_argument("--event-types", nargs="+", default=[], help="Choose which event types to display") diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index 4212340695..863fdff05f 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -156,17 +156,15 @@ async def test_modules_basic_checks(events, httpx_mock): assert not ( "web-basic" in flags and "web-thorough" in flags ), f'module "{module_name}" should have either "web-basic" or "web-thorough" flags, not both' - meta = preloaded.get("meta", {}) - # make sure every module has a description - assert meta.get("description", ""), f"{module_name} must have a description" - # make sure every module has an author - assert meta.get("author", ""), f"{module_name} must have an author" - # make sure every module has a created date - created_date = meta.get("created_date", "") - assert created_date, f"{module_name} must have a created date" - assert created_date_regex.match( - created_date - ), f"{module_name}'s created_date must match the format YYYY-MM-DD" + meta = preloaded.get("meta", {}) + # make sure every module has a description + assert meta.get("description", ""), f"{module_name} must have a description" + # make sure every module has an author + assert meta.get("author", ""), f"{module_name} must have an author" + # make sure every module has a created date + created_date = meta.get("created_date", "") + assert created_date, f"{module_name} must have a created date" + assert created_date_regex.match(created_date), f"{module_name}'s created_date must match the format YYYY-MM-DD" # attribute checks watched_events = preloaded.get("watched_events") diff --git a/bbot/test/test_step_2/module_tests/test_module_nmap_xml.py b/bbot/test/test_step_2/module_tests/test_module_nmap_xml.py new file mode 100644 index 0000000000..b88595be01 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_nmap_xml.py @@ -0,0 +1,85 @@ +import xml.etree.ElementTree as ET + +from bbot.modules.base import BaseModule +from .base import ModuleTestBase + + +class TestNmap_XML(ModuleTestBase): + modules_overrides = ["nmap_xml", "speculate"] + targets = ["blacklanternsecurity.com", "127.0.0.3"] + config_overrides = {"dns": {"minimal": False}} + + class DummyModule(BaseModule): + watched_events = ["OPEN_TCP_PORT"] + _name = "dummy_module" + + async def handle_event(self, event): + if event.port == 80: + await self.emit_event( + {"host": str(event.host), "port": event.port, "protocol": "http", "banner": "Apache"}, + "PROTOCOL", + parent=event, + ) + elif event.port == 443: + await self.emit_event( + {"host": str(event.host), "port": event.port, "protocol": "https"}, "PROTOCOL", parent=event + ) + + async def setup_before_prep(self, module_test): + self.dummy_module = self.DummyModule(module_test.scan) + module_test.scan.modules["dummy_module"] = self.dummy_module + await module_test.mock_dns( + { + "blacklanternsecurity.com": {"A": ["127.0.0.1", "127.0.0.2"]}, + "3.0.0.127.in-addr.arpa": {"PTR": ["www.blacklanternsecurity.com"]}, + "www.blacklanternsecurity.com": {"A": ["127.0.0.1"]}, + } + ) + + def check(self, module_test, events): + nmap_xml_file = module_test.scan.modules["nmap_xml"].output_file + nmap_xml = open(nmap_xml_file).read() + + # Parse the XML + root = ET.fromstring(nmap_xml) + + # Expected IP addresses + expected_ips = {"127.0.0.1", "127.0.0.2", "127.0.0.3"} + found_ips = set() + + # Iterate over each host in the XML + for host in root.findall("host"): + # Get the IP address + address = host.find("address").get("addr") + found_ips.add(address) + + # Get hostnames if available + hostnames = sorted([hostname.get("name") for hostname in host.findall(".//hostname")]) + + # Get open ports and services + ports = [] + for port in host.findall(".//port"): + port_id = port.get("portid") + state = port.find("state").get("state") + if state == "open": + service_name = port.find("service").get("name") + service_product = port.find("service").get("product", "") + service_extrainfo = port.find("service").get("extrainfo", "") + ports.append((port_id, service_name, service_product, service_extrainfo)) + + # Sort ports for consistency + ports.sort() + + # Assertions + if address == "127.0.0.1": + assert hostnames == ["blacklanternsecurity.com", "www.blacklanternsecurity.com"] + assert ports == sorted([("80", "http", "Apache", "Apache"), ("443", "https", "", "")]) + elif address == "127.0.0.2": + assert hostnames == sorted(["blacklanternsecurity.com"]) + assert ports == sorted([("80", "http", "Apache", "Apache"), ("443", "https", "", "")]) + elif address == "127.0.0.3": + assert hostnames == [] # No hostnames for this IP + assert ports == sorted([("80", "http", "Apache", "Apache"), ("443", "https", "", "")]) + + # Assert that all expected IPs were found + assert found_ips == expected_ips From a410392ff0e893e34e4c70abffd302361f6acab1 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 17 Nov 2024 00:03:05 -0500 Subject: [PATCH 02/64] fix cli tests --- bbot/modules/output/nmap_xml.py | 5 +++++ bbot/test/test_step_1/test_cli.py | 16 ++++++++++++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/bbot/modules/output/nmap_xml.py b/bbot/modules/output/nmap_xml.py index 5279a5a3c8..38a5d8a035 100644 --- a/bbot/modules/output/nmap_xml.py +++ b/bbot/modules/output/nmap_xml.py @@ -154,6 +154,11 @@ async def report(self): # Pretty-format the XML rough_string = tostring(nmaprun, encoding="utf-8") reparsed = minidom.parseString(rough_string) + + # Create a new document with the doctype + doctype = minidom.DocumentType("nmaprun") + reparsed.insertBefore(doctype, reparsed.documentElement) + pretty_xml = reparsed.toprettyxml(indent=" ") with open(self.output_file, "w") as f: diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index 47db12d2ab..0841216f09 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -150,11 +150,23 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): out, err = capsys.readouterr() # internal modules assert "| excavate " in out - # output modules - assert "| csv " in out + # no output modules + assert not "| csv " in out # scan modules assert "| wayback " in out + # list output modules + monkeypatch.setattr("sys.argv", ["bbot", "--list-output-modules"]) + result = await cli._main() + assert result == None + out, err = capsys.readouterr() + # no internal modules + assert not "| excavate " in out + # output modules + assert "| csv " in out + # no scan modules + assert not "| wayback " in out + # output dir and scan name output_dir = bbot_test_dir / "bbot_cli_args_output" scan_name = "bbot_cli_args_scan_name" From fa044f10b836d602a1e00b5663a0c4002fc40525 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 17 Nov 2024 01:25:58 -0500 Subject: [PATCH 03/64] update module descriptions --- bbot/modules/internal/cloudcheck.py | 6 +++++- bbot/modules/internal/dnsresolve.py | 2 ++ bbot/modules/output/sqlite.py | 6 +++++- bbot/modules/output/txt.py | 2 +- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 392c8e0c5a..2cd748601e 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -3,7 +3,11 @@ class CloudCheck(BaseInterceptModule): watched_events = ["*"] - meta = {"description": "Tag events by cloud provider, identify cloud resources like storage buckets"} + meta = { + "description": "Tag events by cloud provider, identify cloud resources like storage buckets", + "created_date": "2024-07-07", + "author": "@TheTechromancer", + } scope_distance_modifier = 1 _priority = 3 diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 9b68b7bb9d..f6a08b60c5 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -9,6 +9,8 @@ class DNSResolve(BaseInterceptModule): watched_events = ["*"] + produced_events = ["DNS_NAME", "IP_ADDRESS", "RAW_DNS_RECORD"] + meta = {"description": "Perform DNS resolution", "created_date": "2022-04-08", "author": "@TheTechromancer"} _priority = 1 scope_distance_modifier = None diff --git a/bbot/modules/output/sqlite.py b/bbot/modules/output/sqlite.py index 68ac60dafd..0377caaa3d 100644 --- a/bbot/modules/output/sqlite.py +++ b/bbot/modules/output/sqlite.py @@ -5,7 +5,11 @@ class SQLite(SQLTemplate): watched_events = ["*"] - meta = {"description": "Output scan data to a SQLite database"} + meta = { + "description": "Output scan data to a SQLite database", + "created_date": "2024-11-07", + "author": "@TheTechromancer", + } options = { "database": "", } diff --git a/bbot/modules/output/txt.py b/bbot/modules/output/txt.py index 68f86864da..2dfb14c106 100644 --- a/bbot/modules/output/txt.py +++ b/bbot/modules/output/txt.py @@ -5,7 +5,7 @@ class TXT(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to text"} + meta = {"description": "Output to text", "created_date": "2024-04-03", "author": "@TheTechromancer"} options = {"output_file": ""} options_desc = {"output_file": "Output to file"} From 211d651ed017a20705e363967173782a5b560b7f Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 18 Nov 2024 17:49:02 -0500 Subject: [PATCH 04/64] support http_response --- bbot/modules/output/nmap_xml.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/bbot/modules/output/nmap_xml.py b/bbot/modules/output/nmap_xml.py index 38a5d8a035..9e540f4242 100644 --- a/bbot/modules/output/nmap_xml.py +++ b/bbot/modules/output/nmap_xml.py @@ -17,7 +17,7 @@ def __init__(self): class Nmap_XML(BaseOutputModule): - watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "IP_ADDRESS", "PROTOCOL"] + watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "IP_ADDRESS", "PROTOCOL", "HTTP_RESPONSE"] meta = {"description": "Output to Nmap XML", "created_date": "2024-11-16", "author": "@TheTechromancer"} output_filename = "output.nmap.xml" in_scope_only = True @@ -28,6 +28,7 @@ async def setup(self): return True async def handle_event(self, event): + self.hugesuccess(event) event_host = event.host # we always record by IP @@ -51,16 +52,21 @@ async def handle_event(self, event): if event.type == "OPEN_TCP_PORT": if event_port not in nmap_host.open_ports: nmap_host.open_ports[event.port] = {} - elif event.type == "PROTOCOL": + elif event.type in ("PROTOCOL", "HTTP_RESPONSE"): if event_port is not None: try: existing_services = nmap_host.open_ports[event.port] except KeyError: existing_services = {} nmap_host.open_ports[event.port] = existing_services - protocol = event.data["protocol"].lower() + if event.type == "PROTOCOL": + protocol = event.data["protocol"].lower() + banner = event.data.get("banner", None) + elif event.type == "HTTP_RESPONSE": + protocol = event.parsed_url.scheme.lower() + banner = event.http_title if protocol not in existing_services: - existing_services[protocol] = event.data.get("banner", None) + existing_services[protocol] = banner if self.helpers.is_ip(event_host): if str(event.module) == "PTR": From 820c6e41ea03ed11aa600dc9506a89abc431366f Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 19:44:51 -0500 Subject: [PATCH 05/64] fix tests, remove debug statement --- bbot/modules/output/nmap_xml.py | 1 - bbot/modules/output/postgres.py | 6 +++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/bbot/modules/output/nmap_xml.py b/bbot/modules/output/nmap_xml.py index 9e540f4242..52698e0de8 100644 --- a/bbot/modules/output/nmap_xml.py +++ b/bbot/modules/output/nmap_xml.py @@ -28,7 +28,6 @@ async def setup(self): return True async def handle_event(self, event): - self.hugesuccess(event) event_host = event.host # we always record by IP diff --git a/bbot/modules/output/postgres.py b/bbot/modules/output/postgres.py index b1c8c26598..45beb7c7bc 100644 --- a/bbot/modules/output/postgres.py +++ b/bbot/modules/output/postgres.py @@ -3,7 +3,11 @@ class Postgres(SQLTemplate): watched_events = ["*"] - meta = {"description": "Output scan data to a SQLite database"} + meta = { + "description": "Output scan data to a SQLite database", + "created_date": "2024-11-08", + "author": "@TheTechromancer", + } options = { "username": "postgres", "password": "bbotislife", From cb1b96789748be2fa9b92caf296c71c5f884a015 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 20 Nov 2024 11:48:24 -0500 Subject: [PATCH 06/64] fix portscan test --- .../test_step_2/module_tests/test_module_portscan.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_portscan.py b/bbot/test/test_step_2/module_tests/test_module_portscan.py index 56536cb5dd..d9f55c27f9 100644 --- a/bbot/test/test_step_2/module_tests/test_module_portscan.py +++ b/bbot/test/test_step_2/module_tests/test_module_portscan.py @@ -109,10 +109,12 @@ def check(self, module_test, events): if e.type == "DNS_NAME" and e.data == "dummy.asdf.evilcorp.net" and str(e.module) == "dummy_module" ] ) - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 3 + # the reason these numbers aren't exactly predictable is because we can't predict which one arrives first + # to the portscan module. Sometimes, one that would normally be deduped is force-emitted because it led to a new open port. + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 4 assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.8.8:443"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.5:80"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.6:631"]) From 9c55d9ea7bed5c117469fc80cd6a5cac7b968290 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Thu, 21 Nov 2024 11:17:01 +0100 Subject: [PATCH 07/64] ruff check --fix --unsafe-fixes --- bbot/cli.py | 4 +- bbot/core/config/logger.py | 2 +- bbot/core/core.py | 2 +- bbot/core/event/base.py | 18 +- bbot/core/helpers/command.py | 8 +- bbot/core/helpers/depsinstaller/installer.py | 8 +- bbot/core/helpers/diff.py | 10 +- bbot/core/helpers/dns/dns.py | 2 +- bbot/core/helpers/dns/engine.py | 2 +- bbot/core/helpers/files.py | 2 +- bbot/core/helpers/helper.py | 4 +- bbot/core/helpers/interactsh.py | 6 +- bbot/core/helpers/misc.py | 4 +- bbot/core/helpers/validators.py | 2 +- bbot/core/helpers/web/client.py | 2 +- bbot/core/helpers/web/engine.py | 2 +- bbot/core/helpers/web/web.py | 4 +- bbot/core/helpers/wordcloud.py | 8 +- bbot/core/modules.py | 10 +- bbot/modules/base.py | 32 +- bbot/modules/bypass403.py | 10 +- bbot/modules/c99.py | 2 +- bbot/modules/deadly/ffuf.py | 6 +- bbot/modules/deadly/nuclei.py | 2 +- bbot/modules/deadly/vhost.py | 4 +- bbot/modules/dnsbimi.py | 2 +- bbot/modules/dnsdumpster.py | 4 +- bbot/modules/generic_ssrf.py | 6 +- bbot/modules/github_workflows.py | 2 +- bbot/modules/gowitness.py | 6 +- bbot/modules/host_header.py | 8 +- bbot/modules/httpx.py | 2 +- bbot/modules/iis_shortnames.py | 6 +- bbot/modules/internal/cloudcheck.py | 2 +- bbot/modules/internal/dnsresolve.py | 12 +- bbot/modules/internal/speculate.py | 4 +- bbot/modules/ipneighbor.py | 2 +- bbot/modules/jadx.py | 2 +- bbot/modules/newsletters.py | 4 +- bbot/modules/output/asset_inventory.py | 2 +- bbot/modules/output/base.py | 2 +- bbot/modules/output/stdout.py | 4 +- bbot/modules/portscan.py | 4 +- bbot/modules/report/asn.py | 20 +- bbot/modules/robots.py | 6 +- bbot/modules/securitytxt.py | 2 +- bbot/modules/sitedossier.py | 2 +- bbot/modules/subdomainradar.py | 2 +- bbot/modules/telerik.py | 14 +- bbot/modules/templates/bucket.py | 2 +- bbot/modules/templates/github.py | 2 +- bbot/modules/templates/shodan.py | 2 +- bbot/modules/templates/webhook.py | 2 +- bbot/modules/trufflehog.py | 2 +- bbot/modules/url_manipulation.py | 6 +- bbot/modules/urlscan.py | 2 +- bbot/modules/viewdns.py | 2 +- bbot/modules/wafw00f.py | 2 +- bbot/scanner/preset/args.py | 14 +- bbot/scanner/preset/preset.py | 12 +- bbot/scanner/scanner.py | 22 +- bbot/scanner/target.py | 2 +- bbot/scripts/docs.py | 2 +- bbot/test/bbot_fixtures.py | 2 +- bbot/test/conftest.py | 2 +- bbot/test/test_step_1/test_cli.py | 98 +-- bbot/test/test_step_1/test_dns.py | 28 +- bbot/test/test_step_1/test_engine.py | 34 +- bbot/test/test_step_1/test_events.py | 42 +- bbot/test/test_step_1/test_helpers.py | 50 +- .../test_manager_scope_accuracy.py | 608 +++++++++--------- bbot/test/test_step_1/test_modules_basic.py | 101 ++- bbot/test/test_step_1/test_presets.py | 114 ++-- bbot/test/test_step_1/test_scan.py | 6 +- bbot/test/test_step_1/test_target.py | 26 +- bbot/test/test_step_1/test_web.py | 8 +- bbot/test/test_step_2/module_tests/base.py | 12 +- .../module_tests/test_module_anubisdb.py | 2 +- .../module_tests/test_module_azure_realm.py | 2 +- .../module_tests/test_module_bevigil.py | 8 +- .../module_tests/test_module_binaryedge.py | 4 +- .../module_tests/test_module_bucket_amazon.py | 4 +- .../module_tests/test_module_bucket_azure.py | 2 +- .../module_tests/test_module_builtwith.py | 4 +- .../module_tests/test_module_c99.py | 8 +- .../module_tests/test_module_columbus.py | 2 +- .../module_tests/test_module_credshed.py | 4 +- .../module_tests/test_module_dehashed.py | 2 +- .../module_tests/test_module_digitorus.py | 2 +- .../module_tests/test_module_dnsbrute.py | 16 +- .../module_tests/test_module_dnsdumpster.py | 4 +- .../module_tests/test_module_excavate.py | 12 +- .../module_tests/test_module_host_header.py | 2 +- .../module_tests/test_module_http.py | 8 +- .../module_tests/test_module_httpx.py | 2 +- .../module_tests/test_module_leakix.py | 4 +- .../module_tests/test_module_myssl.py | 2 +- .../module_tests/test_module_neo4j.py | 2 +- .../module_tests/test_module_newsletters.py | 4 +- .../module_tests/test_module_oauth.py | 2 +- .../module_tests/test_module_otx.py | 2 +- .../module_tests/test_module_postgres.py | 2 +- .../module_tests/test_module_rapiddns.py | 10 +- .../module_tests/test_module_sitedossier.py | 4 +- .../module_tests/test_module_smuggler.py | 2 +- .../module_tests/test_module_splunk.py | 8 +- .../test_module_subdomaincenter.py | 2 +- .../module_tests/test_module_subdomains.py | 2 +- .../module_tests/test_module_wayback.py | 2 +- 109 files changed, 809 insertions(+), 808 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index c1ec117b22..97d7488088 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -130,8 +130,8 @@ async def _main(): ] if deadly_modules and not options.allow_deadly: log.hugewarning(f"You enabled the following deadly modules: {','.join(deadly_modules)}") - log.hugewarning(f"Deadly modules are highly intrusive") - log.hugewarning(f"Please specify --allow-deadly to continue") + log.hugewarning("Deadly modules are highly intrusive") + log.hugewarning("Please specify --allow-deadly to continue") return False # --current-preset diff --git a/bbot/core/config/logger.py b/bbot/core/config/logger.py index 505ad2fab3..54866a63b6 100644 --- a/bbot/core/config/logger.py +++ b/bbot/core/config/logger.py @@ -68,7 +68,7 @@ def __init__(self, core): self.listener = None # if we haven't set up logging yet, do it now - if not "_BBOT_LOGGING_SETUP" in os.environ: + if "_BBOT_LOGGING_SETUP" not in os.environ: os.environ["_BBOT_LOGGING_SETUP"] = "1" self.queue = multiprocessing.Queue() self.setup_queue_handler() diff --git a/bbot/core/core.py b/bbot/core/core.py index 23b1a9d620..5814052771 100644 --- a/bbot/core/core.py +++ b/bbot/core/core.py @@ -106,7 +106,7 @@ def default_config(self): if DEFAULT_CONFIG is None: self.default_config = self.files_config.get_default_config() # ensure bbot home dir - if not "home" in self.default_config: + if "home" not in self.default_config: self.default_config["home"] = "~/.bbot" return DEFAULT_CONFIG diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb6d92e91f..48fcce7753 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -203,7 +203,7 @@ def __init__( # self.scan holds the instantiated scan object (for helpers, etc.) self.scan = scan if (not self.scan) and (not self._dummy): - raise ValidationError(f"Must specify scan") + raise ValidationError("Must specify scan") # self.scans holds a list of scan IDs from scans that encountered this event self.scans = [] if scans is not None: @@ -222,7 +222,7 @@ def __init__( self.parent = parent if (not self.parent) and (not self._dummy): - raise ValidationError(f"Must specify event parent") + raise ValidationError("Must specify event parent") if tags is not None: for tag in tags: @@ -301,9 +301,9 @@ def internal(self, value): The purpose of internal events is to enable speculative/explorative discovery without cluttering the console with irrelevant or uninteresting events. """ - if not value in (True, False): + if value not in (True, False): raise ValueError(f'"internal" must be boolean, not {type(value)}') - if value == True: + if value is True: self.add_tag("internal") else: self.remove_tag("internal") @@ -1013,12 +1013,12 @@ def __init__(self, *args, **kwargs): if not self.host: for parent in self.get_parents(include_self=True): # inherit closest URL - if not "url" in self.data: + if "url" not in self.data: parent_url = getattr(parent, "parsed_url", None) if parent_url is not None: self.data["url"] = parent_url.geturl() # inherit closest path - if not "path" in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE": + if "path" not in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE": parent_path = parent.data.get("path", None) if parent_path is not None: self.data["path"] = parent_path @@ -1227,7 +1227,7 @@ def sanitize_data(self, data): def add_tag(self, tag): host_same_as_parent = self.parent and self.host == self.parent.host - if tag == "spider-danger" and host_same_as_parent and not "spider-danger" in self.tags: + if tag == "spider-danger" and host_same_as_parent and "spider-danger" not in self.tags: # increment the web spider distance if self.type == "URL_UNVERIFIED": self.web_spider_distance += 1 @@ -1249,7 +1249,7 @@ def with_port(self): def _words(self): first_elem = self.parsed_url.path.lstrip("/").split("/")[0] - if not "." in first_elem: + if "." not in first_elem: return extract_words(first_elem) return set() @@ -1665,7 +1665,7 @@ def make_event( event.parent = parent if context is not None: event.discovery_context = context - if internal == True: + if internal is True: event.internal = True if tags: event.tags = tags.union(event.tags) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index db05a05e1b..d4f017b330 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -269,11 +269,11 @@ def _prepare_command_kwargs(self, command, kwargs): (['sudo', '-E', '-A', 'LD_LIBRARY_PATH=...', 'PATH=...', 'ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1, 'env': environ(...)}) """ # limit = 100MB (this is needed for cases like httpx that are sending large JSON blobs over stdout) - if not "limit" in kwargs: + if "limit" not in kwargs: kwargs["limit"] = 1024 * 1024 * 100 - if not "stdout" in kwargs: + if "stdout" not in kwargs: kwargs["stdout"] = asyncio.subprocess.PIPE - if not "stderr" in kwargs: + if "stderr" not in kwargs: kwargs["stderr"] = asyncio.subprocess.PIPE sudo = kwargs.pop("sudo", False) @@ -286,7 +286,7 @@ def _prepare_command_kwargs(self, command, kwargs): # use full path of binary, if not already specified binary = command[0] - if not "/" in binary: + if "/" not in binary: binary_full_path = which(binary) if binary_full_path is None: raise SubprocessError(f'Command "{binary}" was not found') diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index fce5f077dd..a65c57574a 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -97,11 +97,11 @@ async def install(self, *modules): or self.deps_behavior == "force_install" ): if not notified: - log.hugeinfo(f"Installing module dependencies. Please be patient, this may take a while.") + log.hugeinfo("Installing module dependencies. Please be patient, this may take a while.") notified = True log.verbose(f'Installing dependencies for module "{m}"') # get sudo access if we need it - if preloaded.get("sudo", False) == True: + if preloaded.get("sudo", False) is True: self.ensure_root(f'Module "{m}" needs root privileges to install its dependencies.') success = await self.install_module(m) self.setup_status[module_hash] = success @@ -159,7 +159,7 @@ async def install_module(self, module): deps_common = preloaded["deps"]["common"] if deps_common: for dep_common in deps_common: - if self.setup_status.get(dep_common, False) == True: + if self.setup_status.get(dep_common, False) is True: log.debug( f'Skipping installation of dependency "{dep_common}" for module "{module}" since it is already installed' ) @@ -244,7 +244,7 @@ def shell(self, module, commands): if success: log.info(f"Successfully ran {len(commands):,} shell commands") else: - log.warning(f"Failed to run shell dependencies") + log.warning("Failed to run shell dependencies") return success def tasks(self, module, tasks): diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index 59ee96567c..795390f6fa 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -183,7 +183,7 @@ async def compare( await self._baseline() - if timeout == None: + if timeout is None: timeout = self.timeout reflection = False @@ -238,11 +238,11 @@ async def compare( different_headers = self.compare_headers(self.baseline.headers, subject_response.headers) if different_headers: - log.debug(f"headers were different, no match") + log.debug("headers were different, no match") diff_reasons.append("header") - if self.compare_body(self.baseline_json, subject_json) == False: - log.debug(f"difference in HTML body, no match") + if self.compare_body(self.baseline_json, subject_json) is False: + log.debug("difference in HTML body, no match") diff_reasons.append("body") @@ -275,6 +275,6 @@ async def canary_check(self, url, mode, rounds=3): ) # if a nonsense header "caused" a difference, we need to abort. We also need to abort if our canary was reflected - if match == False or reflection == True: + if match is False or reflection is True: return False return True diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index 4526f084ae..89758b35ff 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -178,7 +178,7 @@ def _wildcard_prevalidation(self, host): host = clean_dns_record(host) # skip check if it's an IP or a plain hostname - if is_ip(host) or not "." in host: + if is_ip(host) or "." not in host: return False # skip if query isn't a dns name diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 8f3c5a0b77..9ca94b23aa 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -638,7 +638,7 @@ async def _connectivity_check(self, interval=5): self._last_dns_success = time.time() return True if time.time() - self._last_connectivity_warning > interval: - self.log.warning(f"DNS queries are failing, please check your internet connection") + self.log.warning("DNS queries are failing, please check your internet connection") self._last_connectivity_warning = time.time() self._errors.clear() return False diff --git a/bbot/core/helpers/files.py b/bbot/core/helpers/files.py index fb92d1c8b8..5e7d2d88d4 100644 --- a/bbot/core/helpers/files.py +++ b/bbot/core/helpers/files.py @@ -83,7 +83,7 @@ def _feed_pipe(self, pipe, content, text=True): for c in content: p.write(decode_fn(c) + newline) except BrokenPipeError: - log.debug(f"Broken pipe in _feed_pipe()") + log.debug("Broken pipe in _feed_pipe()") except ValueError: log.debug(f"Error _feed_pipe(): {traceback.format_exc()}") except KeyboardInterrupt: diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 6db4b6921a..78ccf67155 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -153,7 +153,9 @@ def temp_filename(self, extension=None): return self.temp_dir / filename def clean_old_scans(self): - _filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name) + def _filter(x): + return x.is_dir() and self.regexes.scan_name_regex.match(x.name) + self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter) def make_target(self, *targets, **kwargs): diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index f707fac93a..c809999a3b 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -155,7 +155,7 @@ async def register(self, callback=None): break if not self.server: - raise InteractshError(f"Failed to register with an interactsh server") + raise InteractshError("Failed to register with an interactsh server") log.info( f"Successfully registered to interactsh server {self.server} with correlation_id {self.correlation_id} [{self.domain}]" @@ -181,7 +181,7 @@ async def deregister(self): >>> await interactsh_client.deregister() """ if not self.server or not self.correlation_id or not self.secret: - raise InteractshError(f"Missing required information to deregister") + raise InteractshError("Missing required information to deregister") headers = {} if self.token: @@ -226,7 +226,7 @@ async def poll(self): ] """ if not self.server or not self.correlation_id or not self.secret: - raise InteractshError(f"Missing required information to poll") + raise InteractshError("Missing required information to poll") headers = {} if self.token: diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 1a56932963..75eaa72a70 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -391,7 +391,7 @@ def url_parents(u): parent_list = [] while 1: parent = parent_url(u) - if parent == None: + if parent is None: return parent_list elif parent not in parent_list: parent_list.append(parent) @@ -512,7 +512,7 @@ def domain_stem(domain): - Utilizes the `tldextract` function for domain parsing. """ parsed = tldextract(str(domain)) - return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") + return ".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") def ip_network_parents(i, include_self=False): diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index bc6ca13725..88acb01462 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -132,7 +132,7 @@ def validate_host(host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address] @validator def validate_severity(severity: str): severity = str(severity).strip().upper() - if not severity in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"): + if severity not in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"): raise ValueError(f"Invalid severity: {severity}") return severity diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index c09a0e4856..28788e04d9 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -56,7 +56,7 @@ def __init__(self, *args, **kwargs): # timeout http_timeout = self._web_config.get("http_timeout", 20) - if not "timeout" in kwargs: + if "timeout" not in kwargs: kwargs["timeout"] = http_timeout # headers diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 85805b2b71..4401a219fd 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -137,7 +137,7 @@ async def stream_request(self, url, **kwargs): if max_size is not None: max_size = human_to_bytes(max_size) kwargs["follow_redirects"] = follow_redirects - if not "method" in kwargs: + if "method" not in kwargs: kwargs["method"] = "GET" try: total_size = 0 diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 33aa2d0352..2865732ce5 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -261,7 +261,7 @@ async def wordlist(self, path, lines=None, **kwargs): """ if not path: raise WordlistError(f"Invalid wordlist: {path}") - if not "cache_hrs" in kwargs: + if "cache_hrs" not in kwargs: kwargs["cache_hrs"] = 720 if self.parent_helper.is_url(path): filename = await self.download(str(path), **kwargs) @@ -350,7 +350,7 @@ async def curl(self, *args, **kwargs): headers[hk] = hv # add the timeout - if not "timeout" in kwargs: + if "timeout" not in kwargs: timeout = http_timeout curl_command.append("-m") diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index fbd4e75930..824f9ea36e 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -111,7 +111,7 @@ def mutations( results = set() for word in words: h = hash(word) - if not h in results: + if h not in results: results.add(h) yield (word,) if numbers > 0: @@ -119,7 +119,7 @@ def mutations( for word in words: for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding): h = hash(number_mutation) - if not h in results: + if h not in results: results.add(h) yield (number_mutation,) for word in words: @@ -322,7 +322,7 @@ def json(self, limit=None): @property def default_filename(self): - return self.parent_helper.preset.scan.home / f"wordcloud.tsv" + return self.parent_helper.preset.scan.home / "wordcloud.tsv" def save(self, filename=None, limit=None): """ @@ -357,7 +357,7 @@ def save(self, filename=None, limit=None): log.debug(f"Saved word cloud ({len(self):,} words) to {filename}") return True, filename else: - log.debug(f"No words to save") + log.debug("No words to save") except Exception as e: import traceback diff --git a/bbot/core/modules.py b/bbot/core/modules.py index a5f4b30eb5..66cd3caa75 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -153,7 +153,7 @@ def preload(self, module_dirs=None): else: log.debug(f"Preloading {module_name} from disk") if module_dir.name == "modules": - namespace = f"bbot.modules" + namespace = "bbot.modules" else: namespace = f"bbot.modules.{module_dir.name}" try: @@ -400,10 +400,10 @@ def preload_module(self, module_file): deps_common.append(dep_common.value) for task in ansible_tasks: - if not "become" in task: + if "become" not in task: task["become"] = False # don't sudo brew - elif os_platform() == "darwin" and ("package" in task and task.get("become", False) == True): + elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True): task["become"] = False preloaded_data = { @@ -436,8 +436,8 @@ def preload_module(self, module_file): f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})' ) for ansible_task in ansible_task_list: - if any(x == True for x in search_dict_by_key("become", ansible_task)) or any( - x == True for x in search_dict_by_key("ansible_become", ansible_tasks) + if any(x is True for x in search_dict_by_key("become", ansible_task)) or any( + x is True for x in search_dict_by_key("ansible_become", ansible_tasks) ): preloaded_data["sudo"] = True return preloaded_data diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 956d59c98c..1fa151c33b 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -311,7 +311,7 @@ async def require_api_key(self): if self.auth_secret: try: await self.ping() - self.hugesuccess(f"API is ready") + self.hugesuccess("API is ready") return True, "" except Exception as e: self.trace(traceback.format_exc()) @@ -332,10 +332,10 @@ def api_key(self, api_keys): def cycle_api_key(self): if len(self._api_keys) > 1: - self.verbose(f"Cycling API key") + self.verbose("Cycling API key") self._api_keys.insert(0, self._api_keys.pop()) else: - self.debug(f"No extra API keys to cycle") + self.debug("No extra API keys to cycle") @property def api_retries(self): @@ -669,7 +669,7 @@ async def _worker(self): if self.incoming_event_queue is not False: event = await self.incoming_event_queue.get() else: - self.debug(f"Event queue is in bad state") + self.debug("Event queue is in bad state") break except asyncio.queues.QueueEmpty: continue @@ -700,7 +700,7 @@ async def _worker(self): else: self.error(f"Critical failure in module {self.name}: {e}") self.error(traceback.format_exc()) - self.log.trace(f"Worker stopped") + self.log.trace("Worker stopped") @property def max_scope_distance(self): @@ -743,7 +743,7 @@ def _event_precheck(self, event): if event.type in ("FINISHED",): return True, "its type is FINISHED" if self.errored: - return False, f"module is in error state" + return False, "module is in error state" # exclude non-watched types if not any(t in self.get_watched_events() for t in ("*", event.type)): return False, "its type is not in watched_events" @@ -770,7 +770,7 @@ async def _event_postcheck(self, event): # check duplicates is_incoming_duplicate, reason = self.is_incoming_duplicate(event, add=True) if is_incoming_duplicate and not self.accept_dupes: - return False, f"module has already seen it" + (f" ({reason})" if reason else "") + return False, "module has already seen it" + (f" ({reason})" if reason else "") return acceptable, reason @@ -863,7 +863,7 @@ async def queue_event(self, event): """ async with self._task_counter.count("queue_event()", _log=False): if self.incoming_event_queue is False: - self.debug(f"Not in an acceptable state to queue incoming event") + self.debug("Not in an acceptable state to queue incoming event") return acceptable, reason = self._event_precheck(event) if not acceptable: @@ -879,7 +879,7 @@ async def queue_event(self, event): if event.type != "FINISHED": self.scan._new_activity = True except AttributeError: - self.debug(f"Not in an acceptable state to queue incoming event") + self.debug("Not in an acceptable state to queue incoming event") async def queue_outgoing_event(self, event, **kwargs): """ @@ -904,7 +904,7 @@ async def queue_outgoing_event(self, event, **kwargs): try: await self.outgoing_event_queue.put((event, kwargs)) except AttributeError: - self.debug(f"Not in an acceptable state to queue outgoing event") + self.debug("Not in an acceptable state to queue outgoing event") def set_error_state(self, message=None, clear_outgoing_queue=False, critical=False): """ @@ -939,7 +939,7 @@ def set_error_state(self, message=None, clear_outgoing_queue=False, critical=Fal self.errored = True # clear incoming queue if self.incoming_event_queue is not False: - self.debug(f"Emptying event_queue") + self.debug("Emptying event_queue") with suppress(asyncio.queues.QueueEmpty): while 1: self.incoming_event_queue.get_nowait() @@ -1126,7 +1126,7 @@ def prepare_api_request(self, url, kwargs): """ if self.api_key: url = url.format(api_key=self.api_key) - if not "headers" in kwargs: + if "headers" not in kwargs: kwargs["headers"] = {} kwargs["headers"]["Authorization"] = f"Bearer {self.api_key}" return url, kwargs @@ -1142,7 +1142,7 @@ async def api_request(self, *args, **kwargs): # loop until we have a successful request for _ in range(self.api_retries): - if not "headers" in kwargs: + if "headers" not in kwargs: kwargs["headers"] = {} new_url, kwargs = self.prepare_api_request(url, kwargs) kwargs["url"] = new_url @@ -1589,7 +1589,7 @@ async def _worker(self): event = incoming kwargs = {} else: - self.debug(f"Event queue is in bad state") + self.debug("Event queue is in bad state") break except asyncio.queues.QueueEmpty: await asyncio.sleep(0.1) @@ -1644,7 +1644,7 @@ async def _worker(self): else: self.critical(f"Critical failure in intercept module {self.name}: {e}") self.critical(traceback.format_exc()) - self.log.trace(f"Worker stopped") + self.log.trace("Worker stopped") async def get_incoming_event(self): """ @@ -1675,7 +1675,7 @@ async def queue_event(self, event, kwargs=None): try: self.incoming_event_queue.put_nowait((event, kwargs)) except AttributeError: - self.debug(f"Not in an acceptable state to queue incoming event") + self.debug("Not in an acceptable state to queue incoming event") async def _event_postcheck(self, event): return await self._event_postcheck_inner(event) diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 4f3b51789b..61fb510775 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -92,7 +92,7 @@ async def do_checks(self, compare_helper, event, collapse_threshold): return None sig = self.format_signature(sig, event) - if sig[2] != None: + if sig[2] is not None: headers = dict(sig[2]) else: headers = None @@ -106,13 +106,13 @@ async def do_checks(self, compare_helper, event, collapse_threshold): continue # In some cases WAFs will respond with a 200 code which causes a false positive - if subject_response != None: + if subject_response is not None: for ws in waf_strings: if ws in subject_response.text: self.debug("Rejecting result based on presence of WAF string") return - if match == False: + if match is False: if str(subject_response.status_code)[0] != "4": if sig[2]: added_header_tuple = next(iter(sig[2].items())) @@ -165,13 +165,13 @@ async def filter_event(self, event): return False def format_signature(self, sig, event): - if sig[3] == True: + if sig[3] is True: cleaned_path = event.parsed_url.path.strip("/") else: cleaned_path = event.parsed_url.path.lstrip("/") kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path} formatted_url = sig[1].format(**kwargs) - if sig[2] != None: + if sig[2] is not None: formatted_headers = {k: v.format(**kwargs) for k, v in sig[2].items()} else: formatted_headers = None diff --git a/bbot/modules/c99.py b/bbot/modules/c99.py index 7bb395fa18..17fea87a13 100644 --- a/bbot/modules/c99.py +++ b/bbot/modules/c99.py @@ -20,7 +20,7 @@ class c99(subdomain_enum_apikey): async def ping(self): url = f"{self.base_url}/randomnumber?key={{api_key}}&between=1,100&json" response = await self.api_request(url) - assert response.json()["success"] == True, getattr(response, "text", "no response from server") + assert response.json()["success"] is True, getattr(response, "text", "no response from server") async def request_url(self, query): url = f"{self.base_url}/subdomainfinder?key={{api_key}}&domain={self.helpers.quote(query)}&json" diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index 6144d0b13e..e0a45e12b0 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -52,7 +52,7 @@ async def setup(self): async def handle_event(self, event): if self.helpers.url_depth(event.data) > self.config.get("max_depth"): - self.debug(f"Exceeded max depth, aborting event") + self.debug("Exceeded max depth, aborting event") return # only FFUF against a directory @@ -252,7 +252,7 @@ async def execute_ffuf( self.warning(f"Exiting from FFUF run early, received an ABORT filter: [{filters[ext][1]}]") continue - elif filters[ext] == None: + elif filters[ext] is None: pass else: @@ -282,7 +282,7 @@ async def execute_ffuf( else: if mode == "normal": # before emitting, we are going to send another baseline. This will immediately catch things like a WAF flipping blocking on us mid-scan - if baseline == False: + if baseline is False: pre_emit_temp_canary = [ f async for f in self.execute_ffuf( diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index f254f9bb07..c525b5a8f8 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -226,7 +226,7 @@ async def execute_nuclei(self, nuclei_input): command.append(f"-{cli_option}") command.append(option) - if self.scan.config.get("interactsh_disable") == True: + if self.scan.config.get("interactsh_disable") is True: self.info("Disbling interactsh in accordance with global settings") command.append("-no-interactsh") diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index 66c1c516c4..e37be8210a 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -73,7 +73,7 @@ async def handle_event(self, event): async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=False): filters = await self.baseline_ffuf(f"{host}/", exts=[""], suffix=basehost, mode="hostheader") - self.debug(f"Baseline completed and returned these filters:") + self.debug("Baseline completed and returned these filters:") self.debug(filters) if not wordlist: wordlist = self.tempfile @@ -90,7 +90,7 @@ async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=F parent=event, context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {vhost_str}", ) - if skip_dns_host == False: + if skip_dns_host is False: await self.emit_event( f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", diff --git a/bbot/modules/dnsbimi.py b/bbot/modules/dnsbimi.py index d974b1183e..f780b15517 100644 --- a/bbot/modules/dnsbimi.py +++ b/bbot/modules/dnsbimi.py @@ -80,7 +80,7 @@ async def filter_event(self, event): return False, "event is wildcard" # there's no value in inspecting service records - if service_record(event.host) == True: + if service_record(event.host) is True: return False, "service record detected" return True diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index ab36b493e8..5c0ae29041 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -31,7 +31,7 @@ async def query(self, domain): html = self.helpers.beautifulsoup(res1.content, "html.parser") if html is False: - self.verbose(f"BeautifulSoup returned False") + self.verbose("BeautifulSoup returned False") return ret csrftoken = None @@ -82,7 +82,7 @@ async def query(self, domain): return ret html = self.helpers.beautifulsoup(res2.content, "html.parser") if html is False: - self.verbose(f"BeautifulSoup returned False") + self.verbose("BeautifulSoup returned False") return ret escaped_domain = re.escape(domain) match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$") diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index c6bd38544f..f486c7d978 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -163,7 +163,7 @@ async def setup(self): self.severity = None self.generic_only = self.config.get("generic_only", False) - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: self.interactsh_instance = self.helpers.interactsh() self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback) @@ -216,7 +216,7 @@ async def interactsh_callback(self, r): self.debug("skipping result because subdomain tag was missing") async def cleanup(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: await self.interactsh_instance.deregister() self.debug( @@ -226,7 +226,7 @@ async def cleanup(self): self.warning(f"Interactsh failure: {e}") async def finish(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 369b337420..9ba34d155d 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -166,7 +166,7 @@ async def download_run_logs(self, owner, repo, run_id): main_logs = [] with zipfile.ZipFile(file_destination, "r") as logzip: for name in logzip.namelist(): - if fnmatch.fnmatch(name, "*.txt") and not "/" in name: + if fnmatch.fnmatch(name, "*.txt") and "/" not in name: logzip.extract(name, folder) main_logs.append(folder / name) return main_logs diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 08edfaaf31..375817c1a4 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -264,8 +264,8 @@ async def cur_execute(self, cur, query): async def report(self): if self.screenshots_taken: self.success(f"{len(self.screenshots_taken):,} web screenshots captured. To view:") - self.success(f" - Start gowitness") + self.success(" - Start gowitness") self.success(f" - cd {self.base_path} && ./gowitness server") - self.success(f" - Browse to http://localhost:7171") + self.success(" - Browse to http://localhost:7171") else: - self.info(f"No web screenshots captured") + self.info("No web screenshots captured") diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index 00dd640baf..50a6290cf1 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -19,7 +19,7 @@ class host_header(BaseModule): async def setup(self): self.subdomain_tags = {} - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: self.interactsh_instance = self.helpers.interactsh() self.domain = await self.interactsh_instance.register(callback=self.interactsh_callback) @@ -60,7 +60,7 @@ async def interactsh_callback(self, r): self.debug("skipping results because subdomain tag was missing") async def finish(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): @@ -69,7 +69,7 @@ async def finish(self): self.debug(f"Error in interact.sh: {e}") async def cleanup(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: await self.interactsh_instance.deregister() self.debug( @@ -136,7 +136,7 @@ async def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: - description = f"Duplicate Host Header Tolerated" + description = "Duplicate Host Header Tolerated" await self.emit_event( { "host": str(event.host), diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 2cd2c0504c..059bc2461f 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -90,7 +90,7 @@ def make_url_metadata(self, event): else: url = str(event.data) url_hash = hash((event.host, event.port, has_spider_max)) - if url_hash == None: + if url_hash is None: url_hash = hash((url, has_spider_max)) return url, url_hash diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index 6a173a929a..60de156268 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -160,7 +160,7 @@ async def solve_shortname_recursive( url_hint_list = [] found_results = False - cl = ext_char_list if extension_mode == True else char_list + cl = ext_char_list if extension_mode is True else char_list urls_and_kwargs = [] @@ -209,7 +209,7 @@ async def solve_shortname_recursive( extension_mode, node_count=node_count, ) - if len(prefix) > 0 and found_results == False: + if len(prefix) > 0 and found_results is False: url_hint_list.append(f"{prefix}") self.verbose(f"Found new (possibly partial) URL_HINT: {prefix} from node {target}") return url_hint_list @@ -234,7 +234,7 @@ class safety_counter_obj: {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, - context=f"{{module}} detected low {{event.type}}: IIS shortname enumeration", + context="{module} detected low {event.type}: IIS shortname enumeration", ) if not self.config.get("detect_only"): for detection in detections: diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 42c51ec033..fa743360e6 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -74,7 +74,7 @@ async def handle_event(self, event, **kwargs): if match: matches.append(match.groups()) for match in matches: - if not match in found: + if match not in found: found.add(match) _kwargs = dict(base_kwargs) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 9b68b7bb9d..3367ce7f95 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -131,9 +131,9 @@ async def handle_wildcard_event(self, event): event.host, rdtypes=rdtypes, raw_dns_records=event.raw_dns_records ) for rdtype, (is_wildcard, wildcard_host) in wildcard_rdtypes.items(): - if is_wildcard == False: + if is_wildcard is False: continue - elif is_wildcard == True: + elif is_wildcard is True: event.add_tag("wildcard") wildcard_tag = "wildcard" else: @@ -142,16 +142,16 @@ async def handle_wildcard_event(self, event): event.add_tag(f"{rdtype}-{wildcard_tag}") # wildcard event modification (www.evilcorp.com --> _wildcard.evilcorp.com) - if wildcard_rdtypes and not "target" in event.tags: + if wildcard_rdtypes and "target" not in event.tags: # these are the rdtypes that have wildcards wildcard_rdtypes_set = set(wildcard_rdtypes) # consider the event a full wildcard if all its records are wildcards event_is_wildcard = False if wildcard_rdtypes_set: - event_is_wildcard = all(r[0] == True for r in wildcard_rdtypes.values()) + event_is_wildcard = all(r[0] is True for r in wildcard_rdtypes.values()) if event_is_wildcard: - if event.type in ("DNS_NAME",) and not "_wildcard" in event.data.split("."): + if event.type in ("DNS_NAME",) and "_wildcard" not in event.data.split("."): wildcard_parent = self.helpers.parent_domain(event.host) for rdtype, (_is_wildcard, _parent_domain) in wildcard_rdtypes.items(): if _is_wildcard: @@ -273,7 +273,7 @@ async def resolve_event(self, event, types): # tag event with errors for rdtype, errors in dns_errors.items(): # only consider it an error if there weren't any results for that rdtype - if errors and not rdtype in event.dns_children: + if errors and rdtype not in event.dns_children: event.add_tag(f"{rdtype}-error") def get_dns_parent(self, event): diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 84e9726bb7..958723739e 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -71,7 +71,7 @@ async def setup(self): self.hugewarning( f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation" ) - self.hugewarning(f'Enabling the "portscan" module is highly recommended') + self.hugewarning('Enabling the "portscan" module is highly recommended') self.range_to_ip = False return True @@ -126,7 +126,7 @@ async def handle_event(self, event): parent = self.helpers.parent_domain(event.host_original) if parent != event.data: await self.emit_event( - parent, "DNS_NAME", parent=event, context=f"speculated parent {{event.type}}: {{event.data}}" + parent, "DNS_NAME", parent=event, context="speculated parent {event.type}: {event.data}" ) # URL --> OPEN_TCP_PORT diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index 6583832583..3bae28a37f 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -31,7 +31,7 @@ async def handle_event(self, event): netmask = main_ip.max_prefixlen - min(main_ip.max_prefixlen, self.num_bits) network = ipaddress.ip_network(f"{main_ip}/{netmask}", strict=False) subnet_hash = hash(network) - if not subnet_hash in self.processed: + if subnet_hash not in self.processed: self.processed.add(subnet_hash) for ip in network: if ip != main_ip: diff --git a/bbot/modules/jadx.py b/bbot/modules/jadx.py index d065310aed..d081171d1e 100644 --- a/bbot/modules/jadx.py +++ b/bbot/modules/jadx.py @@ -43,7 +43,7 @@ async def setup(self): async def filter_event(self, event): if "file" in event.tags: - if not event.data["magic_description"].lower() in self.allowed_file_types: + if event.data["magic_description"].lower() not in self.allowed_file_types: return False, f"Jadx is not able to decompile this file type: {event.data['magic_description']}" else: return False, "Event is not a file" diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 5f2bac729e..114f7d66fd 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -46,11 +46,11 @@ async def handle_event(self, event): body = _event.data["body"] soup = self.helpers.beautifulsoup(body, "html.parser") if soup is False: - self.debug(f"BeautifulSoup returned False") + self.debug("BeautifulSoup returned False") return result = self.find_type(soup) if result: - description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" + description = "Found a Newsletter Submission Form that could be used for email bombing attacks" data = {"host": str(_event.host), "description": description, "url": _event.data["url"]} await self.emit_event( data, diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index a150c029d3..aaf97f80bb 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -99,7 +99,7 @@ def increment_stat(stat, value): totals[stat] += 1 except KeyError: totals[stat] = 1 - if not stat in stats: + if stat not in stats: stats[stat] = {} try: stats[stat][value] += 1 diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index 0f6e7ac783..16fa4443bc 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -24,7 +24,7 @@ def _event_precheck(self, event): if event.type in ("FINISHED",): return True, "its type is FINISHED" if self.errored: - return False, f"module is in error state" + return False, "module is in error state" # exclude non-watched types if not any(t in self.get_watched_events() for t in ("*", event.type)): return False, "its type is not in watched_events" diff --git a/bbot/modules/output/stdout.py b/bbot/modules/output/stdout.py index 6e4ccf5bea..520a90204b 100644 --- a/bbot/modules/output/stdout.py +++ b/bbot/modules/output/stdout.py @@ -20,7 +20,7 @@ class Stdout(BaseOutputModule): async def setup(self): self.text_format = self.config.get("format", "text").strip().lower() - if not self.text_format in self.format_choices: + if self.text_format not in self.format_choices: return ( False, f'Invalid text format choice, "{self.text_format}" (choices: {",".join(self.format_choices)})', @@ -33,7 +33,7 @@ async def setup(self): async def filter_event(self, event): if self.accept_event_types: - if not event.type in self.accept_event_types: + if event.type not in self.accept_event_types: return False, f'Event type "{event.type}" is not in the allowed event_types' return True diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index 6742421691..ab3e54c1b5 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -99,7 +99,7 @@ async def setup(self): return False, "Masscan failed to run" returncode = getattr(ipv6_result, "returncode", 0) if returncode and "failed to detect IPv6 address" in ipv6_result.stderr: - self.warning(f"It looks like you are not set up for IPv6. IPv6 targets will not be scanned.") + self.warning("It looks like you are not set up for IPv6. IPv6 targets will not be scanned.") self.ipv6_support = False return True @@ -334,7 +334,7 @@ def log_masscan_status(self, s): if "FAIL" in s: self.warning(s) self.warning( - f'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.' + 'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.' ) else: self.verbose(s) diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index ba5e1e39a4..b3a80fc581 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -38,7 +38,7 @@ async def filter_event(self, event): async def handle_event(self, event): host = event.host - if self.cache_get(host) == False: + if self.cache_get(host) is False: asns, source = await self.get_asn(host) if not asns: self.cache_put(self.unknown_asn) @@ -96,7 +96,7 @@ def cache_get(self, ip): for p in self.helpers.ip_network_parents(ip): try: self.asn_counts[p] += 1 - if ret == False: + if ret is False: ret = p except KeyError: continue @@ -112,7 +112,7 @@ async def get_asn(self, ip, retries=1): for i, source in enumerate(list(self.sources)): get_asn_fn = getattr(self, f"get_asn_{source}") res = await get_asn_fn(ip) - if res == False: + if res is False: # demote the current source to lowest priority since it just failed self.sources.append(self.sources.pop(i)) self.verbose(f"Failed to contact {source}, retrying") @@ -125,7 +125,7 @@ async def get_asn_ripe(self, ip): url = f"https://stat.ripe.net/data/network-info/data.json?resource={ip}" response = await self.get_url(url, "ASN") asns = [] - if response == False: + if response is False: return False data = response.get("data", {}) if not data: @@ -138,7 +138,7 @@ async def get_asn_ripe(self, ip): asn_numbers = [] for number in asn_numbers: asn = await self.get_asn_metadata_ripe(number) - if asn == False: + if asn is False: return False asn["subnet"] = prefix asns.append(asn) @@ -155,7 +155,7 @@ async def get_asn_metadata_ripe(self, asn_number): } url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" response = await self.get_url(url, "ASN Metadata", cache=True) - if response == False: + if response is False: return False data = response.get("data", {}) if not data: @@ -187,7 +187,7 @@ async def get_asn_bgpview(self, ip): data = await self.get_url(url, "ASN") asns = [] asns_tried = set() - if data == False: + if data is False: return False data = data.get("data", {}) prefixes = data.get("prefixes", []) @@ -201,9 +201,9 @@ async def get_asn_bgpview(self, ip): description = details.get("description") or prefix.get("description") or "" country = details.get("country_code") or prefix.get("country_code") or "" emails = [] - if not asn in asns_tried: + if asn not in asns_tried: emails = await self.get_emails_bgpview(asn) - if emails == False: + if emails is False: return False asns_tried.add(asn) asns.append( @@ -217,7 +217,7 @@ async def get_emails_bgpview(self, asn): contacts = [] url = f"https://api.bgpview.io/asn/{asn}" data = await self.get_url(url, "ASN metadata", cache=True) - if data == False: + if data is False: return False data = data.get("data", {}) if not data: diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index fc76920517..e41b3119fb 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -33,14 +33,14 @@ async def handle_event(self, event): for l in lines: if len(l) > 0: split_l = l.split(": ") - if (split_l[0].lower() == "allow" and self.config.get("include_allow") == True) or ( - split_l[0].lower() == "disallow" and self.config.get("include_disallow") == True + if (split_l[0].lower() == "allow" and self.config.get("include_allow") is True) or ( + split_l[0].lower() == "disallow" and self.config.get("include_disallow") is True ): unverified_url = f"{host}{split_l[1].lstrip('/')}".replace( "*", self.helpers.rand_string(4) ) - elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") == True: + elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") is True: unverified_url = split_l[1] else: continue diff --git a/bbot/modules/securitytxt.py b/bbot/modules/securitytxt.py index 681519e37b..880865c9b8 100644 --- a/bbot/modules/securitytxt.py +++ b/bbot/modules/securitytxt.py @@ -121,7 +121,7 @@ async def handle_event(self, event): start, end = match.span() found_url = v[start:end] - if found_url != url and self._urls == True: + if found_url != url and self._urls is True: await self.emit_event(found_url, "URL_UNVERIFIED", parent=event, tags=tags) diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index fe90150271..659f159608 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -52,5 +52,5 @@ async def query(self, query, parse_fn=None, request_fn=None): results.add(hostname) yield hostname if ' 1 # list modules monkeypatch.setattr("sys.argv", ["bbot", "--list-modules"]) result = await cli._main() - assert result == None + assert result is None out, err = capsys.readouterr() # internal modules assert "| excavate " in out @@ -162,7 +162,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not output_dir.exists() monkeypatch.setattr("sys.argv", ["bbot", "-o", str(output_dir), "-n", scan_name, "-y"]) result = await cli._main() - assert result == True + assert result is True assert output_dir.is_dir() assert scan_dir.is_dir() assert "[SCAN]" in open(scan_dir / "output.txt").read() @@ -173,7 +173,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "--list-module-options"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| modules.wayback.urls" in out assert "| bool" in out assert "| emit URLs in addition to DNS_NAMEs" in out @@ -185,36 +185,36 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "--list-module-options"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| modules.wayback.urls" in out assert "| bool" in out assert "| emit URLs in addition to DNS_NAMEs" in out assert "| False" in out assert "| modules.dnsbrute.wordlist" in out - assert not "| modules.robots.include_allow" in out + assert "| modules.robots.include_allow" not in out # list module options by module monkeypatch.setattr("sys.argv", ["bbot", "-m", "dnsbrute", "-lmo"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert out.count("modules.") == out.count("modules.dnsbrute.") - assert not "| modules.wayback.urls" in out + assert "| modules.wayback.urls" not in out assert "| modules.dnsbrute.wordlist" in out - assert not "| modules.robots.include_allow" in out + assert "| modules.robots.include_allow" not in out # list output module options by module monkeypatch.setattr("sys.argv", ["bbot", "-om", "stdout", "-lmo"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert out.count("modules.") == out.count("modules.stdout.") # list flags monkeypatch.setattr("sys.argv", ["bbot", "--list-flags"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| safe " in out assert "| Non-intrusive, safe to run " in out assert "| active " in out @@ -224,32 +224,32 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "--list-flags"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None - assert not "| safe " in out + assert result is None + assert "| safe " not in out assert "| active " in out - assert not "| passive " in out + assert "| passive " not in out # list multiple flags monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "safe", "--list-flags"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| safe " in out assert "| active " in out - assert not "| passive " in out + assert "| passive " not in out # no args monkeypatch.setattr("sys.argv", ["bbot"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "Target:\n -t TARGET [TARGET ...]" in out # list modules monkeypatch.setattr("sys.argv", ["bbot", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| dnsbrute " in out assert "| httpx " in out assert "| robots " in out @@ -258,33 +258,33 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| dnsbrute " in out assert "| httpx " in out - assert not "| robots " in out + assert "| robots " not in out # list modules by flag + required flag monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-rf", "passive", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| chaos " in out - assert not "| httpx " in out + assert "| httpx " not in out # list modules by flag + excluded flag monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-ef", "active", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| chaos " in out - assert not "| httpx " in out + assert "| httpx " not in out # list modules by flag + excluded module monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-em", "dnsbrute", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None - assert not "| dnsbrute " in out + assert result is None + assert "| dnsbrute " not in out assert "| httpx " in out # output modules override @@ -292,12 +292,12 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-om", "csv,json", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 2/2 output modules, (csv,json)" in caplog.text caplog.clear() monkeypatch.setattr("sys.argv", ["bbot", "-em", "csv,json", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 3/3 output modules, (python,stdout,txt)" in caplog.text # output modules override @@ -305,7 +305,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-om", "subdomains", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 6/6 output modules, (csv,json,python,stdout,subdomains,txt)" in caplog.text # internal modules override @@ -313,17 +313,17 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 5/5 internal modules (aggregate,cloudcheck,dnsresolve,excavate,speculate)" in caplog.text caplog.clear() monkeypatch.setattr("sys.argv", ["bbot", "-em", "excavate", "speculate", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 3/3 internal modules (aggregate,cloudcheck,dnsresolve)" in caplog.text caplog.clear() monkeypatch.setattr("sys.argv", ["bbot", "-c", "speculate=false", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 4/4 internal modules (aggregate,cloudcheck,dnsresolve,excavate)" in caplog.text # custom target type @@ -331,7 +331,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-t", "ORG:evilcorp", "-y"]) result = await cli._main() out, err = capsys.readouterr() - assert result == True + assert result is True assert "[ORG_STUB] evilcorp TARGET" in out # activate modules by flag @@ -339,50 +339,50 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-f", "passive"]) result = await cli._main() - assert result == True + assert result is True # unconsoleable output module monkeypatch.setattr("sys.argv", ["bbot", "-om", "web_report"]) result = await cli._main() - assert result == True + assert result is True # unresolved dependency monkeypatch.setattr("sys.argv", ["bbot", "-m", "wappalyzer"]) result = await cli._main() - assert result == True + assert result is True # require flags monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "-rf", "passive"]) result = await cli._main() - assert result == True + assert result is True # excluded flags monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "-ef", "active"]) result = await cli._main() - assert result == True + assert result is True # slow modules monkeypatch.setattr("sys.argv", ["bbot", "-m", "bucket_digitalocean"]) result = await cli._main() - assert result == True + assert result is True # deadly modules caplog.clear() assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei"]) result = await cli._main() - assert result == False, "-m nuclei ran without --allow-deadly" + assert result is False, "-m nuclei ran without --allow-deadly" assert "Please specify --allow-deadly to continue" in caplog.text # --allow-deadly monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei", "--allow-deadly"]) result = await cli._main() - assert result == True, "-m nuclei failed to run with --allow-deadly" + assert result is True, "-m nuclei failed to run with --allow-deadly" # install all deps monkeypatch.setattr("sys.argv", ["bbot", "--install-all-deps"]) success = await cli._main() - assert success == True, "--install-all-deps failed for at least one module" + assert success is True, "--install-all-deps failed for at least one module" @pytest.mark.asyncio @@ -396,7 +396,7 @@ async def test_cli_customheaders(monkeypatch, caplog, capsys): "sys.argv", ["bbot", "--custom-headers", "foo=bar", "foo2=bar2", "foo3=bar=3", "--current-preset"] ) success = await cli._main() - assert success == None, "setting custom headers on command line failed" + assert success is None, "setting custom headers on command line failed" captured = capsys.readouterr() stdout_preset = yaml.safe_load(captured.out) assert stdout_preset["config"]["web"]["http_headers"] == {"foo": "bar", "foo2": "bar2", "foo3": "bar=3"} @@ -404,21 +404,21 @@ async def test_cli_customheaders(monkeypatch, caplog, capsys): # test custom headers invalid (no "=") monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "justastring", "--current-preset"]) result = await cli._main() - assert result == None + assert result is None assert "Custom headers not formatted correctly (missing '=')" in caplog.text caplog.clear() # test custom headers invalid (missing key) monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "=nokey", "--current-preset"]) result = await cli._main() - assert result == None + assert result is None assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text caplog.clear() # test custom headers invalid (missing value) monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "missingvalue=", "--current-preset"]) result = await cli._main() - assert result == None + assert result is None assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index d0bfb68330..cd75b5fff7 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -23,7 +23,7 @@ async def test_dns_engine(bbot_scanner): ) result = await scan.helpers.resolve("one.one.one.one") assert "1.1.1.1" in result - assert not "2606:4700:4700::1111" in result + assert "2606:4700:4700::1111" not in result results = [_ async for _ in scan.helpers.resolve_batch(("one.one.one.one", "1.1.1.1"))] pass_1 = False @@ -85,12 +85,12 @@ async def test_dns_resolution(bbot_scanner): for answer in answers: responses += list(extract_targets(answer)) assert ("A", "1.1.1.1") in responses - assert not ("AAAA", "2606:4700:4700::1111") in responses + assert ("AAAA", "2606:4700:4700::1111") not in responses answers, errors = await dnsengine.resolve_raw("one.one.one.one", type="AAAA") responses = [] for answer in answers: responses += list(extract_targets(answer)) - assert not ("A", "1.1.1.1") in responses + assert ("A", "1.1.1.1") not in responses assert ("AAAA", "2606:4700:4700::1111") in responses answers, errors = await dnsengine.resolve_raw("1.1.1.1") responses = [] @@ -141,11 +141,11 @@ async def test_dns_resolution(bbot_scanner): assert hash(("1.1.1.1", "PTR")) in dnsengine._dns_cache await dnsengine.resolve("one.one.one.one", type="A") assert hash(("one.one.one.one", "A")) in dnsengine._dns_cache - assert not hash(("one.one.one.one", "AAAA")) in dnsengine._dns_cache + assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache dnsengine._dns_cache.clear() await dnsengine.resolve("one.one.one.one", type="AAAA") assert hash(("one.one.one.one", "AAAA")) in dnsengine._dns_cache - assert not hash(("one.one.one.one", "A")) in dnsengine._dns_cache + assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache await dnsengine._shutdown() @@ -165,7 +165,7 @@ async def test_dns_resolution(bbot_scanner): assert "A" in resolved_hosts_event1.raw_dns_records assert "AAAA" in resolved_hosts_event1.raw_dns_records assert "a-record" in resolved_hosts_event1.tags - assert not "a-record" in resolved_hosts_event2.tags + assert "a-record" not in resolved_hosts_event2.tags scan2 = bbot_scanner("evilcorp.com", config={"dns": {"minimal": False}}) await scan2.helpers.dns._mock_dns( @@ -198,7 +198,7 @@ async def test_wildcards(bbot_scanner): assert len(dnsengine._wildcard_cache) == len(all_rdtypes) + (len(all_rdtypes) - 2) for rdtype in all_rdtypes: assert hash(("github.io", rdtype)) in dnsengine._wildcard_cache - if not rdtype in ("A", "AAAA"): + if rdtype not in ("A", "AAAA"): assert hash(("asdf.github.io", rdtype)) in dnsengine._wildcard_cache assert "github.io" in wildcard_domains assert "A" in wildcard_domains["github.io"] @@ -781,16 +781,16 @@ async def test_dns_graph_structure(bbot_scanner): @pytest.mark.asyncio async def test_dns_helpers(bbot_scanner): - assert service_record("") == False - assert service_record("localhost") == False - assert service_record("www.example.com") == False - assert service_record("www.example.com", "SRV") == True - assert service_record("_custom._service.example.com", "SRV") == True - assert service_record("_custom._service.example.com", "A") == False + assert service_record("") is False + assert service_record("localhost") is False + assert service_record("www.example.com") is False + assert service_record("www.example.com", "SRV") is True + assert service_record("_custom._service.example.com", "SRV") is True + assert service_record("_custom._service.example.com", "A") is False # top 100 most common SRV records for srv_record in common_srvs[:100]: hostname = f"{srv_record}.example.com" - assert service_record(hostname) == True + assert service_record(hostname) is True # make sure system nameservers are excluded from use by DNS brute force brute_nameservers = tempwordlist(["1.2.3.4", "8.8.4.4", "4.3.2.1", "8.8.8.8"]) diff --git a/bbot/test/test_step_1/test_engine.py b/bbot/test/test_step_1/test_engine.py index 1b44049c9b..dbb21246f2 100644 --- a/bbot/test/test_step_1/test_engine.py +++ b/bbot/test/test_step_1/test_engine.py @@ -72,7 +72,7 @@ async def yield_stuff(self, n): # test async generator assert counter == 0 - assert yield_cancelled == False + assert yield_cancelled is False yield_res = [r async for r in test_engine.yield_stuff(13)] assert yield_res == [f"thing{i}" for i in range(13)] assert len(yield_res) == 13 @@ -88,8 +88,8 @@ async def yield_stuff(self, n): await agen.aclose() break await asyncio.sleep(5) - assert yield_cancelled == True - assert yield_errored == False + assert yield_cancelled is True + assert yield_errored is False assert counter < 15 # test async generator with error @@ -99,8 +99,8 @@ async def yield_stuff(self, n): with pytest.raises(BBOTEngineError): async for _ in agen: pass - assert yield_cancelled == False - assert yield_errored == True + assert yield_cancelled is False + assert yield_errored is True # test return with cancellation return_started = False @@ -113,10 +113,10 @@ async def yield_stuff(self, n): with pytest.raises(asyncio.CancelledError): await task await asyncio.sleep(0.1) - assert return_started == True - assert return_finished == False - assert return_cancelled == True - assert return_errored == False + assert return_started is True + assert return_finished is False + assert return_cancelled is True + assert return_errored is False # test return with late cancellation return_started = False @@ -128,10 +128,10 @@ async def yield_stuff(self, n): task.cancel() result = await task assert result == "thing1" - assert return_started == True - assert return_finished == True - assert return_cancelled == False - assert return_errored == False + assert return_started is True + assert return_finished is True + assert return_cancelled is False + assert return_errored is False # test return with error return_started = False @@ -140,9 +140,9 @@ async def yield_stuff(self, n): return_errored = False with pytest.raises(BBOTEngineError): result = await test_engine.return_thing(None) - assert return_started == True - assert return_finished == False - assert return_cancelled == False - assert return_errored == True + assert return_started is True + assert return_finished is False + assert return_cancelled is False + assert return_errored is True await test_engine.shutdown() diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 8156fc7969..6add85ccfe 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -81,8 +81,8 @@ async def test_events(events, helpers): assert "fsocie.ty" not in events.subdomain assert events.subdomain in events.domain assert events.domain not in events.subdomain - assert not events.ipv4 in events.domain - assert not events.netv6 in events.domain + assert events.ipv4 not in events.domain + assert events.netv6 not in events.domain assert events.emoji not in events.domain assert events.domain not in events.emoji open_port_event = scan.make_event(" eViLcorp.COM.:88", "DNS_NAME", dummy=True) @@ -207,7 +207,7 @@ async def test_events(events, helpers): # scope distance event1 = scan.make_event("1.2.3.4", dummy=True) - assert event1._scope_distance == None + assert event1._scope_distance is None event1.scope_distance = 0 assert event1._scope_distance == 0 event2 = scan.make_event("2.3.4.5", parent=event1) @@ -228,7 +228,7 @@ async def test_events(events, helpers): org_stub_1 = scan.make_event("STUB1", "ORG_STUB", parent=scan.root_event) org_stub_1.scope_distance == 1 - assert org_stub_1.netloc == None + assert org_stub_1.netloc is None assert "netloc" not in org_stub_1.json() org_stub_2 = scan.make_event("STUB2", "ORG_STUB", parent=org_stub_1) org_stub_2.scope_distance == 2 @@ -237,7 +237,7 @@ async def test_events(events, helpers): root_event = scan.make_event("0.0.0.0", dummy=True) root_event.scope_distance = 0 internal_event1 = scan.make_event("1.2.3.4", parent=root_event, internal=True) - assert internal_event1._internal == True + assert internal_event1._internal is True assert "internal" in internal_event1.tags # tag inheritance @@ -269,8 +269,8 @@ async def test_events(events, helpers): # updating module event3 = scan.make_event("127.0.0.1", parent=scan.root_event) updated_event = scan.make_event(event3, internal=True) - assert event3.internal == False - assert updated_event.internal == True + assert event3.internal is False + assert updated_event.internal is True # event sorting parent1 = scan.make_event("127.0.0.1", parent=scan.root_event) @@ -527,7 +527,7 @@ async def test_events(events, helpers): hostless_event_json = hostless_event.json() assert hostless_event_json["type"] == "ASDF" assert hostless_event_json["data"] == "asdf" - assert not "host" in hostless_event_json + assert "host" not in hostless_event_json # SIEM-friendly serialize/deserialize json_event_siemfriendly = db_event.json(siem_friendly=True) @@ -805,7 +805,7 @@ async def test_event_web_spider_distance(bbot_scanner): ) assert url_event_3.web_spider_distance == 1 assert "spider-danger" in url_event_3.tags - assert not "spider-max" in url_event_3.tags + assert "spider-max" not in url_event_3.tags social_event = scan.make_event( {"platform": "github", "url": "http://www.evilcorp.com/test4"}, "SOCIAL", parent=url_event_3 ) @@ -828,42 +828,42 @@ async def test_event_web_spider_distance(bbot_scanner): url_event = scan.make_event("http://www.evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event) assert url_event.web_spider_distance == 0 - assert not "spider-danger" in url_event.tags - assert not "spider-max" in url_event.tags + assert "spider-danger" not in url_event.tags + assert "spider-max" not in url_event.tags url_event_2 = scan.make_event( "http://www.evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event, tags="spider-danger" ) # spider distance shouldn't increment because it's not the same host assert url_event_2.web_spider_distance == 0 assert "spider-danger" in url_event_2.tags - assert not "spider-max" in url_event_2.tags + assert "spider-max" not in url_event_2.tags url_event_3 = scan.make_event( "http://www.evilcorp.com/3", "URL_UNVERIFIED", parent=url_event_2, tags="spider-danger" ) assert url_event_3.web_spider_distance == 1 assert "spider-danger" in url_event_3.tags - assert not "spider-max" in url_event_3.tags + assert "spider-max" not in url_event_3.tags url_event_4 = scan.make_event("http://evilcorp.com", "URL_UNVERIFIED", parent=url_event_3) assert url_event_4.web_spider_distance == 0 - assert not "spider-danger" in url_event_4.tags - assert not "spider-max" in url_event_4.tags + assert "spider-danger" not in url_event_4.tags + assert "spider-max" not in url_event_4.tags url_event_4.add_tag("spider-danger") assert url_event_4.web_spider_distance == 0 assert "spider-danger" in url_event_4.tags - assert not "spider-max" in url_event_4.tags + assert "spider-max" not in url_event_4.tags url_event_4.remove_tag("spider-danger") assert url_event_4.web_spider_distance == 0 - assert not "spider-danger" in url_event_4.tags - assert not "spider-max" in url_event_4.tags + assert "spider-danger" not in url_event_4.tags + assert "spider-max" not in url_event_4.tags url_event_5 = scan.make_event("http://evilcorp.com/5", "URL_UNVERIFIED", parent=url_event_4) assert url_event_5.web_spider_distance == 0 - assert not "spider-danger" in url_event_5.tags - assert not "spider-max" in url_event_5.tags + assert "spider-danger" not in url_event_5.tags + assert "spider-max" not in url_event_5.tags url_event_5.add_tag("spider-danger") # if host is the same as parent, web spider distance should auto-increment after adding spider-danger tag assert url_event_5.web_spider_distance == 1 assert "spider-danger" in url_event_5.tags - assert not "spider-max" in url_event_5.tags + assert "spider-max" not in url_event_5.tags def test_event_confidence(): diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 76cf635175..a115e6f26b 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -64,8 +64,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert not helpers.is_subdomain("notreal") assert helpers.is_url("http://evilcorp.co.uk/asdf?a=b&c=d#asdf") assert helpers.is_url("https://evilcorp.co.uk/asdf?a=b&c=d#asdf") - assert helpers.is_uri("ftp://evilcorp.co.uk") == True - assert helpers.is_uri("http://evilcorp.co.uk") == True + assert helpers.is_uri("ftp://evilcorp.co.uk") is True + assert helpers.is_uri("http://evilcorp.co.uk") is True assert helpers.is_uri("evilcorp.co.uk", return_scheme=True) == "" assert helpers.is_uri("ftp://evilcorp.co.uk", return_scheme=True) == "ftp" assert helpers.is_uri("FTP://evilcorp.co.uk", return_scheme=True) == "ftp" @@ -283,7 +283,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): replaced = helpers.search_format_dict( {"asdf": [{"wat": {"here": "#{replaceme}!"}}, {500: True}]}, replaceme="asdf" ) - assert replaced["asdf"][1][500] == True + assert replaced["asdf"][1][500] is True assert replaced["asdf"][0]["wat"]["here"] == "asdf!" filtered_dict = helpers.filter_dict( @@ -315,7 +315,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): fuzzy=True, exclude_keys="modules", ) - assert not "secrets_db" in filtered_dict4["modules"] + assert "secrets_db" not in filtered_dict4["modules"] assert "ipneighbor" in filtered_dict4["modules"] assert "secret" in filtered_dict4["modules"]["ipneighbor"] assert "asdf" not in filtered_dict4["modules"]["ipneighbor"] @@ -408,15 +408,15 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert helpers.validators.validate_host("LOCALHOST ") == "localhost" assert helpers.validators.validate_host(" 192.168.1.1") == "192.168.1.1" assert helpers.validators.validate_host(" Dead::c0dE ") == "dead::c0de" - assert helpers.validators.soft_validate(" evilCorp.COM", "host") == True - assert helpers.validators.soft_validate("!@#$", "host") == False + assert helpers.validators.soft_validate(" evilCorp.COM", "host") is True + assert helpers.validators.soft_validate("!@#$", "host") is False with pytest.raises(ValueError): assert helpers.validators.validate_host("!@#$") # ports assert helpers.validators.validate_port(666) == 666 assert helpers.validators.validate_port(666666) == 65535 - assert helpers.validators.soft_validate(666, "port") == True - assert helpers.validators.soft_validate("!@#$", "port") == False + assert helpers.validators.soft_validate(666, "port") is True + assert helpers.validators.soft_validate("!@#$", "port") is False with pytest.raises(ValueError): helpers.validators.validate_port("asdf") # top tcp ports @@ -437,20 +437,20 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): helpers.validators.validate_url_parsed(" httP://evilcorP.com/asdf?a=b&c=d#e").geturl() == "http://evilcorp.com/asdf" ) - assert helpers.validators.soft_validate(" httP://evilcorP.com/asdf?a=b&c=d#e", "url") == True - assert helpers.validators.soft_validate("!@#$", "url") == False + assert helpers.validators.soft_validate(" httP://evilcorP.com/asdf?a=b&c=d#e", "url") is True + assert helpers.validators.soft_validate("!@#$", "url") is False with pytest.raises(ValueError): helpers.validators.validate_url("!@#$") # severities assert helpers.validators.validate_severity(" iNfo") == "INFO" - assert helpers.validators.soft_validate(" iNfo", "severity") == True - assert helpers.validators.soft_validate("NOPE", "severity") == False + assert helpers.validators.soft_validate(" iNfo", "severity") is True + assert helpers.validators.soft_validate("NOPE", "severity") is False with pytest.raises(ValueError): helpers.validators.validate_severity("NOPE") # emails assert helpers.validators.validate_email(" bOb@eViLcorp.COM") == "bob@evilcorp.com" - assert helpers.validators.soft_validate(" bOb@eViLcorp.COM", "email") == True - assert helpers.validators.soft_validate("!@#$", "email") == False + assert helpers.validators.soft_validate(" bOb@eViLcorp.COM", "email") is True + assert helpers.validators.soft_validate("!@#$", "email") is False with pytest.raises(ValueError): helpers.validators.validate_email("!@#$") @@ -533,9 +533,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): truncated_filename.unlink() # misc DNS helpers - assert helpers.is_ptr("wsc-11-22-33-44-wat.evilcorp.com") == True - assert helpers.is_ptr("wsc-11-22-33-wat.evilcorp.com") == False - assert helpers.is_ptr("11wat.evilcorp.com") == False + assert helpers.is_ptr("wsc-11-22-33-44-wat.evilcorp.com") is True + assert helpers.is_ptr("wsc-11-22-33-wat.evilcorp.com") is False + assert helpers.is_ptr("11wat.evilcorp.com") is False ## NTLM testheader = "TlRMTVNTUAACAAAAHgAeADgAAAAVgorilwL+bvnVipUAAAAAAAAAAJgAmABWAAAACgBjRQAAAA9XAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAACAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgAAQAeAFcASQBOAC0AUwA0ADIATgBPAEIARABWAFQASwA4AAQAHgBXAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAADAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgABwAIAHUwOZlfoNgBAAAAAA==" @@ -613,8 +613,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert len(helpers.get_exception_chain(e)) == 2 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, KeyboardInterrupt)]) == 1 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 - assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) == True - assert helpers.in_exception_chain(e, (TypeError, OSError)) == False + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) is True + assert helpers.in_exception_chain(e, (TypeError, OSError)) is False test_ran = True assert test_ran test_ran = False @@ -627,9 +627,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert len(helpers.get_exception_chain(e)) == 2 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, AttributeError)]) == 1 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 - assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) == False - assert helpers.in_exception_chain(e, (KeyboardInterrupt, AttributeError)) == True - assert helpers.in_exception_chain(e, (AttributeError,)) == True + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) is False + assert helpers.in_exception_chain(e, (KeyboardInterrupt, AttributeError)) is True + assert helpers.in_exception_chain(e, (AttributeError,)) is True test_ran = True assert test_ran @@ -886,7 +886,7 @@ async def test_parameter_validation(helpers): if helpers.validate_parameter(p, "getparam"): assert p in getparam_valid_params and p not in getparam_invalid_params else: - assert p in getparam_invalid_params and not p in getparam_valid_params + assert p in getparam_invalid_params and p not in getparam_valid_params header_valid_params = { "name", @@ -917,7 +917,7 @@ async def test_parameter_validation(helpers): if helpers.validate_parameter(p, "header"): assert p in header_valid_params and p not in header_invalid_params else: - assert p in header_invalid_params and not p in header_valid_params + assert p in header_invalid_params and p not in header_valid_params cookie_valid_params = { "name", @@ -947,4 +947,4 @@ async def test_parameter_validation(helpers): if helpers.validate_parameter(p, "cookie"): assert p in cookie_valid_params and p not in cookie_invalid_params else: - assert p in cookie_invalid_params and not p in cookie_valid_params + assert p in cookie_invalid_params and p not in cookie_valid_params diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index 62a03c0ef1..0fb8bb6b25 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -140,7 +140,7 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) ) assert len(events) == 3 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal"]) @@ -148,14 +148,14 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) for _all_events in (all_events, all_events_nodups): assert len(_all_events) == 3 - assert 1 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) assert 0 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal"]) assert 0 == len([e for e in _all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) assert len(graph_output_events) == 3 - assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal"]) @@ -169,38 +169,38 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) ) assert len(events) == 4 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events) == 9 - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) - assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) + assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events_nodups) == 7 - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 6 - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) @@ -213,39 +213,39 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) ) assert len(events) == 7 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events) == 8 - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events_nodups) == 7 - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) @@ -284,33 +284,33 @@ def custom_setup(scan): ) assert len(events) == 5 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) - assert 1 == len([e for e in events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) assert len(all_events) == 8 - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 2]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 3]) - assert 1 == len([e for e in all_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 2]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) assert len(all_events_nodups) == 6 - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 3]) - assert 1 == len([e for e in all_events_nodups if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events_nodups if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 3]) - assert 1 == len([e for e in _graph_output_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 3]) + assert 1 == len([e for e in _graph_output_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) # httpx/speculate IP_RANGE --> IP_ADDRESS --> OPEN_TCP_PORT --> URL, search distance = 0 events, all_events, all_events_nodups, graph_output_events, graph_output_batch_events = await do_scan( @@ -328,56 +328,56 @@ def custom_setup(scan): ) assert len(events) == 7 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) assert len(all_events) == 14 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) assert len(all_events_nodups) == 12 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) # httpx/speculate IP_RANGE --> IP_ADDRESS --> OPEN_TCP_PORT --> URL, search distance = 0, in_scope_only = False @@ -400,70 +400,70 @@ def custom_setup(scan): # before, this event was speculated off the URL_UNVERIFIED, and that's what was used by httpx to generate the URL. it was graph-important. # now for whatever reason, httpx is visiting the url directly and the open port isn't being used # I don't know what changed exactly, but it doesn't matter, either way is equally valid and bbot is meant to be flexible this way. - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.77:8888"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) assert len(all_events) == 18 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) assert len(all_events_nodups) == 16 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 8 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and "spider-danger" in e.tags]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/"]) @@ -483,78 +483,78 @@ def custom_setup(scan): ) assert len(events) == 8 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) assert len(all_events) == 22 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal == True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal is True and e.scope_distance == 3]) assert len(all_events_nodups) == 20 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal == True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal is True and e.scope_distance == 3]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 8 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/"]) @@ -576,25 +576,25 @@ def custom_setup(scan): ) assert len(events) == 12 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.222:8889"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.33:8889"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44"]) @@ -604,71 +604,71 @@ def custom_setup(scan): assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888"]) assert len(all_events) == 31 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal is True and e.scope_distance == 1]) assert len(all_events_nodups) == 27 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal is True and e.scope_distance == 1]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 12 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/"]) @@ -679,9 +679,9 @@ def custom_setup(scan): assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888"]) assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.222:8889"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.33:8889"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44"]) @@ -699,49 +699,49 @@ def custom_setup(scan): ) assert len(events) == 7 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert" and "affiliate" in e.tags]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert" and "affiliate" in e.tags]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) assert 0 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) assert len(all_events) == 13 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal is True and e.scope_distance == 1 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal is True and e.scope_distance == 2 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) assert len(all_events_nodups) == 11 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal is True and e.scope_distance == 1 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal is True and e.scope_distance == 2 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999"]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) @@ -756,43 +756,43 @@ def custom_setup(scan): ) assert len(events) == 4 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) assert len(all_events) == 11 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 2]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 2]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 3 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 2]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 2]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is True and e.scope_distance == 3 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) assert len(all_events_nodups) == 9 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 3 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is True and e.scope_distance == 3 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 6 - assert 1 == len([e for e in graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) - assert 1 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index 4212340695..83ba778e40 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -23,27 +23,27 @@ async def test_modules_basic_checks(events, httpx_mock): localhost = scan.make_event("127.0.0.1", parent=scan.root_event) # ip addresses should be accepted result, reason = base_output_module_1._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # internal events should be rejected localhost._internal = True result, reason = base_output_module_1._event_precheck(localhost) - assert result == False + assert result is False assert reason == "_internal is True" localhost._internal = False result, reason = base_output_module_1._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # unwatched events should be rejected dns_name = scan.make_event("evilcorp.com", parent=scan.root_event) result, reason = base_output_module_1._event_precheck(dns_name) - assert result == False + assert result is False assert reason == "its type is not in watched_events" # omitted events matching watched types should be accepted url_unverified = scan.make_event("http://127.0.0.1", "URL_UNVERIFIED", parent=scan.root_event) url_unverified._omit = True result, reason = base_output_module_1._event_precheck(url_unverified) - assert result == True + assert result is True assert reason == "its type is explicitly in watched_events" base_output_module_2 = BaseOutputModule(scan) @@ -51,42 +51,42 @@ async def test_modules_basic_checks(events, httpx_mock): # normal events should be accepted localhost = scan.make_event("127.0.0.1", parent=scan.root_event) result, reason = base_output_module_2._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # internal events should be rejected localhost._internal = True result, reason = base_output_module_2._event_precheck(localhost) - assert result == False + assert result is False assert reason == "_internal is True" localhost._internal = False result, reason = base_output_module_2._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # omitted events should be rejected localhost._omit = True result, reason = base_output_module_2._event_precheck(localhost) - assert result == False + assert result is False assert reason == "_omit is True" # normal event should be accepted url_unverified = scan.make_event("http://127.0.0.1", "URL_UNVERIFIED", parent=scan.root_event) result, reason = base_output_module_2._event_precheck(url_unverified) - assert result == True + assert result is True assert reason == "precheck succeeded" # omitted event types should be marked during scan egress await scan.egress_module.handle_event(url_unverified) result, reason = base_output_module_2._event_precheck(url_unverified) - assert result == False + assert result is False assert reason == "_omit is True" # omitted events that are targets should be accepted dns_name = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) dns_name._omit = True result, reason = base_output_module_2._event_precheck(dns_name) - assert result == False + assert result is False assert reason == "_omit is True" # omitted results that are targets should be accepted dns_name.add_tag("target") result, reason = base_output_module_2._event_precheck(dns_name) - assert result == True + assert result is True assert reason == "it's a target" # common event filtering tests @@ -97,18 +97,18 @@ async def test_modules_basic_checks(events, httpx_mock): # base cases base_module._watched_events = None base_module.watched_events = ["*"] - assert base_module._event_precheck(events.emoji)[0] == True + assert base_module._event_precheck(events.emoji)[0] is True base_module._watched_events = None base_module.watched_events = ["IP_ADDRESS"] - assert base_module._event_precheck(events.ipv4)[0] == True - assert base_module._event_precheck(events.domain)[0] == False - assert base_module._event_precheck(events.localhost)[0] == True - assert base_module._event_precheck(localhost2)[0] == True + assert base_module._event_precheck(events.ipv4)[0] is True + assert base_module._event_precheck(events.domain)[0] is False + assert base_module._event_precheck(events.localhost)[0] is True + assert base_module._event_precheck(localhost2)[0] is True # target only base_module.target_only = True - assert base_module._event_precheck(localhost2)[0] == False + assert base_module._event_precheck(localhost2)[0] is False localhost2.add_tag("target") - assert base_module._event_precheck(localhost2)[0] == True + assert base_module._event_precheck(localhost2)[0] is True base_module.target_only = False # in scope only @@ -147,11 +147,11 @@ async def test_modules_basic_checks(events, httpx_mock): for flag in flags: all_flags.add(flag) if preloaded["type"] == "scan": - assert ("active" in flags and not "passive" in flags) or ( - not "active" in flags and "passive" in flags + assert ("active" in flags and "passive" not in flags) or ( + "active" not in flags and "passive" in flags ), f'module "{module_name}" must have either "active" or "passive" flag' - assert ("safe" in flags and not "aggressive" in flags) or ( - not "safe" in flags and "aggressive" in flags + assert ("safe" in flags and "aggressive" not in flags) or ( + "safe" not in flags and "aggressive" in flags ), f'module "{module_name}" must have either "safe" or "aggressive" flag' assert not ( "web-basic" in flags and "web-thorough" in flags @@ -174,7 +174,7 @@ async def test_modules_basic_checks(events, httpx_mock): assert type(watched_events) == list assert type(produced_events) == list - if not preloaded.get("type", "") in ("internal",): + if preloaded.get("type", "") not in ("internal",): assert watched_events, f"{module_name}.watched_events must not be empty" assert type(watched_events) == list, f"{module_name}.watched_events must be of type list" assert type(produced_events) == list, f"{module_name}.produced_events must be of type list" @@ -268,35 +268,35 @@ class mod_domain_only(BaseModule): valid_5, reason_5 = await module._event_postcheck(url_5) if mod_name == "mod_normal": - assert valid_1 == True - assert valid_2 == True - assert valid_3 == True - assert valid_4 == True - assert valid_5 == True + assert valid_1 is True + assert valid_2 is True + assert valid_3 is True + assert valid_4 is True + assert valid_5 is True elif mod_name == "mod_host_only": - assert valid_1 == True - assert valid_2 == False + assert valid_1 is True + assert valid_2 is False assert "per_host_only=True" in reason_2 - assert valid_3 == False + assert valid_3 is False assert "per_host_only=True" in reason_3 - assert valid_4 == True - assert valid_5 == True + assert valid_4 is True + assert valid_5 is True elif mod_name == "mod_hostport_only": - assert valid_1 == True - assert valid_2 == False + assert valid_1 is True + assert valid_2 is False assert "per_hostport_only=True" in reason_2 - assert valid_3 == True - assert valid_4 == True - assert valid_5 == True + assert valid_3 is True + assert valid_4 is True + assert valid_5 is True elif mod_name == "mod_domain_only": - assert valid_1 == True - assert valid_2 == False + assert valid_1 is True + assert valid_2 is False assert "per_domain_only=True" in reason_2 - assert valid_3 == False + assert valid_3 is False assert "per_domain_only=True" in reason_3 - assert valid_4 == False + assert valid_4 is False assert "per_domain_only=True" in reason_4 - assert valid_5 == True + assert valid_5 is True await scan._cleanup() @@ -331,15 +331,15 @@ async def test_modules_basic_perdomainonly(bbot_scanner, monkeypatch): valid_1, reason_1 = await module._event_postcheck(url_1) valid_2, reason_2 = await module._event_postcheck(url_2) - if module.per_domain_only == True: - assert valid_1 == True - assert valid_2 == False + if module.per_domain_only is True: + assert valid_1 is True + assert valid_2 is False assert hash("evilcorp.com") in module._per_host_tracker assert reason_2 == "per_domain_only enabled and already seen domain" else: - assert valid_1 == True - assert valid_2 == True + assert valid_1 is True + assert valid_2 is True await per_domain_scan._cleanup() @@ -397,7 +397,6 @@ async def handle_event(self, event): "ORG_STUB": 1, "URL_UNVERIFIED": 1, "FINDING": 1, - "ORG_STUB": 1, } assert set(scan.stats.module_stats) == {"speculate", "host", "TARGET", "python", "dummy", "dnsresolve"} diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index 1b11529eae..c818026783 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -182,7 +182,7 @@ def test_preset_scope(): blank_preset = blank_preset.bake() assert not blank_preset.target.seeds assert not blank_preset.target.whitelist - assert blank_preset.strict_scope == False + assert blank_preset.strict_scope is False preset1 = Preset( "evilcorp.com", @@ -194,13 +194,13 @@ def test_preset_scope(): # make sure target logic works as expected assert "evilcorp.com" in preset1_baked.target.seeds - assert not "evilcorp.com" in preset1_baked.target.whitelist + assert "evilcorp.com" not in preset1_baked.target.whitelist assert "asdf.evilcorp.com" in preset1_baked.target.seeds - assert not "asdf.evilcorp.com" in preset1_baked.target.whitelist + assert "asdf.evilcorp.com" not in preset1_baked.target.whitelist assert "asdf.evilcorp.ce" in preset1_baked.whitelist assert "evilcorp.ce" in preset1_baked.whitelist assert "test.www.evilcorp.ce" in preset1_baked.blacklist - assert not "evilcorp.ce" in preset1_baked.blacklist + assert "evilcorp.ce" not in preset1_baked.blacklist assert preset1_baked.in_scope("www.evilcorp.ce") assert not preset1_baked.in_scope("evilcorp.com") assert not preset1_baked.in_scope("asdf.test.www.evilcorp.ce") @@ -228,20 +228,20 @@ def test_preset_scope(): assert "www.evilcorp.ce" in preset1_baked.target.seeds assert "evilcorp.org" in preset1_baked.target.seeds # strict scope is enabled - assert not "asdf.www.evilcorp.ce" in preset1_baked.target.seeds - assert not "asdf.evilcorp.org" in preset1_baked.target.seeds - assert not "asdf.evilcorp.com" in preset1_baked.target.seeds - assert not "asdf.www.evilcorp.ce" in preset1_baked.target.seeds + assert "asdf.www.evilcorp.ce" not in preset1_baked.target.seeds + assert "asdf.evilcorp.org" not in preset1_baked.target.seeds + assert "asdf.evilcorp.com" not in preset1_baked.target.seeds + assert "asdf.www.evilcorp.ce" not in preset1_baked.target.seeds assert "evilcorp.ce" in preset1_baked.whitelist assert "evilcorp.de" in preset1_baked.whitelist - assert not "asdf.evilcorp.de" in preset1_baked.whitelist - assert not "asdf.evilcorp.ce" in preset1_baked.whitelist + assert "asdf.evilcorp.de" not in preset1_baked.whitelist + assert "asdf.evilcorp.ce" not in preset1_baked.whitelist # blacklist should be merged, strict scope does not apply assert "test.www.evilcorp.ce" in preset1_baked.blacklist assert "test.www.evilcorp.de" in preset1_baked.blacklist assert "asdf.test.www.evilcorp.ce" in preset1_baked.blacklist assert "asdf.test.www.evilcorp.de" in preset1_baked.blacklist - assert not "asdf.test.www.evilcorp.org" in preset1_baked.blacklist + assert "asdf.test.www.evilcorp.org" not in preset1_baked.blacklist # only the base domain of evilcorp.de should be in scope assert not preset1_baked.in_scope("evilcorp.com") assert not preset1_baked.in_scope("evilcorp.org") @@ -290,7 +290,7 @@ def test_preset_scope(): assert not preset_nowhitelist_baked.in_scope("1.2.3.4/24") assert "www.evilcorp.org" in preset_whitelist_baked.target.seeds - assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist + assert "www.evilcorp.org" not in preset_whitelist_baked.target.whitelist assert "1.2.3.4" in preset_whitelist_baked.whitelist assert not preset_whitelist_baked.in_scope("www.evilcorp.org") assert not preset_whitelist_baked.in_scope("www.evilcorp.de") @@ -329,8 +329,8 @@ def test_preset_scope(): assert set([e.data for e in preset_whitelist_baked.whitelist]) == {"1.2.3.0/24"} assert "www.evilcorp.org" in preset_whitelist_baked.seeds assert "www.evilcorp.com" in preset_whitelist_baked.seeds - assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist - assert not "www.evilcorp.com" in preset_whitelist_baked.target.whitelist + assert "www.evilcorp.org" not in preset_whitelist_baked.target.whitelist + assert "www.evilcorp.com" not in preset_whitelist_baked.target.whitelist assert "1.2.3.4" in preset_whitelist_baked.whitelist assert not preset_whitelist_baked.in_scope("www.evilcorp.org") assert not preset_whitelist_baked.in_scope("www.evilcorp.com") @@ -387,30 +387,30 @@ async def test_preset_logging(): try: silent_preset = Preset(silent=True) - assert silent_preset.silent == True - assert silent_preset.debug == False - assert silent_preset.verbose == False + assert silent_preset.silent is True + assert silent_preset.debug is False + assert silent_preset.verbose is False assert original_log_level == CORE.logger.log_level debug_preset = Preset(debug=True) - assert debug_preset.silent == False - assert debug_preset.debug == True - assert debug_preset.verbose == False + assert debug_preset.silent is False + assert debug_preset.debug is True + assert debug_preset.verbose is False assert original_log_level == CORE.logger.log_level verbose_preset = Preset(verbose=True) - assert verbose_preset.silent == False - assert verbose_preset.debug == False - assert verbose_preset.verbose == True + assert verbose_preset.silent is False + assert verbose_preset.debug is False + assert verbose_preset.verbose is True assert original_log_level == CORE.logger.log_level # test conflicting verbosity levels silent_and_verbose = Preset(silent=True, verbose=True) - assert silent_and_verbose.silent == True - assert silent_and_verbose.debug == False - assert silent_and_verbose.verbose == True + assert silent_and_verbose.silent is True + assert silent_and_verbose.debug is False + assert silent_and_verbose.verbose is True baked = silent_and_verbose.bake() - assert baked.silent == True - assert baked.debug == False - assert baked.verbose == False + assert baked.silent is True + assert baked.debug is False + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = silent_and_verbose.bake(scan=scan) assert baked.core.logger.log_level == logging.CRITICAL @@ -420,13 +420,13 @@ async def test_preset_logging(): assert CORE.logger.log_level == original_log_level silent_and_debug = Preset(silent=True, debug=True) - assert silent_and_debug.silent == True - assert silent_and_debug.debug == True - assert silent_and_debug.verbose == False + assert silent_and_debug.silent is True + assert silent_and_debug.debug is True + assert silent_and_debug.verbose is False baked = silent_and_debug.bake() - assert baked.silent == True - assert baked.debug == False - assert baked.verbose == False + assert baked.silent is True + assert baked.debug is False + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = silent_and_debug.bake(scan=scan) assert baked.core.logger.log_level == logging.CRITICAL @@ -436,13 +436,13 @@ async def test_preset_logging(): assert CORE.logger.log_level == original_log_level debug_and_verbose = Preset(verbose=True, debug=True) - assert debug_and_verbose.silent == False - assert debug_and_verbose.debug == True - assert debug_and_verbose.verbose == True + assert debug_and_verbose.silent is False + assert debug_and_verbose.debug is True + assert debug_and_verbose.verbose is True baked = debug_and_verbose.bake() - assert baked.silent == False - assert baked.debug == True - assert baked.verbose == False + assert baked.silent is False + assert baked.debug is True + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = debug_and_verbose.bake(scan=scan) assert baked.core.logger.log_level == logging.DEBUG @@ -452,13 +452,13 @@ async def test_preset_logging(): assert CORE.logger.log_level == original_log_level all_preset = Preset(verbose=True, debug=True, silent=True) - assert all_preset.silent == True - assert all_preset.debug == True - assert all_preset.verbose == True + assert all_preset.silent is True + assert all_preset.debug is True + assert all_preset.verbose is True baked = all_preset.bake() - assert baked.silent == True - assert baked.debug == False - assert baked.verbose == False + assert baked.silent is True + assert baked.debug is False + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = all_preset.bake(scan=scan) assert baked.core.logger.log_level == logging.CRITICAL @@ -688,7 +688,7 @@ class TestModule5(BaseModule): ) preset = Preset.from_yaml_string( - f""" + """ modules: - testmodule5 """ @@ -896,9 +896,9 @@ def get_module_flags(p): dnsbrute_flags = preset.preloaded_module("dnsbrute").get("flags", []) assert "subdomain-enum" in dnsbrute_flags assert "active" in dnsbrute_flags - assert not "passive" in dnsbrute_flags + assert "passive" not in dnsbrute_flags assert "aggressive" in dnsbrute_flags - assert not "safe" in dnsbrute_flags + assert "safe" not in dnsbrute_flags assert "dnsbrute" in [x[0] for x in module_flags] assert "certspotter" in [x[0] for x in module_flags] assert "c99" in [x[0] for x in module_flags] @@ -912,7 +912,7 @@ def get_module_flags(p): assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) assert "chaos" in [x[0] for x in module_flags] - assert not "httpx" in [x[0] for x in module_flags] + assert "httpx" not in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -923,7 +923,7 @@ def get_module_flags(p): assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) assert "chaos" in [x[0] for x in module_flags] - assert not "httpx" in [x[0] for x in module_flags] + assert "httpx" not in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -933,7 +933,7 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_modules=["dnsbrute"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert "httpx" in [x[0] for x in module_flags] assert any("passive" in flags for module, flags in module_flags) assert any("active" in flags for module, flags in module_flags) @@ -944,7 +944,7 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], require_flags=["safe", "passive"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert all("passive" in flags and "safe" in flags for module, flags in module_flags) assert all("active" not in flags and "aggressive" not in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) @@ -954,7 +954,7 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_flags=["aggressive", "active"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert all("passive" in flags and "safe" in flags for module, flags in module_flags) assert all("active" not in flags and "aggressive" not in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) @@ -964,9 +964,9 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_modules=["dnsbrute", "c99"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert "certspotter" in [x[0] for x in module_flags] - assert not "c99" in [x[0] for x in module_flags] + assert "c99" not in [x[0] for x in module_flags] assert any("passive" in flags for module, flags in module_flags) assert any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) diff --git a/bbot/test/test_step_1/test_scan.py b/bbot/test/test_step_1/test_scan.py index f5f8458262..b0f908a7b9 100644 --- a/bbot/test/test_step_1/test_scan.py +++ b/bbot/test/test_step_1/test_scan.py @@ -100,13 +100,13 @@ async def test_url_extension_handling(bbot_scanner): assert "blacklisted" not in bad_event.tags assert "httpx-only" not in httpx_event.tags result = await scan.ingress_module.handle_event(good_event) - assert result == None + assert result is None result, reason = await scan.ingress_module.handle_event(bad_event) - assert result == False + assert result is False assert reason == "event is blacklisted" assert "blacklisted" in bad_event.tags result = await scan.ingress_module.handle_event(httpx_event) - assert result == None + assert result is None assert "httpx-only" in httpx_event.tags await scan._cleanup() diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 0513d6abed..51343db85a 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -106,24 +106,24 @@ async def test_target(bbot_scanner): assert scan1.target.whitelist.get("publicapis.org") is None target = RadixTarget("evilcorp.com") - assert not "com" in target + assert "com" not in target assert "evilcorp.com" in target assert "www.evilcorp.com" in target strict_target = RadixTarget("evilcorp.com", strict_dns_scope=True) - assert not "com" in strict_target + assert "com" not in strict_target assert "evilcorp.com" in strict_target - assert not "www.evilcorp.com" in strict_target + assert "www.evilcorp.com" not in strict_target target = RadixTarget() target.add("evilcorp.com") - assert not "com" in target + assert "com" not in target assert "evilcorp.com" in target assert "www.evilcorp.com" in target strict_target = RadixTarget(strict_dns_scope=True) strict_target.add("evilcorp.com") - assert not "com" in strict_target + assert "com" not in strict_target assert "evilcorp.com" in strict_target - assert not "www.evilcorp.com" in strict_target + assert "www.evilcorp.com" not in strict_target # test target hashing @@ -293,7 +293,7 @@ async def test_target(bbot_scanner): assert target_dict["seeds"] == ["1.2.3.0/24", "bob@fdsa.evilcorp.net", "http://www.evilcorp.net/"] assert target_dict["whitelist"] == ["bob@www.evilcorp.com", "evilcorp.com", "evilcorp.net"] assert target_dict["blacklist"] == ["1.2.3.4", "4.3.2.0/24", "bob@asdf.evilcorp.net", "http://1.2.3.4/"] - assert target_dict["strict_scope"] == False + assert target_dict["strict_scope"] is False assert target_dict["hash"] == "b36955a8238a71842fc5f23b11110c26ea07d451" assert target_dict["seed_hash"] == "560af51d1f3d69bc5c156fc270b28497fe52dec1" assert target_dict["whitelist_hash"] == "8ed0a7368e6d34630e1cfd419d2a73767debc4c4" @@ -321,8 +321,8 @@ async def test_target(bbot_scanner): assert set(target.hosts) == {"evilcorp.co.uk", "www.evilcorp.co.uk"} assert "evilcorp.co.uk" in target assert "www.evilcorp.co.uk" in target - assert not "api.evilcorp.co.uk" in target - assert not "api.www.evilcorp.co.uk" in target + assert "api.evilcorp.co.uk" not in target + assert "api.www.evilcorp.co.uk" not in target # test 'single' boolean argument target = ScanSeeds("http://evilcorp.com", "evilcorp.com:443") @@ -361,13 +361,13 @@ async def test_blacklist_regex(bbot_scanner, bbot_httpserver): blacklist.get("www.evil.com", raise_error=True) assert "test.com" in blacklist assert "http://evilcorp.com/test.aspx" in blacklist - assert not "http://tes.com" in blacklist + assert "http://tes.com" not in blacklist blacklist = ScanBlacklist("evilcorp.com", r"RE:[0-9]{6}\.aspx$") assert "http://evilcorp.com" in blacklist - assert not "http://test.com/123456" in blacklist - assert not "http://test.com/12345.aspx?a=asdf" in blacklist - assert not "http://test.com/asdf/123456.aspx/asdf" in blacklist + assert "http://test.com/123456" not in blacklist + assert "http://test.com/12345.aspx?a=asdf" not in blacklist + assert "http://test.com/asdf/123456.aspx/asdf" not in blacklist assert "http://test.com/asdf/123456.aspx?a=asdf" in blacklist assert "http://test.com/asdf/123456.aspx" in blacklist diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 0b3011d572..351a17318a 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -211,14 +211,14 @@ async def test_web_helpers(bbot_scanner, bbot_httpserver, httpx_mock): url = bbot_httpserver.url_for(path) bbot_httpserver.expect_request(uri=path).respond_with_data(download_content, status=200) webpage = await scan1.helpers.request(url) - assert webpage, f"Webpage is False" + assert webpage, "Webpage is False" soup = scan1.helpers.beautifulsoup(webpage, "html.parser") - assert soup, f"Soup is False" + assert soup, "Soup is False" # pretty_print = soup.prettify() # assert pretty_print, f"PrettyPrint is False" # scan1.helpers.log.info(f"{pretty_print}") html_text = soup.find(text="Example Domain") - assert html_text, f"Find HTML Text is False" + assert html_text, "Find HTML Text is False" # 404 path = "/test_http_helpers_download_404" @@ -389,7 +389,7 @@ async def test_web_http_compare(httpx_mock, bbot_scanner): await compare_helper.compare("http://www.example.com", check_reflection=True) compare_helper.compare_body({"asdf": "fdsa"}, {"fdsa": "asdf"}) for mode in ("getparam", "header", "cookie"): - assert await compare_helper.canary_check("http://www.example.com", mode=mode) == True + assert await compare_helper.canary_check("http://www.example.com", mode=mode) is True await scan._cleanup() diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index 47038e9aef..3f6b5dd768 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -99,15 +99,15 @@ async def module_test( module_test = self.ModuleTest( self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys ) - self.log.debug(f"Mocking DNS") + self.log.debug("Mocking DNS") await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.88"]}}) - self.log.debug(f"Executing setup_before_prep()") + self.log.debug("Executing setup_before_prep()") await self.setup_before_prep(module_test) - self.log.debug(f"Executing scan._prep()") + self.log.debug("Executing scan._prep()") await module_test.scan._prep() - self.log.debug(f"Executing setup_after_prep()") + self.log.debug("Executing setup_after_prep()") await self.setup_after_prep(module_test) - self.log.debug(f"Starting scan") + self.log.debug("Starting scan") module_test.events = [e async for e in module_test.scan.async_start()] self.log.debug(f"Finished {module_test.name} module test") yield module_test @@ -123,7 +123,7 @@ async def test_module_run(self, module_test): if len(tasks): module_test.log.info(f"Unfinished tasks detected: {tasks}") else: - module_test.log.info(f"No unfinished tasks detected") + module_test.log.info("No unfinished tasks detected") def check(self, module_test, events): assert False, f"Must override {self.name}.check()" diff --git a/bbot/test/test_step_2/module_tests/test_module_anubisdb.py b/bbot/test/test_step_2/module_tests/test_module_anubisdb.py index dbebf86212..7b1bc6659d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_anubisdb.py +++ b/bbot/test/test_step_2/module_tests/test_module_anubisdb.py @@ -5,7 +5,7 @@ class TestAnubisdb(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.module.abort_if = lambda e: False module_test.httpx_mock.add_response( - url=f"https://jldc.me/anubis/subdomains/blacklanternsecurity.com", + url="https://jldc.me/anubis/subdomains/blacklanternsecurity.com", json=["asdf.blacklanternsecurity.com", "zzzz.blacklanternsecurity.com"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_azure_realm.py b/bbot/test/test_step_2/module_tests/test_module_azure_realm.py index 557c362ee8..fa06726158 100644 --- a/bbot/test/test_step_2/module_tests/test_module_azure_realm.py +++ b/bbot/test/test_step_2/module_tests/test_module_azure_realm.py @@ -22,7 +22,7 @@ class TestAzure_Realm(ModuleTestBase): async def setup_after_prep(self, module_test): await module_test.mock_dns({"evilcorp.com": {"A": ["127.0.0.5"]}}) module_test.httpx_mock.add_response( - url=f"https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", + url="https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", json=self.response_json, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_bevigil.py b/bbot/test/test_step_2/module_tests/test_module_bevigil.py index 328e213d2c..7e616752fa 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bevigil.py +++ b/bbot/test/test_step_2/module_tests/test_module_bevigil.py @@ -9,7 +9,7 @@ class TestBeVigil(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", match_headers={"X-Access-Token": "asdf"}, json={ "domain": "blacklanternsecurity.com", @@ -19,7 +19,7 @@ async def setup_after_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", json={"domain": "blacklanternsecurity.com", "urls": ["https://asdf.blacklanternsecurity.com"]}, ) @@ -35,7 +35,7 @@ class TestBeVigilMultiKey(TestBeVigil): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", match_headers={"X-Access-Token": "fdsa"}, json={ "domain": "blacklanternsecurity.com", @@ -46,6 +46,6 @@ async def setup_after_prep(self, module_test): ) module_test.httpx_mock.add_response( match_headers={"X-Access-Token": "asdf"}, - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", json={"domain": "blacklanternsecurity.com", "urls": ["https://asdf.blacklanternsecurity.com"]}, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_binaryedge.py b/bbot/test/test_step_2/module_tests/test_module_binaryedge.py index 95b4ae7a71..348e2efb24 100644 --- a/bbot/test/test_step_2/module_tests/test_module_binaryedge.py +++ b/bbot/test/test_step_2/module_tests/test_module_binaryedge.py @@ -6,7 +6,7 @@ class TestBinaryEdge(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.binaryedge.io/v2/query/domains/subdomain/blacklanternsecurity.com", + url="https://api.binaryedge.io/v2/query/domains/subdomain/blacklanternsecurity.com", match_headers={"X-Key": "asdf"}, json={ "query": "blacklanternsecurity.com", @@ -19,7 +19,7 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=f"https://api.binaryedge.io/v2/user/subscription", + url="https://api.binaryedge.io/v2/user/subscription", match_headers={"X-Key": "asdf"}, json={ "subscription": {"name": "Free"}, diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py b/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py index b566e9a826..7a5499b2e0 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py @@ -82,8 +82,8 @@ def check(self, module_test, events): if e.type == "FINDING" and str(e.module) == self.module_name: url = e.data.get("url", "") assert self.random_bucket_2 in url - assert not self.random_bucket_1 in url - assert not self.random_bucket_3 in url + assert self.random_bucket_1 not in url + assert self.random_bucket_3 not in url # make sure bucket mutations were found assert any( e.type == "STORAGE_BUCKET" diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py b/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py index a3c866c08e..3b172eaaba 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py @@ -21,7 +21,7 @@ class TestBucket_Azure_NoDup(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://tesla.blob.core.windows.net/tesla?restype=container", + url="https://tesla.blob.core.windows.net/tesla?restype=container", text="", ) await module_test.mock_dns( diff --git a/bbot/test/test_step_2/module_tests/test_module_builtwith.py b/bbot/test/test_step_2/module_tests/test_module_builtwith.py index 0fc4de9d56..d11c8940d2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_builtwith.py +++ b/bbot/test/test_step_2/module_tests/test_module_builtwith.py @@ -6,7 +6,7 @@ class TestBuiltWith(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.builtwith.com/v20/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes", + url="https://api.builtwith.com/v20/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes", json={ "Results": [ { @@ -91,7 +91,7 @@ async def setup_after_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=f"https://api.builtwith.com/redirect1/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com", + url="https://api.builtwith.com/redirect1/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com", json={ "Lookup": "blacklanternsecurity.com", "Inbound": [ diff --git a/bbot/test/test_step_2/module_tests/test_module_c99.py b/bbot/test/test_step_2/module_tests/test_module_c99.py index 284b76e1e2..5a0bd6e8de 100644 --- a/bbot/test/test_step_2/module_tests/test_module_c99.py +++ b/bbot/test/test_step_2/module_tests/test_module_c99.py @@ -51,7 +51,7 @@ async def custom_callback(request): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == False + assert module_test.module.errored is False # assert module_test.module._api_request_failures == 4 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com"} @@ -82,7 +82,7 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 8 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com", "evilcorp.com"} @@ -106,7 +106,7 @@ class TestC99AbortThreshold3(TestC99AbortThreshold2): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 12 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { @@ -138,7 +138,7 @@ class TestC99AbortThreshold4(TestC99AbortThreshold3): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == True + assert module_test.module.errored is True assert module_test.module._api_request_failures == 13 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { diff --git a/bbot/test/test_step_2/module_tests/test_module_columbus.py b/bbot/test/test_step_2/module_tests/test_module_columbus.py index 55d456ce31..b91b532d71 100644 --- a/bbot/test/test_step_2/module_tests/test_module_columbus.py +++ b/bbot/test/test_step_2/module_tests/test_module_columbus.py @@ -4,7 +4,7 @@ class TestColumbus(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://columbus.elmasy.com/api/lookup/blacklanternsecurity.com?days=365", + url="https://columbus.elmasy.com/api/lookup/blacklanternsecurity.com?days=365", json=["asdf", "zzzz"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_credshed.py b/bbot/test/test_step_2/module_tests/test_module_credshed.py index 44b9133c97..a6b1e65c51 100644 --- a/bbot/test/test_step_2/module_tests/test_module_credshed.py +++ b/bbot/test/test_step_2/module_tests/test_module_credshed.py @@ -58,12 +58,12 @@ class TestCredshed(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://credshed.com/api/auth", + url="https://credshed.com/api/auth", json=credshed_auth_response, method="POST", ) module_test.httpx_mock.add_response( - url=f"https://credshed.com/api/search", + url="https://credshed.com/api/search", json=credshed_response, method="POST", ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dehashed.py b/bbot/test/test_step_2/module_tests/test_module_dehashed.py index 0ac91c3b85..f642a444b6 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dehashed.py +++ b/bbot/test/test_step_2/module_tests/test_module_dehashed.py @@ -45,7 +45,7 @@ class TestDehashed(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.dehashed.com/search?query=domain:blacklanternsecurity.com&size=10000&page=1", + url="https://api.dehashed.com/search?query=domain:blacklanternsecurity.com&size=10000&page=1", json=dehashed_domain_response, ) await module_test.mock_dns( diff --git a/bbot/test/test_step_2/module_tests/test_module_digitorus.py b/bbot/test/test_step_2/module_tests/test_module_digitorus.py index fc95a82c76..a683a17d8f 100644 --- a/bbot/test/test_step_2/module_tests/test_module_digitorus.py +++ b/bbot/test/test_step_2/module_tests/test_module_digitorus.py @@ -11,7 +11,7 @@ class TestDigitorus(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://certificatedetails.com/blacklanternsecurity.com", + url="https://certificatedetails.com/blacklanternsecurity.com", text=self.web_response, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py index a5ba1eba73..0cbee8440e 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py @@ -56,39 +56,39 @@ async def new_run_live(*command, check=False, text=True, **kwargs): event = module_test.scan.make_event("blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "asdf.test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "wat.asdf.test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == False - assert reason == f"subdomain depth of *.asdf.test.www.blacklanternsecurity.com (4) > max_depth (3)" + assert result is False + assert reason == "subdomain depth of *.asdf.test.www.blacklanternsecurity.com (4) > max_depth (3)" event = module_test.scan.make_event( "hmmm.ptr1234.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == False - assert reason == f'"ptr1234.blacklanternsecurity.com" looks like an autogenerated PTR' + assert result is False + assert reason == '"ptr1234.blacklanternsecurity.com" looks like an autogenerated PTR' def check(self, module_test, events): assert len(events) == 4 diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py index 6bf045d5c4..447f8baba1 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py @@ -4,12 +4,12 @@ class TestDNSDumpster(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://dnsdumpster.com", + url="https://dnsdumpster.com", headers={"Set-Cookie": "csrftoken=asdf"}, content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similiar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n\n\n\n\n\n\n \n\n', ) module_test.httpx_mock.add_response( - url=f"https://dnsdumpster.com/", + url="https://dnsdumpster.com/", method="POST", content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n

Showing results for blacklanternsecurity.com

\n
\n
\n
\n
\n

Hosting (IP block owners)

\n
\n
\n

GeoIP of Host Locations

\n
\n
\n
\n\n

DNS Servers

\n
\n \n \n \n \n \n \n
ns01.domaincontrol.com.
\n\n\n \n
\n
\n
97.74.100.1
ns01.domaincontrol.com
GODADDY-DNS
United States
ns02.domaincontrol.com.
\n\n\n \n
\n
\n
173.201.68.1
ns02.domaincontrol.com
GODADDY-DNS
United States
\n
\n\n

MX Records ** This is where email for the domain goes...

\n
\n \n \n \n \n
asdf.blacklanternsecurity.com.mail.protection.outlook.com.
\n\n\n
\n
\n
104.47.55.138
mail-bn8nam120138.inbound.protection.outlook.com
MICROSOFT-CORP-MSN-AS-BLOCK
United States
\n
\n\n

TXT Records ** Find more hosts in Sender Policy Framework (SPF) configurations

\n
\n \n\n\n\n\n\n\n\n\n\n
"MS=ms26206678"
"v=spf1 ip4:50.240.76.25 include:spf.protection.outlook.com -all"
"google-site-verification=O_PoQFTGJ_hZ9LqfNT9OEc0KPFERKHQ_1t1m0YTx_1E"
"google-site-verification=7XKUMxJSTHBSzdvT7gH47jLRjNAS76nrEfXmzhR_DO4"
\n
\n\n\n

Host Records (A) ** this data may not be current as it uses a static database (updated monthly)

\n
\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n GitHub.com\n\n\n\n\n\n\n\n\n
HTTP TECH: \n varnish\n\n\n\n
185.199.108.153
cdn-185-199-108-153.github.com
FASTLY
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n
SSH: \n SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3\n\n\n\n\n\n\n\n
143.244.156.80
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n Apache/2.4.29 (Ubuntu)\n\n\n\n\n\n\n\n\n
HTTP TECH: \n Ubuntu
Apache,2.4.29
\n\n\n\n
64.227.8.231
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.34.56.157
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.241.216.208
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
167.71.95.71
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
157.245.247.197
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
\n
\n\n\n\n\n\n
\n

Mapping the domain ** click for full size image

\n

\n\n

\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similiar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n\n\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n \n \n \n\n \n\n\n\n\n\n\n\n\n\n\n\n\n \n\n', ) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index bccbe3d730..cd709893a1 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -60,8 +60,8 @@ def check(self, module_test, events): assert "www6.test.notreal" in event_data assert "www7.test.notreal" in event_data assert "www8.test.notreal" in event_data - assert not "http://127.0.0.1:8888/a_relative.js" in event_data - assert not "http://127.0.0.1:8888/link_relative.js" in event_data + assert "http://127.0.0.1:8888/a_relative.js" not in event_data + assert "http://127.0.0.1:8888/link_relative.js" not in event_data assert "http://127.0.0.1:8888/a_relative.txt" in event_data assert "http://127.0.0.1:8888/link_relative.txt" in event_data @@ -220,7 +220,7 @@ def check(self, module_test, events): [e for e in events if e.type == "FINDING" and e.data["description"] == "Non-HTTP URI: smb://127.0.0.1"] ) assert 1 == len( - [e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SMB" and not "port" in e.data] + [e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SMB" and "port" not in e.data] ) assert 0 == len([e for e in events if e.type == "FINDING" and "ssh://127.0.0.1" in e.data["description"]]) assert 0 == len([e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SSH"]) @@ -711,7 +711,7 @@ def check(self, module_test, events): if ( str(e.module) == "dummy_module" and "spider-danger" not in e.tags - and not "spider-max" in e.tags + and "spider-max" not in e.tags ): found_url_unverified_dummy = True if e.type == "URL" and e.data == "http://127.0.0.1:8888/spider": @@ -868,8 +868,8 @@ def check(self, module_test, events): if e.data["name"] == "COOKIE2": found_second_cookie = True - assert found_first_cookie == True - assert found_second_cookie == True + assert found_first_cookie is True + assert found_second_cookie is True class TestExcavateRAWTEXT(ModuleTestBase): diff --git a/bbot/test/test_step_2/module_tests/test_module_host_header.py b/bbot/test/test_step_2/module_tests/test_module_host_header.py index b71a31b1d4..a2d69e9b57 100644 --- a/bbot/test/test_step_2/module_tests/test_module_host_header.py +++ b/bbot/test/test_step_2/module_tests/test_module_host_header.py @@ -31,7 +31,7 @@ def request_handler(self, request): if subdomain_tag_overrides: return Response(f"Alive, host is: {subdomain_tag}.{self.fake_host}", status=200) - return Response(f"Alive, host is: defaulthost.com", status=200) + return Response("Alive, host is: defaulthost.com", status=200) async def setup_before_prep(self, module_test): self.interactsh_mock_instance = module_test.mock_interactsh("host_header") diff --git a/bbot/test/test_step_2/module_tests/test_module_http.py b/bbot/test/test_step_2/module_tests/test_module_http.py index 43b7189adf..2bc99f5ddf 100644 --- a/bbot/test/test_step_2/module_tests/test_module_http.py +++ b/bbot/test/test_step_2/module_tests/test_module_http.py @@ -48,10 +48,10 @@ async def custom_callback(request): ) def check(self, module_test, events): - assert self.got_event == True - assert self.headers_correct == True - assert self.method_correct == True - assert self.url_correct == True + assert self.got_event is True + assert self.headers_correct is True + assert self.method_correct is True + assert self.url_correct is True class TestHTTPSIEMFriendly(TestHTTP): diff --git a/bbot/test/test_step_2/module_tests/test_module_httpx.py b/bbot/test/test_step_2/module_tests/test_module_httpx.py index c05b6842d6..a8d7cfe03d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_httpx.py +++ b/bbot/test/test_step_2/module_tests/test_module_httpx.py @@ -44,7 +44,7 @@ def check(self, module_test, events): for e in events: if e.type == "HTTP_RESPONSE": if e.data["path"] == "/": - assert not "login-page" in e.tags + assert "login-page" not in e.tags open_port = True elif e.data["path"] == "/url": assert "login-page" in e.tags diff --git a/bbot/test/test_step_2/module_tests/test_module_leakix.py b/bbot/test/test_step_2/module_tests/test_module_leakix.py index 13b9221594..f87dba6b50 100644 --- a/bbot/test/test_step_2/module_tests/test_module_leakix.py +++ b/bbot/test/test_step_2/module_tests/test_module_leakix.py @@ -11,7 +11,7 @@ async def setup_before_prep(self, module_test): json={"title": "Not Found", "description": "Host not found"}, ) module_test.httpx_mock.add_response( - url=f"https://leakix.net/api/subdomains/blacklanternsecurity.com", + url="https://leakix.net/api/subdomains/blacklanternsecurity.com", match_headers={"api-key": "asdf"}, json=[ { @@ -35,7 +35,7 @@ async def setup_before_prep(self, module_test): json={"title": "Not Found", "description": "Host not found"}, ) module_test.httpx_mock.add_response( - url=f"https://leakix.net/api/subdomains/blacklanternsecurity.com", + url="https://leakix.net/api/subdomains/blacklanternsecurity.com", json=[ { "subdomain": "asdf.blacklanternsecurity.com", diff --git a/bbot/test/test_step_2/module_tests/test_module_myssl.py b/bbot/test/test_step_2/module_tests/test_module_myssl.py index 34b9b9972e..b39f2711d5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_myssl.py +++ b/bbot/test/test_step_2/module_tests/test_module_myssl.py @@ -5,7 +5,7 @@ class TestMySSL(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.module.abort_if = lambda e: False module_test.httpx_mock.add_response( - url=f"https://myssl.com/api/v1/discover_sub_domain?domain=blacklanternsecurity.com", + url="https://myssl.com/api/v1/discover_sub_domain?domain=blacklanternsecurity.com", json={ "code": 0, "data": [ diff --git a/bbot/test/test_step_2/module_tests/test_module_neo4j.py b/bbot/test/test_step_2/module_tests/test_module_neo4j.py index 98107481ad..c5df1e4748 100644 --- a/bbot/test/test_step_2/module_tests/test_module_neo4j.py +++ b/bbot/test/test_step_2/module_tests/test_module_neo4j.py @@ -41,4 +41,4 @@ async def close(self): module_test.monkeypatch.setattr("neo4j.AsyncGraphDatabase.driver", MockDriver) def check(self, module_test, events): - assert self.neo4j_used == True + assert self.neo4j_used is True diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index d3712be5c0..135b67b327 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -53,5 +53,5 @@ def check(self, module_test, events): # Verify Negative Result (should skip this statement if correct) elif event.data["url"] == self.missing_tgt: missing = False - assert found, f"NEWSLETTER 'Found' Error - Expect status of True but got False" - assert missing, f"NEWSLETTER 'Missing' Error - Expect status of True but got False" + assert found, "NEWSLETTER 'Found' Error - Expect status of True but got False" + assert missing, "NEWSLETTER 'Missing' Error - Expect status of True but got False" diff --git a/bbot/test/test_step_2/module_tests/test_module_oauth.py b/bbot/test/test_step_2/module_tests/test_module_oauth.py index 85fe4f9172..1e7078e840 100644 --- a/bbot/test/test_step_2/module_tests/test_module_oauth.py +++ b/bbot/test/test_step_2/module_tests/test_module_oauth.py @@ -167,7 +167,7 @@ class TestOAUTH(ModuleTestBase): async def setup_after_prep(self, module_test): await module_test.mock_dns({"evilcorp.com": {"A": ["127.0.0.1"]}}) module_test.httpx_mock.add_response( - url=f"https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", + url="https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", json=Azure_Realm.response_json, ) module_test.httpx_mock.add_response( diff --git a/bbot/test/test_step_2/module_tests/test_module_otx.py b/bbot/test/test_step_2/module_tests/test_module_otx.py index 1c41cd962d..9c533ca96e 100644 --- a/bbot/test/test_step_2/module_tests/test_module_otx.py +++ b/bbot/test/test_step_2/module_tests/test_module_otx.py @@ -4,7 +4,7 @@ class TestOTX(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://otx.alienvault.com/api/v1/indicators/domain/blacklanternsecurity.com/passive_dns", + url="https://otx.alienvault.com/api/v1/indicators/domain/blacklanternsecurity.com/passive_dns", json={ "passive_dns": [ { diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py index 874acdb195..ea6c00210c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postgres.py +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -48,7 +48,7 @@ async def setup_before_prep(self, module_test): await asyncio.sleep(1) if process.returncode != 0: - self.log.error(f"Failed to start PostgreSQL server") + self.log.error("Failed to start PostgreSQL server") async def check(self, module_test, events): import asyncpg diff --git a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py index 2b3d3aaf0a..d48f4dae79 100644 --- a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py +++ b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py @@ -11,7 +11,7 @@ class TestRapidDNS(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.module.abort_if = lambda e: False module_test.httpx_mock.add_response( - url=f"https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result", text=self.web_body + url="https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result", text=self.web_body ) def check(self, module_test, events): @@ -45,7 +45,7 @@ async def custom_callback(request): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 3 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com"} @@ -59,7 +59,7 @@ class TestRapidDNSAbortThreshold2(TestRapidDNSAbortThreshold1): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 6 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com", "evilcorp.com"} @@ -74,7 +74,7 @@ class TestRapidDNSAbortThreshold3(TestRapidDNSAbortThreshold1): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 9 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { @@ -94,7 +94,7 @@ class TestRapidDNSAbortThreshold4(TestRapidDNSAbortThreshold1): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == True + assert module_test.module.errored is True assert module_test.module._api_request_failures == 10 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { diff --git a/bbot/test/test_step_2/module_tests/test_module_sitedossier.py b/bbot/test/test_step_2/module_tests/test_module_sitedossier.py index a5b57b8001..ed93307664 100644 --- a/bbot/test/test_step_2/module_tests/test_module_sitedossier.py +++ b/bbot/test/test_step_2/module_tests/test_module_sitedossier.py @@ -136,11 +136,11 @@ async def setup_after_prep(self, module_test): } ) module_test.httpx_mock.add_response( - url=f"http://www.sitedossier.com/parentdomain/evilcorp.com", + url="http://www.sitedossier.com/parentdomain/evilcorp.com", text=page1, ) module_test.httpx_mock.add_response( - url=f"http://www.sitedossier.com/parentdomain/evilcorp.com/101", + url="http://www.sitedossier.com/parentdomain/evilcorp.com/101", text=page2, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_smuggler.py b/bbot/test/test_step_2/module_tests/test_module_smuggler.py index 7e076cf07e..dcbb9fd3b5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_smuggler.py +++ b/bbot/test/test_step_2/module_tests/test_module_smuggler.py @@ -39,7 +39,7 @@ async def setup_after_prep(self, module_test): old_run_live = module_test.scan.helpers.run_live async def smuggler_mock_run_live(*command, **kwargs): - if not "smuggler" in command[0][1]: + if "smuggler" not in command[0][1]: async for l in old_run_live(*command, **kwargs): yield l else: diff --git a/bbot/test/test_step_2/module_tests/test_module_splunk.py b/bbot/test/test_step_2/module_tests/test_module_splunk.py index d55ed17c27..8366a6289b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_splunk.py +++ b/bbot/test/test_step_2/module_tests/test_module_splunk.py @@ -52,7 +52,7 @@ async def custom_callback(request): module_test.httpx_mock.add_response() def check(self, module_test, events): - assert self.got_event == True - assert self.headers_correct == True - assert self.method_correct == True - assert self.url_correct == True + assert self.got_event is True + assert self.headers_correct is True + assert self.method_correct is True + assert self.url_correct is True diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py b/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py index 2ec5e03612..aa95473a48 100644 --- a/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +++ b/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py @@ -4,7 +4,7 @@ class TestSubdomainCenter(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.subdomain.center/?domain=blacklanternsecurity.com", + url="https://api.subdomain.center/?domain=blacklanternsecurity.com", json=["asdf.blacklanternsecurity.com", "zzzz.blacklanternsecurity.com"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomains.py b/bbot/test/test_step_2/module_tests/test_module_subdomains.py index 65b9a8a031..e7fb494591 100644 --- a/bbot/test/test_step_2/module_tests/test_module_subdomains.py +++ b/bbot/test/test_step_2/module_tests/test_module_subdomains.py @@ -6,7 +6,7 @@ class TestSubdomains(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.subdomain.center/?domain=blacklanternsecurity.com", + url="https://api.subdomain.center/?domain=blacklanternsecurity.com", json=["asdfasdf.blacklanternsecurity.com", "zzzzzzzz.blacklanternsecurity.com"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_wayback.py b/bbot/test/test_step_2/module_tests/test_module_wayback.py index cf09d8e2c5..7582e54173 100644 --- a/bbot/test/test_step_2/module_tests/test_module_wayback.py +++ b/bbot/test/test_step_2/module_tests/test_module_wayback.py @@ -4,7 +4,7 @@ class TestWayback(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"http://web.archive.org/cdx/search/cdx?url=blacklanternsecurity.com&matchType=domain&output=json&fl=original&collapse=original", + url="http://web.archive.org/cdx/search/cdx?url=blacklanternsecurity.com&matchType=domain&output=json&fl=original&collapse=original", json=[["original"], ["http://asdf.blacklanternsecurity.com"]], ) From 1e4ed43fbf5d120d99c53841af14a62cc3ce8a68 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 23 Nov 2024 08:19:46 +0100 Subject: [PATCH 08/64] ruff check && ruff format --- .flake8 | 5 -- README.md | 2 +- bbot/test/run_tests.sh | 8 +-- docs/dev/dev_environment.md | 2 +- docs/dev/tests.md | 12 ++-- poetry.lock | 137 ++++++++---------------------------- pyproject.toml | 3 +- 7 files changed, 42 insertions(+), 127 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index a6f057338b..0000000000 --- a/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -select = F,E722 -ignore = F403,F405,F541 -per-file-ignores = - */__init__.py:F401,F403 diff --git a/README.md b/README.md index ee3a130b3a..f029b9432d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ [![bbot_banner](https://github.com/user-attachments/assets/f02804ce-9478-4f1e-ac4d-9cf5620a3214)](https://github.com/blacklanternsecurity/bbot) -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Recon Village 2024](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://www.reconvillage.org/talks) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Recon Village 2024](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://www.reconvillage.org/talks) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) ### **BEE·bot** is a multipurpose scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), built to automate your **Recon**, **Bug Bounties**, and **ASM**! diff --git a/bbot/test/run_tests.sh b/bbot/test/run_tests.sh index 39458dbf9f..55e7b430bb 100755 --- a/bbot/test/run_tests.sh +++ b/bbot/test/run_tests.sh @@ -3,14 +3,14 @@ bbot_dir="$( realpath "$(dirname "$(dirname "${BASH_SOURCE[0]}")")")" echo -e "[+] BBOT dir: $bbot_dir\n" -echo "[+] Checking code formatting with black" +echo "[+] Checking code formatting with ruff" echo "=======================================" -black --check "$bbot_dir" || exit 1 +ruff format "$bbot_dir" || exit 1 echo -echo "[+] Linting with flake8" +echo "[+] Linting with ruff" echo "=======================" -flake8 "$bbot_dir" || exit 1 +ruff check "$bbot_dir" || exit 1 echo if [ "${1}x" != "x" ] ; then diff --git a/docs/dev/dev_environment.md b/docs/dev/dev_environment.md index d3fdee3cf6..b73f660574 100644 --- a/docs/dev/dev_environment.md +++ b/docs/dev/dev_environment.md @@ -33,7 +33,7 @@ bbot --help ```bash # auto-format code indentation, etc. -black . +ruff format # run tests ./bbot/test/run_tests.sh diff --git a/docs/dev/tests.md b/docs/dev/tests.md index f5d05fcf99..1bcf2970a3 100644 --- a/docs/dev/tests.md +++ b/docs/dev/tests.md @@ -2,18 +2,18 @@ BBOT takes tests seriously. Every module *must* have a custom-written test that *actually tests* its functionality. Don't worry if you want to contribute but you aren't used to writing tests. If you open a draft PR, we will help write them :) -We use [black](https://github.com/psf/black) and [flake8](https://flake8.pycqa.org/en/latest/) for linting, and [pytest](https://docs.pytest.org/en/8.2.x/) for tests. +We use [ruff](https://docs.astral.sh/ruff/) for linting, and [pytest](https://docs.pytest.org/en/8.2.x/) for tests. ## Running tests locally -We have Github actions that automatically run tests whenever you open a Pull Request. However, you can also run the tests locally with `pytest`: +We have GitHub Actions that automatically run tests whenever you open a Pull Request. However, you can also run the tests locally with `pytest`: ```bash -# format code with black -poetry run black . +# lint with ruff +poetry run ruff check -# lint with flake8 -poetry run flake8 +# format code with ruff +poetry run ruff format # run all tests with pytest (takes rougly 30 minutes) poetry run pytest diff --git a/poetry.lock b/poetry.lock index 2c880c15d8..9a3597dc42 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -129,52 +129,6 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "black" -version = "24.10.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "cachetools" version = "5.5.0" @@ -682,22 +636,6 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] -[[package]] -name = "flake8" -version = "7.1.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, - {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.12.0,<2.13.0" -pyflakes = ">=3.2.0,<3.3.0" - [[package]] name = "ghp-import" version = "2.1.0" @@ -1158,17 +1096,6 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mergedeep" version = "1.3.4" @@ -1482,17 +1409,6 @@ plot = ["matplotlib (==3.9.2)", "pandas (==2.2.2)"] test = ["pytest (==8.3.3)", "pytest-sugar (==1.0.0)"] type = ["mypy (==1.11.2)"] -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - [[package]] name = "nodeenv" version = "1.9.1" @@ -1704,17 +1620,6 @@ files = [ {file = "puremagic-1.28.tar.gz", hash = "sha256:195893fc129657f611b86b959aab337207d6df7f25372209269ed9e303c1a8c0"}, ] -[[package]] -name = "pycodestyle" -version = "2.12.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, - {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, -] - [[package]] name = "pycparser" version = "2.22" @@ -1891,17 +1796,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] - [[package]] name = "pygments" version = "2.18.0" @@ -2502,6 +2396,33 @@ lint = ["black", "flake8", "isort", "mypy", "types-requests"] release = ["build", "towncrier", "twine"] test = ["commentjson", "packaging", "pytest"] +[[package]] +name = "ruff" +version = "0.8.0" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"}, + {file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"}, + {file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"}, + {file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"}, + {file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"}, + {file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"}, + {file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"}, +] + [[package]] name = "setproctitle" version = "1.3.4" @@ -3136,4 +3057,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "2a8751a5918309b0eb2fda2de92b363ae31cbf4ee97317999636d12ae04a06c9" +content-hash = "25e796bbef89b1f888b32a1adfbea0d7d4c4855b9b211c42529669ac3f66b5be" diff --git a/pyproject.toml b/pyproject.toml index d333855f28..d3136b8506 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,15 +56,14 @@ httpx = "^0.27.0" puremagic = "^1.28" cloudcheck = "^6.0.0.602" radixtarget = "^2.0.0.50" +ruff = "^0.8.0" [tool.poetry.group.dev.dependencies] -flake8 = ">=6,<8" poetry-dynamic-versioning = ">=0.21.4,<1.5.0" urllib3 = "^2.0.2" werkzeug = ">=2.3.4,<4.0.0" pytest-env = ">=0.8.2,<1.2.0" pre-commit = ">=3.4,<5.0" -black = "^24.1.1" pytest-cov = ">=5,<7" pytest-rerunfailures = "^14.0" pytest-timeout = "^2.3.1" From 1bf5036f7418ca5c454718113bc77e5289a1bbf9 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 23 Nov 2024 08:28:53 +0100 Subject: [PATCH 09/64] ruff: Fix comprehension and other performance issues --- bbot/core/event/base.py | 8 ++-- bbot/core/helpers/depsinstaller/installer.py | 2 +- bbot/core/helpers/diff.py | 4 +- bbot/core/helpers/dns/brute.py | 2 +- bbot/core/helpers/dns/engine.py | 6 +-- bbot/core/helpers/misc.py | 18 +++---- bbot/core/helpers/regex.py | 2 +- bbot/core/helpers/regexes.py | 6 +-- bbot/core/helpers/wordcloud.py | 2 +- bbot/core/modules.py | 32 ++++++------- bbot/modules/azure_tenant.py | 4 +- bbot/modules/columbus.py | 2 +- bbot/modules/deadly/ffuf.py | 10 ++-- bbot/modules/deadly/vhost.py | 2 +- bbot/modules/dockerhub.py | 2 +- bbot/modules/extractous.py | 2 +- bbot/modules/filedownload.py | 2 +- bbot/modules/gowitness.py | 8 ++-- bbot/modules/host_header.py | 2 +- bbot/modules/iis_shortnames.py | 6 +-- bbot/modules/internal/cloudcheck.py | 8 ++-- bbot/modules/internal/dnsresolve.py | 2 +- bbot/modules/internal/excavate.py | 10 ++-- bbot/modules/internal/speculate.py | 4 +- bbot/modules/output/asset_inventory.py | 10 ++-- bbot/modules/output/csv.py | 2 +- bbot/modules/paramminer_headers.py | 6 +-- bbot/modules/portscan.py | 2 +- bbot/modules/report/asn.py | 2 +- bbot/modules/social.py | 2 +- bbot/modules/templates/subdomain_enum.py | 2 +- bbot/scanner/preset/args.py | 6 +-- bbot/scanner/preset/preset.py | 6 +-- bbot/scanner/scanner.py | 12 ++--- bbot/test/test_step_1/test_bbot_fastapi.py | 4 +- bbot/test/test_step_1/test_cli.py | 14 +++--- bbot/test/test_step_1/test_dns.py | 2 +- bbot/test/test_step_1/test_events.py | 2 +- bbot/test/test_step_1/test_helpers.py | 2 +- .../test_manager_scope_accuracy.py | 4 +- bbot/test/test_step_1/test_modules_basic.py | 4 +- bbot/test/test_step_1/test_presets.py | 48 +++++++++---------- bbot/test/test_step_1/test_regexes.py | 10 ++-- bbot/test/test_step_1/test_scan.py | 2 +- bbot/test/test_step_1/test_target.py | 24 +++++----- bbot/test/test_step_1/test_web.py | 2 +- .../module_tests/test_module_baddns.py | 12 ++--- .../module_tests/test_module_baddns_direct.py | 6 +-- .../module_tests/test_module_c99.py | 10 ++-- .../module_tests/test_module_excavate.py | 8 ++-- .../module_tests/test_module_extractous.py | 18 +++---- .../module_tests/test_module_filedownload.py | 28 +++++------ .../module_tests/test_module_git_clone.py | 4 +- .../module_tests/test_module_gowitness.py | 8 ++-- .../module_tests/test_module_httpx.py | 12 ++--- .../module_tests/test_module_newsletters.py | 8 ++-- .../module_tests/test_module_ntlm.py | 14 +++--- .../test_module_paramminer_cookies.py | 2 +- .../test_module_paramminer_headers.py | 4 +- .../module_tests/test_module_portscan.py | 6 +-- .../module_tests/test_module_rapiddns.py | 8 ++-- .../module_tests/test_module_speculate.py | 8 +--- .../module_tests/test_module_trufflehog.py | 4 +- 63 files changed, 234 insertions(+), 240 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb6d92e91f..4927648fcb 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -175,8 +175,8 @@ def __init__( self._scope_distance = None self._module_priority = None self._resolved_hosts = set() - self.dns_children = dict() - self.raw_dns_records = dict() + self.dns_children = {} + self.raw_dns_records = {} self._discovery_context = "" self._discovery_context_regex = re.compile(r"\{(?:event|module)[^}]*\}") self.web_spider_distance = 0 @@ -769,7 +769,7 @@ def json(self, mode="json", siem_friendly=False): Returns: dict: JSON-serializable dictionary representation of the event object. """ - j = dict() + j = {} # type, ID, scope description for i in ("type", "id", "uuid", "scope_description", "netloc"): v = getattr(self, i, "") @@ -1277,7 +1277,7 @@ def __init__(self, *args, **kwargs): @property def resolved_hosts(self): # TODO: remove this when we rip out httpx - return set(".".join(i.split("-")[1:]) for i in self.tags if i.startswith("ip-")) + return {".".join(i.split("-")[1:]) for i in self.tags if i.startswith("ip-")} @property def pretty_string(self): diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index fce5f077dd..efeceaf4aa 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -310,7 +310,7 @@ def ansible_run(self, tasks=None, module=None, args=None, ansible_args=None): return success, err def read_setup_status(self): - setup_status = dict() + setup_status = {} if self.setup_status_cache.is_file(): with open(self.setup_status_cache) as f: with suppress(Exception): diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index 59ee96567c..2d07d65110 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -101,7 +101,7 @@ async def _baseline(self): ddiff = DeepDiff(baseline_1_json, baseline_2_json, ignore_order=True, view="tree") self.ddiff_filters = [] - for k, v in ddiff.items(): + for k in ddiff.keys(): for x in list(ddiff[k]): log.debug(f"Added {k} filter for path: {x.path()}") self.ddiff_filters.append(x.path()) @@ -140,7 +140,7 @@ def compare_headers(self, headers_1, headers_2): ddiff = DeepDiff(headers_1, headers_2, ignore_order=True, view="tree") - for k, v in ddiff.items(): + for k in ddiff.keys(): for x in list(ddiff[k]): try: header_value = str(x).split("'")[1] diff --git a/bbot/core/helpers/dns/brute.py b/bbot/core/helpers/dns/brute.py index dd17ec5cc4..9bf796d243 100644 --- a/bbot/core/helpers/dns/brute.py +++ b/bbot/core/helpers/dns/brute.py @@ -164,7 +164,7 @@ def gen_random_subdomains(self, n=50): for i in range(0, max(0, n - 5)): d = delimiters[i % len(delimiters)] l = lengths[i % len(lengths)] - segments = list(random.choice(self.devops_mutations) for _ in range(l)) + segments = [random.choice(self.devops_mutations) for _ in range(l)] segments.append(self.parent_helper.rand_string(length=8, digits=False)) subdomain = d.join(segments) yield subdomain diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 8f3c5a0b77..15e64917b3 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -54,7 +54,7 @@ def __init__(self, socket_path, config={}, debug=False): dns_omit_queries = self.dns_config.get("omit_queries", None) if not dns_omit_queries: dns_omit_queries = [] - self.dns_omit_queries = dict() + self.dns_omit_queries = {} for d in dns_omit_queries: d = d.split(":") if len(d) == 2: @@ -72,7 +72,7 @@ def __init__(self, socket_path, config={}, debug=False): self.wildcard_ignore = [] self.wildcard_ignore = tuple([str(d).strip().lower() for d in self.wildcard_ignore]) self.wildcard_tests = self.dns_config.get("wildcard_tests", 5) - self._wildcard_cache = dict() + self._wildcard_cache = {} # since wildcard detection takes some time, This is to prevent multiple # modules from kicking off wildcard detection for the same domain at the same time self._wildcard_lock = NamedLock() @@ -82,7 +82,7 @@ def __init__(self, socket_path, config={}, debug=False): self._last_connectivity_warning = time.time() # keeps track of warnings issued for wildcard detection to prevent duplicate warnings self._dns_warnings = set() - self._errors = dict() + self._errors = {} self._debug = self.dns_config.get("debug", False) self._dns_cache = LRUCache(maxsize=10000) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 1a56932963..27892e44f4 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -921,12 +921,12 @@ def extract_params_xml(xml_data, compare_mode="getparam"): # Define valid characters for each mode based on RFCs valid_chars_dict = { - "header": set( + "header": { chr(c) for c in range(33, 127) if chr(c) in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_" - ), - "getparam": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="), - "postparam": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="), - "cookie": set(chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'), + }, + "getparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, + "postparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, + "cookie": {chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'}, } @@ -1148,7 +1148,7 @@ def chain_lists( """ if isinstance(l, str): l = [l] - final_list = dict() + final_list = {} for entry in l: for s in split_regex.split(entry): f = s.strip() @@ -1345,7 +1345,7 @@ def search_dict_by_key(key, d): if isinstance(d, dict): if key in d: yield d[key] - for k, v in d.items(): + for v in d.values(): yield from search_dict_by_key(key, v) elif isinstance(d, list): for v in d: @@ -1412,7 +1412,7 @@ def search_dict_values(d, *regexes): results.add(h) yield result elif isinstance(d, dict): - for _, v in d.items(): + for v in d.values(): yield from search_dict_values(v, *regexes) elif isinstance(d, list): for v in d: @@ -2397,7 +2397,7 @@ def in_exception_chain(e, exc_types): ... if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): ... raise """ - return any([isinstance(_, exc_types) for _ in get_exception_chain(e)]) + return any(isinstance(_, exc_types) for _ in get_exception_chain(e)) def get_traceback_details(e): diff --git a/bbot/core/helpers/regex.py b/bbot/core/helpers/regex.py index f0bee1fc0a..97d8cbe6ec 100644 --- a/bbot/core/helpers/regex.py +++ b/bbot/core/helpers/regex.py @@ -41,7 +41,7 @@ async def findall_multi(self, compiled_regexes, *args, threads=10, **kwargs): """ if not isinstance(compiled_regexes, dict): raise ValueError('compiled_regexes must be a dictionary like this: {"regex_name": }') - for k, v in compiled_regexes.items(): + for v in compiled_regexes.values(): self.ensure_compiled_regex(v) tasks = {} diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 8d5d23b3a4..85f9dcac7e 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -36,7 +36,7 @@ _ipv4_regex + r"\/[0-9]{1,2}", _ipv6_regex + r"\/[0-9]{1,3}", ) -ip_range_regexes = list(re.compile(r, re.I) for r in _ip_range_regexes) +ip_range_regexes = [re.compile(r, re.I) for r in _ip_range_regexes] # dns names with periods _dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?" @@ -64,14 +64,14 @@ _hostname_regex + r":[0-9]{1,5}", r"\[" + _ipv6_regex + r"\]:[0-9]{1,5}", ) -open_port_regexes = list(re.compile(r, re.I) for r in _open_port_regexes) +open_port_regexes = [re.compile(r, re.I) for r in _open_port_regexes] _url_regexes = ( r"https?://" + _dns_name_regex + r"(?::[0-9]{1,5})?(?:(?:/|\?).*)?", r"https?://" + _hostname_regex + r"(?::[0-9]{1,5})?(?:(?:/|\?).*)?", r"https?://\[" + _ipv6_regex + r"\](?::[0-9]{1,5})?(?:(?:/|\?).*)?", ) -url_regexes = list(re.compile(r, re.I) for r in _url_regexes) +url_regexes = [re.compile(r, re.I) for r in _url_regexes] _double_slash_regex = r"/{2,}" double_slash_regex = re.compile(_double_slash_regex) diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index fbd4e75930..8dc8e3a647 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -421,7 +421,7 @@ def mutations(self, words, max_mutations=None): def mutate(self, word, max_mutations=None, mutations=None): if mutations is None: mutations = self.top_mutations(max_mutations) - for mutation, count in mutations.items(): + for mutation in mutations.keys(): ret = [] for s in mutation: if s is not None: diff --git a/bbot/core/modules.py b/bbot/core/modules.py index a5f4b30eb5..b3f041411b 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -235,7 +235,7 @@ def _preloaded(self): return self.__preloaded def get_recursive_dirs(self, *dirs): - dirs = set(Path(d).resolve() for d in dirs) + dirs = {Path(d).resolve() for d in dirs} for d in list(dirs): if not d.is_dir(): continue @@ -340,61 +340,61 @@ def preload_module(self, module_file): # class attributes that are dictionaries if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict: # module options - if any([target.id == "options" for target in class_attr.targets]): + if any(target.id == "options" for target in class_attr.targets): config.update(ast.literal_eval(class_attr.value)) # module options - elif any([target.id == "options_desc" for target in class_attr.targets]): + elif any(target.id == "options_desc" for target in class_attr.targets): options_desc.update(ast.literal_eval(class_attr.value)) # module metadata - elif any([target.id == "meta" for target in class_attr.targets]): + elif any(target.id == "meta" for target in class_attr.targets): meta = ast.literal_eval(class_attr.value) # class attributes that are lists if type(class_attr) == ast.Assign and type(class_attr.value) == ast.List: # flags - if any([target.id == "flags" for target in class_attr.targets]): + if any(target.id == "flags" for target in class_attr.targets): for flag in class_attr.value.elts: if type(flag.value) == str: flags.add(flag.value) # watched events - elif any([target.id == "watched_events" for target in class_attr.targets]): + elif any(target.id == "watched_events" for target in class_attr.targets): for event_type in class_attr.value.elts: if type(event_type.value) == str: watched_events.add(event_type.value) # produced events - elif any([target.id == "produced_events" for target in class_attr.targets]): + elif any(target.id == "produced_events" for target in class_attr.targets): for event_type in class_attr.value.elts: if type(event_type.value) == str: produced_events.add(event_type.value) # bbot module dependencies - elif any([target.id == "deps_modules" for target in class_attr.targets]): + elif any(target.id == "deps_modules" for target in class_attr.targets): for dep_module in class_attr.value.elts: if type(dep_module.value) == str: deps_modules.add(dep_module.value) # python dependencies - elif any([target.id == "deps_pip" for target in class_attr.targets]): + elif any(target.id == "deps_pip" for target in class_attr.targets): for dep_pip in class_attr.value.elts: if type(dep_pip.value) == str: deps_pip.append(dep_pip.value) - elif any([target.id == "deps_pip_constraints" for target in class_attr.targets]): + elif any(target.id == "deps_pip_constraints" for target in class_attr.targets): for dep_pip in class_attr.value.elts: if type(dep_pip.value) == str: deps_pip_constraints.append(dep_pip.value) # apt dependencies - elif any([target.id == "deps_apt" for target in class_attr.targets]): + elif any(target.id == "deps_apt" for target in class_attr.targets): for dep_apt in class_attr.value.elts: if type(dep_apt.value) == str: deps_apt.append(dep_apt.value) # bash dependencies - elif any([target.id == "deps_shell" for target in class_attr.targets]): + elif any(target.id == "deps_shell" for target in class_attr.targets): for dep_shell in class_attr.value.elts: deps_shell.append(ast.literal_eval(dep_shell)) # ansible playbook - elif any([target.id == "deps_ansible" for target in class_attr.targets]): + elif any(target.id == "deps_ansible" for target in class_attr.targets): ansible_tasks = ast.literal_eval(class_attr.value) # shared/common module dependencies - elif any([target.id == "deps_common" for target in class_attr.targets]): + elif any(target.id == "deps_common" for target in class_attr.targets): for dep_common in class_attr.value.elts: if type(dep_common.value) == str: deps_common.append(dep_common.value) @@ -540,7 +540,7 @@ def recommend_dependencies(self, modules): with suppress(KeyError): choices.remove(modname) if event_type not in resolve_choices: - resolve_choices[event_type] = dict() + resolve_choices[event_type] = {} deps = resolve_choices[event_type] self.add_or_create(deps, "required_by", modname) for c in choices: @@ -639,7 +639,7 @@ def modules_options(self, modules=None, mod_type=None): def modules_options_table(self, modules=None, mod_type=None): table = [] header = ["Config Option", "Type", "Description", "Default"] - for module_name, module_options in self.modules_options(modules, mod_type).items(): + for module_options in self.modules_options(modules, mod_type).values(): table += module_options return make_table(table, header) diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index e4be4380d2..f17911c86a 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -102,7 +102,7 @@ async def query(self, domain): status_code = getattr(r, "status_code", 0) if status_code not in (200, 421): self.verbose(f'Error retrieving azure_tenant domains for "{domain}" (status code: {status_code})') - return set(), dict() + return set(), {} found_domains = list(set(await self.helpers.re.findall(self.d_xml_regex, r.text))) domains = set() @@ -116,7 +116,7 @@ async def query(self, domain): self.scan.word_cloud.absorb_word(d) r = await openid_task - openid_config = dict() + openid_config = {} with suppress(Exception): openid_config = r.json() diff --git a/bbot/modules/columbus.py b/bbot/modules/columbus.py index 781c3c94b3..0a33ee12a6 100644 --- a/bbot/modules/columbus.py +++ b/bbot/modules/columbus.py @@ -21,5 +21,5 @@ async def parse_results(self, r, query): results = set() json = r.json() if json and isinstance(json, list): - return set([f"{s.lower()}.{query}" for s in json]) + return {f"{s.lower()}.{query}" for s in json} return results diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index 6144d0b13e..135d61221c 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -28,7 +28,7 @@ class ffuf(BaseModule): deps_common = ["ffuf"] - banned_characters = set([" "]) + banned_characters = {" "} blacklist = ["images", "css", "image"] in_scope_only = True @@ -122,7 +122,7 @@ async def baseline_ffuf(self, url, exts=[""], prefix="", suffix="", mode="normal continue # if the codes are different, we should abort, this should also be a warning, as it is highly unusual behavior - if len(set(d["status"] for d in canary_results)) != 1: + if len({d["status"] for d in canary_results}) != 1: self.warning("Got different codes for each baseline. This could indicate load balancing") filters[ext] = ["ABORT", "BASELINE_CHANGED_CODES"] continue @@ -148,7 +148,7 @@ async def baseline_ffuf(self, url, exts=[""], prefix="", suffix="", mode="normal continue # we start by seeing if all of the baselines have the same character count - if len(set(d["length"] for d in canary_results)) == 1: + if len({d["length"] for d in canary_results}) == 1: self.debug("All baseline results had the same char count, we can make a filter on that") filters[ext] = [ "-fc", @@ -161,7 +161,7 @@ async def baseline_ffuf(self, url, exts=[""], prefix="", suffix="", mode="normal continue # if that doesn't work we can try words - if len(set(d["words"] for d in canary_results)) == 1: + if len({d["words"] for d in canary_results}) == 1: self.debug("All baseline results had the same word count, we can make a filter on that") filters[ext] = [ "-fc", @@ -174,7 +174,7 @@ async def baseline_ffuf(self, url, exts=[""], prefix="", suffix="", mode="normal continue # as a last resort we will try lines - if len(set(d["lines"] for d in canary_results)) == 1: + if len({d["lines"] for d in canary_results}) == 1: self.debug("All baseline results had the same word count, we can make a filter on that") filters[ext] = [ "-fc", diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index 66c1c516c4..161a4015dd 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -23,7 +23,7 @@ class vhost(ffuf): } deps_common = ["ffuf"] - banned_characters = set([" ", "."]) + banned_characters = {" ", "."} in_scope_only = True diff --git a/bbot/modules/dockerhub.py b/bbot/modules/dockerhub.py index b64b88705e..23f45ab756 100644 --- a/bbot/modules/dockerhub.py +++ b/bbot/modules/dockerhub.py @@ -31,7 +31,7 @@ async def handle_event(self, event): async def handle_org_stub(self, event): profile_name = event.data # docker usernames are case sensitive, so if there are capitalizations we also try a lowercase variation - profiles_to_check = set([profile_name, profile_name.lower()]) + profiles_to_check = {profile_name, profile_name.lower()} for p in profiles_to_check: api_url = f"{self.api_url}/users/{p}" api_result = await self.helpers.request(api_url, follow_redirects=True) diff --git a/bbot/modules/extractous.py b/bbot/modules/extractous.py index 471e2c07e2..5b2e2cdec7 100644 --- a/bbot/modules/extractous.py +++ b/bbot/modules/extractous.py @@ -67,7 +67,7 @@ class extractous(BaseModule): scope_distance_modifier = 1 async def setup(self): - self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) + self.extensions = list({e.lower().strip(".") for e in self.config.get("extensions", [])}) return True async def filter_event(self, event): diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 872a447a1f..35287252bf 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -87,7 +87,7 @@ class filedownload(BaseModule): scope_distance_modifier = 3 async def setup(self): - self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) + self.extensions = list({e.lower().strip(".") for e in self.config.get("extensions", [])}) self.max_filesize = self.config.get("max_filesize", "10MB") self.download_dir = self.scan.home / "filedownload" self.helpers.mkdir(self.download_dir) diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 08edfaaf31..1fdbf37a72 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -88,7 +88,7 @@ async def setup(self): self.screenshot_path = self.base_path / "screenshots" self.command = self.construct_command() self.prepped = False - self.screenshots_taken = dict() + self.screenshots_taken = {} self.connections_logged = set() self.technologies_found = set() return True @@ -172,7 +172,7 @@ async def handle_batch(self, *events): # emit technologies new_technologies = await self.get_new_technologies() - for _, row in new_technologies.items(): + for row in new_technologies.values(): parent_id = row["url_id"] parent_url = self.screenshots_taken[parent_id] parent_event = event_dict[parent_url] @@ -227,7 +227,7 @@ async def get_new_screenshots(self): return screenshots async def get_new_network_logs(self): - network_logs = dict() + network_logs = {} if self.db_path.is_file(): async with aiosqlite.connect(str(self.db_path)) as con: con.row_factory = aiosqlite.Row @@ -241,7 +241,7 @@ async def get_new_network_logs(self): return network_logs async def get_new_technologies(self): - technologies = dict() + technologies = {} if self.db_path.is_file(): async with aiosqlite.connect(str(self.db_path)) as con: con.row_factory = aiosqlite.Row diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index 00dd640baf..f3460799bb 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -84,7 +84,7 @@ async def handle_event(self, event): added_cookies = {} - for header, header_values in event.data["header-dict"].items(): + for header_values in event.data["header-dict"].values(): for header_value in header_values: if header_value.lower() == "set-cookie": header_split = header_value.split("=") diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index 6a173a929a..afda521347 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -39,7 +39,7 @@ async def detect(self, target): test_url = f"{target}*~1*/a.aspx" for method in ["GET", "POST", "OPTIONS", "DEBUG", "HEAD", "TRACE"]: - kwargs = dict(method=method, allow_redirects=False, timeout=10) + kwargs = {"method": method, "allow_redirects": False, "timeout": 10} confirmations = 0 iterations = 5 # one failed detection is tolerated, as long as its not the first run while iterations > 0: @@ -128,7 +128,7 @@ async def solve_valid_chars(self, method, target, affirmative_status_code): suffix = "/a.aspx" urls_and_kwargs = [] - kwargs = dict(method=method, allow_redirects=False, retries=2, timeout=10) + kwargs = {"method": method, "allow_redirects": False, "retries": 2, "timeout": 10} for c in valid_chars: for file_part in ("stem", "ext"): payload = encode_all(f"*{c}*~1*") @@ -169,7 +169,7 @@ async def solve_shortname_recursive( wildcard = "*" if extension_mode else "*~1*" payload = encode_all(f"{prefix}{c}{wildcard}") url = f"{target}{payload}{suffix}" - kwargs = dict(method=method) + kwargs = {"method": method} urls_and_kwargs.append((url, kwargs, c)) async for url, kwargs, c, response in self.helpers.request_custom_batch(urls_and_kwargs): diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 42c51ec033..ce6d0b1756 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -15,7 +15,7 @@ async def setup(self): def make_dummy_modules(self): self.dummy_modules = {} - for provider_name, provider in self.helpers.cloud.providers.items(): + for provider_name in self.helpers.cloud.providers.keys(): module = self.scan._make_dummy_module(f"cloud_{provider_name}", _type="scan") module.default_discovery_context = "{module} derived {event.type}: {event.host}" self.dummy_modules[provider_name] = module @@ -56,9 +56,9 @@ async def handle_event(self, event, **kwargs): # loop through each provider for provider in self.helpers.cloud.providers.values(): provider_name = provider.name.lower() - base_kwargs = dict( - parent=event, tags=[f"{provider.provider_type}-{provider_name}"], _provider=provider_name - ) + base_kwargs = { + "parent": event, "tags": [f"{provider.provider_type}-{provider_name}"], "_provider": provider_name + } # loop through the provider's regex signatures, if any for event_type, sigs in provider.signatures.items(): if event_type != "STORAGE_BUCKET": diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 9b68b7bb9d..575091f313 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -307,7 +307,7 @@ def get_dns_parent(self, event): def emit_raw_records(self): if self._emit_raw_records is None: watching_raw_records = any( - ["RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values()] + "RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values() ) omitted_event_types = self.scan.config.get("omit_event_types", []) omit_raw_records = "RAW_DNS_RECORD" in omitted_event_types diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 3e3df8643b..354b279022 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -153,7 +153,7 @@ async def preprocess(self, r, event, discovery_context): yara_results = {} for h in r.strings: yara_results[h.identifier.lstrip("$")] = sorted( - set([i.matched_data.decode("utf-8", errors="ignore") for i in h.instances]) + {i.matched_data.decode("utf-8", errors="ignore") for i in h.instances} ) await self.process(yara_results, event, yara_rule_settings, discovery_context) @@ -180,7 +180,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte Returns: None """ - for identifier, results in yara_results.items(): + for results in yara_results.values(): for result in results: event_data = {"description": f"{discovery_context} {yara_rule_settings.description}"} if yara_rule_settings.emit_match: @@ -314,7 +314,7 @@ class excavateTestRule(ExcavateRule): _module_threads = 8 - parameter_blacklist = set( + parameter_blacklist = { p.lower() for p in [ "__VIEWSTATE", @@ -329,7 +329,7 @@ class excavateTestRule(ExcavateRule): "JSESSIONID", "PHPSESSID", ] - ) + } yara_rule_name_regex = re.compile(r"rule\s(\w+)\s{") yara_rule_regex = re.compile(r"(?s)((?:rule\s+\w+\s*{[^{}]*(?:{[^{}]*}[^{}]*)*[^{}]*(?:/\S*?}[^/]*?/)*)*})") @@ -634,7 +634,7 @@ class NonHttpSchemeExtractor(ExcavateRule): scheme_blacklist = ["javascript", "mailto", "tel", "data", "vbscript", "about", "file"] async def process(self, yara_results, event, yara_rule_settings, discovery_context): - for identifier, results in yara_results.items(): + for results in yara_results.values(): for url_str in results: scheme = url_str.split("://")[0] if scheme in self.scheme_blacklist: diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 84e9726bb7..9fb02a5922 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -45,10 +45,10 @@ class speculate(BaseInternalModule): async def setup(self): scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"] - self.open_port_consumers = any(["OPEN_TCP_PORT" in m.watched_events for m in scan_modules]) + self.open_port_consumers = any("OPEN_TCP_PORT" in m.watched_events for m in scan_modules) # only consider active portscanners (still speculate if only passive ones are enabled) self.portscanner_enabled = any( - ["portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values()] + "portscan" in m.flags and "active" in m.flags for m in self.scan.modules.values() ) self.emit_open_ports = self.open_port_consumers and not self.portscanner_enabled self.range_to_ip = True diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index a150c029d3..a3a0ab431a 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -91,8 +91,8 @@ async def handle_event(self, event): self.assets[hostkey].absorb_event(event) async def report(self): - stats = dict() - totals = dict() + stats = {} + totals = {} def increment_stat(stat, value): try: @@ -263,13 +263,13 @@ def absorb_csv_row(self, row): if not self.recheck: # ports ports = [i.strip() for i in row.get("Open Ports", "").split(",")] - self.ports.update(set(i for i in ports if i and is_port(i))) + self.ports.update({i for i in ports if i and is_port(i)}) # findings findings = [i.strip() for i in row.get("Findings", "").splitlines()] - self.findings.update(set(i for i in findings if i)) + self.findings.update({i for i in findings if i}) # technologies technologies = [i.strip() for i in row.get("Technologies", "").splitlines()] - self.technologies.update(set(i for i in technologies if i)) + self.technologies.update({i for i in technologies if i}) # risk rating risk_rating = row.get("Risk Rating", "").strip() if risk_rating and risk_rating.isdigit() and int(risk_rating) > self.risk_rating: diff --git a/bbot/modules/output/csv.py b/bbot/modules/output/csv.py index 3141713fa5..9b7d4b4bd9 100644 --- a/bbot/modules/output/csv.py +++ b/bbot/modules/output/csv.py @@ -64,7 +64,7 @@ async def handle_event(self, event): ), "Source Module": str(getattr(event, "module_sequence", "")), "Scope Distance": str(getattr(event, "scope_distance", "")), - "Event Tags": ",".join(sorted(list(getattr(event, "tags", [])))), + "Event Tags": ",".join(sorted(getattr(event, "tags", []))), "Discovery Path": " --> ".join(discovery_path), } ) diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index c8e78de092..07a3c7e0dc 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -89,11 +89,11 @@ async def setup(self): if not wordlist: wordlist = f"{self.helpers.wordlist_dir}/{self.default_wordlist}" self.debug(f"Using wordlist: [{wordlist}]") - self.wl = set( + self.wl = { h.strip().lower() for h in self.helpers.read_file(await self.helpers.wordlist(wordlist)) if len(h) > 0 and "%" not in h - ) + } # check against the boring list (if the option is set) if self.config.get("skip_boring_words", True): @@ -238,7 +238,7 @@ async def check_batch(self, compare_helper, url, header_list): return await compare_helper.compare(url, headers=test_headers, check_reflection=(len(header_list) == 1)) async def finish(self): - untested_matches = sorted(list(self.extracted_words_master.copy())) + untested_matches = sorted(self.extracted_words_master.copy()) for url, (event, batch_size) in list(self.event_dict.items()): try: compare_helper = self.helpers.http_compare(url) diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index 6742421691..737c74ff0a 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -109,7 +109,7 @@ async def handle_batch(self, *events): self.scanned_initial_targets = True events = set(events) events.update( - set([e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")]) + {e for e in self.scan.target.seeds.events if e.type in ("DNS_NAME", "IP_ADDRESS", "IP_RANGE")} ) # ping scan diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index ba5e1e39a4..cd823f59e7 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -207,7 +207,7 @@ async def get_asn_bgpview(self, ip): return False asns_tried.add(asn) asns.append( - dict(asn=asn, subnet=subnet, name=name, description=description, country=country, emails=emails) + {"asn": asn, "subnet": subnet, "name": name, "description": description, "country": country, "emails": emails} ) if not asns: self.debug(f'No results for "{ip}"') diff --git a/bbot/modules/social.py b/bbot/modules/social.py index 5833228a04..fb46dd3870 100644 --- a/bbot/modules/social.py +++ b/bbot/modules/social.py @@ -45,7 +45,7 @@ async def handle_event(self, event): url = f"https://{url}" event_data = {"platform": platform, "url": url, "profile_name": profile_name} # only emit if the same event isn't already in the parent chain - if not any([e.type == "SOCIAL" and e.data == event_data for e in event.get_parents()]): + if not any(e.type == "SOCIAL" and e.data == event_data for e in event.get_parents()): social_event = self.make_event( event_data, "SOCIAL", diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 0d5f347e90..764ecdd1fe 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -155,7 +155,7 @@ async def query_paginated(self, query): async def _is_wildcard(self, query): rdtypes = ("A", "AAAA", "CNAME") if self.helpers.is_dns_name(query): - for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).items(): + for wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query, rdtypes=rdtypes)).values(): if any(t in wildcard_rdtypes for t in rdtypes): return True return False diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index ad0267780f..b346fd9554 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -174,9 +174,9 @@ def preset_from_args(self): def create_parser(self, *args, **kwargs): kwargs.update( - dict( - description="Bighuge BLS OSINT Tool", formatter_class=argparse.RawTextHelpFormatter, epilog=self.epilog - ) + { + "description": "Bighuge BLS OSINT Tool", "formatter_class": argparse.RawTextHelpFormatter, "epilog": self.epilog + } ) p = argparse.ArgumentParser(*args, **kwargs) diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py index 0388fbcfad..841b4c7ee8 100644 --- a/bbot/scanner/preset/preset.py +++ b/bbot/scanner/preset/preset.py @@ -17,7 +17,7 @@ log = logging.getLogger("bbot.presets") -_preset_cache = dict() +_preset_cache = {} # cache default presets to prevent having to reload from disk @@ -916,7 +916,7 @@ def all_presets(self): global DEFAULT_PRESETS if DEFAULT_PRESETS is None: - presets = dict() + presets = {} for ext in ("yml", "yaml"): for preset_path in PRESET_PATH: # for every yaml file @@ -967,7 +967,7 @@ def presets_table(self, include_modules=True): header = ["Preset", "Category", "Description", "# Modules"] if include_modules: header.append("Modules") - for yaml_file, (loaded_preset, category, preset_path, original_file) in self.all_presets.items(): + for (loaded_preset, category, preset_path, original_file) in self.all_presets.values(): loaded_preset = loaded_preset.bake() num_modules = f"{len(loaded_preset.scan_modules):,}" row = [loaded_preset.name, category, loaded_preset.description, num_modules] diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 49114a5b5d..8ef0ac014a 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -214,8 +214,8 @@ def __init__( ) # url file extensions - self.url_extension_blacklist = set(e.lower() for e in self.config.get("url_extension_blacklist", [])) - self.url_extension_httpx_only = set(e.lower() for e in self.config.get("url_extension_httpx_only", [])) + self.url_extension_blacklist = {e.lower() for e in self.config.get("url_extension_blacklist", [])} + self.url_extension_httpx_only = {e.lower() for e in self.config.get("url_extension_httpx_only", [])} # url querystring behavior self.url_querystring_remove = self.config.get("url_querystring_remove", True) @@ -451,7 +451,7 @@ async def _mark_finished(self): await m.queue_event(scan_finish_event) # wait until output modules are flushed while 1: - modules_finished = all([m.finished for m in output_modules]) + modules_finished = all(m.finished for m in output_modules) if modules_finished: break await asyncio.sleep(0.05) @@ -685,7 +685,7 @@ def modules_status(self, _log=False): if modules_errored: self.verbose( - f'{self.name}: Modules errored: {len(modules_errored):,} ({", ".join([m for m in modules_errored])})' + f'{self.name}: Modules errored: {len(modules_errored):,} ({", ".join(list(modules_errored))})' ) num_queued_events = self.num_queued_events @@ -1024,7 +1024,7 @@ def dns_strings(self): A list of DNS hostname strings generated from the scan target """ if self._dns_strings is None: - dns_whitelist = set(t.host for t in self.whitelist if t.host and isinstance(t.host, str)) + dns_whitelist = {t.host for t in self.whitelist if t.host and isinstance(t.host, str)} dns_whitelist = sorted(dns_whitelist, key=len) dns_whitelist_set = set() dns_strings = [] @@ -1121,7 +1121,7 @@ def json(self): """ A dictionary representation of the scan including its name, ID, targets, whitelist, blacklist, and modules """ - j = dict() + j = {} for i in ("id", "name"): v = getattr(self, i, "") if v: diff --git a/bbot/test/test_step_1/test_bbot_fastapi.py b/bbot/test/test_step_1/test_bbot_fastapi.py index bad4020712..add7ad099a 100644 --- a/bbot/test/test_step_1/test_bbot_fastapi.py +++ b/bbot/test/test_step_1/test_bbot_fastapi.py @@ -28,7 +28,7 @@ def test_bbot_multiprocess(bbot_httpserver): assert len(events) >= 3 scan_events = [e for e in events if e["type"] == "SCAN"] assert len(scan_events) == 2 - assert any([e["data"] == "test@blacklanternsecurity.com" for e in events]) + assert any(e["data"] == "test@blacklanternsecurity.com" for e in events) def test_bbot_fastapi(bbot_httpserver): @@ -61,7 +61,7 @@ def test_bbot_fastapi(bbot_httpserver): assert len(events) >= 3 scan_events = [e for e in events if e["type"] == "SCAN"] assert len(scan_events) == 2 - assert any([e["data"] == "test@blacklanternsecurity.com" for e in events]) + assert any(e["data"] == "test@blacklanternsecurity.com" for e in events) finally: with suppress(Exception): diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index acdd4011b6..669de90cec 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -21,7 +21,7 @@ async def test_cli_scope(monkeypatch, capsys): lines = [json.loads(l) for l in out.splitlines()] dns_events = [l for l in lines if l["type"] == "DNS_NAME" and l["data"] == "one.one.one.one"] assert dns_events - assert all([l["scope_distance"] == 0 and "in-scope" in l["tags"] for l in dns_events]) + assert all(l["scope_distance"] == 0 and "in-scope" in l["tags"] for l in dns_events) assert 1 == len( [ l @@ -34,10 +34,10 @@ async def test_cli_scope(monkeypatch, capsys): ) ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.1.1.1"] assert ip_events - assert all([l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events]) + assert all(l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events) ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.0.0.1"] assert ip_events - assert all([l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events]) + assert all(l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events) # with whitelist monkeypatch.setattr( @@ -61,10 +61,10 @@ async def test_cli_scope(monkeypatch, capsys): lines = [json.loads(l) for l in out.splitlines()] lines = [l for l in lines if l["type"] != "SCAN"] assert lines - assert not any([l["scope_distance"] == 0 for l in lines]) + assert not any(l["scope_distance"] == 0 for l in lines) dns_events = [l for l in lines if l["type"] == "DNS_NAME" and l["data"] == "one.one.one.one"] assert dns_events - assert all([l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in dns_events]) + assert all(l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in dns_events) assert 1 == len( [ l @@ -77,10 +77,10 @@ async def test_cli_scope(monkeypatch, capsys): ) ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.1.1.1"] assert ip_events - assert all([l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events]) + assert all(l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events) ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.0.0.1"] assert ip_events - assert all([l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events]) + assert all(l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events) @pytest.mark.asyncio diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index d0bfb68330..001c8fa51c 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -113,7 +113,7 @@ async def test_dns_resolution(bbot_scanner): batch_results = [r async for r in dnsengine.resolve_batch(["1.1.1.1", "one.one.one.one"])] assert len(batch_results) == 2 batch_results = dict(batch_results) - assert any([x in batch_results["one.one.one.one"] for x in ("1.1.1.1", "1.0.0.1")]) + assert any(x in batch_results["one.one.one.one"] for x in ("1.1.1.1", "1.0.0.1")) assert "one.one.one.one" in batch_results["1.1.1.1"] # custom batch resolution diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 8156fc7969..1b02f36a78 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -490,7 +490,7 @@ async def test_events(events, helpers): assert db_event.discovery_context == "test context" assert db_event.discovery_path == ["test context"] assert len(db_event.parent_chain) == 1 - assert all([event_uuid_regex.match(u) for u in db_event.parent_chain]) + assert all(event_uuid_regex.match(u) for u in db_event.parent_chain) assert db_event.parent_chain[0] == str(db_event.uuid) assert db_event.parent.uuid == scan.root_event.uuid assert db_event.parent_uuid == scan.root_event.uuid diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 76cf635175..5776f774c0 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -428,7 +428,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert top_tcp_ports[-10:] == [65526, 65527, 65528, 65529, 65530, 65531, 65532, 65533, 65534, 65535] assert len(top_tcp_ports) == 65535 assert len(set(top_tcp_ports)) == 65535 - assert all([isinstance(i, int) for i in top_tcp_ports]) + assert all(isinstance(i, int) for i in top_tcp_ports) top_tcp_ports = helpers.top_tcp_ports(10, as_string=True) assert top_tcp_ports == "80,23,443,21,22,25,3389,110,445,139" # urls diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index 62a03c0ef1..7e3ef2dc14 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -817,9 +817,9 @@ async def test_manager_blacklist(bbot_scanner, bbot_httpserver, caplog): events = [e async for e in scan.async_start()] - assert any([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://www-dev.test.notreal:8888/"]) + assert any(e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://www-dev.test.notreal:8888/") # the hostname is in-scope, but its IP is blacklisted, therefore we shouldn't see it - assert not any([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://www-prod.test.notreal:8888/"]) + assert not any(e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://www-prod.test.notreal:8888/") assert 'Not forwarding DNS_NAME("www-prod.test.notreal", module=excavate' in caplog.text and 'because it has a blacklisted DNS record' in caplog.text diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index 4212340695..5fc172e78a 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -179,10 +179,10 @@ async def test_modules_basic_checks(events, httpx_mock): assert type(watched_events) == list, f"{module_name}.watched_events must be of type list" assert type(produced_events) == list, f"{module_name}.produced_events must be of type list" assert all( - [type(t) == str for t in watched_events] + type(t) == str for t in watched_events ), f"{module_name}.watched_events entries must be of type string" assert all( - [type(t) == str for t in produced_events] + type(t) == str for t in produced_events ), f"{module_name}.produced_events entries must be of type string" assert type(preloaded.get("deps_pip", [])) == list, f"{module_name}.deps_pip must be of type list" diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index 1b11529eae..42c42481f1 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -16,7 +16,7 @@ def test_preset_descriptions(): # ensure very preset has a description preset = Preset() - for yaml_file, (loaded_preset, category, preset_path, original_filename) in preset.all_presets.items(): + for (loaded_preset, category, preset_path, original_filename) in preset.all_presets.values(): assert ( loaded_preset.description ), f'Preset "{loaded_preset.name}" at {original_filename} does not have a description.' @@ -174,9 +174,9 @@ def test_preset_scope(): # test target merging scan = Scanner("1.2.3.4", preset=Preset.from_dict({"target": ["evilcorp.com"]})) - assert set([str(h) for h in scan.preset.target.seeds.hosts]) == {"1.2.3.4/32", "evilcorp.com"} - assert set([e.data for e in scan.target.seeds]) == {"1.2.3.4", "evilcorp.com"} - assert set([e.data for e in scan.target.whitelist]) == {"1.2.3.4", "evilcorp.com"} + assert {str(h) for h in scan.preset.target.seeds.hosts} == {"1.2.3.4/32", "evilcorp.com"} + assert {e.data for e in scan.target.seeds} == {"1.2.3.4", "evilcorp.com"} + assert {e.data for e in scan.target.whitelist} == {"1.2.3.4", "evilcorp.com"} blank_preset = Preset() blank_preset = blank_preset.bake() @@ -303,15 +303,15 @@ def test_preset_scope(): assert preset_whitelist_baked.whitelisted("1.2.3.4/28") assert preset_whitelist_baked.whitelisted("1.2.3.4/24") - assert set([e.data for e in preset_nowhitelist_baked.seeds]) == {"evilcorp.com"} - assert set([e.data for e in preset_nowhitelist_baked.whitelist]) == {"evilcorp.com"} - assert set([e.data for e in preset_whitelist_baked.seeds]) == {"evilcorp.org"} - assert set([e.data for e in preset_whitelist_baked.whitelist]) == {"1.2.3.0/24", "http://evilcorp.net/"} + assert {e.data for e in preset_nowhitelist_baked.seeds} == {"evilcorp.com"} + assert {e.data for e in preset_nowhitelist_baked.whitelist} == {"evilcorp.com"} + assert {e.data for e in preset_whitelist_baked.seeds} == {"evilcorp.org"} + assert {e.data for e in preset_whitelist_baked.whitelist} == {"1.2.3.0/24", "http://evilcorp.net/"} preset_nowhitelist.merge(preset_whitelist) preset_nowhitelist_baked = preset_nowhitelist.bake() - assert set([e.data for e in preset_nowhitelist_baked.seeds]) == {"evilcorp.com", "evilcorp.org"} - assert set([e.data for e in preset_nowhitelist_baked.whitelist]) == {"1.2.3.0/24", "http://evilcorp.net/"} + assert {e.data for e in preset_nowhitelist_baked.seeds} == {"evilcorp.com", "evilcorp.org"} + assert {e.data for e in preset_nowhitelist_baked.whitelist} == {"1.2.3.0/24", "http://evilcorp.net/"} assert "www.evilcorp.org" in preset_nowhitelist_baked.seeds assert "www.evilcorp.com" in preset_nowhitelist_baked.seeds assert "1.2.3.4" in preset_nowhitelist_baked.whitelist @@ -325,8 +325,8 @@ def test_preset_scope(): preset_whitelist = Preset("evilcorp.org", whitelist=["1.2.3.4/24"]) preset_whitelist.merge(preset_nowhitelist) preset_whitelist_baked = preset_whitelist.bake() - assert set([e.data for e in preset_whitelist_baked.seeds]) == {"evilcorp.com", "evilcorp.org"} - assert set([e.data for e in preset_whitelist_baked.whitelist]) == {"1.2.3.0/24"} + assert {e.data for e in preset_whitelist_baked.seeds} == {"evilcorp.com", "evilcorp.org"} + assert {e.data for e in preset_whitelist_baked.whitelist} == {"1.2.3.0/24"} assert "www.evilcorp.org" in preset_whitelist_baked.seeds assert "www.evilcorp.com" in preset_whitelist_baked.seeds assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist @@ -342,16 +342,16 @@ def test_preset_scope(): preset_nowhitelist2 = Preset("evilcorp.de") preset_nowhitelist1_baked = preset_nowhitelist1.bake() preset_nowhitelist2_baked = preset_nowhitelist2.bake() - assert set([e.data for e in preset_nowhitelist1_baked.seeds]) == {"evilcorp.com"} - assert set([e.data for e in preset_nowhitelist2_baked.seeds]) == {"evilcorp.de"} - assert set([e.data for e in preset_nowhitelist1_baked.whitelist]) == {"evilcorp.com"} - assert set([e.data for e in preset_nowhitelist2_baked.whitelist]) == {"evilcorp.de"} + assert {e.data for e in preset_nowhitelist1_baked.seeds} == {"evilcorp.com"} + assert {e.data for e in preset_nowhitelist2_baked.seeds} == {"evilcorp.de"} + assert {e.data for e in preset_nowhitelist1_baked.whitelist} == {"evilcorp.com"} + assert {e.data for e in preset_nowhitelist2_baked.whitelist} == {"evilcorp.de"} preset_nowhitelist1.merge(preset_nowhitelist2) preset_nowhitelist1_baked = preset_nowhitelist1.bake() - assert set([e.data for e in preset_nowhitelist1_baked.seeds]) == {"evilcorp.com", "evilcorp.de"} - assert set([e.data for e in preset_nowhitelist2_baked.seeds]) == {"evilcorp.de"} - assert set([e.data for e in preset_nowhitelist1_baked.whitelist]) == {"evilcorp.com", "evilcorp.de"} - assert set([e.data for e in preset_nowhitelist2_baked.whitelist]) == {"evilcorp.de"} + assert {e.data for e in preset_nowhitelist1_baked.seeds} == {"evilcorp.com", "evilcorp.de"} + assert {e.data for e in preset_nowhitelist2_baked.seeds} == {"evilcorp.de"} + assert {e.data for e in preset_nowhitelist1_baked.whitelist} == {"evilcorp.com", "evilcorp.de"} + assert {e.data for e in preset_nowhitelist2_baked.whitelist} == {"evilcorp.de"} assert "www.evilcorp.com" in preset_nowhitelist1_baked.seeds assert "www.evilcorp.de" in preset_nowhitelist1_baked.seeds assert "www.evilcorp.com" in preset_nowhitelist1_baked.target.seeds @@ -370,10 +370,10 @@ def test_preset_scope(): preset_nowhitelist2.merge(preset_nowhitelist1) preset_nowhitelist1_baked = preset_nowhitelist1.bake() preset_nowhitelist2_baked = preset_nowhitelist2.bake() - assert set([e.data for e in preset_nowhitelist1_baked.seeds]) == {"evilcorp.com"} - assert set([e.data for e in preset_nowhitelist2_baked.seeds]) == {"evilcorp.com", "evilcorp.de"} - assert set([e.data for e in preset_nowhitelist1_baked.whitelist]) == {"evilcorp.com"} - assert set([e.data for e in preset_nowhitelist2_baked.whitelist]) == {"evilcorp.com", "evilcorp.de"} + assert {e.data for e in preset_nowhitelist1_baked.seeds} == {"evilcorp.com"} + assert {e.data for e in preset_nowhitelist2_baked.seeds} == {"evilcorp.com", "evilcorp.de"} + assert {e.data for e in preset_nowhitelist1_baked.whitelist} == {"evilcorp.com"} + assert {e.data for e in preset_nowhitelist2_baked.whitelist} == {"evilcorp.com", "evilcorp.de"} @pytest.mark.asyncio diff --git a/bbot/test/test_step_1/test_regexes.py b/bbot/test/test_step_1/test_regexes.py index 77ccc987e2..c8cb6d845d 100644 --- a/bbot/test/test_step_1/test_regexes.py +++ b/bbot/test/test_step_1/test_regexes.py @@ -91,7 +91,7 @@ def test_ip_regexes(): ip == "2001:db8::1/128" and event_type == "IP_RANGE" ), f"Event type for IP_ADDRESS {ip} was not properly detected" else: - matches = list(r.match(ip) for r in ip_address_regexes) + matches = [r.match(ip) for r in ip_address_regexes] assert any(matches), f"Good IP ADDRESS {ip} did not match regexes" @@ -138,7 +138,7 @@ def test_ip_range_regexes(): pytest.fail(f"BAD IP_RANGE: {bad_ip_range} raised unknown error: {e}: {traceback.format_exc()}") for good_ip_range in good_ip_ranges: - matches = list(r.match(good_ip_range) for r in ip_range_regexes) + matches = [r.match(good_ip_range) for r in ip_range_regexes] assert any(matches), f"Good IP_RANGE {good_ip_range} did not match regexes" @@ -191,7 +191,7 @@ def test_dns_name_regexes(): pytest.fail(f"BAD DNS NAME: {dns} raised unknown error: {e}") for dns in good_dns: - matches = list(r.match(dns) for r in dns_name_regexes) + matches = [r.match(dns) for r in dns_name_regexes] assert any(matches), f"Good DNS_NAME {dns} did not match regexes" event_type, _ = get_event_type(dns) if not event_type == "DNS_NAME": @@ -253,7 +253,7 @@ def test_open_port_regexes(): pytest.fail(f"BAD OPEN_TCP_PORT: {open_port} raised unknown error: {e}") for open_port in good_ports: - matches = list(r.match(open_port) for r in open_port_regexes) + matches = [r.match(open_port) for r in open_port_regexes] assert any(matches), f"Good OPEN_TCP_PORT {open_port} did not match regexes" event_type, _ = get_event_type(open_port) assert event_type == "OPEN_TCP_PORT" @@ -318,7 +318,7 @@ def test_url_regexes(): pytest.fail(f"BAD URL: {bad_url} raised unknown error: {e}: {traceback.format_exc()}") for good_url in good_urls: - matches = list(r.match(good_url) for r in url_regexes) + matches = [r.match(good_url) for r in url_regexes] assert any(matches), f"Good URL {good_url} did not match regexes" assert ( get_event_type(good_url)[0] == "URL_UNVERIFIED" diff --git a/bbot/test/test_step_1/test_scan.py b/bbot/test/test_step_1/test_scan.py index f5f8458262..5509914fdc 100644 --- a/bbot/test/test_step_1/test_scan.py +++ b/bbot/test/test_step_1/test_scan.py @@ -144,7 +144,7 @@ async def test_python_output_matches_json(bbot_scanner): assert len(events) == 5 scan_events = [e for e in events if e["type"] == "SCAN"] assert len(scan_events) == 2 - assert all([isinstance(e["data"]["status"], str) for e in scan_events]) + assert all(isinstance(e["data"]["status"], str) for e in scan_events) assert len([e for e in events if e["type"] == "DNS_NAME"]) == 1 assert len([e for e in events if e["type"] == "ORG_STUB"]) == 1 assert len([e for e in events if e["type"] == "IP_ADDRESS"]) == 1 diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 0513d6abed..26a2f74ff2 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -164,8 +164,8 @@ async def test_target(bbot_scanner): bbottarget = BBOTTarget("http://1.2.3.4:8443", "bob@evilcorp.com") assert bbottarget.seeds.hosts == {ip_network("1.2.3.4"), "evilcorp.com"} assert bbottarget.whitelist.hosts == {ip_network("1.2.3.4"), "evilcorp.com"} - assert set([e.data for e in bbottarget.seeds.events]) == {"http://1.2.3.4:8443/", "bob@evilcorp.com"} - assert set([e.data for e in bbottarget.whitelist.events]) == {"1.2.3.4", "evilcorp.com"} + assert {e.data for e in bbottarget.seeds.events} == {"http://1.2.3.4:8443/", "bob@evilcorp.com"} + assert {e.data for e in bbottarget.whitelist.events} == {"1.2.3.4", "evilcorp.com"} bbottarget1 = BBOTTarget("evilcorp.com", "evilcorp.net", whitelist=["1.2.3.4/24"], blacklist=["1.2.3.4"]) bbottarget2 = BBOTTarget("evilcorp.com", "evilcorp.net", whitelist=["1.2.3.0/24"], blacklist=["1.2.3.4"]) @@ -228,22 +228,22 @@ async def test_target(bbot_scanner): with pytest.raises(TypeError): assert list(bbottarget) == ["http://evilcorp.com:8080"] assert list(bbottarget.seeds) == ["http://evilcorp.com:8080"] - assert set([e.data for e in bbottarget.whitelist]) == {"evilcorp.net:443", "http://evilcorp.net:8080/"} - assert set([e.data for e in bbottarget.blacklist]) == {"http://evilcorp.org:8080/", "evilcorp.org:443"} + assert {e.data for e in bbottarget.whitelist} == {"evilcorp.net:443", "http://evilcorp.net:8080/"} + assert {e.data for e in bbottarget.blacklist} == {"http://evilcorp.org:8080/", "evilcorp.org:443"} # test org stub as target for org_target in ("ORG:evilcorp", "ORG_STUB:evilcorp"): scan = bbot_scanner(org_target) events = [e async for e in scan.async_start()] assert len(events) == 3 - assert set([e.type for e in events]) == {"SCAN", "ORG_STUB"} + assert {e.type for e in events} == {"SCAN", "ORG_STUB"} # test username as target for user_target in ("USER:vancerefrigeration", "USERNAME:vancerefrigeration"): scan = bbot_scanner(user_target) events = [e async for e in scan.async_start()] assert len(events) == 3 - assert set([e.type for e in events]) == {"SCAN", "USERNAME"} + assert {e.type for e in events} == {"SCAN", "USERNAME"} # verify hash values bbottarget = BBOTTarget( @@ -253,17 +253,17 @@ async def test_target(bbot_scanner): whitelist=["evilcorp.com", "bob@www.evilcorp.com", "evilcorp.net"], blacklist=["1.2.3.4", "4.3.2.1/24", "http://1.2.3.4", "bob@asdf.evilcorp.net"], ) - assert set([e.data for e in bbottarget.seeds.events]) == { + assert {e.data for e in bbottarget.seeds.events} == { "1.2.3.0/24", "http://www.evilcorp.net/", "bob@fdsa.evilcorp.net", } - assert set([e.data for e in bbottarget.whitelist.events]) == { + assert {e.data for e in bbottarget.whitelist.events} == { "evilcorp.com", "evilcorp.net", "bob@www.evilcorp.com", } - assert set([e.data for e in bbottarget.blacklist.events]) == { + assert {e.data for e in bbottarget.blacklist.events} == { "1.2.3.4", "4.3.2.0/24", "http://1.2.3.4/", @@ -332,7 +332,7 @@ async def test_target(bbot_scanner): assert event.host == "evilcorp.com" events = target.get("www.evilcorp.com", single=False) assert len(events) == 2 - assert set([e.data for e in events]) == {"http://evilcorp.com/", "evilcorp.com:443"} + assert {e.data for e in events} == {"http://evilcorp.com/", "evilcorp.com:443"} @pytest.mark.asyncio @@ -382,7 +382,7 @@ async def test_blacklist_regex(bbot_scanner, bbot_httpserver): # make sure URL is detected normally scan = bbot_scanner("http://127.0.0.1:8888/", presets=["spider"], config={"excavate": True}, debug=True) - assert set([r.pattern for r in scan.target.blacklist.blacklist_regexes]) == {r"/.*(sign|log)[_-]?out"} + assert {r.pattern for r in scan.target.blacklist.blacklist_regexes} == {r"/.*(sign|log)[_-]?out"} events = [e async for e in scan.async_start()] urls = [e.data for e in events if e.type == "URL"] assert len(urls) == 2 @@ -397,7 +397,7 @@ async def test_blacklist_regex(bbot_scanner, bbot_httpserver): debug=True, ) assert scan.target.blacklist.blacklist_regexes - assert set([r.pattern for r in scan.target.blacklist.blacklist_regexes]) == { + assert {r.pattern for r in scan.target.blacklist.blacklist_regexes} == { r"evil[0-9]{3}", r"/.*(sign|log)[_-]?out", } diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 0b3011d572..ee6494fbe9 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -29,7 +29,7 @@ def server_handler(request): urls = [f"{base_url}{i}" for i in range(num_urls)] responses = [r async for r in scan.helpers.request_batch(urls)] assert len(responses) == 100 - assert all([r[1].status_code == 200 and r[1].text.startswith(f"{r[0]}: ") for r in responses]) + assert all(r[1].status_code == 200 and r[1].text.startswith(f"{r[0]}: ") for r in responses) # request_batch w/ cancellation agen = scan.helpers.request_batch(urls) diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns.py b/bbot/test/test_step_2/module_tests/test_module_baddns.py index b21e0d1146..288af7ae79 100644 --- a/bbot/test/test_step_2/module_tests/test_module_baddns.py +++ b/bbot/test/test_step_2/module_tests/test_module_baddns.py @@ -31,9 +31,9 @@ async def setup_after_prep(self, module_test): module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) def check(self, module_test, events): - assert any([e.data == "baddns.azurewebsites.net" for e in events]), "CNAME detection failed" - assert any([e.type == "VULNERABILITY" for e in events]), "Failed to emit VULNERABILITY" - assert any(["baddns-cname" in e.tags for e in events]), "Failed to add baddns tag" + assert any(e.data == "baddns.azurewebsites.net" for e in events), "CNAME detection failed" + assert any(e.type == "VULNERABILITY" for e in events), "Failed to emit VULNERABILITY" + assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" class TestBaddns_cname_signature(BaseTestBaddns): @@ -60,8 +60,8 @@ def set_target(self, target): module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) def check(self, module_test, events): - assert any([e for e in events]) + assert any(e for e in events) assert any( - [e.type == "VULNERABILITY" and "bigcartel.com" in e.data["description"] for e in events] + e.type == "VULNERABILITY" and "bigcartel.com" in e.data["description"] for e in events ), "Failed to emit VULNERABILITY" - assert any(["baddns-cname" in e.tags for e in events]), "Failed to add baddns tag" + assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py b/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py index 596d3c89e7..77a86153c7 100644 --- a/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +++ b/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py @@ -54,11 +54,9 @@ def set_target(self, target): def check(self, module_test, events): assert any( - [ - e.type == "FINDING" + e.type == "FINDING" and "Possible [AWS Bucket Takeover Detection] via direct BadDNS analysis. Indicator: [[Words: The specified bucket does not exist | Condition: and | Part: body] Matchers-Condition: and] Trigger: [self] baddns Module: [CNAME]" in e.data["description"] for e in events - ] ), "Failed to emit FINDING" - assert any(["baddns-cname" in e.tags for e in events]), "Failed to add baddns tag" + assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" diff --git a/bbot/test/test_step_2/module_tests/test_module_c99.py b/bbot/test/test_step_2/module_tests/test_module_c99.py index 284b76e1e2..297337bfde 100644 --- a/bbot/test/test_step_2/module_tests/test_module_c99.py +++ b/bbot/test/test_step_2/module_tests/test_module_c99.py @@ -54,7 +54,7 @@ def check(self, module_test, events): assert module_test.module.errored == False # assert module_test.module._api_request_failures == 4 assert module_test.module.api_retries == 4 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com"} + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com"} assert self.url_count == { "https://api.c99.nl/randomnumber?key=6789&between=1,100&json": 1, "https://api.c99.nl/randomnumber?key=4321&between=1,100&json": 1, @@ -85,7 +85,7 @@ def check(self, module_test, events): assert module_test.module.errored == False assert module_test.module._api_request_failures == 8 assert module_test.module.api_retries == 4 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com", "evilcorp.com"} + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com", "evilcorp.com"} assert self.url_count == { "https://api.c99.nl/randomnumber?key=6789&between=1,100&json": 1, "https://api.c99.nl/randomnumber?key=4321&between=1,100&json": 1, @@ -109,7 +109,7 @@ def check(self, module_test, events): assert module_test.module.errored == False assert module_test.module._api_request_failures == 12 assert module_test.module.api_retries == 4 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == { + assert {e.data for e in events if e.type == "DNS_NAME"} == { "blacklanternsecurity.com", "evilcorp.com", "evilcorp.net", @@ -141,11 +141,11 @@ def check(self, module_test, events): assert module_test.module.errored == True assert module_test.module._api_request_failures == 13 assert module_test.module.api_retries == 4 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == { + assert {e.data for e in events if e.type == "DNS_NAME"} == { "blacklanternsecurity.com", "evilcorp.com", "evilcorp.net", "evilcorp.co.uk", } assert len(self.url_count) == 16 - assert all([v == 1 for v in self.url_count.values()]) + assert all(v == 1 for v in self.url_count.values()) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index bccbe3d730..364c8f9b5e 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -957,12 +957,12 @@ class TestExcavateRAWTEXT(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.set_expect_requests( - dict(uri="/"), - dict(response_data=''), + {"uri": "/"}, + {"response_data": ''}, ) module_test.set_expect_requests( - dict(uri="/Test_PDF"), - dict(response_data=self.pdf_data, headers={"Content-Type": "application/pdf"}), + {"uri": "/Test_PDF"}, + {"response_data": self.pdf_data, "headers": {"Content-Type": "application/pdf"}}, ) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_extractous.py b/bbot/test/test_step_2/module_tests/test_module_extractous.py index 9c47945e4a..27f3c95bf7 100644 --- a/bbot/test/test_step_2/module_tests/test_module_extractous.py +++ b/bbot/test/test_step_2/module_tests/test_module_extractous.py @@ -21,19 +21,19 @@ class TestExtractous(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.set_expect_requests( - dict(uri="/"), - dict(response_data=''), + {"uri": "/"}, + {"response_data": ''}, ) module_test.set_expect_requests( - dict(uri="/Test_PDF"), - dict(response_data=self.pdf_data, headers={"Content-Type": "application/pdf"}), + {"uri": "/Test_PDF"}, + {"response_data": self.pdf_data, "headers": {"Content-Type": "application/pdf"}}, ) module_test.set_expect_requests( - dict(uri="/Test_DOCX"), - dict( - response_data=self.docx_data, - headers={"Content-Type": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}, - ), + {"uri": "/Test_DOCX"}, + { + "response_data": self.docx_data, + "headers": {"Content-Type": "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}, + }, ) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_filedownload.py b/bbot/test/test_step_2/module_tests/test_module_filedownload.py index 0b949e9616..51d25988de 100644 --- a/bbot/test/test_step_2/module_tests/test_module_filedownload.py +++ b/bbot/test/test_step_2/module_tests/test_module_filedownload.py @@ -15,28 +15,28 @@ class TestFileDownload(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.set_expect_requests( - dict(uri="/"), - dict( - response_data='' - ), + {"uri": "/"}, + { + "response_data": '' + }, ) module_test.set_expect_requests( - dict(uri="/Test_File.txt"), - dict( - response_data="juicy stuff", - ), + {"uri": "/Test_File.txt"}, + { + "response_data": "juicy stuff", + }, ) module_test.set_expect_requests( - dict(uri="/Test_PDF"), - dict(response_data=self.pdf_data, headers={"Content-Type": "application/pdf"}), + {"uri": "/Test_PDF"}, + {"response_data": self.pdf_data, "headers": {"Content-Type": "application/pdf"}}, ) module_test.set_expect_requests( - dict(uri="/test.html"), - dict(response_data="", headers={"Content-Type": "text/html"}), + {"uri": "/test.html"}, + {"response_data": "", "headers": {"Content-Type": "text/html"}}, ) module_test.set_expect_requests( - dict(uri="/test2"), - dict(response_data="", headers={"Content-Type": "text/html"}), + {"uri": "/test2"}, + {"response_data": "", "headers": {"Content-Type": "text/html"}}, ) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_git_clone.py b/bbot/test/test_step_2/module_tests/test_module_git_clone.py index 15bc54fb37..d6a994402a 100644 --- a/bbot/test/test_step_2/module_tests/test_module_git_clone.py +++ b/bbot/test/test_step_2/module_tests/test_module_git_clone.py @@ -202,7 +202,7 @@ def check(self, module_test, events): ] assert 1 == len(filesystem_events), "Failed to git clone CODE_REPOSITORY" # make sure the binary blob isn't here - assert not any(["blob" in e.data for e in [e for e in events if e.type == "FILESYSTEM"]]) + assert not any("blob" in e.data for e in [e for e in events if e.type == "FILESYSTEM"]) filesystem_event = filesystem_events[0] folder = Path(filesystem_event.data["path"]) assert folder.is_dir(), "Destination folder doesn't exist" @@ -217,7 +217,7 @@ class TestGit_CloneWithBlob(TestGit_Clone): def check(self, module_test, events): filesystem_events = [e for e in events if e.type == "FILESYSTEM"] assert len(filesystem_events) == 1 - assert all(["blob" in e.data for e in filesystem_events]) + assert all("blob" in e.data for e in filesystem_events) filesystem_event = filesystem_events[0] blob = filesystem_event.data["blob"] tar_bytes = base64.b64decode(blob) diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index b6439dbbb1..2d6dc2cd8f 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -27,8 +27,8 @@ async def setup_after_prep(self, module_test): "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, } module_test.set_expect_requests(respond_args=respond_args) - request_args = dict(uri="/blacklanternsecurity") - respond_args = dict(response_data="""blacklanternsecurity github """ async def setup_after_prep(self, module_test): - request_args = dict(uri="/", headers={"test": "header"}) - respond_args = dict(response_data=self.html_without_login) + request_args = {"uri": "/", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_without_login} module_test.set_expect_requests(request_args, respond_args) - request_args = dict(uri="/url", headers={"test": "header"}) - respond_args = dict(response_data=self.html_with_login) + request_args = {"uri": "/url", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_with_login} module_test.set_expect_requests(request_args, respond_args) def check(self, module_test, events): @@ -124,8 +124,8 @@ def check(self, module_test, events): assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/test.aspx"]) assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/test.txt"]) - assert not any([e for e in events if "URL" in e.type and ".svg" in e.data]) - assert not any([e for e in events if "URL" in e.type and ".woff" in e.data]) + assert not any(e for e in events if "URL" in e.type and ".svg" in e.data) + assert not any(e for e in events if "URL" in e.type and ".woff" in e.data) class TestHTTPX_querystring_removed(ModuleTestBase): diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index d3712be5c0..37bfa3620f 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -33,11 +33,11 @@ class TestNewsletters(ModuleTestBase): """ async def setup_after_prep(self, module_test): - request_args = dict(uri="/found", headers={"test": "header"}) - respond_args = dict(response_data=self.html_with_newsletter) + request_args = {"uri": "/found", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_with_newsletter} module_test.set_expect_requests(request_args, respond_args) - request_args = dict(uri="/missing", headers={"test": "header"}) - respond_args = dict(response_data=self.html_without_newsletter) + request_args = {"uri": "/missing", "headers": {"test": "header"}} + respond_args = {"response_data": self.html_without_newsletter} module_test.set_expect_requests(request_args, respond_args) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_ntlm.py b/bbot/test/test_step_2/module_tests/test_module_ntlm.py index 790f2e0d22..1e79be7705 100644 --- a/bbot/test/test_step_2/module_tests/test_module_ntlm.py +++ b/bbot/test/test_step_2/module_tests/test_module_ntlm.py @@ -7,16 +7,16 @@ class TestNTLM(ModuleTestBase): config_overrides = {"modules": {"ntlm": {"try_all": True}}} async def setup_after_prep(self, module_test): - request_args = dict(uri="/", headers={"test": "header"}) + request_args = {"uri": "/", "headers": {"test": "header"}} module_test.set_expect_requests(request_args, {}) - request_args = dict( - uri="/oab/", headers={"Authorization": "NTLM TlRMTVNTUAABAAAAl4II4gAAAAAAAAAAAAAAAAAAAAAKAGFKAAAADw=="} - ) - respond_args = dict( - headers={ + request_args = { + "uri": "/oab/", "headers": {"Authorization": "NTLM TlRMTVNTUAABAAAAl4II4gAAAAAAAAAAAAAAAAAAAAAKAGFKAAAADw=="} + } + respond_args = { + "headers": { "WWW-Authenticate": "NTLM TlRMTVNTUAACAAAABgAGADgAAAAVgoni89aZT4Q0mH0AAAAAAAAAAHYAdgA+AAAABgGxHQAAAA9WAE4ATwACAAYAVgBOAE8AAQAKAEUAWABDADAAMQAEABIAdgBuAG8ALgBsAG8AYwBhAGwAAwAeAEUAWABDADAAMQAuAHYAbgBvAC4AbABvAGMAYQBsAAUAEgB2AG4AbwAuAGwAbwBjAGEAbAAHAAgAXxo0p/6L2QEAAAAA" } - ) + } module_test.set_expect_requests(request_args, respond_args) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py index 0e099e63d5..6c4ecda526 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py @@ -28,7 +28,7 @@ async def setup_after_prep(self, module_test): module_test.monkeypatch.setattr( helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} ) - expect_args = dict(headers={"Cookie": "admincookie=AAAAAAAAAAAAAA"}) + expect_args = {"headers": {"Cookie": "admincookie=AAAAAAAAAAAAAA"}} respond_args = {"response_data": self.cookies_body_match} module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py index c2cdddffd7..9d04b9d22a 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py @@ -31,7 +31,7 @@ async def setup_after_prep(self, module_test): module_test.monkeypatch.setattr( helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} ) - expect_args = dict(headers={"tracestate": "AAAAAAAAAAAAAA"}) + expect_args = {"headers": {"tracestate": "AAAAAAAAAAAAAA"}} respond_args = {"response_data": self.headers_body_match} module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) @@ -112,7 +112,7 @@ async def setup_after_prep(self, module_test): module_test.monkeypatch.setattr( helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} ) - expect_args = dict(headers={"foo": "AAAAAAAAAAAAAA"}) + expect_args = {"headers": {"foo": "AAAAAAAAAAAAAA"}} respond_args = {"response_data": self.headers_body_match} module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) diff --git a/bbot/test/test_step_2/module_tests/test_module_portscan.py b/bbot/test/test_step_2/module_tests/test_module_portscan.py index f6f7a62a89..06a2fcef40 100644 --- a/bbot/test/test_step_2/module_tests/test_module_portscan.py +++ b/bbot/test/test_step_2/module_tests/test_module_portscan.py @@ -122,7 +122,7 @@ def check(self, module_test, events): assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "asdf.evilcorp.net:80"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "dummy.asdf.evilcorp.net:80"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "dummy.evilcorp.com:631"]) - assert not any([e for e in events if e.type == "OPEN_TCP_PORT" and e.host == "dummy.www.evilcorp.com"]) + assert not any(e for e in events if e.type == "OPEN_TCP_PORT" and e.host == "dummy.www.evilcorp.com") class TestPortscanPingFirst(TestPortscan): @@ -136,7 +136,7 @@ def check(self, module_test, events): assert self.ping_runs == 1 open_port_events = [e for e in events if e.type == "OPEN_TCP_PORT"] assert len(open_port_events) == 3 - assert set([e.data for e in open_port_events]) == {"8.8.8.8:443", "evilcorp.com:443", "www.evilcorp.com:443"} + assert {e.data for e in open_port_events} == {"8.8.8.8:443", "evilcorp.com:443", "www.evilcorp.com:443"} class TestPortscanPingOnly(TestPortscan): @@ -154,4 +154,4 @@ def check(self, module_test, events): assert len(open_port_events) == 0 ip_events = [e for e in events if e.type == "IP_ADDRESS"] assert len(ip_events) == 1 - assert set([e.data for e in ip_events]) == {"8.8.8.8"} + assert {e.data for e in ip_events} == {"8.8.8.8"} diff --git a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py index 2b3d3aaf0a..8b2a4e1ffa 100644 --- a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py +++ b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py @@ -48,7 +48,7 @@ def check(self, module_test, events): assert module_test.module.errored == False assert module_test.module._api_request_failures == 3 assert module_test.module.api_retries == 3 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com"} + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com"} assert self.url_count == { "https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result": 3, } @@ -62,7 +62,7 @@ def check(self, module_test, events): assert module_test.module.errored == False assert module_test.module._api_request_failures == 6 assert module_test.module.api_retries == 3 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com", "evilcorp.com"} + assert {e.data for e in events if e.type == "DNS_NAME"} == {"blacklanternsecurity.com", "evilcorp.com"} assert self.url_count == { "https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result": 3, "https://rapiddns.io/subdomain/evilcorp.com?full=1#result": 3, @@ -77,7 +77,7 @@ def check(self, module_test, events): assert module_test.module.errored == False assert module_test.module._api_request_failures == 9 assert module_test.module.api_retries == 3 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == { + assert {e.data for e in events if e.type == "DNS_NAME"} == { "blacklanternsecurity.com", "evilcorp.com", "evilcorp.net", @@ -97,7 +97,7 @@ def check(self, module_test, events): assert module_test.module.errored == True assert module_test.module._api_request_failures == 10 assert module_test.module.api_retries == 3 - assert set([e.data for e in events if e.type == "DNS_NAME"]) == { + assert {e.data for e in events if e.type == "DNS_NAME"} == { "blacklanternsecurity.com", "evilcorp.com", "evilcorp.net", diff --git a/bbot/test/test_step_2/module_tests/test_module_speculate.py b/bbot/test/test_step_2/module_tests/test_module_speculate.py index 8b6150919f..e407470346 100644 --- a/bbot/test/test_step_2/module_tests/test_module_speculate.py +++ b/bbot/test/test_step_2/module_tests/test_module_speculate.py @@ -62,10 +62,8 @@ def check(self, module_test, events): for e in module_test.scan.modules["dummy"].events: events_data.add(e.data) assert all( - [ - x in events_data + x in events_data for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") - ] ) @@ -79,8 +77,6 @@ def check(self, module_test, events): for e in module_test.scan.modules["dummy"].events: events_data.add(e.data) assert not any( - [ - x in events_data + x in events_data for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") - ] ) diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index 46798dd94e..ba923bc08a 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -1144,7 +1144,7 @@ def check(self, module_test, events): assert content == self.file_content, "File content doesn't match" filesystem_events = [e.parent for e in vuln_events] assert len(filesystem_events) == 4 - assert all([e.type == "FILESYSTEM" for e in filesystem_events]) + assert all(e.type == "FILESYSTEM" for e in filesystem_events) assert 1 == len( [ e @@ -1206,7 +1206,7 @@ def check(self, module_test, events): assert content == self.file_content, "File content doesn't match" filesystem_events = [e.parent for e in finding_events] assert len(filesystem_events) == 4 - assert all([e.type == "FILESYSTEM" for e in filesystem_events]) + assert all(e.type == "FILESYSTEM" for e in filesystem_events) assert 1 == len( [ e From 3c9117b85949b6e0f5fcc9612ef4a91a0f082dc3 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 23 Nov 2024 11:02:42 +0100 Subject: [PATCH 10/64] poetry remove ruff && poetry add --group=dev ruff --- poetry.lock | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9a3597dc42..509937d90a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3057,4 +3057,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "25e796bbef89b1f888b32a1adfbea0d7d4c4855b9b211c42529669ac3f66b5be" +content-hash = "b196b7db964114dcb29d17bdd833c24759a09a02404d2a385d677b807ec865f8" diff --git a/pyproject.toml b/pyproject.toml index d3136b8506..02a6d12d23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,6 @@ httpx = "^0.27.0" puremagic = "^1.28" cloudcheck = "^6.0.0.602" radixtarget = "^2.0.0.50" -ruff = "^0.8.0" [tool.poetry.group.dev.dependencies] poetry-dynamic-versioning = ">=0.21.4,<1.5.0" @@ -73,6 +72,7 @@ pytest-asyncio = "0.24.0" uvicorn = "^0.32.0" fastapi = "^0.115.5" pytest-httpx = ">=0.33,<0.35" +ruff = "^0.8.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" From 1a4488f1a30a426b4515eefe90405612daf1350e Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Sun, 24 Nov 2024 02:52:51 +0000 Subject: [PATCH 11/64] [create-pull-request] automated change --- docs/scanning/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 14c2465d9f..c95df5785b 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -436,7 +436,7 @@ Many modules accept their own configuration options. These options have the abil | modules.trufflehog.config | str | File path or URL to YAML trufflehog config | | | modules.trufflehog.deleted_forks | bool | Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours. | False | | modules.trufflehog.only_verified | bool | Only report credentials that have been verified | True | -| modules.trufflehog.version | str | trufflehog version | 3.84.0 | +| modules.trufflehog.version | str | trufflehog version | 3.84.1 | | modules.urlscan.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.virustotal.api_key | str | VirusTotal API Key | | | modules.wayback.garbage_threshold | int | Dedupe similar urls if they are in a group of this size or higher (lower values == less garbage data) | 10 | From cbd046a23679316ee3bb19ce850bb37d1089c805 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sun, 24 Nov 2024 16:47:29 +0100 Subject: [PATCH 12/64] Fix typos discovered by codespell --- bbot/core/helpers/bloom.py | 4 ++-- bbot/core/helpers/diff.py | 6 +++--- bbot/core/helpers/regexes.py | 4 ++-- bbot/core/helpers/web/web.py | 2 +- bbot/modules/deadly/nuclei.py | 2 +- bbot/modules/dnsbimi.py | 2 +- bbot/modules/docker_pull.py | 2 +- bbot/modules/output/asset_inventory.py | 2 +- bbot/modules/portscan.py | 2 +- bbot/modules/telerik.py | 2 +- bbot/test/test_step_1/test_dns.py | 2 +- bbot/test/test_step_2/module_tests/test_module_bypass403.py | 2 +- .../test_step_2/module_tests/test_module_dnsdumpster.py | 4 ++-- bbot/test/test_step_2/module_tests/test_module_hunterio.py | 2 +- bbot/test/test_step_2/module_tests/test_module_telerik.py | 2 +- docs/dev/tests.md | 2 +- docs/modules/internal_modules.md | 6 +++--- 17 files changed, 24 insertions(+), 24 deletions(-) diff --git a/bbot/core/helpers/bloom.py b/bbot/core/helpers/bloom.py index 4a3508edff..62d2caa38f 100644 --- a/bbot/core/helpers/bloom.py +++ b/bbot/core/helpers/bloom.py @@ -5,14 +5,14 @@ class BloomFilter: """ - Simple bloom filter implementation capable of rougly 400K lookups/s. + Simple bloom filter implementation capable of roughly 400K lookups/s. BBOT uses bloom filters in scenarios like DNS brute-forcing, where it's useful to keep track of which mutations have been tried so far. A 100-megabyte bloom filter (800M bits) can store 10M entries with a .01% false-positive rate. A python hash is 36 bytes. So if you wanted to store these in a set, this would take up - 36 * 10M * 2 (key+value) == 720 megabytes. So we save rougly 7 times the space. + 36 * 10M * 2 (key+value) == 720 megabytes. So we save roughly 7 times the space. """ def __init__(self, size=8000000): diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index cb79c6039b..ea7ca3a864 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -94,7 +94,7 @@ async def _baseline(self): baseline_1_json = xmltodict.parse(baseline_1.text) baseline_2_json = xmltodict.parse(baseline_2.text) except ExpatError: - log.debug(f"Cant HTML parse for {self.baseline_url}. Switching to text parsing as a backup") + log.debug(f"Can't HTML parse for {self.baseline_url}. Switching to text parsing as a backup") baseline_1_json = baseline_1.text.split("\n") baseline_2_json = baseline_2.text.split("\n") @@ -203,7 +203,7 @@ async def compare( ) if subject_response is None: - # this can be caused by a WAF not liking the header, so we really arent interested in it + # this can be caused by a WAF not liking the header, so we really aren't interested in it return (True, "403", reflection, subject_response) if check_reflection: @@ -225,7 +225,7 @@ async def compare( subject_json = xmltodict.parse(subject_response.text) except ExpatError: - log.debug(f"Cant HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup") + log.debug(f"Can't HTML parse for {subject.split('?')[0]}. Switching to text parsing as a backup") subject_json = subject_response.text.split("\n") diff_reasons = [] diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 85f9dcac7e..21dcabe545 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -23,7 +23,7 @@ _ipv4_regex = r"(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}" ipv4_regex = re.compile(_ipv4_regex, re.I) -# IPv6 is complicated, so we have accomodate alternative patterns, +# IPv6 is complicated, so we have accommodate alternative patterns, # :(:[A-F0-9]{1,4}){1,7} == ::1, ::ffff:1 # ([A-F0-9]{1,4}:){1,7}: == 2001::, 2001:db8::, 2001:db8:0:1:2:3:: # ([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4}) == 2001::1, 2001:db8::1, 2001:db8:0:1:2:3::1 @@ -118,7 +118,7 @@ scan_name_regex = re.compile(r"[a-z]{3,20}_[a-z]{3,20}") -# For use with excavate paramaters extractor +# For use with excavate parameters extractor input_tag_regex = re.compile( r"]+?name=[\"\']?([\.$\w]+)[\"\']?(?:[^>]*?value=[\"\']([=+\/\w]*)[\"\'])?[^>]*>" ) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 2865732ce5..6c712e1e3e 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -451,7 +451,7 @@ def beautifulsoup( Perform an html parse of the 'markup' argument and return a soup instance >>> email_type = soup.find(type="email") - Searches the soup instance for all occurances of the passed in argument + Searches the soup instance for all occurrences of the passed in argument """ try: soup = BeautifulSoup( diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index c525b5a8f8..5de7aa465d 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -227,7 +227,7 @@ async def execute_nuclei(self, nuclei_input): command.append(option) if self.scan.config.get("interactsh_disable") is True: - self.info("Disbling interactsh in accordance with global settings") + self.info("Disabling interactsh in accordance with global settings") command.append("-no-interactsh") if self.mode == "technology": diff --git a/bbot/modules/dnsbimi.py b/bbot/modules/dnsbimi.py index f780b15517..00c69f8c20 100644 --- a/bbot/modules/dnsbimi.py +++ b/bbot/modules/dnsbimi.py @@ -19,7 +19,7 @@ # # NOTE: .svg file extensions are filtered from inclusion by default, modify "url_extension_blacklist" appropriately if you want the .svg image to be considered for download. # -# NOTE: use the "filedownload" module if you to download .svg and .pem files. .pem will be downloaded by defaut, .svg will require a customised configuration for that module. +# NOTE: use the "filedownload" module if you to download .svg and .pem files. .pem will be downloaded by default, .svg will require a customised configuration for that module. # # The domain portion of any URL_UNVERIFIED's will be extracted by the various internal modules if .svg is not filtered. # diff --git a/bbot/modules/docker_pull.py b/bbot/modules/docker_pull.py index 65594736b4..22888bce3c 100644 --- a/bbot/modules/docker_pull.py +++ b/bbot/modules/docker_pull.py @@ -102,7 +102,7 @@ async def get_tags(self, registry, repository): url = f"{registry}/v2/{repository}/tags/list" r = await self.docker_api_request(url) if r is None or r.status_code != 200: - self.log.warning(f"Could not retrieve all tags for {repository} asuming tag:latest only.") + self.log.warning(f"Could not retrieve all tags for {repository} assuming tag:latest only.") self.log.debug(f"Response: {r}") return ["latest"] try: diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index fa520e125c..ce94a56ea9 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -259,7 +259,7 @@ def absorb_csv_row(self, row): # ips self.ip_addresses = set(_make_ip_list(row.get("IP (External)", ""))) self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", "")))) - # If user reqests a recheck dont import the following fields to force them to be rechecked + # If user requests a recheck dont import the following fields to force them to be rechecked if not self.recheck: # ports ports = [i.strip() for i in row.get("Open Ports", "").split(",")] diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index 37124ce879..a04bec2fdb 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -104,7 +104,7 @@ async def setup(self): return True async def handle_batch(self, *events): - # on our first run, we automatically include all our intial scan targets + # on our first run, we automatically include all our initial scan targets if not self.scanned_initial_targets: self.scanned_initial_targets = True events = set(events) diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index 478eb305e1..c11510d6cc 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -173,7 +173,7 @@ async def handle_event(self, event): webresource = "Telerik.Web.UI.WebResource.axd?type=rau" result, _ = await self.test_detector(event.data, webresource) if result: - if "RadAsyncUpload handler is registered succesfully" in result.text: + if "RadAsyncUpload handler is registered successfully" in result.text: self.debug("Detected Telerik instance (Telerik.Web.UI.WebResource.axd?type=rau)") probe_data = { diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index fa239c01fc..dbbfe68d65 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -733,7 +733,7 @@ async def handle_event(self, event): dummy_module = DummyModule(scan) scan.modules["dummy_module"] = dummy_module events = [e async for e in scan.async_start()] - # no raw records should be ouptut + # no raw records should be output assert 0 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) # but they should still make it to the module assert 1 == len( diff --git a/bbot/test/test_step_2/module_tests/test_module_bypass403.py b/bbot/test/test_step_2/module_tests/test_module_bypass403.py index 8990f0d5ec..57c1a8bedf 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bypass403.py +++ b/bbot/test/test_step_2/module_tests/test_module_bypass403.py @@ -26,7 +26,7 @@ class TestBypass403_collapsethreshold(ModuleTestBase): async def setup_after_prep(self, module_test): respond_args = {"response_data": "alive"} - # some of these wont work outside of the module because of the complex logic. This doesn't matter, we just need to get more alerts than the threshold. + # some of these won't work outside of the module because of the complex logic. This doesn't matter, we just need to get more alerts than the threshold. query_payloads = [ "%09", diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py index 447f8baba1..714a610c0c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py @@ -6,12 +6,12 @@ async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( url="https://dnsdumpster.com", headers={"Set-Cookie": "csrftoken=asdf"}, - content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similiar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n\n\n\n\n\n\n \n\n', + content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n\n\n\n\n\n\n \n\n', ) module_test.httpx_mock.add_response( url="https://dnsdumpster.com/", method="POST", - content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n

Showing results for blacklanternsecurity.com

\n
\n
\n
\n
\n

Hosting (IP block owners)

\n
\n
\n

GeoIP of Host Locations

\n
\n
\n
\n\n

DNS Servers

\n
\n \n \n \n \n \n \n
ns01.domaincontrol.com.
\n\n\n \n
\n
\n
97.74.100.1
ns01.domaincontrol.com
GODADDY-DNS
United States
ns02.domaincontrol.com.
\n\n\n \n
\n
\n
173.201.68.1
ns02.domaincontrol.com
GODADDY-DNS
United States
\n
\n\n

MX Records ** This is where email for the domain goes...

\n
\n \n \n \n \n
asdf.blacklanternsecurity.com.mail.protection.outlook.com.
\n\n\n
\n
\n
104.47.55.138
mail-bn8nam120138.inbound.protection.outlook.com
MICROSOFT-CORP-MSN-AS-BLOCK
United States
\n
\n\n

TXT Records ** Find more hosts in Sender Policy Framework (SPF) configurations

\n
\n \n\n\n\n\n\n\n\n\n\n
"MS=ms26206678"
"v=spf1 ip4:50.240.76.25 include:spf.protection.outlook.com -all"
"google-site-verification=O_PoQFTGJ_hZ9LqfNT9OEc0KPFERKHQ_1t1m0YTx_1E"
"google-site-verification=7XKUMxJSTHBSzdvT7gH47jLRjNAS76nrEfXmzhR_DO4"
\n
\n\n\n

Host Records (A) ** this data may not be current as it uses a static database (updated monthly)

\n
\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n GitHub.com\n\n\n\n\n\n\n\n\n
HTTP TECH: \n varnish\n\n\n\n
185.199.108.153
cdn-185-199-108-153.github.com
FASTLY
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n
SSH: \n SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3\n\n\n\n\n\n\n\n
143.244.156.80
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n Apache/2.4.29 (Ubuntu)\n\n\n\n\n\n\n\n\n
HTTP TECH: \n Ubuntu
Apache,2.4.29
\n\n\n\n
64.227.8.231
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.34.56.157
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.241.216.208
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
167.71.95.71
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
157.245.247.197
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
\n
\n\n\n\n\n\n
\n

Mapping the domain ** click for full size image

\n

\n\n

\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similiar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n\n\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n \n \n \n\n \n\n\n\n\n\n\n\n\n\n\n\n\n \n\n', + content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n

Showing results for blacklanternsecurity.com

\n
\n
\n
\n
\n

Hosting (IP block owners)

\n
\n
\n

GeoIP of Host Locations

\n
\n
\n
\n\n

DNS Servers

\n
\n \n \n \n \n \n \n
ns01.domaincontrol.com.
\n\n\n \n
\n
\n
97.74.100.1
ns01.domaincontrol.com
GODADDY-DNS
United States
ns02.domaincontrol.com.
\n\n\n \n
\n
\n
173.201.68.1
ns02.domaincontrol.com
GODADDY-DNS
United States
\n
\n\n

MX Records ** This is where email for the domain goes...

\n
\n \n \n \n \n
asdf.blacklanternsecurity.com.mail.protection.outlook.com.
\n\n\n
\n
\n
104.47.55.138
mail-bn8nam120138.inbound.protection.outlook.com
MICROSOFT-CORP-MSN-AS-BLOCK
United States
\n
\n\n

TXT Records ** Find more hosts in Sender Policy Framework (SPF) configurations

\n
\n \n\n\n\n\n\n\n\n\n\n
"MS=ms26206678"
"v=spf1 ip4:50.240.76.25 include:spf.protection.outlook.com -all"
"google-site-verification=O_PoQFTGJ_hZ9LqfNT9OEc0KPFERKHQ_1t1m0YTx_1E"
"google-site-verification=7XKUMxJSTHBSzdvT7gH47jLRjNAS76nrEfXmzhR_DO4"
\n
\n\n\n

Host Records (A) ** this data may not be current as it uses a static database (updated monthly)

\n
\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n GitHub.com\n\n\n\n\n\n\n\n\n
HTTP TECH: \n varnish\n\n\n\n
185.199.108.153
cdn-185-199-108-153.github.com
FASTLY
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n
SSH: \n SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3\n\n\n\n\n\n\n\n
143.244.156.80
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n Apache/2.4.29 (Ubuntu)\n\n\n\n\n\n\n\n\n
HTTP TECH: \n Ubuntu
Apache,2.4.29
\n\n\n\n
64.227.8.231
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.34.56.157
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.241.216.208
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
167.71.95.71
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
157.245.247.197
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
\n
\n\n\n\n\n\n
\n

Mapping the domain ** click for full size image

\n

\n\n

\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n\n\n\n\n
\n\n\n
\n
\n
\n\n
\n
\n
\n\n\n
\n

dnsdumpster@gmail.com

\n
\n\n\n\n\n
\n
\n
\n\n \n \n
\n
Low volume Updates and News
\n
\n
\n
\n\n \n\n
\n
\n
\n
\n\n
\n\n\n
\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n \n \n \n\n \n\n\n\n\n\n\n\n\n\n\n\n\n \n\n', ) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_hunterio.py b/bbot/test/test_step_2/module_tests/test_module_hunterio.py index 263f304a38..ecd71957ab 100644 --- a/bbot/test/test_step_2/module_tests/test_module_hunterio.py +++ b/bbot/test/test_step_2/module_tests/test_module_hunterio.py @@ -17,7 +17,7 @@ async def setup_before_prep(self, module_test): "reset_date": "1917-05-23", "team_id": 1234, "calls": { - "_deprecation_notice": "Sums the searches and the verifications, giving an unprecise look of the available requests", + "_deprecation_notice": "Sums the searches and the verifications, giving an imprecise look of the available requests", "used": 999, "available": 2000, }, diff --git a/bbot/test/test_step_2/module_tests/test_module_telerik.py b/bbot/test/test_step_2/module_tests/test_module_telerik.py index 98c511f2ab..21c4d2b86b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_telerik.py +++ b/bbot/test/test_step_2/module_tests/test_module_telerik.py @@ -11,7 +11,7 @@ async def setup_before_prep(self, module_test): # Simulate Telerik.Web.UI.WebResource.axd?type=rau detection expect_args = {"method": "GET", "uri": "/Telerik.Web.UI.WebResource.axd", "query_string": "type=rau"} respond_args = { - "response_data": '{ "message" : "RadAsyncUpload handler is registered succesfully, however, it may not be accessed directly." }' + "response_data": '{ "message" : "RadAsyncUpload handler is registered successfully, however, it may not be accessed directly." }' } module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) diff --git a/docs/dev/tests.md b/docs/dev/tests.md index 1bcf2970a3..4381981812 100644 --- a/docs/dev/tests.md +++ b/docs/dev/tests.md @@ -15,7 +15,7 @@ poetry run ruff check # format code with ruff poetry run ruff format -# run all tests with pytest (takes rougly 30 minutes) +# run all tests with pytest (takes roughly 30 minutes) poetry run pytest ``` diff --git a/docs/modules/internal_modules.md b/docs/modules/internal_modules.md index 1a22ebb1e0..1e49575160 100644 --- a/docs/modules/internal_modules.md +++ b/docs/modules/internal_modules.md @@ -31,7 +31,7 @@ The cloud module looks at events and tries to determine if they are associated w ### dns -The DNS internal module controls the basic DNS resoultion the BBOT performs, and all of the supporting machinery like wildcard detection, etc. +The DNS internal module controls the basic DNS resolution the BBOT performs, and all of the supporting machinery like wildcard detection, etc. ### excavate @@ -60,7 +60,7 @@ Scans for verbose error messages in HTTP responses and raw text data. By identif The CSP extraction capability focuses on extracting domains from Content-Security-Policy headers. By analyzing these headers, BBOT can identify additional domains which can get fed back into the scan. #### Serialization Detection -Serialized objects are a common source of serious security vulnerablities. Excavate aims to detect those used in Java, .NET, and PHP applications. +Serialized objects are a common source of serious security vulnerabilities. Excavate aims to detect those used in Java, .NET, and PHP applications. #### Functionality Detection Looks for specific web functionalities such as file upload fields and WSDL URLs. By identifying these elements, BBOT can pinpoint areas of the application that may require further scrutiny for security vulnerabilities. @@ -70,7 +70,7 @@ The non-HTTP scheme detection capability extracts URLs with non-HTTP schemes, su #### Custom Yara Rules -Excavate supports the use of custom YARA rules, which wil be added to the other rules before the scan start. For more info, view this. +Excavate supports the use of custom YARA rules, which will be added to the other rules before the scan start. For more info, view this. ### speculate From fa3e629b9c8a105c850f35c034c24b90f890228f Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sun, 24 Nov 2024 16:59:09 +0100 Subject: [PATCH 13/64] pyproject.toml Add [tool.codespell] config --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 02a6d12d23..cfdc26f305 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,6 +95,10 @@ asyncio_default_fixture_loop_scope = "function" requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] build-backend = "poetry_dynamic_versioning.backend" +[tool.codespell] +ignore-words-list = "bu,cna,couldn,dialin,nd,ned,thirdparty" +skip = "./docs/javascripts/vega*.js,./bbot/wordlists/*" + [tool.ruff] line-length = 119 format.exclude = ["bbot/test/test_step_1/test_manager_*"] From 052b242ba47630e3513657c6fae7cb0ef2c6e366 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sun, 24 Nov 2024 19:43:47 +0100 Subject: [PATCH 14/64] version_updater.yml: Update GitHub Actions --- .github/workflows/version_updater.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/version_updater.yml b/.github/workflows/version_updater.yml index bb149820cf..81f4490514 100644 --- a/.github/workflows/version_updater.yml +++ b/.github/workflows/version_updater.yml @@ -9,13 +9,13 @@ jobs: update-nuclei-version: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: dev fetch-depth: 0 token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.x' - name: Install dependencies @@ -44,7 +44,7 @@ jobs: run: "sed -i '0,/\"version\": \".*\",/ s/\"version\": \".*\",/\"version\": \"${{ env.latest_version }}\",/g' bbot/modules/deadly/nuclei.py" - name: Create pull request to update the version if: steps.update-version.outcome == 'success' - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v7 with: token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} commit-message: "Update nuclei" @@ -61,13 +61,13 @@ jobs: update-trufflehog-version: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: dev fetch-depth: 0 token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.x' - name: Install dependencies @@ -96,7 +96,7 @@ jobs: run: "sed -i '0,/\"version\": \".*\",/ s/\"version\": \".*\",/\"version\": \"${{ env.latest_version }}\",/g' bbot/modules/trufflehog.py" - name: Create pull request to update the version if: steps.update-version.outcome == 'success' - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v7 with: token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} commit-message: "Update trufflehog" @@ -109,4 +109,4 @@ jobs: branch: "update-trufflehog" committer: blsaccess author: blsaccess - assignees: "TheTechromancer" \ No newline at end of file + assignees: "TheTechromancer" From 1f47ce18a3c3ab344e188545fbd353cc918168cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:35:18 +0000 Subject: [PATCH 15/64] Bump uvicorn from 0.32.0 to 0.32.1 Bumps [uvicorn](https://github.com/encode/uvicorn) from 0.32.0 to 0.32.1. - [Release notes](https://github.com/encode/uvicorn/releases) - [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md) - [Commits](https://github.com/encode/uvicorn/compare/0.32.0...0.32.1) --- updated-dependencies: - dependency-name: uvicorn dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 509937d90a..559a783d83 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -2720,13 +2720,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.32.0" +version = "0.32.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, - {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, + {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"}, + {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"}, ] [package.dependencies] @@ -2735,7 +2735,7 @@ h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "verspec" From 78be54105741db14417dc5863adefa50e2d896c2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:36:01 +0000 Subject: [PATCH 16/64] Bump pydantic from 2.9.2 to 2.10.1 Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.9.2 to 2.10.1. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.9.2...v2.10.1) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 206 +++++++++++++++++++++++++++------------------------- 1 file changed, 107 insertions(+), 99 deletions(-) diff --git a/poetry.lock b/poetry.lock index 509937d90a..63db70ca8a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1674,22 +1674,19 @@ files = [ [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e"}, + {file = "pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, -] +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1697,100 +1694,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] From 916104e7e4f1128fafe07081a098659e3fbab136 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:36:22 +0000 Subject: [PATCH 17/64] Bump pytest-rerunfailures from 14.0 to 15.0 Bumps [pytest-rerunfailures](https://github.com/pytest-dev/pytest-rerunfailures) from 14.0 to 15.0. - [Changelog](https://github.com/pytest-dev/pytest-rerunfailures/blob/master/CHANGES.rst) - [Commits](https://github.com/pytest-dev/pytest-rerunfailures/compare/14.0...15.0) --- updated-dependencies: - dependency-name: pytest-rerunfailures dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 14 +++++++------- pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 509937d90a..872d3df763 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1969,18 +1969,18 @@ testing = ["pytest-asyncio (==0.24.*)", "pytest-cov (==5.*)"] [[package]] name = "pytest-rerunfailures" -version = "14.0" +version = "15.0" description = "pytest plugin to re-run tests to eliminate flaky failures" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest-rerunfailures-14.0.tar.gz", hash = "sha256:4a400bcbcd3c7a4ad151ab8afac123d90eca3abe27f98725dc4d9702887d2e92"}, - {file = "pytest_rerunfailures-14.0-py3-none-any.whl", hash = "sha256:4197bdd2eaeffdbf50b5ea6e7236f47ff0e44d1def8dae08e409f536d84e7b32"}, + {file = "pytest-rerunfailures-15.0.tar.gz", hash = "sha256:2d9ac7baf59f4c13ac730b47f6fa80e755d1ba0581da45ce30b72fb3542b4474"}, + {file = "pytest_rerunfailures-15.0-py3-none-any.whl", hash = "sha256:dd150c4795c229ef44320adc9a0c0532c51b78bb7a6843a8c53556b9a611df1a"}, ] [package.dependencies] packaging = ">=17.1" -pytest = ">=7.2" +pytest = ">=7.4,<8.2.2 || >8.2.2" [[package]] name = "pytest-timeout" @@ -3057,4 +3057,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "b196b7db964114dcb29d17bdd833c24759a09a02404d2a385d677b807ec865f8" +content-hash = "cbcaf165db6acc5621f55d4b730e2d1e5d8cfc76db841bc433cae9bc4801eed8" diff --git a/pyproject.toml b/pyproject.toml index 02a6d12d23..7c8e9b99e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ werkzeug = ">=2.3.4,<4.0.0" pytest-env = ">=0.8.2,<1.2.0" pre-commit = ">=3.4,<5.0" pytest-cov = ">=5,<7" -pytest-rerunfailures = "^14.0" +pytest-rerunfailures = ">=14,<16" pytest-timeout = "^2.3.1" pytest-httpserver = "^1.0.11" pytest = "^8.3.1" From 81e8429ffb13a1f2c33e5d4f8e86dc717bf9e546 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:36:44 +0000 Subject: [PATCH 18/64] Bump mkdocs-material from 9.5.44 to 9.5.45 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.44 to 9.5.45. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.44...9.5.45) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 509937d90a..7e7af48bb0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1214,13 +1214,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.44" +version = "9.5.45" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.44-py3-none-any.whl", hash = "sha256:47015f9c167d58a5ff5e682da37441fc4d66a1c79334bfc08d774763cacf69ca"}, - {file = "mkdocs_material-9.5.44.tar.gz", hash = "sha256:f3a6c968e524166b3f3ed1fb97d3ed3e0091183b0545cedf7156a2a6804c56c0"}, + {file = "mkdocs_material-9.5.45-py3-none-any.whl", hash = "sha256:a9be237cfd0be14be75f40f1726d83aa3a81ce44808dc3594d47a7a592f44547"}, + {file = "mkdocs_material-9.5.45.tar.gz", hash = "sha256:286489cf0beca4a129d91d59d6417419c63bceed1ce5cd0ec1fc7e1ebffb8189"}, ] [package.dependencies] From a1f367a9975529ab3f4cc83462ef97784d8e36cb Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 23 Nov 2024 15:58:06 +0100 Subject: [PATCH 19/64] git add .pre-commit-config.yaml --- .gitattributes | 2 +- .gitmodules | 2 +- .pre-commit-config.yaml | 48 +++++++++++ bbot/defaults.yml | 2 +- bbot/modules/internal/cloudcheck.py | 4 +- bbot/modules/internal/dnsresolve.py | 4 +- bbot/modules/report/asn.py | 9 +- bbot/presets/kitchen-sink.yml | 2 - bbot/presets/web/dotnet-audit.yml | 1 - bbot/scanner/preset/args.py | 4 +- bbot/scanner/preset/preset.py | 2 +- bbot/test/test_step_1/test__module__tests.py | 1 - bbot/test/test_step_1/test_bbot_fastapi.py | 3 - bbot/test/test_step_1/test_bloom_filter.py | 1 - bbot/test/test_step_1/test_dns.py | 3 - bbot/test/test_step_1/test_engine.py | 2 - bbot/test/test_step_1/test_events.py | 2 - bbot/test/test_step_1/test_helpers.py | 1 - bbot/test/test_step_1/test_presets.py | 7 +- bbot/test/test_step_1/test_target.py | 1 - bbot/test/test_step_1/test_web.py | 2 - .../module_tests/test_module_baddns_direct.py | 6 +- .../module_tests/test_module_excavate.py | 22 ++--- .../module_tests/test_module_gowitness.py | 4 +- .../module_tests/test_module_newsletters.py | 20 ++--- .../module_tests/test_module_ntlm.py | 3 +- .../module_tests/test_module_pgp.py | 4 +- .../module_tests/test_module_smuggler.py | 26 +++--- .../module_tests/test_module_speculate.py | 4 +- .../module_tests/test_module_viewdns.py | 2 +- bbot/wordlists/devops_mutations.txt | 2 +- bbot/wordlists/ffuf_shortname_candidates.txt | 2 +- bbot/wordlists/nameservers.txt | 2 +- bbot/wordlists/paramminer_headers.txt | 2 +- bbot/wordlists/paramminer_parameters.txt | 2 +- ...aft-small-extensions-lowercase_CLEANED.txt | 2 +- bbot/wordlists/valid_url_schemes.txt | 2 +- docs/data/chord_graph/entities.json | 2 +- docs/data/chord_graph/rels.json | 2 +- docs/dev/helpers/index.md | 2 +- docs/javascripts/tablesort.min.js | 2 +- docs/modules/custom_yara_rules.md | 6 +- docs/modules/internal_modules.md | 6 +- docs/modules/nuclei.md | 10 +-- docs/release_history.md | 2 +- docs/scanning/configuration.md | 2 +- docs/scanning/index.md | 2 +- docs/scanning/output.md | 8 +- docs/scanning/presets.md | 2 +- docs/scanning/presets_list.md | 86 +++++++++---------- mkdocs.yml | 4 +- 51 files changed, 188 insertions(+), 156 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.gitattributes b/.gitattributes index 49edcb7119..00bf2637dc 100644 --- a/.gitattributes +++ b/.gitattributes @@ -5,4 +5,4 @@ *.txt text eol=lf *.json text eol=lf *.md text eol=lf -*.sh text eol=lf \ No newline at end of file +*.sh text eol=lf diff --git a/.gitmodules b/.gitmodules index 0033a29676..c85f090f5f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,4 @@ [submodule "bbot/modules/playground"] path = bbot/modules/playground url = https://github.com/blacklanternsecurity/bbot-module-playground - branch = main \ No newline at end of file + branch = main diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..d6643f2ad3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,48 @@ +# Learn more about this config here: https://pre-commit.com/ + +# To enable these pre-commit hooks run: +# `pipx install pre-commit` or `brew install pre-commit` +# Then in the project root directory run `pre-commit install` + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-ast + - id: check-builtin-literals + - id: check-byte-order-marker + - id: check-case-conflict + # - id: check-docstring-first + # - id: check-executables-have-shebangs + - id: check-json + - id: check-merge-conflict + # - id: check-shebang-scripts-are-executable + - id: check-symlinks + - id: check-toml + - id: check-vcs-permalinks + - id: check-xml + # - id: check-yaml + - id: debug-statements + - id: destroyed-symlinks + # - id: detect-private-key + - id: end-of-file-fixer + - id: file-contents-sorter + - id: fix-byte-order-marker + - id: forbid-new-submodules + - id: forbid-submodules + - id: mixed-line-ending + - id: requirements-txt-fixer + - id: sort-simple-yaml + - id: trailing-whitespace + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.0 + hooks: + - id: ruff + - id: ruff-format + + - repo: https://github.com/abravalheri/validate-pyproject + rev: v0.23 + hooks: + - id: validate-pyproject diff --git a/bbot/defaults.yml b/bbot/defaults.yml index 63f5f7e68b..61638595a0 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -74,7 +74,7 @@ dns: web: # HTTP proxy - http_proxy: + http_proxy: # Web user-agent user_agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.2151.97 # Set the maximum number of HTTP links that can be followed in a row (0 == no spidering allowed) diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 685d67f9d7..86b6130d71 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -57,7 +57,9 @@ async def handle_event(self, event, **kwargs): for provider in self.helpers.cloud.providers.values(): provider_name = provider.name.lower() base_kwargs = { - "parent": event, "tags": [f"{provider.provider_type}-{provider_name}"], "_provider": provider_name + "parent": event, + "tags": [f"{provider.provider_type}-{provider_name}"], + "_provider": provider_name, } # loop through the provider's regex signatures, if any for event_type, sigs in provider.signatures.items(): diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 15facec564..bdca0ea5c3 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -306,9 +306,7 @@ def get_dns_parent(self, event): @property def emit_raw_records(self): if self._emit_raw_records is None: - watching_raw_records = any( - "RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values() - ) + watching_raw_records = any("RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values()) omitted_event_types = self.scan.config.get("omit_event_types", []) omit_raw_records = "RAW_DNS_RECORD" in omitted_event_types self._emit_raw_records = watching_raw_records or not omit_raw_records diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index 771e4b4f7f..3b3c488d15 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -207,7 +207,14 @@ async def get_asn_bgpview(self, ip): return False asns_tried.add(asn) asns.append( - {"asn": asn, "subnet": subnet, "name": name, "description": description, "country": country, "emails": emails} + { + "asn": asn, + "subnet": subnet, + "name": name, + "description": description, + "country": country, + "emails": emails, + } ) if not asns: self.debug(f'No results for "{ip}"') diff --git a/bbot/presets/kitchen-sink.yml b/bbot/presets/kitchen-sink.yml index 43057bf44a..073f480bb2 100644 --- a/bbot/presets/kitchen-sink.yml +++ b/bbot/presets/kitchen-sink.yml @@ -16,5 +16,3 @@ config: modules: baddns: enable_references: True - - diff --git a/bbot/presets/web/dotnet-audit.yml b/bbot/presets/web/dotnet-audit.yml index bbc5e201e0..b1cd8e9cac 100644 --- a/bbot/presets/web/dotnet-audit.yml +++ b/bbot/presets/web/dotnet-audit.yml @@ -19,4 +19,3 @@ config: extensions: asp,aspx,ashx,asmx,ascx telerik: exploit_RAU_crypto: True - diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index b4294710fa..d7b55d50c9 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -175,7 +175,9 @@ def preset_from_args(self): def create_parser(self, *args, **kwargs): kwargs.update( { - "description": "Bighuge BLS OSINT Tool", "formatter_class": argparse.RawTextHelpFormatter, "epilog": self.epilog + "description": "Bighuge BLS OSINT Tool", + "formatter_class": argparse.RawTextHelpFormatter, + "epilog": self.epilog, } ) p = argparse.ArgumentParser(*args, **kwargs) diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py index 9e67f2c803..b275cc1f72 100644 --- a/bbot/scanner/preset/preset.py +++ b/bbot/scanner/preset/preset.py @@ -967,7 +967,7 @@ def presets_table(self, include_modules=True): header = ["Preset", "Category", "Description", "# Modules"] if include_modules: header.append("Modules") - for (loaded_preset, category, preset_path, original_file) in self.all_presets.values(): + for loaded_preset, category, preset_path, original_file in self.all_presets.values(): loaded_preset = loaded_preset.bake() num_modules = f"{len(loaded_preset.scan_modules):,}" row = [loaded_preset.name, category, loaded_preset.description, num_modules] diff --git a/bbot/test/test_step_1/test__module__tests.py b/bbot/test/test_step_1/test__module__tests.py index 791e58f58a..e50f67a910 100644 --- a/bbot/test/test_step_1/test__module__tests.py +++ b/bbot/test/test_step_1/test__module__tests.py @@ -15,7 +15,6 @@ def test__module__tests(): - preset = Preset() # make sure each module has a .py file diff --git a/bbot/test/test_step_1/test_bbot_fastapi.py b/bbot/test/test_step_1/test_bbot_fastapi.py index add7ad099a..1136963a3d 100644 --- a/bbot/test/test_step_1/test_bbot_fastapi.py +++ b/bbot/test/test_step_1/test_bbot_fastapi.py @@ -17,7 +17,6 @@ def run_bbot_multiprocess(queue): def test_bbot_multiprocess(bbot_httpserver): - bbot_httpserver.expect_request("/").respond_with_data("test@blacklanternsecurity.com") queue = multiprocessing.Queue() @@ -32,12 +31,10 @@ def test_bbot_multiprocess(bbot_httpserver): def test_bbot_fastapi(bbot_httpserver): - bbot_httpserver.expect_request("/").respond_with_data("test@blacklanternsecurity.com") fastapi_process = start_fastapi_server() try: - # wait for the server to start with a timeout of 60 seconds start_time = time.time() while True: diff --git a/bbot/test/test_step_1/test_bloom_filter.py b/bbot/test/test_step_1/test_bloom_filter.py index 22ec4db323..f954bfbc6e 100644 --- a/bbot/test/test_step_1/test_bloom_filter.py +++ b/bbot/test/test_step_1/test_bloom_filter.py @@ -6,7 +6,6 @@ @pytest.mark.asyncio async def test_bloom_filter(): - def generate_random_strings(n, length=10): """Generate a list of n random strings.""" return ["".join(random.choices(string.ascii_letters + string.digits, k=length)) for _ in range(n)] diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index dbbfe68d65..c032b44e48 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -185,7 +185,6 @@ async def test_dns_resolution(bbot_scanner): @pytest.mark.asyncio async def test_wildcards(bbot_scanner): - scan = bbot_scanner("1.1.1.1") helpers = scan.helpers @@ -634,7 +633,6 @@ def custom_lookup(query, rdtype): @pytest.mark.asyncio async def test_wildcard_deduplication(bbot_scanner): - custom_lookup = """ def custom_lookup(query, rdtype): if rdtype == "TXT" and query.strip(".").endswith("evilcorp.com"): @@ -670,7 +668,6 @@ async def handle_event(self, event): @pytest.mark.asyncio async def test_dns_raw_records(bbot_scanner): - from bbot.modules.base import BaseModule class DummyModule(BaseModule): diff --git a/bbot/test/test_step_1/test_engine.py b/bbot/test/test_step_1/test_engine.py index dbb21246f2..653c3dcd6c 100644 --- a/bbot/test/test_step_1/test_engine.py +++ b/bbot/test/test_step_1/test_engine.py @@ -14,7 +14,6 @@ async def test_engine(): return_errored = False class TestEngineServer(EngineServer): - CMDS = { 0: "return_thing", 1: "yield_stuff", @@ -54,7 +53,6 @@ async def yield_stuff(self, n): raise class TestEngineClient(EngineClient): - SERVER_CLASS = TestEngineServer async def return_thing(self, n): diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 39be4d704b..195f08ea89 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -9,7 +9,6 @@ @pytest.mark.asyncio async def test_events(events, helpers): - scan = Scanner() await scan._prep() @@ -617,7 +616,6 @@ async def test_events(events, helpers): @pytest.mark.asyncio async def test_event_discovery_context(): - from bbot.modules.base import BaseModule scan = Scanner("evilcorp.com") diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 16b0dc9ec5..2eb67cd13d 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -857,7 +857,6 @@ def test_liststring_invalidfnchars(helpers): # test parameter validation @pytest.mark.asyncio async def test_parameter_validation(helpers): - getparam_valid_params = { "name", "age", diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index 73fdcf23a5..5b1564f12c 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -16,7 +16,7 @@ def test_preset_descriptions(): # ensure very preset has a description preset = Preset() - for (loaded_preset, category, preset_path, original_filename) in preset.all_presets.values(): + for loaded_preset, category, preset_path, original_filename in preset.all_presets.values(): assert ( loaded_preset.description ), f'Preset "{loaded_preset.name}" at {original_filename} does not have a description.' @@ -68,7 +68,6 @@ def test_core(): def test_preset_yaml(clean_default_config): - import yaml preset1 = Preset( @@ -171,7 +170,6 @@ def test_preset_cache(): def test_preset_scope(): - # test target merging scan = Scanner("1.2.3.4", preset=Preset.from_dict({"target": ["evilcorp.com"]})) assert {str(h) for h in scan.preset.target.seeds.hosts} == {"1.2.3.4/32", "evilcorp.com"} @@ -378,7 +376,6 @@ def test_preset_scope(): @pytest.mark.asyncio async def test_preset_logging(): - scan = Scanner() # test individual verbosity levels @@ -711,7 +708,6 @@ class TestModule5(BaseModule): def test_preset_include(): - # test recursive preset inclusion custom_preset_dir_1 = bbot_test_dir / "custom_preset_dir" @@ -883,7 +879,6 @@ def test_preset_module_disablement(clean_default_config): def test_preset_require_exclude(): - def get_module_flags(p): for m in p.scan_modules: preloaded = p.preloaded_module(m) diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 3c9a9832b5..8f2a6bf91f 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -337,7 +337,6 @@ async def test_target(bbot_scanner): @pytest.mark.asyncio async def test_blacklist_regex(bbot_scanner, bbot_httpserver): - from bbot.scanner.target import ScanBlacklist blacklist = ScanBlacklist("evilcorp.com") diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index dc1f50339e..e07ed3d7d4 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -6,7 +6,6 @@ @pytest.mark.asyncio async def test_web_engine(bbot_scanner, bbot_httpserver, httpx_mock): - from werkzeug.wrappers import Response def server_handler(request): @@ -134,7 +133,6 @@ def server_handler(request): @pytest.mark.asyncio async def test_web_helpers(bbot_scanner, bbot_httpserver, httpx_mock): - # json conversion scan = bbot_scanner("evilcorp.com") url = "http://www.evilcorp.com/json_test?a=b" diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py b/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py index 77a86153c7..b2b49717c8 100644 --- a/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py +++ b/bbot/test/test_step_2/module_tests/test_module_baddns_direct.py @@ -55,8 +55,8 @@ def set_target(self, target): def check(self, module_test, events): assert any( e.type == "FINDING" - and "Possible [AWS Bucket Takeover Detection] via direct BadDNS analysis. Indicator: [[Words: The specified bucket does not exist | Condition: and | Part: body] Matchers-Condition: and] Trigger: [self] baddns Module: [CNAME]" - in e.data["description"] - for e in events + and "Possible [AWS Bucket Takeover Detection] via direct BadDNS analysis. Indicator: [[Words: The specified bucket does not exist | Condition: and | Part: body] Matchers-Condition: and] Trigger: [self] baddns Module: [CNAME]" + in e.data["description"] + for e in events ), "Failed to emit FINDING" assert any("baddns-cname" in e.tags for e in events), "Failed to add baddns tag" diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index f5f774e380..a2ccf97613 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -895,7 +895,7 @@ class TestExcavateRAWTEXT(ModuleTestBase): /Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] >> /Rotate 0 /Trans << ->> +>> /Type /Page >> endobj @@ -906,7 +906,7 @@ class TestExcavateRAWTEXT(ModuleTestBase): endobj 5 0 obj << -/Author (anonymous) /CreationDate (D:20240807182842+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240807182842+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) +/Author (anonymous) /CreationDate (D:20240807182842+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240807182842+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) /Subject (unspecified) /Title (untitled) /Trapped /False >> endobj @@ -924,17 +924,17 @@ class TestExcavateRAWTEXT(ModuleTestBase): endobj xref 0 8 -0000000000 65535 f -0000000073 00000 n -0000000104 00000 n -0000000211 00000 n -0000000414 00000 n -0000000482 00000 n -0000000778 00000 n -0000000837 00000 n +0000000000 65535 f +0000000073 00000 n +0000000104 00000 n +0000000211 00000 n +0000000414 00000 n +0000000482 00000 n +0000000778 00000 n +0000000837 00000 n trailer << -/ID +/ID [<3c7340500fa2fe72523c5e6f07511599><3c7340500fa2fe72523c5e6f07511599>] % ReportLab generated PDF document -- digest (http://www.reportlab.com) diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index 2d6dc2cd8f..6090fbb1d6 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -101,6 +101,4 @@ class TestGoWitnessWithBlob(TestGowitness): def check(self, module_test, events): webscreenshots = [e for e in events if e.type == "WEBSCREENSHOT"] assert webscreenshots, "failed to raise WEBSCREENSHOT events" - assert all( - "blob" in e.data and e.data["blob"] for e in webscreenshots - ), "blob not found in WEBSCREENSHOT data" + assert all("blob" in e.data and e.data["blob"] for e in webscreenshots), "blob not found in WEBSCREENSHOT data" diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index 98210f658e..c5edd25141 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -10,16 +10,16 @@ class TestNewsletters(ModuleTestBase): modules_overrides = ["speculate", "httpx", "newsletters"] html_with_newsletter = """ - """ diff --git a/bbot/test/test_step_2/module_tests/test_module_ntlm.py b/bbot/test/test_step_2/module_tests/test_module_ntlm.py index 1e79be7705..7b834ef2f9 100644 --- a/bbot/test/test_step_2/module_tests/test_module_ntlm.py +++ b/bbot/test/test_step_2/module_tests/test_module_ntlm.py @@ -10,7 +10,8 @@ async def setup_after_prep(self, module_test): request_args = {"uri": "/", "headers": {"test": "header"}} module_test.set_expect_requests(request_args, {}) request_args = { - "uri": "/oab/", "headers": {"Authorization": "NTLM TlRMTVNTUAABAAAAl4II4gAAAAAAAAAAAAAAAAAAAAAKAGFKAAAADw=="} + "uri": "/oab/", + "headers": {"Authorization": "NTLM TlRMTVNTUAABAAAAl4II4gAAAAAAAAAAAAAAAAAAAAAKAGFKAAAADw=="}, } respond_args = { "headers": { diff --git a/bbot/test/test_step_2/module_tests/test_module_pgp.py b/bbot/test/test_step_2/module_tests/test_module_pgp.py index e6f122dd93..dc493d7b52 100644 --- a/bbot/test/test_step_2/module_tests/test_module_pgp.py +++ b/bbot/test/test_step_2/module_tests/test_module_pgp.py @@ -9,10 +9,10 @@ class TestPGP(ModuleTestBase):

Search results for 'blacklanternsecurity.com'

Type bits/keyID            cr. time   exp time   key expir
 
diff --git a/bbot/test/test_step_2/module_tests/test_module_smuggler.py b/bbot/test/test_step_2/module_tests/test_module_smuggler.py index dcbb9fd3b5..fb86b9ae92 100644 --- a/bbot/test/test_step_2/module_tests/test_module_smuggler.py +++ b/bbot/test/test_step_2/module_tests/test_module_smuggler.py @@ -1,13 +1,13 @@ from .base import ModuleTestBase smuggler_text = r""" - ______ _ - / _____) | | - ( (____ ____ _ _ ____ ____| | _____ ____ + ______ _ + / _____) | | + ( (____ ____ _ _ ____ ____| | _____ ____ \____ \| \| | | |/ _ |/ _ | || ___ |/ ___) - _____) ) | | | |_| ( (_| ( (_| | || ____| | - (______/|_|_|_|____/ \___ |\___ |\_)_____)_| - (_____(_____| + _____) ) | | | |_| ( (_| ( (_| | || ____| | + (______/|_|_|_|____/ \___ |\___ |\_)_____)_| + (_____(_____| @defparam v1.1 @@ -16,13 +16,13 @@ [+] Endpoint : / [+] Configfile : default.py [+] Timeout : 5.0 seconds - [+] Cookies : 1 (Appending to the attack) - [nameprefix1] : Checking TECL... - [nameprefix1] : Checking CLTE... - [nameprefix1] : OK (TECL: 0.61 - 405) (CLTE: 0.62 - 405) - [tabprefix1] : Checking TECL...git - [tabprefix1] : Checking CLTE... - [tabprefix1] : Checking TECL... + [+] Cookies : 1 (Appending to the attack) + [nameprefix1] : Checking TECL... + [nameprefix1] : Checking CLTE... + [nameprefix1] : OK (TECL: 0.61 - 405) (CLTE: 0.62 - 405) + [tabprefix1] : Checking TECL...git + [tabprefix1] : Checking CLTE... + [tabprefix1] : Checking TECL... [tabprefix1] : Checking CLTE... [tabprefix1] : Checking TECL... [tabprefix1] : Checking CLTE... diff --git a/bbot/test/test_step_2/module_tests/test_module_speculate.py b/bbot/test/test_step_2/module_tests/test_module_speculate.py index e407470346..55db777e7b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_speculate.py +++ b/bbot/test/test_step_2/module_tests/test_module_speculate.py @@ -63,7 +63,7 @@ def check(self, module_test, events): events_data.add(e.data) assert all( x in events_data - for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") + for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") ) @@ -78,5 +78,5 @@ def check(self, module_test, events): events_data.add(e.data) assert not any( x in events_data - for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") + for x in ("evilcorp.com:80", "evilcorp.com:443", "asdf.evilcorp.com:80", "asdf.evilcorp.com:443") ) diff --git a/bbot/test/test_step_2/module_tests/test_module_viewdns.py b/bbot/test/test_step_2/module_tests/test_module_viewdns.py index d196981ba1..e8b2fe2339 100644 --- a/bbot/test/test_step_2/module_tests/test_module_viewdns.py +++ b/bbot/test/test_step_2/module_tests/test_module_viewdns.py @@ -66,7 +66,7 @@ def check(self, module_test, events): - ViewDNS.info > Tools > + ViewDNS.info > Tools >

Reverse Whois Lookup



This free tool will allow you to find domain names owned by an individual person or company. Simply enter the email address or name of the person or company to find other domains registered using those same details. FAQ.

diff --git a/bbot/wordlists/devops_mutations.txt b/bbot/wordlists/devops_mutations.txt index bfde86c591..b3fc8deda1 100644 --- a/bbot/wordlists/devops_mutations.txt +++ b/bbot/wordlists/devops_mutations.txt @@ -105,4 +105,4 @@ store home production auto -cn \ No newline at end of file +cn diff --git a/bbot/wordlists/ffuf_shortname_candidates.txt b/bbot/wordlists/ffuf_shortname_candidates.txt index 4439d6d744..2d57ee9463 100644 --- a/bbot/wordlists/ffuf_shortname_candidates.txt +++ b/bbot/wordlists/ffuf_shortname_candidates.txt @@ -107979,4 +107979,4 @@ zzz zzzindex zzztest zzzz -zzzzz \ No newline at end of file +zzzzz diff --git a/bbot/wordlists/nameservers.txt b/bbot/wordlists/nameservers.txt index d350e56f9c..9153631946 100644 --- a/bbot/wordlists/nameservers.txt +++ b/bbot/wordlists/nameservers.txt @@ -2370,4 +2370,4 @@ 8.25.185.131 203.39.3.133 118.69.187.252 -108.56.80.135 \ No newline at end of file +108.56.80.135 diff --git a/bbot/wordlists/paramminer_headers.txt b/bbot/wordlists/paramminer_headers.txt index 53ea11e8b4..3fe2366059 100644 --- a/bbot/wordlists/paramminer_headers.txt +++ b/bbot/wordlists/paramminer_headers.txt @@ -1147,4 +1147,4 @@ http_sm_userdn http_sm_usermsg x-remote-ip traceparent -tracestate \ No newline at end of file +tracestate diff --git a/bbot/wordlists/paramminer_parameters.txt b/bbot/wordlists/paramminer_parameters.txt index 2022323fb3..501878d987 100644 --- a/bbot/wordlists/paramminer_parameters.txt +++ b/bbot/wordlists/paramminer_parameters.txt @@ -6520,4 +6520,4 @@ shell_path user_token adminCookie fullapp -LandingUrl \ No newline at end of file +LandingUrl diff --git a/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt b/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt index 6e2aca6506..b5f461182f 100644 --- a/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt +++ b/bbot/wordlists/raft-small-extensions-lowercase_CLEANED.txt @@ -830,4 +830,4 @@ .z .zdat .zif -.zip \ No newline at end of file +.zip diff --git a/bbot/wordlists/valid_url_schemes.txt b/bbot/wordlists/valid_url_schemes.txt index f0a440da9b..721a854aee 100644 --- a/bbot/wordlists/valid_url_schemes.txt +++ b/bbot/wordlists/valid_url_schemes.txt @@ -379,4 +379,4 @@ xri ymsgr z39.50 z39.50r -z39.50s \ No newline at end of file +z39.50s diff --git a/docs/data/chord_graph/entities.json b/docs/data/chord_graph/entities.json index 96d3875945..88242097ed 100644 --- a/docs/data/chord_graph/entities.json +++ b/docs/data/chord_graph/entities.json @@ -2020,4 +2020,4 @@ 7 ] } -] \ No newline at end of file +] diff --git a/docs/data/chord_graph/rels.json b/docs/data/chord_graph/rels.json index 7ebca1393a..43a646026a 100644 --- a/docs/data/chord_graph/rels.json +++ b/docs/data/chord_graph/rels.json @@ -1719,4 +1719,4 @@ "target": 148, "type": "produces" } -] \ No newline at end of file +] diff --git a/docs/dev/helpers/index.md b/docs/dev/helpers/index.md index 60d64f793d..cc27ed1f2b 100644 --- a/docs/dev/helpers/index.md +++ b/docs/dev/helpers/index.md @@ -6,7 +6,7 @@ The vast majority of these helpers can be accessed directly from the `.helpers` ```python class MyModule(BaseModule): - + ... async def handle_event(self, event): diff --git a/docs/javascripts/tablesort.min.js b/docs/javascripts/tablesort.min.js index 65a83b1138..fcd3b078ef 100644 --- a/docs/javascripts/tablesort.min.js +++ b/docs/javascripts/tablesort.min.js @@ -3,4 +3,4 @@ * http://tristen.ca/tablesort/demo/ * Copyright (c) 2021 ; Licensed MIT */ -!function(){function a(b,c){if(!(this instanceof a))return new a(b,c);if(!b||"TABLE"!==b.tagName)throw new Error("Element must be a table");this.init(b,c||{})}var b=[],c=function(a){var b;return window.CustomEvent&&"function"==typeof window.CustomEvent?b=new CustomEvent(a):(b=document.createEvent("CustomEvent"),b.initCustomEvent(a,!1,!1,void 0)),b},d=function(a,b){return a.getAttribute(b.sortAttribute||"data-sort")||a.textContent||a.innerText||""},e=function(a,b){return a=a.trim().toLowerCase(),b=b.trim().toLowerCase(),a===b?0:a0)if(a.tHead&&a.tHead.rows.length>0){for(e=0;e0&&n.push(m),o++;if(!n)return}for(o=0;o0)if(a.tHead&&a.tHead.rows.length>0){for(e=0;e0&&n.push(m),o++;if(!n)return}for(o=0;o + ## Dependencies diff --git a/docs/scanning/output.md b/docs/scanning/output.md index dd45a5c833..66d9b1c70c 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -291,7 +291,7 @@ bbot -f subdomain-enum -t evilcorp.com -om neo4j ### Cypher Queries and Tips -Neo4j uses the Cypher Query Language for its graph query language. Cypher uses common clauses to craft relational queries and present the desired data in multiple formats. +Neo4j uses the Cypher Query Language for its graph query language. Cypher uses common clauses to craft relational queries and present the desired data in multiple formats. Cypher queries can be broken down into three required pieces; selection, filter, and presentation. The selection piece identifies what data that will be searched against - 90% of the time the "MATCH" clause will be enough but there are means to read from csv or json data files. In all of these examples the "MATCH" clause will be used. The filter piece helps to focus in on the required data and used the "WHERE" clause to accomplish this effort (most basic operators can be used). Finally, the presentation section identifies how the data should be presented back to the querier. While neo4j is a graph database, it can be used in a traditional table view. @@ -300,7 +300,7 @@ A simple query to grab every URL event with ".com" in the BBOT data field would In this query the following can be identified: - Within the MATCH statement "u" is a variable and can be any value needed by the user while the "URL" label is a direct relationship to the BBOT event type. -- The WHERE statement allows the query to filter on any of the BBOT event properties like data, tag, or even the label itself. +- The WHERE statement allows the query to filter on any of the BBOT event properties like data, tag, or even the label itself. - The RETURN statement is a general presentation of the whole URL event but this can be narrowed down to present any of the specific properties of the BBOT event (`RETURN u.data, u.tags`). The following are a few recommended queries to get started with: @@ -337,6 +337,6 @@ RETURN n.data, collect(distinct port) MATCH (n) DETACH DELETE n ``` -This is not an exhaustive list of clauses, filters, or other means to use cypher and should be considered a starting point. To build more advanced queries consider reading Neo4j's Cypher [documentation](https://neo4j.com/docs/cypher-manual/current/introduction/). +This is not an exhaustive list of clauses, filters, or other means to use cypher and should be considered a starting point. To build more advanced queries consider reading Neo4j's Cypher [documentation](https://neo4j.com/docs/cypher-manual/current/introduction/). -Additional note: these sample queries are dependent on the existence of the data in the target neo4j database. +Additional note: these sample queries are dependent on the existence of the data in the target neo4j database. diff --git a/docs/scanning/presets.md b/docs/scanning/presets.md index f68a62dc33..7fa8f8c93b 100644 --- a/docs/scanning/presets.md +++ b/docs/scanning/presets.md @@ -37,7 +37,7 @@ bbot -lp Enable them with `-p`: ```bash -# do a subdomain enumeration +# do a subdomain enumeration bbot -t evilcorp.com -p subdomain-enum # multiple presets - subdomain enumeration + web spider diff --git a/docs/scanning/presets_list.md b/docs/scanning/presets_list.md index 93e1d3c8b3..416e163c52 100644 --- a/docs/scanning/presets_list.md +++ b/docs/scanning/presets_list.md @@ -8,13 +8,13 @@ Run all baddns modules and submodules. ??? note "`baddns-thorough.yml`" ```yaml title="~/.bbot/presets/baddns-thorough.yml" description: Run all baddns modules and submodules. - - + + modules: - baddns - baddns_zone - baddns_direct - + config: modules: baddns: @@ -32,10 +32,10 @@ Enumerate cloud resources such as storage buckets, etc. ??? note "`cloud-enum.yml`" ```yaml title="~/.bbot/presets/cloud-enum.yml" description: Enumerate cloud resources such as storage buckets, etc. - + include: - subdomain-enum - + flags: - cloud-enum ``` @@ -51,7 +51,7 @@ Enumerate Git repositories, Docker images, etc. ??? note "`code-enum.yml`" ```yaml title="~/.bbot/presets/code-enum.yml" description: Enumerate Git repositories, Docker images, etc. - + flags: - code-enum ``` @@ -67,17 +67,17 @@ Recursive web directory brute-force (aggressive) ??? note "`dirbust-heavy.yml`" ```yaml title="~/.bbot/presets/web/dirbust-heavy.yml" description: Recursive web directory brute-force (aggressive) - + include: - spider - + flags: - iis-shortnames - + modules: - ffuf - wayback - + config: modules: iis_shortnames: @@ -118,13 +118,13 @@ Basic web directory brute-force (surface-level directories only) ??? note "`dirbust-light.yml`" ```yaml title="~/.bbot/presets/web/dirbust-light.yml" description: Basic web directory brute-force (surface-level directories only) - + include: - iis-shortnames - + modules: - ffuf - + config: modules: ffuf: @@ -143,11 +143,11 @@ Comprehensive scan for all IIS/.NET specific modules and module settings ??? note "`dotnet-audit.yml`" ```yaml title="~/.bbot/presets/web/dotnet-audit.yml" description: Comprehensive scan for all IIS/.NET specific modules and module settings - - + + include: - iis-shortnames - + modules: - httpx - badsecrets @@ -156,14 +156,14 @@ Comprehensive scan for all IIS/.NET specific modules and module settings - telerik - ajaxpro - dotnetnuke - + config: modules: ffuf: extensions: asp,aspx,ashx,asmx,ascx telerik: exploit_RAU_crypto: True - + ``` Category: web @@ -177,10 +177,10 @@ Enumerate email addresses from APIs, web crawling, etc. ??? note "`email-enum.yml`" ```yaml title="~/.bbot/presets/email-enum.yml" description: Enumerate email addresses from APIs, web crawling, etc. - + flags: - email-enum - + output_modules: - emails ``` @@ -196,10 +196,10 @@ Scan only the provided targets as fast as possible - no extra discovery ??? note "`fast.yml`" ```yaml title="~/.bbot/presets/fast.yml" description: Scan only the provided targets as fast as possible - no extra discovery - + exclude_modules: - excavate - + config: # only scan the exact targets specified scope: @@ -224,10 +224,10 @@ Recursively enumerate IIS shortnames ??? note "`iis-shortnames.yml`" ```yaml title="~/.bbot/presets/web/iis-shortnames.yml" description: Recursively enumerate IIS shortnames - + flags: - iis-shortnames - + config: modules: iis_shortnames: @@ -246,7 +246,7 @@ Everything everywhere all at once ??? note "`kitchen-sink.yml`" ```yaml title="~/.bbot/presets/kitchen-sink.yml" description: Everything everywhere all at once - + include: - subdomain-enum - cloud-enum @@ -258,13 +258,13 @@ Everything everywhere all at once - dirbust-light - web-screenshots - baddns-thorough - + config: modules: baddns: enable_references: True - - + + ``` @@ -278,13 +278,13 @@ Discover new web parameters via brute-force ??? note "`paramminer.yml`" ```yaml title="~/.bbot/presets/web/paramminer.yml" description: Discover new web parameters via brute-force - + flags: - web-paramminer - + modules: - httpx - + config: web: spider_distance: 1 @@ -302,14 +302,14 @@ Recursive web spider ??? note "`spider.yml`" ```yaml title="~/.bbot/presets/spider.yml" description: Recursive web spider - + modules: - httpx - + blacklist: # Prevent spider from invalidating sessions by logging out - "RE:/.*(sign|log)[_-]?out" - + config: web: # how many links to follow in a row @@ -331,15 +331,15 @@ Enumerate subdomains via APIs, brute-force ??? note "`subdomain-enum.yml`" ```yaml title="~/.bbot/presets/subdomain-enum.yml" description: Enumerate subdomains via APIs, brute-force - + flags: # enable every module with the subdomain-enum flag - subdomain-enum - + output_modules: # output unique subdomains to TXT file - subdomains - + config: dns: threads: 25 @@ -365,10 +365,10 @@ Quick web scan ??? note "`web-basic.yml`" ```yaml title="~/.bbot/presets/web-basic.yml" description: Quick web scan - + include: - iis-shortnames - + flags: - web-basic ``` @@ -384,10 +384,10 @@ Take screenshots of webpages ??? note "`web-screenshots.yml`" ```yaml title="~/.bbot/presets/web-screenshots.yml" description: Take screenshots of webpages - + flags: - web-screenshots - + config: modules: gowitness: @@ -410,11 +410,11 @@ Aggressive web scan ??? note "`web-thorough.yml`" ```yaml title="~/.bbot/presets/web-thorough.yml" description: Aggressive web scan - + include: # include the web-basic preset - web-basic - + flags: - web-thorough ``` diff --git a/mkdocs.yml b/mkdocs.yml index 1802fc678a..4413fac487 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -7,7 +7,7 @@ site_description: >- # Repository repo_name: blacklanternsecurity/bbot repo_url: https://github.com/blacklanternsecurity/bbot -watch: +watch: - "mkdocs.yml" - "bbot" - "docs" @@ -29,7 +29,7 @@ nav: - Tips and Tricks: scanning/tips_and_tricks.md - Advanced Usage: scanning/advanced.md - Configuration: scanning/configuration.md - - Modules: + - Modules: - List of Modules: modules/list_of_modules.md - Nuclei: modules/nuclei.md - Custom YARA Rules: modules/custom_yara_rules.md From ecf6d006c5142e73246128f9a3ef985f24d90078 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 25 Nov 2024 18:55:56 +0100 Subject: [PATCH 20/64] Fix ruff rules E711,E712,E713,E731,F541 --- bbot/modules/dnstlsrpt.py | 2 +- bbot/modules/dotnetnuke.py | 4 ++-- bbot/modules/internal/excavate.py | 29 +++++++++++++++-------------- bbot/modules/paramminer_headers.py | 8 ++++---- pyproject.toml | 2 +- 5 files changed, 23 insertions(+), 22 deletions(-) diff --git a/bbot/modules/dnstlsrpt.py b/bbot/modules/dnstlsrpt.py index c3b709c417..4232cc921f 100644 --- a/bbot/modules/dnstlsrpt.py +++ b/bbot/modules/dnstlsrpt.py @@ -70,7 +70,7 @@ async def filter_event(self, event): return False, "event is wildcard" # there's no value in inspecting service records - if service_record(event.host) == True: + if service_record(event.host) is True: return False, "service record detected" return True diff --git a/bbot/modules/dotnetnuke.py b/bbot/modules/dotnetnuke.py index 2e1d7390ec..eb2485fecd 100644 --- a/bbot/modules/dotnetnuke.py +++ b/bbot/modules/dotnetnuke.py @@ -31,7 +31,7 @@ async def setup(self): self.interactsh_subdomain_tags = {} self.interactsh_instance = None - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: self.interactsh_instance = self.helpers.interactsh() self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback) @@ -93,7 +93,7 @@ async def handle_event(self, event): detected = True break - if detected == True: + if detected is True: # DNNPersonalization Deserialization Detection for probe_url in [f'{event.data["url"]}/__', f'{event.data["url"]}/', f'{event.data["url"]}']: result = await self.helpers.request(probe_url, cookies=self.exploit_probe) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 354b279022..9d33621815 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -471,7 +471,7 @@ def __init__(self, excavate): self.parameterExtractorCallbackDict[r.__name__] = r regexes_component_list.append(f"${r.__name__} = {r.discovery_regex}") regexes_component = " ".join(regexes_component_list) - self.yara_rules[f"parameter_extraction"] = ( + self.yara_rules["parameter_extraction"] = ( rf'rule parameter_extraction {{meta: description = "contains POST form" strings: {regexes_component} condition: any of them}}' ) @@ -503,7 +503,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte ) if self.excavate.helpers.validate_parameter(parameter_name, parameter_type): - if self.excavate.in_bl(parameter_name) == False: + if self.excavate.in_bl(parameter_name) is False: parsed_url = urlparse(url) description = f"HTTP Extracted Parameter [{parameter_name}] ({parameterExtractorSubModule.name} Submodule)" data = { @@ -579,7 +579,7 @@ def __init__(self, excavate): for signature_name, signature in self.signatures.items(): signature_component_list.append(rf"${signature_name} = {signature}") signature_component = " ".join(signature_component_list) - self.yara_rules[f"error_detection"] = ( + self.yara_rules["error_detection"] = ( f'rule error_detection {{meta: description = "contains a verbose error message" strings: {signature_component} condition: any of them}}' ) @@ -608,7 +608,7 @@ def __init__(self, excavate): for regex_name, regex in self.regexes.items(): regexes_component_list.append(rf"${regex_name} = /\b{regex.pattern}/ nocase") regexes_component = " ".join(regexes_component_list) - self.yara_rules[f"serialization_detection"] = ( + self.yara_rules["serialization_detection"] = ( f'rule serialization_detection {{meta: description = "contains a possible serialized object" strings: {regexes_component} condition: any of them}}' ) @@ -656,7 +656,8 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte continue if parsed_url.scheme in ["http", "https"]: continue - abort_if = lambda e: e.scope_distance > 0 + def abort_if(e): + return e.scope_distance > 0 finding_data = {"host": str(host), "description": f"Non-HTTP URI: {parsed_url.geturl()}"} await self.report(finding_data, event, yara_rule_settings, discovery_context, abort_if=abort_if) protocol_data = {"protocol": parsed_url.scheme, "host": str(host)} @@ -769,7 +770,7 @@ class HostnameExtractor(ExcavateRule): def __init__(self, excavate): super().__init__(excavate) if excavate.scan.dns_yara_rules_uncompiled: - self.yara_rules[f"hostname_extraction"] = excavate.scan.dns_yara_rules_uncompiled + self.yara_rules["hostname_extraction"] = excavate.scan.dns_yara_rules_uncompiled async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier in yara_results.keys(): @@ -817,7 +818,7 @@ async def setup(self): self.parameter_extraction = bool(modules_WEB_PARAMETER) self.retain_querystring = False - if self.config.get("retain_querystring", False) == True: + if self.config.get("retain_querystring", False) is True: self.retain_querystring = True for module in self.scan.modules.values(): @@ -847,7 +848,7 @@ async def setup(self): rules_content = f.read() self.debug(f"Successfully loaded custom yara rules file [{self.custom_yara_rules}]") else: - self.debug(f"Custom yara rules file is NOT a file. Will attempt to treat it as rule content") + self.debug("Custom yara rules file is NOT a file. Will attempt to treat it as rule content") rules_content = self.custom_yara_rules self.debug(f"Final combined yara rule contents: {rules_content}") @@ -860,7 +861,7 @@ async def setup(self): rule_match = await self.helpers.re.search(self.yara_rule_name_regex, rule_content) if not rule_match: - return False, f"Custom Yara formatted incorrectly: could not find rule name" + return False, "Custom Yara formatted incorrectly: could not find rule name" rule_name = rule_match.groups(1)[0] c = CustomExtractor(self) @@ -936,8 +937,8 @@ async def handle_event(self, event): if event.type == "HTTP_RESPONSE": # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled if ( - self.parameter_extraction == True - and self.url_querystring_remove == False + self.parameter_extraction is True + and self.url_querystring_remove is False and str(event.parent.parent.module) == "TARGET" ): self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") @@ -949,7 +950,7 @@ async def handle_event(self, event): regex_name, additional_params, ) in extract_params_url(event.parsed_url): - if self.in_bl(parameter_name) == False: + if self.in_bl(parameter_name) is False: description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" data = { "host": parsed_url.hostname, @@ -985,7 +986,7 @@ async def handle_event(self, event): cookie_name = header_value.split("=")[0] cookie_value = header_value.split("=")[1].split(";")[0] - if self.in_bl(cookie_value) == False: + if self.in_bl(cookie_value) is False: self.assigned_cookies[cookie_name] = cookie_value description = f"Set-Cookie Assigned Cookie [{cookie_name}]" data = { @@ -1029,7 +1030,7 @@ async def handle_event(self, event): regex_name, additional_params, ) in extract_params_location(header_value, event.parsed_url): - if self.in_bl(parameter_name) == False: + if self.in_bl(parameter_name) is False: description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" data = { "host": parsed_url.hostname, diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 07a3c7e0dc..723bffc2e0 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -172,7 +172,7 @@ async def handle_event(self, event): self.debug(f"Error initializing compare helper: {e}") return batch_size = await self.count_test(url) - if batch_size == None or batch_size <= 0: + if batch_size is None or batch_size <= 0: self.debug(f"Failed to get baseline max {self.compare_mode} count, aborting") return self.debug(f"Resolved batch_size at {str(batch_size)}") @@ -195,11 +195,11 @@ async def count_test(self, url): baseline = await self.helpers.request(url) if baseline is None: return - if str(baseline.status_code)[0] in ("4", "5"): + if str(baseline.status_code)[0] in {"4", "5"}: return for count, args, kwargs in self.gen_count_args(url): r = await self.helpers.request(*args, **kwargs) - if r is not None and not (str(r.status_code)[0] in ("4", "5")): + if r is not None and str(r.status_code)[0] not in {"4", "5"}: return count def gen_count_args(self, url): @@ -222,7 +222,7 @@ async def binary_search(self, compare_helper, url, group, reasons=None, reflecti elif len(group) > 1 or (len(group) == 1 and len(reasons) == 0): for group_slice in self.helpers.split_list(group): match, reasons, reflection, subject_response = await self.check_batch(compare_helper, url, group_slice) - if match == False: + if match is False: async for r in self.binary_search(compare_helper, url, group_slice, reasons, reflection): yield r else: diff --git a/pyproject.toml b/pyproject.toml index a407bcd7af..34974e1d3b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,7 @@ skip = "./docs/javascripts/vega*.js,./bbot/wordlists/*" [tool.ruff] line-length = 119 format.exclude = ["bbot/test/test_step_1/test_manager_*"] -lint.ignore = ["E402", "E711", "E712", "E713", "E721", "E731", "E741", "F401", "F403", "F405", "F541", "F601"] +lint.ignore = ["E402", "E721", "E741", "F401", "F403", "F405"] [tool.poetry-dynamic-versioning] enable = true From 555a4dee8c34a41b659e9881dbc3903b4adec04e Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 26 Nov 2024 15:17:26 -0500 Subject: [PATCH 21/64] fix dnsresolve bug --- bbot/modules/internal/dnsresolve.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index bdca0ea5c3..08c6eb5c6f 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -73,6 +73,7 @@ async def handle_event(self, event, **kwargs): if blacklisted: return False, "it has a blacklisted DNS record" + # DNS resolution for hosts that aren't IPs if not event_is_ip: # if the event is within our dns search distance, resolve the rest of our records if main_host_event.scope_distance < self._dns_search_distance: @@ -82,13 +83,14 @@ async def handle_event(self, event, **kwargs): event_data_changed = await self.handle_wildcard_event(main_host_event) if event_data_changed: # since data has changed, we check again whether it's a duplicate - if self.scan.ingress_module.is_incoming_duplicate(event, add=True): - if not event._graph_important: - return False, "event was already emitted by its module" - else: - self.debug( - f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" - ) + if self.scan.ingress_module.is_incoming_duplicate(main_host_event): + if new_event: + if not event._graph_important: + return False, "event was already emitted by its module" + else: + self.debug( + f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" + ) # if there weren't any DNS children and it's not an IP address, tag as unresolved if not main_host_event.raw_dns_records and not event_is_ip: From b4c316f3191ec79349e4cb5b50593a1affba9e7c Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 26 Nov 2024 15:33:08 -0500 Subject: [PATCH 22/64] fix tests --- bbot/modules/internal/dnsresolve.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 08c6eb5c6f..c746b03451 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -83,14 +83,13 @@ async def handle_event(self, event, **kwargs): event_data_changed = await self.handle_wildcard_event(main_host_event) if event_data_changed: # since data has changed, we check again whether it's a duplicate - if self.scan.ingress_module.is_incoming_duplicate(main_host_event): - if new_event: - if not event._graph_important: - return False, "event was already emitted by its module" - else: - self.debug( - f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" - ) + if event.type == "DNS_NAME" and self.scan.ingress_module.is_incoming_duplicate(event, add=True): + if not event._graph_important: + return False, "it's a DNS wildcard, and its module already emitted a similar wildcard event" + else: + self.debug( + f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" + ) # if there weren't any DNS children and it's not an IP address, tag as unresolved if not main_host_event.raw_dns_records and not event_is_ip: From f8a558cbd5680c61a54ccb74d500baf278b425d5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:48:08 +0000 Subject: [PATCH 23/64] Bump pydantic from 2.10.1 to 2.10.2 Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.10.1 to 2.10.2. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.10.1...v2.10.2) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c1ebff3f53..f4e202da84 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1674,13 +1674,13 @@ files = [ [[package]] name = "pydantic" -version = "2.10.1" +version = "2.10.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e"}, - {file = "pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560"}, + {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, + {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, ] [package.dependencies] From 46ea71596a0e369bc2ad4147187d39d63cf9d09d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:48:34 +0000 Subject: [PATCH 24/64] Bump ruff from 0.8.0 to 0.8.1 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.8.0 to 0.8.1. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.8.0...0.8.1) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index c1ebff3f53..9a48465592 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2406,29 +2406,29 @@ test = ["commentjson", "packaging", "pytest"] [[package]] name = "ruff" -version = "0.8.0" +version = "0.8.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"}, - {file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"}, - {file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"}, - {file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"}, - {file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"}, - {file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"}, - {file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"}, - {file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"}, - {file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"}, - {file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"}, - {file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"}, - {file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"}, + {file = "ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5"}, + {file = "ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087"}, + {file = "ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1"}, + {file = "ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9"}, + {file = "ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5"}, + {file = "ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790"}, + {file = "ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6"}, + {file = "ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737"}, + {file = "ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f"}, ] [[package]] From 921043dc5dd1475f2be39c30cb2b3b2915c88064 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:49:07 +0000 Subject: [PATCH 25/64] Bump pyjwt from 2.10.0 to 2.10.1 Bumps [pyjwt](https://github.com/jpadilla/pyjwt) from 2.10.0 to 2.10.1. - [Release notes](https://github.com/jpadilla/pyjwt/releases) - [Changelog](https://github.com/jpadilla/pyjwt/blob/master/CHANGELOG.rst) - [Commits](https://github.com/jpadilla/pyjwt/compare/2.10.0...2.10.1) --- updated-dependencies: - dependency-name: pyjwt dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c1ebff3f53..8436513e83 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1820,13 +1820,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.10.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" files = [ - {file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"}, - {file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.extras] From 1794879a8036696928c3c7c276f91b24e922c981 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 04:49:56 +0000 Subject: [PATCH 26/64] Bump mkdocs-material from 9.5.45 to 9.5.47 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.45 to 9.5.47. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.45...9.5.47) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c1ebff3f53..f5683be5ff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1214,13 +1214,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.45" +version = "9.5.47" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.45-py3-none-any.whl", hash = "sha256:a9be237cfd0be14be75f40f1726d83aa3a81ce44808dc3594d47a7a592f44547"}, - {file = "mkdocs_material-9.5.45.tar.gz", hash = "sha256:286489cf0beca4a129d91d59d6417419c63bceed1ce5cd0ec1fc7e1ebffb8189"}, + {file = "mkdocs_material-9.5.47-py3-none-any.whl", hash = "sha256:53fb9c9624e7865da6ec807d116cd7be24b3cb36ab31b1d1d1a9af58c56009a2"}, + {file = "mkdocs_material-9.5.47.tar.gz", hash = "sha256:fc3b7a8e00ad896660bd3a5cc12ca0cb28bdc2bcbe2a946b5714c23ac91b0ede"}, ] [package.dependencies] From 6ac0f816897620dd4be6ca08e44618366c4b618d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 12:39:01 +0000 Subject: [PATCH 27/64] Bump pytest from 8.3.3 to 8.3.4 Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.3.3 to 8.3.4. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.3.3...8.3.4) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d70cb5994e..27f9a2c154 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1869,13 +1869,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] From e4ba0098eec21446b917c15c0a249a39b3bac5ff Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 26 Nov 2024 23:20:19 +0100 Subject: [PATCH 28/64] dependabot.yml: GitHub Action update PRs target-branch: "dev" The default branch is not the contribution branch. --- .github/dependabot.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 18e2537a56..1ad88ccb68 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -14,3 +14,4 @@ updates: - "*" # Group all Actions updates into a single larger pull request schedule: interval: weekly + target-branch: "dev" From 5ea8161a5e43e1a6c7dc203e2c625b1eb52f0d8e Mon Sep 17 00:00:00 2001 From: blsaccess Date: Tue, 3 Dec 2024 00:25:29 +0000 Subject: [PATCH 29/64] Update nuclei --- bbot/modules/deadly/nuclei.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 5de7aa465d..4f2d73c986 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -15,7 +15,7 @@ class nuclei(BaseModule): } options = { - "version": "3.3.6", + "version": "3.3.7", "tags": "", "templates": "", "severity": "", From 3afe3e99116e1339eafd6ab614c44115ec82d07a Mon Sep 17 00:00:00 2001 From: blsaccess Date: Tue, 3 Dec 2024 00:25:30 +0000 Subject: [PATCH 30/64] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 80a4385c2e..eaba8bbfb9 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.84.1", + "version": "3.84.2", "config": "", "only_verified": True, "concurrency": 8, From cdbcf548041360874144213c843932d41adcd12d Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 3 Dec 2024 14:31:52 -0500 Subject: [PATCH 31/64] remove unnecessary log message --- bbot/scanner/target.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 5608fdf4fe..7a141e3b9e 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -192,7 +192,6 @@ def __init__(self, *args, **kwargs): @special_target_type(r"^(?:RE|REGEX):(.*)") def handle_regex(self, match): pattern = match.group(1) - log.info(f"Blacklisting by custom regex: {pattern}") blacklist_regex = re.compile(pattern, re.IGNORECASE) self.blacklist_regexes.add(blacklist_regex) return [] From 01c01e3f11ba5cf1eb4ecfc882c820043e1b6e5a Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 3 Dec 2024 16:30:14 -0500 Subject: [PATCH 32/64] fix blacklist logging --- bbot/modules/internal/dnsresolve.py | 9 +++++++-- bbot/modules/internal/excavate.py | 2 ++ bbot/scanner/target.py | 6 ++++++ bbot/test/test_step_1/test_target.py | 1 + 4 files changed, 16 insertions(+), 2 deletions(-) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index c746b03451..5bb5c5bc40 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -83,9 +83,14 @@ async def handle_event(self, event, **kwargs): event_data_changed = await self.handle_wildcard_event(main_host_event) if event_data_changed: # since data has changed, we check again whether it's a duplicate - if event.type == "DNS_NAME" and self.scan.ingress_module.is_incoming_duplicate(event, add=True): + if event.type == "DNS_NAME" and self.scan.ingress_module.is_incoming_duplicate( + event, add=True + ): if not event._graph_important: - return False, "it's a DNS wildcard, and its module already emitted a similar wildcard event" + return ( + False, + "it's a DNS wildcard, and its module already emitted a similar wildcard event", + ) else: self.debug( f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 9d33621815..209b96eefb 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -656,8 +656,10 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte continue if parsed_url.scheme in ["http", "https"]: continue + def abort_if(e): return e.scope_distance > 0 + finding_data = {"host": str(host), "description": f"Non-HTTP URI: {parsed_url.geturl()}"} await self.report(finding_data, event, yara_rule_settings, discovery_context, abort_if=abort_if) protocol_data = {"protocol": parsed_url.scheme, "host": str(host)} diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 7a141e3b9e..ba4226ec09 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -224,6 +224,12 @@ def _hash_value(self): hosts = [str(h).encode() for h in self.sorted_hosts] return hosts + regex_patterns + def __len__(self): + return super().__len__() + len(self.blacklist_regexes) + + def __bool__(self): + return bool(len(self)) + class BBOTTarget: """ diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 8f2a6bf91f..f5c28c3596 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -395,6 +395,7 @@ async def test_blacklist_regex(bbot_scanner, bbot_httpserver): config={"excavate": True}, debug=True, ) + assert len(scan.target.blacklist) == 2 assert scan.target.blacklist.blacklist_regexes assert {r.pattern for r in scan.target.blacklist.blacklist_regexes} == { r"evil[0-9]{3}", From 2133597b5481be9d9cf01aab1695d357cc31415f Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 4 Dec 2024 11:00:29 -0500 Subject: [PATCH 33/64] ruff --- bbot/cli.py | 1 - bbot/modules/internal/dnsresolve.py | 9 +++++++-- bbot/modules/internal/excavate.py | 2 ++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index d556e7413a..d06810ee70 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -79,7 +79,6 @@ async def _main(): # if we're listing modules or their options if options.list_modules or options.list_output_modules or options.list_module_options: - # if no modules or flags are specified, enable everything if not (options.modules or options.output_modules or options.flags): for module, preloaded in preset.module_loader.preloaded().items(): diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index fbf8747406..3dddd289a4 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -85,9 +85,14 @@ async def handle_event(self, event, **kwargs): event_data_changed = await self.handle_wildcard_event(main_host_event) if event_data_changed: # since data has changed, we check again whether it's a duplicate - if event.type == "DNS_NAME" and self.scan.ingress_module.is_incoming_duplicate(event, add=True): + if event.type == "DNS_NAME" and self.scan.ingress_module.is_incoming_duplicate( + event, add=True + ): if not event._graph_important: - return False, "it's a DNS wildcard, and its module already emitted a similar wildcard event" + return ( + False, + "it's a DNS wildcard, and its module already emitted a similar wildcard event", + ) else: self.debug( f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 9d33621815..209b96eefb 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -656,8 +656,10 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte continue if parsed_url.scheme in ["http", "https"]: continue + def abort_if(e): return e.scope_distance > 0 + finding_data = {"host": str(host), "description": f"Non-HTTP URI: {parsed_url.geturl()}"} await self.report(finding_data, event, yara_rule_settings, discovery_context, abort_if=abort_if) protocol_data = {"protocol": parsed_url.scheme, "host": str(host)} From d52da462c4b1bb39d57cc1064e8038eb0d7d7781 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 4 Dec 2024 11:25:36 -0500 Subject: [PATCH 34/64] ruff go away --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 34974e1d3b..2ad06885d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,7 @@ skip = "./docs/javascripts/vega*.js,./bbot/wordlists/*" [tool.ruff] line-length = 119 format.exclude = ["bbot/test/test_step_1/test_manager_*"] -lint.ignore = ["E402", "E721", "E741", "F401", "F403", "F405"] +lint.ignore = ["E402", "E711", "E713", "E721", "E741", "F401", "F403", "F405"] [tool.poetry-dynamic-versioning] enable = true From 865fd1717b32bb1cacbcc3ff0e5da3eb53fb9f04 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 4 Dec 2024 13:07:38 -0500 Subject: [PATCH 35/64] mysql author/created date --- bbot/modules/output/mysql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/output/mysql.py b/bbot/modules/output/mysql.py index 69856a8a33..6099d18ceb 100644 --- a/bbot/modules/output/mysql.py +++ b/bbot/modules/output/mysql.py @@ -3,7 +3,7 @@ class MySQL(SQLTemplate): watched_events = ["*"] - meta = {"description": "Output scan data to a MySQL database"} + meta = {"description": "Output scan data to a MySQL database", "created_date": "2024-11-13", "author": "@TheTechromancer"} options = { "username": "root", "password": "bbotislife", From 6608c9dd2e86e9666b66c63c651f48fb88b37485 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 4 Dec 2024 17:00:40 -0500 Subject: [PATCH 36/64] amir --- bbot/core/helpers/names_generator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index a0a569e530..e2d9b74311 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -293,6 +293,7 @@ "alyssa", "amanda", "amber", + "amir", "amy", "andrea", "andrew", From 3d40f6b550fab575c8d9da053e08a9c3a622d27b Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 5 Dec 2024 10:41:41 -0500 Subject: [PATCH 37/64] update docker --- bbot-docker.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bbot-docker.sh b/bbot-docker.sh index e4e0bb9e43..3db958f94a 100755 --- a/bbot-docker.sh +++ b/bbot-docker.sh @@ -1,2 +1,3 @@ -# run the docker image -docker run --rm -it -v "$HOME/.bbot:/root/.bbot" -v "$HOME/.config/bbot:/root/.config/bbot" blacklanternsecurity/bbot:stable "$@" +# OUTPUTS SCAN DATA TO ~/.bbot/scans + +docker run --rm -it -v "$HOME/.bbot/scans:/root/.bbot/scans" -v "$HOME/.config/bbot:/root/.config/bbot" blacklanternsecurity/bbot:stable "$@" From 4d08937221b23661e84f55356b2c5b56eb357d48 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 5 Dec 2024 11:12:34 -0500 Subject: [PATCH 38/64] better docker documentation --- docs/index.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/index.md b/docs/index.md index 3d6c5ef267..355d58e8b2 100644 --- a/docs/index.md +++ b/docs/index.md @@ -34,6 +34,8 @@ bbot --help Docker images are provided, along with helper script `bbot-docker.sh` to persist your scan data. +Scans are output to `~/.bbot/scans` (the usual place for BBOT scan data). + ```bash # bleeding edge (dev) docker run -it blacklanternsecurity/bbot --help @@ -46,6 +48,16 @@ git clone https://github.com/blacklanternsecurity/bbot && cd bbot ./bbot-docker.sh --help ``` +Note: If you need to pass in a custom preset, you can do so by mapping the preset into the container: + +```bash +# use the preset `my_preset.yml` from the current directory +docker run --rm -it \ + -v "$HOME/.bbot/scans:/root/.bbot/scans" \ + -v "$PWD/my_preset.yml:/my_preset.yml" \ + blacklanternsecurity/bbot -p /my_preset.yml +``` + ## Example Commands Below are some examples of common scans. From 7e53091c84d1d6bc1bc95a455e848611c388446e Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 6 Dec 2024 22:28:48 -0500 Subject: [PATCH 39/64] update radixtarget, cloudcheck --- poetry.lock | 18 +++++++++--------- pyproject.toml | 4 ++-- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/poetry.lock b/poetry.lock index 68ad2a296e..b7d2630139 100644 --- a/poetry.lock +++ b/poetry.lock @@ -371,19 +371,19 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "6.0.0.661" +version = "7.0.12" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "cloudcheck-6.0.0.661-py3-none-any.whl", hash = "sha256:b8c45061d76eea14aa493e9dfd087e1aefccb1632c3bb8d49c77d273f721188c"}, - {file = "cloudcheck-6.0.0.661.tar.gz", hash = "sha256:98a7b88f4784fad91faa3d6ea5749c7fe215462dbad63c34df1afc671f915795"}, + {file = "cloudcheck-7.0.12-py3-none-any.whl", hash = "sha256:9397a52e83bae45b6255d59c16f6f5d83f4f4711adf945a9ba45c8675e23f430"}, + {file = "cloudcheck-7.0.12.tar.gz", hash = "sha256:364b61cffdf53767cae9fd4a14542f92a95d7f12e9ee72e041af0e027a88a2a9"}, ] [package.dependencies] -httpx = ">=0.26,<0.28" +httpx = ">=0.26,<0.29" pydantic = ">=2.4.2,<3.0.0" -radixtarget = ">=2.0.0.32,<3.0.0.0" +radixtarget = ">=3.0.13,<4.0.0" regex = ">=2024.4.16,<2025.0.0" [[package]] @@ -2240,13 +2240,13 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "radixtarget" -version = "2.0.0.50" +version = "3.0.13" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "radixtarget-2.0.0.50-py3-none-any.whl", hash = "sha256:fe1670a382d1ddaebc2cba3b16607d32085987eb5d71074cc0535e19a02406b7"}, - {file = "radixtarget-2.0.0.50.tar.gz", hash = "sha256:73519eebb0596a67d4e9347a5e4602c95c9ff9dc8be4c64e6ab0247bc69a13e8"}, + {file = "radixtarget-3.0.13-py3-none-any.whl", hash = "sha256:fe8e0faa9fcfe62629583625c606e690a7002baa0243a6f00b85f1e96824d734"}, + {file = "radixtarget-3.0.13.tar.gz", hash = "sha256:668b77a91c8d642fe5ed29b929a807f6a0c411c4b0805b6be37c15383bf5c536"}, ] [[package]] @@ -3065,4 +3065,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "cbcaf165db6acc5621f55d4b730e2d1e5d8cfc76db841bc433cae9bc4801eed8" +content-hash = "557bc6054bfe11138840eb5a3427868cd6137c773f60abd7b0c07b1c6b56d044" diff --git a/pyproject.toml b/pyproject.toml index 2ad06885d6..994dc1b116 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,8 +54,8 @@ yara-python = "^4.5.1" pyzmq = "^26.0.3" httpx = "^0.27.0" puremagic = "^1.28" -cloudcheck = "^6.0.0.602" -radixtarget = "^2.0.0.50" +radixtarget = "^3.0.13" +cloudcheck = "^7.0.12" [tool.poetry.group.dev.dependencies] poetry-dynamic-versioning = ">=0.21.4,<1.5.0" From c56645e1041e1d21cb7aadeb234fc35d86c9bacc Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 7 Dec 2024 20:10:18 -0500 Subject: [PATCH 40/64] fix extractous bug --- bbot/modules/extractous.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/extractous.py b/bbot/modules/extractous.py index 5b2e2cdec7..9d2ae153b4 100644 --- a/bbot/modules/extractous.py +++ b/bbot/modules/extractous.py @@ -112,7 +112,7 @@ def extract_text(file_path): result = "" buffer = reader.read(4096) while len(buffer) > 0: - result += buffer.decode("utf-8") + result += buffer.decode("utf-8", errors="ignore") buffer = reader.read(4096) return result.strip() From ffbaf5f26d3b543da45d0776171962666a92b2be Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 7 Dec 2024 22:41:39 -0500 Subject: [PATCH 41/64] disable cloudcheck HTTP_RESPONSE extraction --- bbot/modules/internal/cloudcheck.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 65cf6ea242..c45acfe954 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -72,9 +72,10 @@ async def handle_event(self, event, **kwargs): base_kwargs["event_type"] = event_type for sig in sigs: matches = [] - if event.type == "HTTP_RESPONSE": - matches = await self.helpers.re.findall(sig, event.data.get("body", "")) - elif event.type.startswith("DNS_NAME"): + # TODO: convert this to an excavate YARA hook + # if event.type == "HTTP_RESPONSE": + # matches = await self.helpers.re.findall(sig, event.data.get("body", "")) + if event.type.startswith("DNS_NAME"): for host in str_hosts_to_check: match = sig.match(host) if match: From d970cc3fd84e5f9967e0fcfad25beab2cbbc1ddc Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 7 Dec 2024 23:20:00 -0500 Subject: [PATCH 42/64] fix test --- bbot/test/test_step_2/module_tests/test_module_cloudcheck.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py b/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py index 902f2df35f..0ce93ec00d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +++ b/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py @@ -8,7 +8,7 @@ class TestCloudCheck(ModuleTestBase): modules_overrides = ["httpx", "excavate", "cloudcheck"] async def setup_after_prep(self, module_test): - module_test.set_expect_requests({"uri": "/"}, {"response_data": ""}) + module_test.set_expect_requests({"uri": "/"}, {"response_data": ""}) scan = Scanner(config={"cloudcheck": True}) await scan._prep() From 3b6d22e352f12c445e1d3ffc256dcc804a0d48ef Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 7 Dec 2024 23:29:57 -0500 Subject: [PATCH 43/64] httpx orjson --- bbot/modules/httpx.py | 8 ++-- poetry.lock | 86 ++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 90 insertions(+), 5 deletions(-) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 059bc2461f..136e14119b 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -1,5 +1,5 @@ import re -import json +import orjson import tempfile import subprocess from pathlib import Path @@ -142,10 +142,10 @@ async def handle_batch(self, *events): proxy = self.scan.http_proxy if proxy: command += ["-http-proxy", proxy] - async for line in self.run_process_live(command, input=list(stdin), stderr=subprocess.DEVNULL): + async for line in self.run_process_live(command, text=False, input=list(stdin), stderr=subprocess.DEVNULL): try: - j = json.loads(line) - except json.decoder.JSONDecodeError: + j = orjson.loads(line) + except orjson.JSONDecodeError: self.debug(f"Failed to decode line: {line}") continue diff --git a/poetry.lock b/poetry.lock index 68ad2a296e..cb79be280e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1449,6 +1449,90 @@ files = [ [package.extras] dev = ["black", "mypy", "pytest"] +[[package]] +name = "orjson" +version = "3.10.12" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.12-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ece01a7ec71d9940cc654c482907a6b65df27251255097629d0dea781f255c6d"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34ec9aebc04f11f4b978dd6caf697a2df2dd9b47d35aa4cc606cabcb9df69d7"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6ec8658da3480939c79b9e9e27e0db31dffcd4ba69c334e98c9976ac29140e"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17e6baf4cf01534c9de8a16c0c611f3d94925d1701bf5f4aff17003677d8ced"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6402ebb74a14ef96f94a868569f5dccf70d791de49feb73180eb3c6fda2ade56"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0000758ae7c7853e0a4a6063f534c61656ebff644391e1f81698c1b2d2fc8cd2"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:888442dcee99fd1e5bd37a4abb94930915ca6af4db50e23e746cdf4d1e63db13"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c1f7a3ce79246aa0e92f5458d86c54f257fb5dfdc14a192651ba7ec2c00f8a05"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:802a3935f45605c66fb4a586488a38af63cb37aaad1c1d94c982c40dcc452e85"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1da1ef0113a2be19bb6c557fb0ec2d79c92ebd2fed4cfb1b26bab93f021fb885"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a3273e99f367f137d5b3fecb5e9f45bcdbfac2a8b2f32fbc72129bbd48789c2"}, + {file = "orjson-3.10.12-cp310-none-win32.whl", hash = "sha256:475661bf249fd7907d9b0a2a2421b4e684355a77ceef85b8352439a9163418c3"}, + {file = "orjson-3.10.12-cp310-none-win_amd64.whl", hash = "sha256:87251dc1fb2b9e5ab91ce65d8f4caf21910d99ba8fb24b49fd0c118b2362d509"}, + {file = "orjson-3.10.12-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a734c62efa42e7df94926d70fe7d37621c783dea9f707a98cdea796964d4cf74"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:750f8b27259d3409eda8350c2919a58b0cfcd2054ddc1bd317a643afc646ef23"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb52c22bfffe2857e7aa13b4622afd0dd9d16ea7cc65fd2bf318d3223b1b6252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:440d9a337ac8c199ff8251e100c62e9488924c92852362cd27af0e67308c16ef"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e15c06491c69997dfa067369baab3bf094ecb74be9912bdc4339972323f252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:362d204ad4b0b8724cf370d0cd917bb2dc913c394030da748a3bb632445ce7c4"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b57cbb4031153db37b41622eac67329c7810e5f480fda4cfd30542186f006ae"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:165c89b53ef03ce0d7c59ca5c82fa65fe13ddf52eeb22e859e58c237d4e33b9b"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5dee91b8dfd54557c1a1596eb90bcd47dbcd26b0baaed919e6861f076583e9da"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a4e1cfb72de6f905bdff061172adfb3caf7a4578ebf481d8f0530879476c07"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:038d42c7bc0606443459b8fe2d1f121db474c49067d8d14c6a075bbea8bf14dd"}, + {file = "orjson-3.10.12-cp311-none-win32.whl", hash = "sha256:03b553c02ab39bed249bedd4abe37b2118324d1674e639b33fab3d1dafdf4d79"}, + {file = "orjson-3.10.12-cp311-none-win_amd64.whl", hash = "sha256:8b8713b9e46a45b2af6b96f559bfb13b1e02006f4242c156cbadef27800a55a8"}, + {file = "orjson-3.10.12-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:53206d72eb656ca5ac7d3a7141e83c5bbd3ac30d5eccfe019409177a57634b0d"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac8010afc2150d417ebda810e8df08dd3f544e0dd2acab5370cfa6bcc0662f8f"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed459b46012ae950dd2e17150e838ab08215421487371fa79d0eced8d1461d70"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dcb9673f108a93c1b52bfc51b0af422c2d08d4fc710ce9c839faad25020bb69"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22a51ae77680c5c4652ebc63a83d5255ac7d65582891d9424b566fb3b5375ee9"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910fdf2ac0637b9a77d1aad65f803bac414f0b06f720073438a7bd8906298192"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:24ce85f7100160936bc2116c09d1a8492639418633119a2224114f67f63a4559"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a76ba5fc8dd9c913640292df27bff80a685bed3a3c990d59aa6ce24c352f8fc"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ff70ef093895fd53f4055ca75f93f047e088d1430888ca1229393a7c0521100f"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4244b7018b5753ecd10a6d324ec1f347da130c953a9c88432c7fbc8875d13be"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:16135ccca03445f37921fa4b585cff9a58aa8d81ebcb27622e69bfadd220b32c"}, + {file = "orjson-3.10.12-cp312-none-win32.whl", hash = "sha256:2d879c81172d583e34153d524fcba5d4adafbab8349a7b9f16ae511c2cee8708"}, + {file = "orjson-3.10.12-cp312-none-win_amd64.whl", hash = "sha256:fc23f691fa0f5c140576b8c365bc942d577d861a9ee1142e4db468e4e17094fb"}, + {file = "orjson-3.10.12-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47962841b2a8aa9a258b377f5188db31ba49af47d4003a32f55d6f8b19006543"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6334730e2532e77b6054e87ca84f3072bee308a45a452ea0bffbbbc40a67e296"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:accfe93f42713c899fdac2747e8d0d5c659592df2792888c6c5f829472e4f85e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7974c490c014c48810d1dede6c754c3cc46598da758c25ca3b4001ac45b703f"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3f250ce7727b0b2682f834a3facff88e310f52f07a5dcfd852d99637d386e79e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f31422ff9486ae484f10ffc51b5ab2a60359e92d0716fcce1b3593d7bb8a9af6"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5f29c5d282bb2d577c2a6bbde88d8fdcc4919c593f806aac50133f01b733846e"}, + {file = "orjson-3.10.12-cp313-none-win32.whl", hash = "sha256:f45653775f38f63dc0e6cd4f14323984c3149c05d6007b58cb154dd080ddc0dc"}, + {file = "orjson-3.10.12-cp313-none-win_amd64.whl", hash = "sha256:229994d0c376d5bdc91d92b3c9e6be2f1fbabd4cc1b59daae1443a46ee5e9825"}, + {file = "orjson-3.10.12-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7d69af5b54617a5fac5c8e5ed0859eb798e2ce8913262eb522590239db6c6763"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ed119ea7d2953365724a7059231a44830eb6bbb0cfead33fcbc562f5fd8f935"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5fc1238ef197e7cad5c91415f524aaa51e004be5a9b35a1b8a84ade196f73f"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43509843990439b05f848539d6f6198d4ac86ff01dd024b2f9a795c0daeeab60"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f72e27a62041cfb37a3de512247ece9f240a561e6c8662276beaf4d53d406db4"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a904f9572092bb6742ab7c16c623f0cdccbad9eeb2d14d4aa06284867bddd31"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:855c0833999ed5dc62f64552db26f9be767434917d8348d77bacaab84f787d7b"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:897830244e2320f6184699f598df7fb9db9f5087d6f3f03666ae89d607e4f8ed"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:0b32652eaa4a7539f6f04abc6243619c56f8530c53bf9b023e1269df5f7816dd"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:36b4aa31e0f6a1aeeb6f8377769ca5d125db000f05c20e54163aef1d3fe8e833"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5535163054d6cbf2796f93e4f0dbc800f61914c0e3c4ed8499cf6ece22b4a3da"}, + {file = "orjson-3.10.12-cp38-none-win32.whl", hash = "sha256:90a5551f6f5a5fa07010bf3d0b4ca2de21adafbbc0af6cb700b63cd767266cb9"}, + {file = "orjson-3.10.12-cp38-none-win_amd64.whl", hash = "sha256:703a2fb35a06cdd45adf5d733cf613cbc0cb3ae57643472b16bc22d325b5fb6c"}, + {file = "orjson-3.10.12-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f29de3ef71a42a5822765def1febfb36e0859d33abf5c2ad240acad5c6a1b78d"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de365a42acc65d74953f05e4772c974dad6c51cfc13c3240899f534d611be967"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a5a0158648a67ff0004cb0df5df7dcc55bfc9ca154d9c01597a23ad54c8d0c"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c47ce6b8d90fe9646a25b6fb52284a14ff215c9595914af63a5933a49972ce36"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0eee4c2c5bfb5c1b47a5db80d2ac7aaa7e938956ae88089f098aff2c0f35d5d8"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d3081bbe8b86587eb5c98a73b97f13d8f9fea685cf91a579beddacc0d10566"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c23a6e90383884068bc2dba83d5222c9fcc3b99a0ed2411d38150734236755"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5472be7dc3269b4b52acba1433dac239215366f89dc1d8d0e64029abac4e714e"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7319cda750fca96ae5973efb31b17d97a5c5225ae0bc79bf5bf84df9e1ec2ab6"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:74d5ca5a255bf20b8def6a2b96b1e18ad37b4a122d59b154c458ee9494377f80"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ff31d22ecc5fb85ef62c7d4afe8301d10c558d00dd24274d4bbe464380d3cd69"}, + {file = "orjson-3.10.12-cp39-none-win32.whl", hash = "sha256:c22c3ea6fba91d84fcb4cda30e64aff548fcf0c44c876e681f47d61d24b12e6b"}, + {file = "orjson-3.10.12-cp39-none-win_amd64.whl", hash = "sha256:be604f60d45ace6b0b33dd990a66b4526f1a7a186ac411c942674625456ca548"}, + {file = "orjson-3.10.12.tar.gz", hash = "sha256:0a78bbda3aea0f9f079057ee1ee8a1ecf790d4f1af88dd67493c6b8ee52506ff"}, +] + [[package]] name = "packaging" version = "24.2" @@ -3065,4 +3149,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "cbcaf165db6acc5621f55d4b730e2d1e5d8cfc76db841bc433cae9bc4801eed8" +content-hash = "635f4acfdf9b3746d369877390cc94acaed40e2cd79742d51da337d206f7b3fe" diff --git a/pyproject.toml b/pyproject.toml index 2ad06885d6..1b9e7c66a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ httpx = "^0.27.0" puremagic = "^1.28" cloudcheck = "^6.0.0.602" radixtarget = "^2.0.0.50" +orjson = "^3.10.12" [tool.poetry.group.dev.dependencies] poetry-dynamic-versioning = ">=0.21.4,<1.5.0" From 319c9c3b52ad118b259855e2540990461737e017 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 7 Dec 2024 23:38:23 -0500 Subject: [PATCH 44/64] httpx performance boost --- bbot/modules/httpx.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 136e14119b..8edc4e1d69 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -144,9 +144,9 @@ async def handle_batch(self, *events): command += ["-http-proxy", proxy] async for line in self.run_process_live(command, text=False, input=list(stdin), stderr=subprocess.DEVNULL): try: - j = orjson.loads(line) + j = await self.helpers.run_in_executor(orjson.loads, line) except orjson.JSONDecodeError: - self.debug(f"Failed to decode line: {line}") + self.warning(f"httpx failed to decode line: {line}") continue url = j.get("url", "") From c3ed71f89b65ce32e0af39cabb41877f1544d2b7 Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Mon, 9 Dec 2024 02:56:12 +0000 Subject: [PATCH 45/64] [create-pull-request] automated change --- README.md | 2 - docs/data/chord_graph/entities.json | 683 +++++++++++++++------------- docs/data/chord_graph/rels.json | 566 ++++++++++++----------- docs/modules/list_of_modules.md | 15 +- docs/modules/nuclei.md | 2 +- docs/scanning/advanced.md | 13 +- docs/scanning/configuration.md | 4 +- docs/scanning/events.md | 70 +-- docs/scanning/presets_list.md | 83 ++-- 9 files changed, 756 insertions(+), 682 deletions(-) diff --git a/README.md b/README.md index f029b9432d..bd4e9d461a 100644 --- a/README.md +++ b/README.md @@ -226,8 +226,6 @@ config: baddns: enable_references: True - - ``` diff --git a/docs/data/chord_graph/entities.json b/docs/data/chord_graph/entities.json index 88242097ed..00633c95fa 100644 --- a/docs/data/chord_graph/entities.json +++ b/docs/data/chord_graph/entities.json @@ -23,11 +23,11 @@ ] }, { - "id": 129, + "id": 131, "name": "AZURE_TENANT", "parent": 88888888, "consumes": [ - 128 + 130 ], "produces": [] }, @@ -36,20 +36,20 @@ "name": "CODE_REPOSITORY", "parent": 88888888, "consumes": [ - 62, - 82, - 85, - 87, - 117, - 136 + 63, + 83, + 86, + 88, + 119, + 138 ], "produces": [ 42, - 63, - 83, + 64, 84, - 86, - 116 + 85, + 87, + 118 ] }, { @@ -87,36 +87,37 @@ 58, 59, 60, - 61, - 67, - 79, - 83, - 90, - 94, - 96, - 102, + 62, + 68, + 80, + 84, + 91, + 95, + 97, 103, - 107, - 108, - 112, - 113, + 104, + 106, + 109, + 110, 114, - 118, - 121, - 122, + 115, + 116, + 120, 123, 124, 125, - 128, - 131, - 132, + 126, + 127, + 130, 133, + 134, 135, - 138, - 141, - 142, - 145, - 148 + 137, + 140, + 143, + 144, + 147, + 150 ], "produces": [ 6, @@ -137,31 +138,32 @@ 58, 59, 60, - 79, - 90, - 94, - 96, - 102, + 61, + 80, + 91, + 95, + 97, 103, - 105, + 104, 107, - 108, - 112, - 118, - 121, + 109, + 110, + 114, + 120, 123, - 124, - 128, + 125, + 126, 130, - 131, 132, - 135, - 138, - 139, + 133, + 134, + 137, + 140, 141, - 142, - 145, - 148 + 143, + 144, + 147, + 150 ] }, { @@ -170,8 +172,8 @@ "parent": 88888888, "consumes": [ 21, - 128, - 133 + 130, + 135 ], "produces": [] }, @@ -180,19 +182,19 @@ "name": "EMAIL_ADDRESS", "parent": 88888888, "consumes": [ - 68 + 69 ], "produces": [ 45, 52, 58, - 61, - 67, - 94, - 113, - 122, - 125, - 130 + 62, + 68, + 95, + 115, + 124, + 127, + 132 ] }, { @@ -200,18 +202,18 @@ "name": "FILESYSTEM", "parent": 88888888, "consumes": [ - 72, - 101, - 136 + 73, + 102, + 138 ], "produces": [ 8, - 62, - 76, - 82, - 85, - 101, - 117 + 63, + 77, + 83, + 86, + 102, + 119 ] }, { @@ -220,7 +222,7 @@ "parent": 88888888, "consumes": [ 14, - 146 + 148 ], "produces": [ 1, @@ -235,33 +237,33 @@ 34, 37, 51, - 81, - 86, - 91, - 93, - 96, - 104, + 82, + 87, + 92, + 94, + 97, 105, - 106, - 109, - 110, - 120, - 126, + 107, + 108, + 111, + 112, + 122, 128, - 134, + 130, 136, - 137, - 147 + 138, + 139, + 149 ] }, { - "id": 98, + "id": 99, "name": "GEOLOCATION", "parent": 88888888, "consumes": [], "produces": [ - 97, - 100 + 98, + 101 ] }, { @@ -283,24 +285,25 @@ 14, 26, 51, - 66, - 69, - 76, - 86, - 91, - 104, + 67, + 70, + 77, + 87, + 92, 105, - 109, - 110, + 106, + 107, 111, - 120, - 128, - 134, - 144, - 147 + 112, + 113, + 122, + 130, + 136, + 146, + 149 ], "produces": [ - 92 + 93 ] }, { @@ -310,26 +313,28 @@ "consumes": [ 11, 14, - 96, 97, - 99, + 98, 100, - 114, - 128 + 101, + 106, + 116, + 130 ], "produces": [ 14, - 99, - 128 + 61, + 100, + 130 ] }, { - "id": 115, + "id": 117, "name": "IP_RANGE", "parent": 88888888, "consumes": [ - 114, - 128 + 116, + 130 ], "produces": [] }, @@ -341,7 +346,7 @@ 8 ], "produces": [ - 87 + 88 ] }, { @@ -350,29 +355,30 @@ "parent": 88888888, "consumes": [ 14, - 77, - 92, - 130 + 78, + 93, + 106, + 132 ], "produces": [ 14, - 96, - 114, - 128 + 97, + 116, + 130 ] }, { - "id": 64, + "id": 65, "name": "ORG_STUB", "parent": 88888888, "consumes": [ - 63, - 84, - 87, - 116 + 64, + 85, + 88, + 118 ], "produces": [ - 128 + 130 ] }, { @@ -386,12 +392,14 @@ ] }, { - "id": 78, + "id": 79, "name": "PROTOCOL", "parent": 88888888, - "consumes": [], + "consumes": [ + 106 + ], "produces": [ - 77 + 78 ] }, { @@ -401,36 +409,37 @@ "consumes": [], "produces": [ 54, - 61 + 61, + 62 ] }, { - "id": 70, + "id": 71, "name": "RAW_TEXT", "parent": 88888888, "consumes": [ - 69 + 70 ], "produces": [ - 72 + 73 ] }, { - "id": 65, + "id": 66, "name": "SOCIAL", "parent": 88888888, "consumes": [ - 63, - 84, - 86, - 88, - 116, - 128 + 64, + 85, + 87, + 89, + 118, + 130 ], "produces": [ - 63, - 86, - 127 + 64, + 87, + 129 ] }, { @@ -445,7 +454,7 @@ 32, 33, 34, - 128 + 130 ], "produces": [ 29, @@ -461,19 +470,19 @@ "parent": 88888888, "consumes": [ 14, - 86, - 146, - 147 + 87, + 148, + 149 ], "produces": [ 26, - 66, - 86, - 88, - 96, - 106, - 144, - 147 + 67, + 87, + 89, + 97, + 108, + 146, + 149 ] }, { @@ -485,37 +494,37 @@ 14, 23, 37, - 73, - 80, + 74, 81, - 88, - 92, - 95, - 105, - 106, - 119, - 126, + 82, + 89, + 93, + 96, + 107, + 108, + 121, 128, - 134, - 137, + 130, + 136, 139, - 143, - 146 + 141, + 145, + 148 ], "produces": [ - 88, - 92 + 89, + 93 ] }, { - "id": 75, + "id": 76, "name": "URL_HINT", "parent": 88888888, "consumes": [ - 74 + 75 ], "produces": [ - 95 + 96 ] }, { @@ -524,11 +533,11 @@ "parent": 88888888, "consumes": [ 42, - 76, - 92, - 107, - 127, - 128 + 77, + 93, + 109, + 129, + 130 ], "produces": [ 18, @@ -536,19 +545,19 @@ 32, 54, 58, - 61, - 63, - 69, - 73, + 62, + 64, + 70, 74, - 83, - 88, - 94, - 119, - 122, - 138, - 145, - 147 + 75, + 84, + 89, + 95, + 121, + 124, + 140, + 147, + 149 ] }, { @@ -556,7 +565,7 @@ "name": "USERNAME", "parent": 88888888, "consumes": [ - 128 + 130 ], "produces": [ 45, @@ -564,14 +573,14 @@ ] }, { - "id": 140, + "id": 142, "name": "VHOST", "parent": 88888888, "consumes": [ - 146 + 148 ], "produces": [ - 139 + 141 ] }, { @@ -580,7 +589,7 @@ "parent": 88888888, "consumes": [ 14, - 146 + 148 ], "produces": [ 1, @@ -589,13 +598,13 @@ 25, 26, 51, - 66, - 80, - 96, - 106, - 134, + 67, + 81, + 97, + 108, 136, - 147 + 138, + 149 ] }, { @@ -606,33 +615,33 @@ 14 ], "produces": [ - 143 + 145 ] }, { - "id": 89, + "id": 90, "name": "WEBSCREENSHOT", "parent": 88888888, "consumes": [], "produces": [ - 88 + 89 ] }, { - "id": 71, + "id": 72, "name": "WEB_PARAMETER", "parent": 88888888, "consumes": [ - 93, - 109, - 110, - 111 + 94, + 111, + 112, + 113 ], "produces": [ - 69, - 109, - 110, - 111 + 70, + 111, + 112, + 113 ] }, { @@ -1105,6 +1114,17 @@ }, { "id": 61, + "name": "dnsresolve", + "parent": 99999999, + "consumes": [], + "produces": [ + 7, + 12, + 55 + ] + }, + { + "id": 62, "name": "dnstlsrpt", "parent": 99999999, "consumes": [ @@ -1117,7 +1137,7 @@ ] }, { - "id": 62, + "id": 63, "name": "docker_pull", "parent": 99999999, "consumes": [ @@ -1128,21 +1148,21 @@ ] }, { - "id": 63, + "id": 64, "name": "dockerhub", "parent": 99999999, "consumes": [ - 64, - 65 + 65, + 66 ], "produces": [ 43, - 65, + 66, 19 ] }, { - "id": 66, + "id": 67, "name": "dotnetnuke", "parent": 99999999, "consumes": [ @@ -1154,7 +1174,7 @@ ] }, { - "id": 67, + "id": 68, "name": "emailformat", "parent": 99999999, "consumes": [ @@ -1165,7 +1185,7 @@ ] }, { - "id": 68, + "id": 69, "name": "emails", "parent": 99999999, "consumes": [ @@ -1174,31 +1194,31 @@ "produces": [] }, { - "id": 69, + "id": 70, "name": "excavate", "parent": 99999999, "consumes": [ 2, - 70 + 71 ], "produces": [ 19, - 71 + 72 ] }, { - "id": 72, + "id": 73, "name": "extractous", "parent": 99999999, "consumes": [ 10 ], "produces": [ - 70 + 71 ] }, { - "id": 73, + "id": 74, "name": "ffuf", "parent": 99999999, "consumes": [ @@ -1209,18 +1229,18 @@ ] }, { - "id": 74, + "id": 75, "name": "ffuf_shortnames", "parent": 99999999, "consumes": [ - 75 + 76 ], "produces": [ 19 ] }, { - "id": 76, + "id": 77, "name": "filedownload", "parent": 99999999, "consumes": [ @@ -1232,18 +1252,18 @@ ] }, { - "id": 77, + "id": 78, "name": "fingerprintx", "parent": 99999999, "consumes": [ 15 ], "produces": [ - 78 + 79 ] }, { - "id": 79, + "id": 80, "name": "fullhunt", "parent": 99999999, "consumes": [ @@ -1254,7 +1274,7 @@ ] }, { - "id": 80, + "id": 81, "name": "generic_ssrf", "parent": 99999999, "consumes": [ @@ -1265,7 +1285,7 @@ ] }, { - "id": 81, + "id": 82, "name": "git", "parent": 99999999, "consumes": [ @@ -1276,7 +1296,7 @@ ] }, { - "id": 82, + "id": 83, "name": "git_clone", "parent": 99999999, "consumes": [ @@ -1287,7 +1307,7 @@ ] }, { - "id": 83, + "id": 84, "name": "github_codesearch", "parent": 99999999, "consumes": [ @@ -1299,19 +1319,19 @@ ] }, { - "id": 84, + "id": 85, "name": "github_org", "parent": 99999999, "consumes": [ - 64, - 65 + 65, + 66 ], "produces": [ 43 ] }, { - "id": 85, + "id": 86, "name": "github_workflows", "parent": 99999999, "consumes": [ @@ -1322,50 +1342,50 @@ ] }, { - "id": 86, + "id": 87, "name": "gitlab", "parent": 99999999, "consumes": [ 2, - 65, + 66, 16 ], "produces": [ 43, 4, - 65, + 66, 16 ] }, { - "id": 87, + "id": 88, "name": "google_playstore", "parent": 99999999, "consumes": [ 43, - 64 + 65 ], "produces": [ 9 ] }, { - "id": 88, + "id": 89, "name": "gowitness", "parent": 99999999, "consumes": [ - 65, + 66, 3 ], "produces": [ 16, 3, 19, - 89 + 90 ] }, { - "id": 90, + "id": 91, "name": "hackertarget", "parent": 99999999, "consumes": [ @@ -1376,7 +1396,7 @@ ] }, { - "id": 91, + "id": 92, "name": "host_header", "parent": 99999999, "consumes": [ @@ -1387,7 +1407,7 @@ ] }, { - "id": 92, + "id": 93, "name": "httpx", "parent": 99999999, "consumes": [ @@ -1401,18 +1421,18 @@ ] }, { - "id": 93, + "id": 94, "name": "hunt", "parent": 99999999, "consumes": [ - 71 + 72 ], "produces": [ 4 ] }, { - "id": 94, + "id": 95, "name": "hunterio", "parent": 99999999, "consumes": [ @@ -1425,18 +1445,18 @@ ] }, { - "id": 95, + "id": 96, "name": "iis_shortnames", "parent": 99999999, "consumes": [ 3 ], "produces": [ - 75 + 76 ] }, { - "id": 96, + "id": 97, "name": "internetdb", "parent": 99999999, "consumes": [ @@ -1452,18 +1472,18 @@ ] }, { - "id": 97, + "id": 98, "name": "ip2location", "parent": 99999999, "consumes": [ 12 ], "produces": [ - 98 + 99 ] }, { - "id": 99, + "id": 100, "name": "ipneighbor", "parent": 99999999, "consumes": [ @@ -1474,18 +1494,18 @@ ] }, { - "id": 100, + "id": 101, "name": "ipstack", "parent": 99999999, "consumes": [ 12 ], "produces": [ - 98 + 99 ] }, { - "id": 101, + "id": 102, "name": "jadx", "parent": 99999999, "consumes": [ @@ -1496,7 +1516,7 @@ ] }, { - "id": 102, + "id": 103, "name": "leakix", "parent": 99999999, "consumes": [ @@ -1507,7 +1527,7 @@ ] }, { - "id": 103, + "id": 104, "name": "myssl", "parent": 99999999, "consumes": [ @@ -1518,7 +1538,7 @@ ] }, { - "id": 104, + "id": 105, "name": "newsletters", "parent": 99999999, "consumes": [ @@ -1529,7 +1549,20 @@ ] }, { - "id": 105, + "id": 106, + "name": "nmap_xml", + "parent": 99999999, + "consumes": [ + 7, + 2, + 12, + 15, + 79 + ], + "produces": [] + }, + { + "id": 107, "name": "ntlm", "parent": 99999999, "consumes": [ @@ -1542,7 +1575,7 @@ ] }, { - "id": 106, + "id": 108, "name": "nuclei", "parent": 99999999, "consumes": [ @@ -1555,7 +1588,7 @@ ] }, { - "id": 107, + "id": 109, "name": "oauth", "parent": 99999999, "consumes": [ @@ -1567,7 +1600,7 @@ ] }, { - "id": 108, + "id": 110, "name": "otx", "parent": 99999999, "consumes": [ @@ -1578,45 +1611,45 @@ ] }, { - "id": 109, + "id": 111, "name": "paramminer_cookies", "parent": 99999999, "consumes": [ 2, - 71 + 72 ], "produces": [ 4, - 71 + 72 ] }, { - "id": 110, + "id": 112, "name": "paramminer_getparams", "parent": 99999999, "consumes": [ 2, - 71 + 72 ], "produces": [ 4, - 71 + 72 ] }, { - "id": 111, + "id": 113, "name": "paramminer_headers", "parent": 99999999, "consumes": [ 2, - 71 + 72 ], "produces": [ - 71 + 72 ] }, { - "id": 112, + "id": 114, "name": "passivetotal", "parent": 99999999, "consumes": [ @@ -1627,7 +1660,7 @@ ] }, { - "id": 113, + "id": 115, "name": "pgp", "parent": 99999999, "consumes": [ @@ -1638,32 +1671,32 @@ ] }, { - "id": 114, + "id": 116, "name": "portscan", "parent": 99999999, "consumes": [ 7, 12, - 115 + 117 ], "produces": [ 15 ] }, { - "id": 116, + "id": 118, "name": "postman", "parent": 99999999, "consumes": [ - 64, - 65 + 65, + 66 ], "produces": [ 43 ] }, { - "id": 117, + "id": 119, "name": "postman_download", "parent": 99999999, "consumes": [ @@ -1674,7 +1707,7 @@ ] }, { - "id": 118, + "id": 120, "name": "rapiddns", "parent": 99999999, "consumes": [ @@ -1685,7 +1718,7 @@ ] }, { - "id": 119, + "id": 121, "name": "robots", "parent": 99999999, "consumes": [ @@ -1696,7 +1729,7 @@ ] }, { - "id": 120, + "id": 122, "name": "secretsdb", "parent": 99999999, "consumes": [ @@ -1707,7 +1740,7 @@ ] }, { - "id": 121, + "id": 123, "name": "securitytrails", "parent": 99999999, "consumes": [ @@ -1718,7 +1751,7 @@ ] }, { - "id": 122, + "id": 124, "name": "securitytxt", "parent": 99999999, "consumes": [ @@ -1730,7 +1763,7 @@ ] }, { - "id": 123, + "id": 125, "name": "shodan_dns", "parent": 99999999, "consumes": [ @@ -1741,7 +1774,7 @@ ] }, { - "id": 124, + "id": 126, "name": "sitedossier", "parent": 99999999, "consumes": [ @@ -1752,7 +1785,7 @@ ] }, { - "id": 125, + "id": 127, "name": "skymem", "parent": 99999999, "consumes": [ @@ -1763,7 +1796,7 @@ ] }, { - "id": 126, + "id": 128, "name": "smuggler", "parent": 99999999, "consumes": [ @@ -1774,28 +1807,28 @@ ] }, { - "id": 127, + "id": 129, "name": "social", "parent": 99999999, "consumes": [ 19 ], "produces": [ - 65 + 66 ] }, { - "id": 128, + "id": 130, "name": "speculate", "parent": 99999999, "consumes": [ - 129, + 131, 7, 22, 2, 12, - 115, - 65, + 117, + 66, 24, 3, 19, @@ -1806,11 +1839,11 @@ 4, 12, 15, - 64 + 65 ] }, { - "id": 130, + "id": 132, "name": "sslcert", "parent": 99999999, "consumes": [ @@ -1822,7 +1855,7 @@ ] }, { - "id": 131, + "id": 133, "name": "subdomaincenter", "parent": 99999999, "consumes": [ @@ -1833,7 +1866,7 @@ ] }, { - "id": 132, + "id": 134, "name": "subdomainradar", "parent": 99999999, "consumes": [ @@ -1844,7 +1877,7 @@ ] }, { - "id": 133, + "id": 135, "name": "subdomains", "parent": 99999999, "consumes": [ @@ -1854,7 +1887,7 @@ "produces": [] }, { - "id": 134, + "id": 136, "name": "telerik", "parent": 99999999, "consumes": [ @@ -1867,7 +1900,7 @@ ] }, { - "id": 135, + "id": 137, "name": "trickest", "parent": 99999999, "consumes": [ @@ -1878,7 +1911,7 @@ ] }, { - "id": 136, + "id": 138, "name": "trufflehog", "parent": 99999999, "consumes": [ @@ -1891,7 +1924,7 @@ ] }, { - "id": 137, + "id": 139, "name": "url_manipulation", "parent": 99999999, "consumes": [ @@ -1902,7 +1935,7 @@ ] }, { - "id": 138, + "id": 140, "name": "urlscan", "parent": 99999999, "consumes": [ @@ -1914,7 +1947,7 @@ ] }, { - "id": 139, + "id": 141, "name": "vhost", "parent": 99999999, "consumes": [ @@ -1922,11 +1955,11 @@ ], "produces": [ 7, - 140 + 142 ] }, { - "id": 141, + "id": 143, "name": "viewdns", "parent": 99999999, "consumes": [ @@ -1937,7 +1970,7 @@ ] }, { - "id": 142, + "id": 144, "name": "virustotal", "parent": 99999999, "consumes": [ @@ -1948,7 +1981,7 @@ ] }, { - "id": 143, + "id": 145, "name": "wafw00f", "parent": 99999999, "consumes": [ @@ -1959,7 +1992,7 @@ ] }, { - "id": 144, + "id": 146, "name": "wappalyzer", "parent": 99999999, "consumes": [ @@ -1970,7 +2003,7 @@ ] }, { - "id": 145, + "id": 147, "name": "wayback", "parent": 99999999, "consumes": [ @@ -1982,20 +2015,20 @@ ] }, { - "id": 146, + "id": 148, "name": "web_report", "parent": 99999999, "consumes": [ 4, 16, 3, - 140, + 142, 5 ], "produces": [] }, { - "id": 147, + "id": 149, "name": "wpscan", "parent": 99999999, "consumes": [ @@ -2010,7 +2043,7 @@ ] }, { - "id": 148, + "id": 150, "name": "zoomeye", "parent": 99999999, "consumes": [ @@ -2020,4 +2053,4 @@ 7 ] } -] +] \ No newline at end of file diff --git a/docs/data/chord_graph/rels.json b/docs/data/chord_graph/rels.json index 43a646026a..f25e491f3e 100644 --- a/docs/data/chord_graph/rels.json +++ b/docs/data/chord_graph/rels.json @@ -585,1138 +585,1178 @@ "type": "produces" }, { - "source": 61, + "source": 7, + "target": 61, + "type": "produces" + }, + { + "source": 12, + "target": 61, + "type": "produces" + }, + { + "source": 55, + "target": 61, + "type": "produces" + }, + { + "source": 62, "target": 7, "type": "consumes" }, { "source": 46, - "target": 61, + "target": 62, "type": "produces" }, { "source": 55, - "target": 61, + "target": 62, "type": "produces" }, { "source": 19, - "target": 61, + "target": 62, "type": "produces" }, { - "source": 62, + "source": 63, "target": 43, "type": "consumes" }, { "source": 10, - "target": 62, + "target": 63, "type": "produces" }, { - "source": 63, - "target": 64, + "source": 64, + "target": 65, "type": "consumes" }, { - "source": 63, - "target": 65, + "source": 64, + "target": 66, "type": "consumes" }, { "source": 43, - "target": 63, + "target": 64, "type": "produces" }, { - "source": 65, - "target": 63, + "source": 66, + "target": 64, "type": "produces" }, { "source": 19, - "target": 63, + "target": 64, "type": "produces" }, { - "source": 66, + "source": 67, "target": 2, "type": "consumes" }, { "source": 16, - "target": 66, + "target": 67, "type": "produces" }, { "source": 5, - "target": 66, + "target": 67, "type": "produces" }, { - "source": 67, + "source": 68, "target": 7, "type": "consumes" }, { "source": 46, - "target": 67, + "target": 68, "type": "produces" }, { - "source": 68, + "source": 69, "target": 46, "type": "consumes" }, { - "source": 69, + "source": 70, "target": 2, "type": "consumes" }, { - "source": 69, - "target": 70, + "source": 70, + "target": 71, "type": "consumes" }, { "source": 19, - "target": 69, + "target": 70, "type": "produces" }, { - "source": 71, - "target": 69, + "source": 72, + "target": 70, "type": "produces" }, { - "source": 72, + "source": 73, "target": 10, "type": "consumes" }, { - "source": 70, - "target": 72, + "source": 71, + "target": 73, "type": "produces" }, { - "source": 73, + "source": 74, "target": 3, "type": "consumes" }, { "source": 19, - "target": 73, + "target": 74, "type": "produces" }, { - "source": 74, - "target": 75, + "source": 75, + "target": 76, "type": "consumes" }, { "source": 19, - "target": 74, + "target": 75, "type": "produces" }, { - "source": 76, + "source": 77, "target": 2, "type": "consumes" }, { - "source": 76, + "source": 77, "target": 19, "type": "consumes" }, { "source": 10, - "target": 76, + "target": 77, "type": "produces" }, { - "source": 77, + "source": 78, "target": 15, "type": "consumes" }, { - "source": 78, - "target": 77, + "source": 79, + "target": 78, "type": "produces" }, { - "source": 79, + "source": 80, "target": 7, "type": "consumes" }, { "source": 7, - "target": 79, + "target": 80, "type": "produces" }, { - "source": 80, + "source": 81, "target": 3, "type": "consumes" }, { "source": 5, - "target": 80, + "target": 81, "type": "produces" }, { - "source": 81, + "source": 82, "target": 3, "type": "consumes" }, { "source": 4, - "target": 81, + "target": 82, "type": "produces" }, { - "source": 82, + "source": 83, "target": 43, "type": "consumes" }, { "source": 10, - "target": 82, + "target": 83, "type": "produces" }, { - "source": 83, + "source": 84, "target": 7, "type": "consumes" }, { "source": 43, - "target": 83, + "target": 84, "type": "produces" }, { "source": 19, - "target": 83, + "target": 84, "type": "produces" }, { - "source": 84, - "target": 64, + "source": 85, + "target": 65, "type": "consumes" }, { - "source": 84, - "target": 65, + "source": 85, + "target": 66, "type": "consumes" }, { "source": 43, - "target": 84, + "target": 85, "type": "produces" }, { - "source": 85, + "source": 86, "target": 43, "type": "consumes" }, { "source": 10, - "target": 85, + "target": 86, "type": "produces" }, { - "source": 86, + "source": 87, "target": 2, "type": "consumes" }, { - "source": 86, - "target": 65, + "source": 87, + "target": 66, "type": "consumes" }, { - "source": 86, + "source": 87, "target": 16, "type": "consumes" }, { "source": 43, - "target": 86, + "target": 87, "type": "produces" }, { "source": 4, - "target": 86, + "target": 87, "type": "produces" }, { - "source": 65, - "target": 86, + "source": 66, + "target": 87, "type": "produces" }, { "source": 16, - "target": 86, + "target": 87, "type": "produces" }, { - "source": 87, + "source": 88, "target": 43, "type": "consumes" }, { - "source": 87, - "target": 64, + "source": 88, + "target": 65, "type": "consumes" }, { "source": 9, - "target": 87, + "target": 88, "type": "produces" }, { - "source": 88, - "target": 65, + "source": 89, + "target": 66, "type": "consumes" }, { - "source": 88, + "source": 89, "target": 3, "type": "consumes" }, { "source": 16, - "target": 88, + "target": 89, "type": "produces" }, { "source": 3, - "target": 88, + "target": 89, "type": "produces" }, { "source": 19, - "target": 88, + "target": 89, "type": "produces" }, { - "source": 89, - "target": 88, + "source": 90, + "target": 89, "type": "produces" }, { - "source": 90, + "source": 91, "target": 7, "type": "consumes" }, { "source": 7, - "target": 90, + "target": 91, "type": "produces" }, { - "source": 91, + "source": 92, "target": 2, "type": "consumes" }, { "source": 4, - "target": 91, + "target": 92, "type": "produces" }, { - "source": 92, + "source": 93, "target": 15, "type": "consumes" }, { - "source": 92, + "source": 93, "target": 3, "type": "consumes" }, { - "source": 92, + "source": 93, "target": 19, "type": "consumes" }, { "source": 2, - "target": 92, + "target": 93, "type": "produces" }, { "source": 3, - "target": 92, + "target": 93, "type": "produces" }, { - "source": 93, - "target": 71, + "source": 94, + "target": 72, "type": "consumes" }, { "source": 4, - "target": 93, + "target": 94, "type": "produces" }, { - "source": 94, + "source": 95, "target": 7, "type": "consumes" }, { "source": 7, - "target": 94, + "target": 95, "type": "produces" }, { "source": 46, - "target": 94, + "target": 95, "type": "produces" }, { "source": 19, - "target": 94, + "target": 95, "type": "produces" }, { - "source": 95, + "source": 96, "target": 3, "type": "consumes" }, { - "source": 75, - "target": 95, + "source": 76, + "target": 96, "type": "produces" }, { - "source": 96, + "source": 97, "target": 7, "type": "consumes" }, { - "source": 96, + "source": 97, "target": 12, "type": "consumes" }, { "source": 7, - "target": 96, + "target": 97, "type": "produces" }, { "source": 4, - "target": 96, + "target": 97, "type": "produces" }, { "source": 15, - "target": 96, + "target": 97, "type": "produces" }, { "source": 16, - "target": 96, + "target": 97, "type": "produces" }, { "source": 5, - "target": 96, + "target": 97, "type": "produces" }, { - "source": 97, + "source": 98, "target": 12, "type": "consumes" }, { - "source": 98, - "target": 97, + "source": 99, + "target": 98, "type": "produces" }, { - "source": 99, + "source": 100, "target": 12, "type": "consumes" }, { "source": 12, - "target": 99, + "target": 100, "type": "produces" }, { - "source": 100, + "source": 101, "target": 12, "type": "consumes" }, { - "source": 98, - "target": 100, + "source": 99, + "target": 101, "type": "produces" }, { - "source": 101, + "source": 102, "target": 10, "type": "consumes" }, { "source": 10, - "target": 101, + "target": 102, "type": "produces" }, { - "source": 102, + "source": 103, "target": 7, "type": "consumes" }, { "source": 7, - "target": 102, + "target": 103, "type": "produces" }, { - "source": 103, + "source": 104, "target": 7, "type": "consumes" }, { "source": 7, - "target": 103, + "target": 104, "type": "produces" }, { - "source": 104, + "source": 105, "target": 2, "type": "consumes" }, { "source": 4, - "target": 104, + "target": 105, "type": "produces" }, { - "source": 105, + "source": 106, + "target": 7, + "type": "consumes" + }, + { + "source": 106, "target": 2, "type": "consumes" }, { - "source": 105, + "source": 106, + "target": 12, + "type": "consumes" + }, + { + "source": 106, + "target": 15, + "type": "consumes" + }, + { + "source": 106, + "target": 79, + "type": "consumes" + }, + { + "source": 107, + "target": 2, + "type": "consumes" + }, + { + "source": 107, "target": 3, "type": "consumes" }, { "source": 7, - "target": 105, + "target": 107, "type": "produces" }, { "source": 4, - "target": 105, + "target": 107, "type": "produces" }, { - "source": 106, + "source": 108, "target": 3, "type": "consumes" }, { "source": 4, - "target": 106, + "target": 108, "type": "produces" }, { "source": 16, - "target": 106, + "target": 108, "type": "produces" }, { "source": 5, - "target": 106, + "target": 108, "type": "produces" }, { - "source": 107, + "source": 109, "target": 7, "type": "consumes" }, { - "source": 107, + "source": 109, "target": 19, "type": "consumes" }, { "source": 7, - "target": 107, + "target": 109, "type": "produces" }, { - "source": 108, + "source": 110, "target": 7, "type": "consumes" }, { "source": 7, - "target": 108, + "target": 110, "type": "produces" }, { - "source": 109, + "source": 111, "target": 2, "type": "consumes" }, { - "source": 109, - "target": 71, + "source": 111, + "target": 72, "type": "consumes" }, { "source": 4, - "target": 109, + "target": 111, "type": "produces" }, { - "source": 71, - "target": 109, + "source": 72, + "target": 111, "type": "produces" }, { - "source": 110, + "source": 112, "target": 2, "type": "consumes" }, { - "source": 110, - "target": 71, + "source": 112, + "target": 72, "type": "consumes" }, { "source": 4, - "target": 110, + "target": 112, "type": "produces" }, { - "source": 71, - "target": 110, + "source": 72, + "target": 112, "type": "produces" }, { - "source": 111, + "source": 113, "target": 2, "type": "consumes" }, { - "source": 111, - "target": 71, + "source": 113, + "target": 72, "type": "consumes" }, { - "source": 71, - "target": 111, + "source": 72, + "target": 113, "type": "produces" }, { - "source": 112, + "source": 114, "target": 7, "type": "consumes" }, { "source": 7, - "target": 112, + "target": 114, "type": "produces" }, { - "source": 113, + "source": 115, "target": 7, "type": "consumes" }, { "source": 46, - "target": 113, + "target": 115, "type": "produces" }, { - "source": 114, + "source": 116, "target": 7, "type": "consumes" }, { - "source": 114, + "source": 116, "target": 12, "type": "consumes" }, { - "source": 114, - "target": 115, + "source": 116, + "target": 117, "type": "consumes" }, { "source": 15, - "target": 114, + "target": 116, "type": "produces" }, { - "source": 116, - "target": 64, + "source": 118, + "target": 65, "type": "consumes" }, { - "source": 116, - "target": 65, + "source": 118, + "target": 66, "type": "consumes" }, { "source": 43, - "target": 116, + "target": 118, "type": "produces" }, { - "source": 117, + "source": 119, "target": 43, "type": "consumes" }, { "source": 10, - "target": 117, + "target": 119, "type": "produces" }, { - "source": 118, + "source": 120, "target": 7, "type": "consumes" }, { "source": 7, - "target": 118, + "target": 120, "type": "produces" }, { - "source": 119, + "source": 121, "target": 3, "type": "consumes" }, { "source": 19, - "target": 119, + "target": 121, "type": "produces" }, { - "source": 120, + "source": 122, "target": 2, "type": "consumes" }, { "source": 4, - "target": 120, + "target": 122, "type": "produces" }, { - "source": 121, + "source": 123, "target": 7, "type": "consumes" }, { "source": 7, - "target": 121, + "target": 123, "type": "produces" }, { - "source": 122, + "source": 124, "target": 7, "type": "consumes" }, { "source": 46, - "target": 122, + "target": 124, "type": "produces" }, { "source": 19, - "target": 122, + "target": 124, "type": "produces" }, { - "source": 123, + "source": 125, "target": 7, "type": "consumes" }, { "source": 7, - "target": 123, + "target": 125, "type": "produces" }, { - "source": 124, + "source": 126, "target": 7, "type": "consumes" }, { "source": 7, - "target": 124, + "target": 126, "type": "produces" }, { - "source": 125, + "source": 127, "target": 7, "type": "consumes" }, { "source": 46, - "target": 125, + "target": 127, "type": "produces" }, { - "source": 126, + "source": 128, "target": 3, "type": "consumes" }, { "source": 4, - "target": 126, + "target": 128, "type": "produces" }, { - "source": 127, + "source": 129, "target": 19, "type": "consumes" }, { - "source": 65, - "target": 127, + "source": 66, + "target": 129, "type": "produces" }, { - "source": 128, - "target": 129, + "source": 130, + "target": 131, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 7, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 22, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 2, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 12, "type": "consumes" }, { - "source": 128, - "target": 115, + "source": 130, + "target": 117, "type": "consumes" }, { - "source": 128, - "target": 65, + "source": 130, + "target": 66, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 24, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 3, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 19, "type": "consumes" }, { - "source": 128, + "source": 130, "target": 49, "type": "consumes" }, { "source": 7, - "target": 128, + "target": 130, "type": "produces" }, { "source": 4, - "target": 128, + "target": 130, "type": "produces" }, { "source": 12, - "target": 128, + "target": 130, "type": "produces" }, { "source": 15, - "target": 128, + "target": 130, "type": "produces" }, { - "source": 64, - "target": 128, + "source": 65, + "target": 130, "type": "produces" }, { - "source": 130, + "source": 132, "target": 15, "type": "consumes" }, { "source": 7, - "target": 130, + "target": 132, "type": "produces" }, { "source": 46, - "target": 130, + "target": 132, "type": "produces" }, { - "source": 131, + "source": 133, "target": 7, "type": "consumes" }, { "source": 7, - "target": 131, + "target": 133, "type": "produces" }, { - "source": 132, + "source": 134, "target": 7, "type": "consumes" }, { "source": 7, - "target": 132, + "target": 134, "type": "produces" }, { - "source": 133, + "source": 135, "target": 7, "type": "consumes" }, { - "source": 133, + "source": 135, "target": 22, "type": "consumes" }, { - "source": 134, + "source": 136, "target": 2, "type": "consumes" }, { - "source": 134, + "source": 136, "target": 3, "type": "consumes" }, { "source": 4, - "target": 134, + "target": 136, "type": "produces" }, { "source": 5, - "target": 134, + "target": 136, "type": "produces" }, { - "source": 135, + "source": 137, "target": 7, "type": "consumes" }, { "source": 7, - "target": 135, + "target": 137, "type": "produces" }, { - "source": 136, + "source": 138, "target": 43, "type": "consumes" }, { - "source": 136, + "source": 138, "target": 10, "type": "consumes" }, { "source": 4, - "target": 136, + "target": 138, "type": "produces" }, { "source": 5, - "target": 136, + "target": 138, "type": "produces" }, { - "source": 137, + "source": 139, "target": 3, "type": "consumes" }, { "source": 4, - "target": 137, + "target": 139, "type": "produces" }, { - "source": 138, + "source": 140, "target": 7, "type": "consumes" }, { "source": 7, - "target": 138, + "target": 140, "type": "produces" }, { "source": 19, - "target": 138, + "target": 140, "type": "produces" }, { - "source": 139, + "source": 141, "target": 3, "type": "consumes" }, { "source": 7, - "target": 139, + "target": 141, "type": "produces" }, { - "source": 140, - "target": 139, + "source": 142, + "target": 141, "type": "produces" }, { - "source": 141, + "source": 143, "target": 7, "type": "consumes" }, { "source": 7, - "target": 141, + "target": 143, "type": "produces" }, { - "source": 142, + "source": 144, "target": 7, "type": "consumes" }, { "source": 7, - "target": 142, + "target": 144, "type": "produces" }, { - "source": 143, + "source": 145, "target": 3, "type": "consumes" }, { "source": 17, - "target": 143, + "target": 145, "type": "produces" }, { - "source": 144, + "source": 146, "target": 2, "type": "consumes" }, { "source": 16, - "target": 144, + "target": 146, "type": "produces" }, { - "source": 145, + "source": 147, "target": 7, "type": "consumes" }, { "source": 7, - "target": 145, + "target": 147, "type": "produces" }, { "source": 19, - "target": 145, + "target": 147, "type": "produces" }, { - "source": 146, + "source": 148, "target": 4, "type": "consumes" }, { - "source": 146, + "source": 148, "target": 16, "type": "consumes" }, { - "source": 146, + "source": 148, "target": 3, "type": "consumes" }, { - "source": 146, - "target": 140, + "source": 148, + "target": 142, "type": "consumes" }, { - "source": 146, + "source": 148, "target": 5, "type": "consumes" }, { - "source": 147, + "source": 149, "target": 2, "type": "consumes" }, { - "source": 147, + "source": 149, "target": 16, "type": "consumes" }, { "source": 4, - "target": 147, + "target": 149, "type": "produces" }, { "source": 16, - "target": 147, + "target": 149, "type": "produces" }, { "source": 19, - "target": 147, + "target": 149, "type": "produces" }, { "source": 5, - "target": 147, + "target": 149, "type": "produces" }, { - "source": 148, + "source": 150, "target": 7, "type": "consumes" }, { "source": 7, - "target": 148, + "target": 150, "type": "produces" } -] +] \ No newline at end of file diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 44d57a0fbe..f8eae6b501 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -120,21 +120,22 @@ | emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | @domwhewell-sage | 2023-12-23 | | http | output | No | Send every event to a custom URL via a web request | | * | | @TheTechromancer | 2022-04-13 | | json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | @TheTechromancer | 2022-04-07 | -| mysql | output | No | Output scan data to a MySQL database | | * | | | | +| mysql | output | No | Output scan data to a MySQL database | | * | | @TheTechromancer | 2024-11-13 | | neo4j | output | No | Output to Neo4j | | * | | @TheTechromancer | 2022-04-07 | -| postgres | output | No | Output scan data to a SQLite database | | * | | | | +| nmap_xml | output | No | Output to Nmap XML | | DNS_NAME, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, PROTOCOL | | @TheTechromancer | 2024-11-16 | +| postgres | output | No | Output scan data to a SQLite database | | * | | @TheTechromancer | 2024-11-08 | | python | output | No | Output via Python API | | * | | @TheTechromancer | 2022-09-13 | | slack | output | No | Message a Slack channel when certain events are encountered | | * | | @TheTechromancer | 2023-08-14 | | splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | @w0Tx | 2024-02-17 | -| sqlite | output | No | Output scan data to a SQLite database | | * | | | | -| stdout | output | No | Output to text | | * | | | | +| sqlite | output | No | Output scan data to a SQLite database | | * | | @TheTechromancer | 2024-11-07 | +| stdout | output | No | Output to text | | * | | @TheTechromancer | 2024-04-03 | | subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | @TheTechromancer | 2023-07-31 | | teams | output | No | Message a Teams channel when certain events are encountered | | * | | @TheTechromancer | 2023-08-14 | -| txt | output | No | Output to text | | * | | | | +| txt | output | No | Output to text | | * | | @TheTechromancer | 2024-04-03 | | web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | @liquidsec | 2023-02-08 | | websocket | output | No | Output to websockets | | * | | @TheTechromancer | 2022-04-15 | -| cloudcheck | internal | No | Tag events by cloud provider, identify cloud resources like storage buckets | | * | | | | -| dnsresolve | internal | No | | | * | | | | +| cloudcheck | internal | No | Tag events by cloud provider, identify cloud resources like storage buckets | | * | | @TheTechromancer | 2024-07-07 | +| dnsresolve | internal | No | Perform DNS resolution | | * | DNS_NAME, IP_ADDRESS, RAW_DNS_RECORD | @TheTechromancer | 2022-04-08 | | aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | @TheTechromancer | 2022-07-25 | | excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE, RAW_TEXT | URL_UNVERIFIED, WEB_PARAMETER | @liquidsec | 2022-06-27 | | speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | @liquidsec | 2022-05-03 | diff --git a/docs/modules/nuclei.md b/docs/modules/nuclei.md index 43231b5c7a..2849cf22f5 100644 --- a/docs/modules/nuclei.md +++ b/docs/modules/nuclei.md @@ -51,7 +51,7 @@ The Nuclei module has many configuration options: | modules.nuclei.silent | bool | Don't display nuclei's banner or status messages | False | | modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | | modules.nuclei.templates | str | template or template directory paths to include in the scan | | -| modules.nuclei.version | str | nuclei version | 3.3.6 | +| modules.nuclei.version | str | nuclei version | 3.3.7 | Most of these you probably will **NOT** want to change. In particular, we advise against changing the version of Nuclei, as it's possible the latest version won't work right with BBOT. diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index b990598a1a..88f75da5c1 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -39,8 +39,8 @@ usage: bbot [-h] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] [-f FLAG [FLAG ...]] [-lf] [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] [--allow-deadly] [-n SCAN_NAME] [-v] [-d] [-s] [--force] [-y] [--fast-mode] [--dry-run] - [--current-preset] [--current-preset-full] [-o DIR] - [-om MODULE [MODULE ...]] [--json] [--brief] + [--current-preset] [--current-preset-full] + [-om MODULE [MODULE ...]] [-lo] [-o DIR] [--json] [--brief] [--event-types EVENT_TYPES [EVENT_TYPES ...]] [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] [--version] [--proxy HTTP_PROXY] @@ -100,10 +100,12 @@ Scan: Show the current preset in its full form, including defaults Output: + -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] + Output module(s). Choices: asset_inventory,csv,discord,emails,http,json,mysql,neo4j,nmap_xml,postgres,python,slack,splunk,sqlite,stdout,subdomains,teams,txt,web_report,websocket + -lo, --list-output-modules + List available output modules -o DIR, --output-dir DIR Directory to output scan results - -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: asset_inventory,csv,discord,emails,http,json,mysql,neo4j,postgres,python,slack,splunk,sqlite,stdout,subdomains,teams,txt,web_report,websocket --json, -j Output scan data in JSON format --brief, -br Output only the data itself --event-types EVENT_TYPES [EVENT_TYPES ...] @@ -149,6 +151,9 @@ EXAMPLES List modules: bbot -l + List output modules: + bbot -lo + List presets: bbot -lp diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 4ce1a92212..fe6b0fad55 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -325,7 +325,7 @@ Many modules accept their own configuration options. These options have the abil | modules.nuclei.silent | bool | Don't display nuclei's banner or status messages | False | | modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | | modules.nuclei.templates | str | template or template directory paths to include in the scan | | -| modules.nuclei.version | str | nuclei version | 3.3.6 | +| modules.nuclei.version | str | nuclei version | 3.3.7 | | modules.oauth.try_all | bool | Check for OAUTH/IODC on every subdomain and URL. | False | | modules.paramminer_cookies.recycle_words | bool | Attempt to use words found during the scan on all other endpoints | False | | modules.paramminer_cookies.skip_boring_words | bool | Remove commonly uninteresting words from the wordlist | True | @@ -436,7 +436,7 @@ Many modules accept their own configuration options. These options have the abil | modules.trufflehog.config | str | File path or URL to YAML trufflehog config | | | modules.trufflehog.deleted_forks | bool | Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours. | False | | modules.trufflehog.only_verified | bool | Only report credentials that have been verified | True | -| modules.trufflehog.version | str | trufflehog version | 3.84.1 | +| modules.trufflehog.version | str | trufflehog version | 3.84.2 | | modules.urlscan.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.virustotal.api_key | str | VirusTotal API Key | | | modules.wayback.garbage_threshold | int | Dedupe similar urls if they are in a group of this size or higher (lower values == less garbage data) | 10 | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index e837417985..f512ac770d 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -104,41 +104,41 @@ Below is a full list of event types along with which modules produce/consume the ## List of Event Types -| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | -|---------------------|-----------------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 18 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, mysql, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | -| ASN | 0 | 1 | | asn | -| AZURE_TENANT | 1 | 0 | speculate | | -| CODE_REPOSITORY | 6 | 6 | docker_pull, git_clone, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, github_codesearch, github_org, gitlab, postman | -| DNS_NAME | 60 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, baddns, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, myssl, oauth, otx, passivetotal, pgp, portscan, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, fullhunt, hackertarget, hunterio, internetdb, leakix, myssl, ntlm, oauth, otx, passivetotal, rapiddns, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | -| DNS_NAME_UNRESOLVED | 3 | 0 | baddns, speculate, subdomains | | -| EMAIL_ADDRESS | 1 | 10 | emails | credshed, dehashed, dnscaa, dnstlsrpt, emailformat, hunterio, pgp, securitytxt, skymem, sslcert | -| FILESYSTEM | 3 | 7 | extractous, jadx, trufflehog | apkpure, docker_pull, filedownload, git_clone, github_workflows, jadx, postman_download | -| FINDING | 2 | 29 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, gitlab, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, secretsdb, smuggler, speculate, telerik, trufflehog, url_manipulation, wpscan | -| GEOLOCATION | 0 | 2 | | ip2location, ipstack | -| HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 19 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, dotnetnuke, excavate, filedownload, gitlab, host_header, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer, wpscan | httpx | -| IP_ADDRESS | 8 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, portscan, speculate | asset_inventory, ipneighbor, speculate | -| IP_RANGE | 2 | 0 | portscan, speculate | | -| MOBILE_APP | 1 | 1 | apkpure | google_playstore | -| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, portscan, speculate | -| ORG_STUB | 4 | 1 | dockerhub, github_org, google_playstore, postman | speculate | -| PASSWORD | 0 | 2 | | credshed, dehashed | -| PROTOCOL | 0 | 1 | | fingerprintx | -| RAW_DNS_RECORD | 0 | 2 | | dnsbimi, dnstlsrpt | -| RAW_TEXT | 1 | 1 | excavate | extractous | -| SOCIAL | 6 | 3 | dockerhub, github_org, gitlab, gowitness, postman, speculate | dockerhub, gitlab, social | -| STORAGE_BUCKET | 8 | 5 | baddns_direct, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | -| TECHNOLOGY | 4 | 8 | asset_inventory, gitlab, web_report, wpscan | badsecrets, dotnetnuke, gitlab, gowitness, internetdb, nuclei, wappalyzer, wpscan | -| URL | 20 | 2 | ajaxpro, asset_inventory, baddns_direct, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | -| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | -| URL_UNVERIFIED | 6 | 18 | code_repository, filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, dnsbimi, dnscaa, dnstlsrpt, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, robots, securitytxt, urlscan, wayback, wpscan | -| USERNAME | 1 | 2 | speculate | credshed, dehashed | -| VHOST | 1 | 1 | web_report | vhost | -| VULNERABILITY | 2 | 13 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, dastardly, dotnetnuke, generic_ssrf, internetdb, nuclei, telerik, trufflehog, wpscan | -| WAF | 1 | 1 | asset_inventory | wafw00f | -| WEBSCREENSHOT | 0 | 1 | | gowitness | -| WEB_PARAMETER | 4 | 4 | hunt, paramminer_cookies, paramminer_getparams, paramminer_headers | excavate, paramminer_cookies, paramminer_getparams, paramminer_headers | +| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | +|---------------------|-----------------------|-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| * | 18 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, mysql, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | +| ASN | 0 | 1 | | asn | +| AZURE_TENANT | 1 | 0 | speculate | | +| CODE_REPOSITORY | 6 | 6 | docker_pull, git_clone, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, github_codesearch, github_org, gitlab, postman | +| DNS_NAME | 61 | 44 | anubisdb, asset_inventory, azure_realm, azure_tenant, baddns, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, myssl, nmap_xml, oauth, otx, passivetotal, pgp, portscan, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnsresolve, fullhunt, hackertarget, hunterio, internetdb, leakix, myssl, ntlm, oauth, otx, passivetotal, rapiddns, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | +| DNS_NAME_UNRESOLVED | 3 | 0 | baddns, speculate, subdomains | | +| EMAIL_ADDRESS | 1 | 10 | emails | credshed, dehashed, dnscaa, dnstlsrpt, emailformat, hunterio, pgp, securitytxt, skymem, sslcert | +| FILESYSTEM | 3 | 7 | extractous, jadx, trufflehog | apkpure, docker_pull, filedownload, git_clone, github_workflows, jadx, postman_download | +| FINDING | 2 | 29 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, gitlab, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, secretsdb, smuggler, speculate, telerik, trufflehog, url_manipulation, wpscan | +| GEOLOCATION | 0 | 2 | | ip2location, ipstack | +| HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | +| HTTP_RESPONSE | 20 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, dotnetnuke, excavate, filedownload, gitlab, host_header, newsletters, nmap_xml, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer, wpscan | httpx | +| IP_ADDRESS | 9 | 4 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, nmap_xml, portscan, speculate | asset_inventory, dnsresolve, ipneighbor, speculate | +| IP_RANGE | 2 | 0 | portscan, speculate | | +| MOBILE_APP | 1 | 1 | apkpure | google_playstore | +| OPEN_TCP_PORT | 5 | 4 | asset_inventory, fingerprintx, httpx, nmap_xml, sslcert | asset_inventory, internetdb, portscan, speculate | +| ORG_STUB | 4 | 1 | dockerhub, github_org, google_playstore, postman | speculate | +| PASSWORD | 0 | 2 | | credshed, dehashed | +| PROTOCOL | 1 | 1 | nmap_xml | fingerprintx | +| RAW_DNS_RECORD | 0 | 3 | | dnsbimi, dnsresolve, dnstlsrpt | +| RAW_TEXT | 1 | 1 | excavate | extractous | +| SOCIAL | 6 | 3 | dockerhub, github_org, gitlab, gowitness, postman, speculate | dockerhub, gitlab, social | +| STORAGE_BUCKET | 8 | 5 | baddns_direct, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | +| TECHNOLOGY | 4 | 8 | asset_inventory, gitlab, web_report, wpscan | badsecrets, dotnetnuke, gitlab, gowitness, internetdb, nuclei, wappalyzer, wpscan | +| URL | 20 | 2 | ajaxpro, asset_inventory, baddns_direct, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | +| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | +| URL_UNVERIFIED | 6 | 18 | code_repository, filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, dnsbimi, dnscaa, dnstlsrpt, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, robots, securitytxt, urlscan, wayback, wpscan | +| USERNAME | 1 | 2 | speculate | credshed, dehashed | +| VHOST | 1 | 1 | web_report | vhost | +| VULNERABILITY | 2 | 13 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, dastardly, dotnetnuke, generic_ssrf, internetdb, nuclei, telerik, trufflehog, wpscan | +| WAF | 1 | 1 | asset_inventory | wafw00f | +| WEBSCREENSHOT | 0 | 1 | | gowitness | +| WEB_PARAMETER | 4 | 4 | hunt, paramminer_cookies, paramminer_getparams, paramminer_headers | excavate, paramminer_cookies, paramminer_getparams, paramminer_headers | ## Findings Vs. Vulnerabilities diff --git a/docs/scanning/presets_list.md b/docs/scanning/presets_list.md index 416e163c52..11407fead5 100644 --- a/docs/scanning/presets_list.md +++ b/docs/scanning/presets_list.md @@ -8,13 +8,13 @@ Run all baddns modules and submodules. ??? note "`baddns-thorough.yml`" ```yaml title="~/.bbot/presets/baddns-thorough.yml" description: Run all baddns modules and submodules. - - + + modules: - baddns - baddns_zone - baddns_direct - + config: modules: baddns: @@ -32,10 +32,10 @@ Enumerate cloud resources such as storage buckets, etc. ??? note "`cloud-enum.yml`" ```yaml title="~/.bbot/presets/cloud-enum.yml" description: Enumerate cloud resources such as storage buckets, etc. - + include: - subdomain-enum - + flags: - cloud-enum ``` @@ -51,7 +51,7 @@ Enumerate Git repositories, Docker images, etc. ??? note "`code-enum.yml`" ```yaml title="~/.bbot/presets/code-enum.yml" description: Enumerate Git repositories, Docker images, etc. - + flags: - code-enum ``` @@ -67,17 +67,17 @@ Recursive web directory brute-force (aggressive) ??? note "`dirbust-heavy.yml`" ```yaml title="~/.bbot/presets/web/dirbust-heavy.yml" description: Recursive web directory brute-force (aggressive) - + include: - spider - + flags: - iis-shortnames - + modules: - ffuf - wayback - + config: modules: iis_shortnames: @@ -118,13 +118,13 @@ Basic web directory brute-force (surface-level directories only) ??? note "`dirbust-light.yml`" ```yaml title="~/.bbot/presets/web/dirbust-light.yml" description: Basic web directory brute-force (surface-level directories only) - + include: - iis-shortnames - + modules: - ffuf - + config: modules: ffuf: @@ -143,11 +143,11 @@ Comprehensive scan for all IIS/.NET specific modules and module settings ??? note "`dotnet-audit.yml`" ```yaml title="~/.bbot/presets/web/dotnet-audit.yml" description: Comprehensive scan for all IIS/.NET specific modules and module settings - - + + include: - iis-shortnames - + modules: - httpx - badsecrets @@ -156,14 +156,13 @@ Comprehensive scan for all IIS/.NET specific modules and module settings - telerik - ajaxpro - dotnetnuke - + config: modules: ffuf: extensions: asp,aspx,ashx,asmx,ascx telerik: exploit_RAU_crypto: True - ``` Category: web @@ -177,10 +176,10 @@ Enumerate email addresses from APIs, web crawling, etc. ??? note "`email-enum.yml`" ```yaml title="~/.bbot/presets/email-enum.yml" description: Enumerate email addresses from APIs, web crawling, etc. - + flags: - email-enum - + output_modules: - emails ``` @@ -196,10 +195,10 @@ Scan only the provided targets as fast as possible - no extra discovery ??? note "`fast.yml`" ```yaml title="~/.bbot/presets/fast.yml" description: Scan only the provided targets as fast as possible - no extra discovery - + exclude_modules: - excavate - + config: # only scan the exact targets specified scope: @@ -224,10 +223,10 @@ Recursively enumerate IIS shortnames ??? note "`iis-shortnames.yml`" ```yaml title="~/.bbot/presets/web/iis-shortnames.yml" description: Recursively enumerate IIS shortnames - + flags: - iis-shortnames - + config: modules: iis_shortnames: @@ -246,7 +245,7 @@ Everything everywhere all at once ??? note "`kitchen-sink.yml`" ```yaml title="~/.bbot/presets/kitchen-sink.yml" description: Everything everywhere all at once - + include: - subdomain-enum - cloud-enum @@ -258,13 +257,11 @@ Everything everywhere all at once - dirbust-light - web-screenshots - baddns-thorough - + config: modules: baddns: enable_references: True - - ``` @@ -278,13 +275,13 @@ Discover new web parameters via brute-force ??? note "`paramminer.yml`" ```yaml title="~/.bbot/presets/web/paramminer.yml" description: Discover new web parameters via brute-force - + flags: - web-paramminer - + modules: - httpx - + config: web: spider_distance: 1 @@ -302,14 +299,14 @@ Recursive web spider ??? note "`spider.yml`" ```yaml title="~/.bbot/presets/spider.yml" description: Recursive web spider - + modules: - httpx - + blacklist: # Prevent spider from invalidating sessions by logging out - "RE:/.*(sign|log)[_-]?out" - + config: web: # how many links to follow in a row @@ -331,15 +328,15 @@ Enumerate subdomains via APIs, brute-force ??? note "`subdomain-enum.yml`" ```yaml title="~/.bbot/presets/subdomain-enum.yml" description: Enumerate subdomains via APIs, brute-force - + flags: # enable every module with the subdomain-enum flag - subdomain-enum - + output_modules: # output unique subdomains to TXT file - subdomains - + config: dns: threads: 25 @@ -365,10 +362,10 @@ Quick web scan ??? note "`web-basic.yml`" ```yaml title="~/.bbot/presets/web-basic.yml" description: Quick web scan - + include: - iis-shortnames - + flags: - web-basic ``` @@ -384,10 +381,10 @@ Take screenshots of webpages ??? note "`web-screenshots.yml`" ```yaml title="~/.bbot/presets/web-screenshots.yml" description: Take screenshots of webpages - + flags: - web-screenshots - + config: modules: gowitness: @@ -410,11 +407,11 @@ Aggressive web scan ??? note "`web-thorough.yml`" ```yaml title="~/.bbot/presets/web-thorough.yml" description: Aggressive web scan - + include: # include the web-basic preset - web-basic - + flags: - web-thorough ``` From 9dabf00f8c292139de6c2962a9199f605e1014e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 04:39:47 +0000 Subject: [PATCH 46/64] Bump cloudcheck from 6.0.0.661 to 6.0.0.686 Bumps [cloudcheck](https://github.com/blacklanternsecurity/cloudcheck) from 6.0.0.661 to 6.0.0.686. - [Commits](https://github.com/blacklanternsecurity/cloudcheck/commits) --- updated-dependencies: - dependency-name: cloudcheck dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb79be280e..ccd8b44cd8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -371,17 +371,17 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "6.0.0.661" +version = "6.0.0.686" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "cloudcheck-6.0.0.661-py3-none-any.whl", hash = "sha256:b8c45061d76eea14aa493e9dfd087e1aefccb1632c3bb8d49c77d273f721188c"}, - {file = "cloudcheck-6.0.0.661.tar.gz", hash = "sha256:98a7b88f4784fad91faa3d6ea5749c7fe215462dbad63c34df1afc671f915795"}, + {file = "cloudcheck-6.0.0.686-py3-none-any.whl", hash = "sha256:bd2494ffc5e3ee2c6ab6c13bf2842407da02a2ff27edd9788d73e13a276f1c39"}, + {file = "cloudcheck-6.0.0.686.tar.gz", hash = "sha256:70ef41a1e03dcc35093322d7bb1cd6636ea945bdbeceda15e9120013023ee5cb"}, ] [package.dependencies] -httpx = ">=0.26,<0.28" +httpx = ">=0.26,<0.29" pydantic = ">=2.4.2,<3.0.0" radixtarget = ">=2.0.0.32,<3.0.0.0" regex = ">=2024.4.16,<2025.0.0" From 8c27edb8fbddab8285c78d1945a84059ce070827 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 04:40:07 +0000 Subject: [PATCH 47/64] Bump mkdocs-material from 9.5.47 to 9.5.48 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.47 to 9.5.48. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.47...9.5.48) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb79be280e..22bcb4d237 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1214,13 +1214,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.47" +version = "9.5.48" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.47-py3-none-any.whl", hash = "sha256:53fb9c9624e7865da6ec807d116cd7be24b3cb36ab31b1d1d1a9af58c56009a2"}, - {file = "mkdocs_material-9.5.47.tar.gz", hash = "sha256:fc3b7a8e00ad896660bd3a5cc12ca0cb28bdc2bcbe2a946b5714c23ac91b0ede"}, + {file = "mkdocs_material-9.5.48-py3-none-any.whl", hash = "sha256:b695c998f4b939ce748adbc0d3bff73fa886a670ece948cf27818fa115dc16f8"}, + {file = "mkdocs_material-9.5.48.tar.gz", hash = "sha256:a582531e8b34f4c7ed38c29d5c44763053832cf2a32f7409567e0c74749a47db"}, ] [package.dependencies] From c9b52e83753e8a67b8b236d200c46e7978ccc615 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 04:40:50 +0000 Subject: [PATCH 48/64] Bump ruff from 0.8.1 to 0.8.2 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.8.1 to 0.8.2. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.8.1...0.8.2) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb79be280e..93bae43455 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2490,29 +2490,29 @@ test = ["commentjson", "packaging", "pytest"] [[package]] name = "ruff" -version = "0.8.1" +version = "0.8.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5"}, - {file = "ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087"}, - {file = "ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5"}, - {file = "ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790"}, - {file = "ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6"}, - {file = "ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737"}, - {file = "ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f"}, + {file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"}, + {file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"}, + {file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"}, + {file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"}, + {file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"}, + {file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"}, + {file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"}, ] [[package]] From b21dc18bf6701d0c51c1c98b075a44c060854d70 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 04:41:51 +0000 Subject: [PATCH 49/64] Bump fastapi from 0.115.5 to 0.115.6 Bumps [fastapi](https://github.com/fastapi/fastapi) from 0.115.5 to 0.115.6. - [Release notes](https://github.com/fastapi/fastapi/releases) - [Commits](https://github.com/fastapi/fastapi/compare/0.115.5...0.115.6) --- updated-dependencies: - dependency-name: fastapi dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb79be280e..977e244e68 100644 --- a/poetry.lock +++ b/poetry.lock @@ -602,13 +602,13 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.115.5" +version = "0.115.6" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, - {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, + {file = "fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305"}, + {file = "fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654"}, ] [package.dependencies] From 879dc10463ee575a5aff59656bebe722008136cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 11:42:55 +0000 Subject: [PATCH 50/64] Bump radixtarget from 2.0.0.50 to 2.0.0.58 Bumps [radixtarget](https://github.com/blacklanternsecurity/radixtarget) from 2.0.0.50 to 2.0.0.58. - [Commits](https://github.com/blacklanternsecurity/radixtarget/commits) --- updated-dependencies: - dependency-name: radixtarget dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c9233050b4..264e0750e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2324,13 +2324,13 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "radixtarget" -version = "2.0.0.50" +version = "2.0.0.58" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "radixtarget-2.0.0.50-py3-none-any.whl", hash = "sha256:fe1670a382d1ddaebc2cba3b16607d32085987eb5d71074cc0535e19a02406b7"}, - {file = "radixtarget-2.0.0.50.tar.gz", hash = "sha256:73519eebb0596a67d4e9347a5e4602c95c9ff9dc8be4c64e6ab0247bc69a13e8"}, + {file = "radixtarget-2.0.0.58-py3-none-any.whl", hash = "sha256:da1feb277012a115c26b370f5e2102dd31ff8745294fddc75f2d2664cc8820ad"}, + {file = "radixtarget-2.0.0.58.tar.gz", hash = "sha256:2d909608503698495b135cf1c2446c23c1d6f9dc4dfc6e6ed5517fcb5f7ee46e"}, ] [[package]] From 949c0469411f0c2b4aaa18fe9cb8eb5e862b9ba7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 11:42:57 +0000 Subject: [PATCH 51/64] Bump pydantic from 2.10.2 to 2.10.3 Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.10.2 to 2.10.3. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.10.2...v2.10.3) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c9233050b4..e0fb6774ce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1758,13 +1758,13 @@ files = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] From 3eb2a7835ecef803736bb1049e09f39ca35a0a31 Mon Sep 17 00:00:00 2001 From: blsaccess Date: Thu, 12 Dec 2024 00:24:38 +0000 Subject: [PATCH 52/64] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index eaba8bbfb9..379dd7b8e5 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.84.2", + "version": "3.86.1", "config": "", "only_verified": True, "concurrency": 8, From 85c3c334001bfe0dc2654e6a2eaf1142228a1472 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Dec 2024 04:43:27 +0000 Subject: [PATCH 53/64] Bump uvicorn from 0.32.1 to 0.34.0 Bumps [uvicorn](https://github.com/encode/uvicorn) from 0.32.1 to 0.34.0. - [Release notes](https://github.com/encode/uvicorn/releases) - [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md) - [Commits](https://github.com/encode/uvicorn/compare/0.32.1...0.34.0) --- updated-dependencies: - dependency-name: uvicorn dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 55c5fec788..56ef76cb3d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "annotated-types" @@ -2812,13 +2812,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.32.1" +version = "0.34.0" description = "The lightning-fast ASGI server." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"}, - {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"}, + {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, + {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, ] [package.dependencies] @@ -3149,4 +3149,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "635f4acfdf9b3746d369877390cc94acaed40e2cd79742d51da337d206f7b3fe" +content-hash = "288c787f59c298728c1b4a6dbfb446c9f04afec8b0f4cc2a80376132e36eb380" diff --git a/pyproject.toml b/pyproject.toml index 1b9e7c66a5..553fa39cc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ pytest-timeout = "^2.3.1" pytest-httpserver = "^1.0.11" pytest = "^8.3.1" pytest-asyncio = "0.24.0" -uvicorn = "^0.32.0" +uvicorn = ">=0.32,<0.35" fastapi = "^0.115.5" pytest-httpx = ">=0.33,<0.35" ruff = "^0.8.0" From 6ac53c89c11932c1455c2ec70d39e562eb013508 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Dec 2024 04:44:24 +0000 Subject: [PATCH 54/64] Bump ruff from 0.8.2 to 0.8.3 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.8.2 to 0.8.3. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.8.2...0.8.3) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index 55c5fec788..2c54035ca0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "annotated-types" @@ -2490,29 +2490,29 @@ test = ["commentjson", "packaging", "pytest"] [[package]] name = "ruff" -version = "0.8.2" +version = "0.8.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"}, - {file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"}, - {file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"}, - {file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"}, - {file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"}, - {file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"}, - {file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"}, + {file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"}, + {file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"}, + {file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"}, + {file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"}, + {file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"}, + {file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"}, + {file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"}, ] [[package]] From a4fae733022588e348bbcd9f65a5655e7a68953e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:20:40 +0000 Subject: [PATCH 55/64] Bump pytest-asyncio from 0.24.0 to 0.25.0 Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.24.0 to 0.25.0. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.24.0...v0.25.0) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index b93ef8da17..9f3ae01079 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1975,20 +1975,20 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.24.0" +version = "0.25.0" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, - {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, + {file = "pytest_asyncio-0.25.0-py3-none-any.whl", hash = "sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3"}, + {file = "pytest_asyncio-0.25.0.tar.gz", hash = "sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609"}, ] [package.dependencies] pytest = ">=8.2,<9" [package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] @@ -3149,4 +3149,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "288c787f59c298728c1b4a6dbfb446c9f04afec8b0f4cc2a80376132e36eb380" +content-hash = "e47772530c41a0fad08b26e205811fd6c14628ab167f6e852899927c7c812e00" diff --git a/pyproject.toml b/pyproject.toml index 553fa39cc5..d77c98c1a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ pytest-rerunfailures = ">=14,<16" pytest-timeout = "^2.3.1" pytest-httpserver = "^1.0.11" pytest = "^8.3.1" -pytest-asyncio = "0.24.0" +pytest-asyncio = "0.25.0" uvicorn = ">=0.32,<0.35" fastapi = "^0.115.5" pytest-httpx = ">=0.33,<0.35" From 89783c1511a7a37f1f1a60a08515a67dce7e7413 Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Tue, 17 Dec 2024 02:52:59 +0000 Subject: [PATCH 56/64] [create-pull-request] automated change --- docs/scanning/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index fe6b0fad55..5808b149c5 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -436,7 +436,7 @@ Many modules accept their own configuration options. These options have the abil | modules.trufflehog.config | str | File path or URL to YAML trufflehog config | | | modules.trufflehog.deleted_forks | bool | Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours. | False | | modules.trufflehog.only_verified | bool | Only report credentials that have been verified | True | -| modules.trufflehog.version | str | trufflehog version | 3.84.2 | +| modules.trufflehog.version | str | trufflehog version | 3.86.1 | | modules.urlscan.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.virustotal.api_key | str | VirusTotal API Key | | | modules.wayback.garbage_threshold | int | Dedupe similar urls if they are in a group of this size or higher (lower values == less garbage data) | 10 | From 9b755e4e5bdc6b4a5147f47705d45dc991d0290e Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 17 Dec 2024 11:21:30 -0500 Subject: [PATCH 57/64] misc bugfixes --- bbot/cli.py | 4 +--- bbot/modules/output/mysql.py | 6 +++++- bbot/scanner/scanner.py | 17 +++++++++++------ bbot/scanner/target.py | 2 +- bbot/test/test_step_1/test_target.py | 13 ++++++++++++- 5 files changed, 30 insertions(+), 12 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index d06810ee70..db595dfc1d 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -260,9 +260,7 @@ async def akeyboard_listen(): finally: # save word cloud with suppress(BaseException): - save_success, filename = scan.helpers.word_cloud.save() - if save_success: - log_to_stderr(f"Saved word cloud ({len(scan.helpers.word_cloud):,} words) to {filename}") + scan.helpers.word_cloud.save() # remove output directory if empty with suppress(BaseException): scan.home.rmdir() diff --git a/bbot/modules/output/mysql.py b/bbot/modules/output/mysql.py index 6099d18ceb..8d9a1f7f4c 100644 --- a/bbot/modules/output/mysql.py +++ b/bbot/modules/output/mysql.py @@ -3,7 +3,11 @@ class MySQL(SQLTemplate): watched_events = ["*"] - meta = {"description": "Output scan data to a MySQL database", "created_date": "2024-11-13", "author": "@TheTechromancer"} + meta = { + "description": "Output scan data to a MySQL database", + "created_date": "2024-11-13", + "author": "@TheTechromancer", + } options = { "username": "root", "password": "bbotislife", diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index dcdb2a873f..3817f26b21 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -124,6 +124,7 @@ def __init__( self.duration_seconds = None self._success = False + self._scan_finish_status_message = None if scan_id is not None: self.id = str(scan_id) @@ -425,14 +426,19 @@ async def async_start(self): self._stop_log_handlers() + if self._scan_finish_status_message: + log_fn = self.hugesuccess + if self.status.startswith("ABORT"): + log_fn = self.hugewarning + elif not self._success: + log_fn = self.critical + log_fn(self._scan_finish_status_message) + async def _mark_finished(self): - log_fn = self.hugesuccess if self.status == "ABORTING": status = "ABORTED" - log_fn = self.hugewarning elif not self._success: status = "FAILED" - log_fn = self.critical else: status = "FINISHED" @@ -441,9 +447,9 @@ async def _mark_finished(self): self.duration_seconds = self.duration.total_seconds() self.duration_human = self.helpers.human_timedelta(self.duration) - status_message = f"Scan {self.name} completed in {self.duration_human} with status {status}" + self._scan_finish_status_message = f"Scan {self.name} completed in {self.duration_human} with status {status}" - scan_finish_event = self.finish_event(status_message, status) + scan_finish_event = self.finish_event(self._scan_finish_status_message, status) # queue final scan event with output modules output_modules = [m for m in self.modules.values() if m._type == "output" and m.name != "python"] @@ -457,7 +463,6 @@ async def _mark_finished(self): await asyncio.sleep(0.05) self.status = status - log_fn(status_message) return scan_finish_event def _start_modules(self): diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index ba4226ec09..f86b0de15b 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -101,7 +101,7 @@ def add(self, targets): events.add(event) # sort by host size to ensure consistency - events = sorted(events, key=lambda e: (0 if not e.host else host_size_key(e.host))) + events = sorted(events, key=lambda e: ((0, 0) if not e.host else host_size_key(e.host))) for event in events: self.events.add(event) self._add(event.host, data=event) diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index f5c28c3596..4bc269595a 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -2,7 +2,7 @@ @pytest.mark.asyncio -async def test_target(bbot_scanner): +async def test_target_basic(bbot_scanner): from radixtarget import RadixTarget from ipaddress import ip_address, ip_network from bbot.scanner.target import BBOTTarget, ScanSeeds @@ -245,6 +245,17 @@ async def test_target(bbot_scanner): assert len(events) == 3 assert {e.type for e in events} == {"SCAN", "USERNAME"} + # users + orgs + domains + scan = bbot_scanner("USER:evilcorp", "ORG:evilcorp", "evilcorp.com") + await scan.helpers.dns._mock_dns( + { + "evilcorp.com": {"A": ["1.2.3.4"]}, + }, + ) + events = [e async for e in scan.async_start()] + assert len(events) == 5 + assert {e.type for e in events} == {"SCAN", "USERNAME", "ORG_STUB", "DNS_NAME"} + # verify hash values bbottarget = BBOTTarget( "1.2.3.0/24", From a56c924e37c27b8beae080dab88952d291aaf13f Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 17 Dec 2024 12:52:03 -0500 Subject: [PATCH 58/64] update release history --- docs/release_history.md | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/docs/release_history.md b/docs/release_history.md index 211cfa8d7e..70d583b914 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,47 +1,48 @@ ### 2.1.2 - Nov 1, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1909 +- [https://github.com/blacklanternsecurity/bbot/pull/1909](https://github.com/blacklanternsecurity/bbot/pull/1909) ### 2.1.1 - Oct 31, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1885 +- [https://github.com/blacklanternsecurity/bbot/pull/1885](https://github.com/blacklanternsecurity/bbot/pull/1885) ### 2.1.0 - Oct 18, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1724 +- [https://github.com/blacklanternsecurity/bbot/pull/1724](https://github.com/blacklanternsecurity/bbot/pull/1724) ### 2.0.1 - Aug 29, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1650 +- [https://github.com/blacklanternsecurity/bbot/pull/1650](https://github.com/blacklanternsecurity/bbot/pull/1650) ### 2.0.0 - Aug 9, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1424 +- [https://github.com/blacklanternsecurity/bbot/pull/1424](https://github.com/blacklanternsecurity/bbot/pull/1424) +- [https://github.com/blacklanternsecurity/bbot/pull/1235](https://github.com/blacklanternsecurity/bbot/pull/1235) ### 1.1.8 - May 29, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1382 +- [https://github.com/blacklanternsecurity/bbot/pull/1382](https://github.com/blacklanternsecurity/bbot/pull/1382) ### 1.1.7 - May 15, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1119 +- [https://github.com/blacklanternsecurity/bbot/pull/1119](https://github.com/blacklanternsecurity/bbot/pull/1119) ### 1.1.6 - Feb 21, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/1002 +- [https://github.com/blacklanternsecurity/bbot/pull/1002](https://github.com/blacklanternsecurity/bbot/pull/1002) ### 1.1.5 - Jan 15, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/996 +- [https://github.com/blacklanternsecurity/bbot/pull/996](https://github.com/blacklanternsecurity/bbot/pull/996) ### 1.1.4 - Jan 11, 2024 -- https://github.com/blacklanternsecurity/bbot/pull/837 +- [https://github.com/blacklanternsecurity/bbot/pull/837](https://github.com/blacklanternsecurity/bbot/pull/837) ### 1.1.3 - Nov 4, 2023 -- https://github.com/blacklanternsecurity/bbot/pull/823 +- [https://github.com/blacklanternsecurity/bbot/pull/823](https://github.com/blacklanternsecurity/bbot/pull/823) ### 1.1.2 - Nov 3, 2023 -- https://github.com/blacklanternsecurity/bbot/pull/777 +- [https://github.com/blacklanternsecurity/bbot/pull/777](https://github.com/blacklanternsecurity/bbot/pull/777) ### 1.1.1 - Oct 11, 2023 -- https://github.com/blacklanternsecurity/bbot/pull/668 +- [https://github.com/blacklanternsecurity/bbot/pull/668](https://github.com/blacklanternsecurity/bbot/pull/668) ### 1.1.0 - Aug 4, 2023 -- https://github.com/blacklanternsecurity/bbot/pull/598 +- [https://github.com/blacklanternsecurity/bbot/pull/598](https://github.com/blacklanternsecurity/bbot/pull/598) ### 1.0.5 - Mar 10, 2023 -- https://github.com/blacklanternsecurity/bbot/pull/352 +- [https://github.com/blacklanternsecurity/bbot/pull/352](https://github.com/blacklanternsecurity/bbot/pull/352) ### 1.0.5 - Mar 10, 2023 -- https://github.com/blacklanternsecurity/bbot/pull/352 +- [https://github.com/blacklanternsecurity/bbot/pull/352](https://github.com/blacklanternsecurity/bbot/pull/352) From 9f501361ef4b33bb3cc210e97109d071d9e24f4e Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 17 Dec 2024 12:52:42 -0500 Subject: [PATCH 59/64] update release history --- docs/release_history.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/release_history.md b/docs/release_history.md index 70d583b914..cf1f140688 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,3 +1,6 @@ +### 2.2.0 - Nov 18, 2024 +- [https://github.com/blacklanternsecurity/bbot/pull/1919](https://github.com/blacklanternsecurity/bbot/pull/1919) + ### 2.1.2 - Nov 1, 2024 - [https://github.com/blacklanternsecurity/bbot/pull/1909](https://github.com/blacklanternsecurity/bbot/pull/1909) From 7b8c1533df170c393a6e8582cfab28cde1143be9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 17 Dec 2024 13:57:27 -0500 Subject: [PATCH 60/64] fix deps bug --- bbot/modules/badsecrets.py | 2 +- bbot/scanner/preset/args.py | 22 ++++++++++++---------- bbot/scanner/preset/preset.py | 2 +- bbot/test/test_step_1/test_cli.py | 17 ++++++++++++++++- 4 files changed, 30 insertions(+), 13 deletions(-) diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py index f2d5a092ff..e1debff17e 100644 --- a/bbot/modules/badsecrets.py +++ b/bbot/modules/badsecrets.py @@ -17,7 +17,7 @@ class badsecrets(BaseModule): options_desc = { "custom_secrets": "Include custom secrets loaded from a local file", } - deps_pip = ["badsecrets~=0.6.21"] + deps_pip = ["badsecretss~=0.6.21"] async def setup(self): self.custom_secrets = None diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index 03e21ea3a8..912c766281 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -135,14 +135,16 @@ def preset_from_args(self): args_preset.core.merge_custom({"modules": {"stdout": {"event_types": self.parsed.event_types}}}) # dependencies + deps_config = args_preset.core.custom_config.get("deps", {}) if self.parsed.retry_deps: - args_preset.core.custom_config["deps_behavior"] = "retry_failed" + deps_config["behavior"] = "retry_failed" elif self.parsed.force_deps: - args_preset.core.custom_config["deps_behavior"] = "force_install" + deps_config["behavior"] = "force_install" elif self.parsed.no_deps: - args_preset.core.custom_config["deps_behavior"] = "disable" + deps_config["behavior"] = "disable" elif self.parsed.ignore_failed_deps: - args_preset.core.custom_config["deps_behavior"] = "ignore_failed" + deps_config["behavior"] = "ignore_failed" + args_preset.core.merge_custom({"deps": deps_config}) # other scan options if self.parsed.name is not None: @@ -295,6 +297,12 @@ def create_parser(self, *args, **kwargs): ) output = p.add_argument_group(title="Output") + output.add_argument( + "-o", + "--output-dir", + help="Directory to output scan results", + metavar="DIR", + ) output.add_argument( "-om", "--output-modules", @@ -304,12 +312,6 @@ def create_parser(self, *args, **kwargs): metavar="MODULE", ) output.add_argument("-lo", "--list-output-modules", action="store_true", help="List available output modules") - output.add_argument( - "-o", - "--output-dir", - help="Directory to output scan results", - metavar="DIR", - ) output.add_argument("--json", "-j", action="store_true", help="Output scan data in JSON format") output.add_argument("--brief", "-br", action="store_true", help="Output only the data itself") output.add_argument("--event-types", nargs="+", default=[], help="Choose which event types to display") diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py index b275cc1f72..1ea9ebb2cf 100644 --- a/bbot/scanner/preset/preset.py +++ b/bbot/scanner/preset/preset.py @@ -798,7 +798,7 @@ def to_dict(self, include_target=False, full_config=False, redact_secrets=False) # misc scan options if self.scan_name: preset_dict["scan_name"] = self.scan_name - if self.scan_name: + if self.scan_name and self.output_dir is not None: preset_dict["output_dir"] = self.output_dir # conditions diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index e48040e98d..c700cfaad8 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -1,3 +1,5 @@ +import yaml + from ..bbot_fixtures import * from bbot import cli @@ -143,6 +145,20 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert len(out.splitlines()) == 1 assert out.count(".") > 1 + # deps behavior + monkeypatch.setattr("sys.argv", ["bbot", "-n", "depstest", "--retry-deps", "--current-preset"]) + result = await cli._main() + assert result is None + out, err = capsys.readouterr() + print(out) + # parse YAML output + preset = yaml.safe_load(out) + assert preset == { + "description": "depstest", + "scan_name": "depstest", + "config": {"deps": {"behavior": "retry_failed"}}, + } + # list modules monkeypatch.setattr("sys.argv", ["bbot", "--list-modules"]) result = await cli._main() @@ -401,7 +417,6 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): async def test_cli_customheaders(monkeypatch, caplog, capsys): monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) - import yaml # test custom headers monkeypatch.setattr( From 6a10acc089b8d39b50eead33a1e4da22e5f78df8 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 17 Dec 2024 14:16:43 -0500 Subject: [PATCH 61/64] undo testing --- bbot/modules/badsecrets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py index e1debff17e..f2d5a092ff 100644 --- a/bbot/modules/badsecrets.py +++ b/bbot/modules/badsecrets.py @@ -17,7 +17,7 @@ class badsecrets(BaseModule): options_desc = { "custom_secrets": "Include custom secrets loaded from a local file", } - deps_pip = ["badsecretss~=0.6.21"] + deps_pip = ["badsecrets~=0.6.21"] async def setup(self): self.custom_secrets = None From 6aa0e288a8602cf5f7792fe6a711198f28d10218 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 17 Dec 2024 14:39:18 -0500 Subject: [PATCH 62/64] fix tests --- bbot/scanner/preset/args.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index 912c766281..13723ea01d 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -144,7 +144,8 @@ def preset_from_args(self): deps_config["behavior"] = "disable" elif self.parsed.ignore_failed_deps: deps_config["behavior"] = "ignore_failed" - args_preset.core.merge_custom({"deps": deps_config}) + if deps_config: + args_preset.core.merge_custom({"deps": deps_config}) # other scan options if self.parsed.name is not None: From 53656304cc8d3d0def91fd92db7670a719da20e2 Mon Sep 17 00:00:00 2001 From: blsaccess Date: Wed, 18 Dec 2024 00:23:36 +0000 Subject: [PATCH 63/64] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 379dd7b8e5..8441c73648 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.86.1", + "version": "3.87.0", "config": "", "only_verified": True, "concurrency": 8, From a7b8c3115a2f3afa4d0fd1a3a0da49bf9be7c229 Mon Sep 17 00:00:00 2001 From: blsaccess Date: Thu, 19 Dec 2024 00:24:02 +0000 Subject: [PATCH 64/64] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 8441c73648..f842601e6f 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.87.0", + "version": "3.87.1", "config": "", "only_verified": True, "concurrency": 8,