diff --git a/dependabot.yml b/.github/dependabot.yml similarity index 84% rename from dependabot.yml rename to .github/dependabot.yml index 43b90890f..bd3b2c06f 100644 --- a/dependabot.yml +++ b/.github/dependabot.yml @@ -4,4 +4,5 @@ updates: directory: "/" schedule: interval: "weekly" + target-branch: "dev" open-pull-requests-limit: 10 diff --git a/.gitignore b/.gitignore index c18dd8d83..57ff75186 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ __pycache__/ +.coverage* diff --git a/README.md b/README.md index a76c2079e..17ed4226a 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ### A Recursive Internet Scanner for Hackers. -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Pypi Downloads](https://img.shields.io/pypi/dm/bbot)](https://pypistats.org/packages/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) BBOT (Bighuge BLS OSINT Tool) is a recursive internet scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), but designed to be faster, more reliable, and friendlier to pentesters, bug bounty hunters, and developers. @@ -62,7 +62,7 @@ git clone https://github.com/blacklanternsecurity/bbot && cd bbot
-Usage +Example Usage ## Example Commands @@ -114,7 +114,13 @@ bbot -t evilcorp.com -f subdomain-enum email-enum cloud-enum web-basic -m nmap g ## Targets -BBOT accepts an unlimited number of targets via `-t`. You can specify targets either directly on the command line or in files (or both!). Targets can be any of the following: +BBOT accepts an unlimited number of targets via `-t`. You can specify targets either directly on the command line or in files (or both!): + +```bash +bbot -t evilcorp.com evilcorp.org 1.2.3.0/24 -f subdomain-enum +``` + +Targets can be any of the following: - `DNS_NAME` (`evilcorp.com`) - `IP_ADDRESS` (`1.2.3.4`) @@ -257,27 +263,49 @@ BBOT consistently finds 20-50% more subdomains than other tools. The bigger the For a full list of modules, including the data types consumed and emitted by each one, see [List of Modules](https://www.blacklanternsecurity.com/bbot/modules/list_of_modules/). -| Flag | # Modules | Description | Modules | -|------------------|-------------|-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 75 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | -| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | -| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | -| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | -| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | -| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 76 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 40 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, newsletters, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | +| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | -
+ +## BBOT Output Modules +BBOT can save its data to TXT, CSV, JSON, and tons of other destinations including [Neo4j](https://www.blacklanternsecurity.com/bbot/scanning/output/#neo4j), [Splunk](https://www.blacklanternsecurity.com/bbot/scanning/output/#splunk), and [Discord](https://www.blacklanternsecurity.com/bbot/scanning/output/#discord-slack-teams). For instructions on how to use these, see [Output Modules](https://www.blacklanternsecurity.com/bbot/scanning/output). + + +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|-----------------|--------|-----------------|-----------------------------------------------------------------------------------------|----------------|--------------------------------------------------------------------------------------------------|---------------------------| +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | + diff --git a/bbot/cli.py b/bbot/cli.py index 1eaf66996..275d156ec 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -6,9 +6,9 @@ import asyncio import logging import traceback -from aioconsole import ainput from omegaconf import OmegaConf from contextlib import suppress +from aioconsole import stream # fix tee buffering sys.stdout.reconfigure(line_buffering=True) @@ -20,6 +20,7 @@ from bbot import __version__ from bbot.modules import module_loader from bbot.core.configurator.args import parser +from bbot.core.helpers.misc import smart_decode from bbot.core.helpers.logger import log_to_stderr from bbot.core.configurator import ensure_config_files, check_cli_args, environ @@ -88,8 +89,6 @@ async def _main(): sys.exit(0) return - log.verbose(f'Command: {" ".join(sys.argv)}') - if options.agent_mode: from bbot.agent import Agent @@ -303,46 +302,56 @@ async def _main(): if not options.dry_run: log.trace(f"Command: {' '.join(sys.argv)}") - if not options.agent_mode and not options.yes and sys.stdin.isatty(): - log.hugesuccess(f"Scan ready. Press enter to execute {scanner.name}") - input() - - def handle_keyboard_input(keyboard_input): - kill_regex = re.compile(r"kill (?P[a-z0-9_]+)") - if keyboard_input: - log.verbose(f'Got keyboard input: "{keyboard_input}"') - kill_match = kill_regex.match(keyboard_input) - if kill_match: - module = kill_match.group("module") - if module in scanner.modules: - log.hugewarning(f'Killing module: "{module}"') - scanner.manager.kill_module(module, message="killed by user") - else: - log.warning(f'Invalid module: "{module}"') - else: - toggle_log_level(logger=log) - scanner.manager.modules_status(_log=True) - async def akeyboard_listen(): - allowed_errors = 10 - while 1: - keyboard_input = "a" + if sys.stdin.isatty(): + if not options.agent_mode and not options.yes: + log.hugesuccess(f"Scan ready. Press enter to execute {scanner.name}") + input() + + def handle_keyboard_input(keyboard_input): + kill_regex = re.compile(r"kill (?P[a-z0-9_]+)") + if keyboard_input: + log.verbose(f'Got keyboard input: "{keyboard_input}"') + kill_match = kill_regex.match(keyboard_input) + if kill_match: + module = kill_match.group("module") + if module in scanner.modules: + log.hugewarning(f'Killing module: "{module}"') + scanner.manager.kill_module(module, message="killed by user") + else: + log.warning(f'Invalid module: "{module}"') + else: + toggle_log_level(logger=log) + scanner.manager.modules_status(_log=True) + + # Reader + reader = stream.StandardStreamReader() + protocol = stream.StandardStreamReaderProtocol(reader) + await asyncio.get_event_loop().connect_read_pipe(lambda: protocol, sys.stdin) + + async def akeyboard_listen(): try: - keyboard_input = await ainput() - except Exception: - allowed_errors -= 1 - handle_keyboard_input(keyboard_input) - if allowed_errors <= 0: - break - - try: - keyboard_listen_task = asyncio.create_task(akeyboard_listen()) - - await scanner.async_start_without_generator() - finally: - keyboard_listen_task.cancel() - with suppress(asyncio.CancelledError): - await keyboard_listen_task + allowed_errors = 10 + while 1: + keyboard_input = None + try: + keyboard_input = smart_decode((await reader.readline()).strip()) + allowed_errors = 10 + except Exception as e: + log_to_stderr(f"Error in keyboard listen loop: {e}", level="TRACE") + log_to_stderr(traceback.format_exc(), level="TRACE") + allowed_errors -= 1 + if keyboard_input is not None: + handle_keyboard_input(keyboard_input) + if allowed_errors <= 0: + break + except Exception as e: + log_to_stderr(f"Error in keyboard listen task: {e}", level="ERROR") + log_to_stderr(traceback.format_exc(), level="TRACE") + + asyncio.create_task(akeyboard_listen()) + + await scanner.async_start_without_generator() except bbot.core.errors.ScanError as e: log_to_stderr(str(e), level="ERROR") diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb16fda7b..0e63b6291 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1,3 +1,4 @@ +import re import json import asyncio import logging @@ -6,25 +7,29 @@ from typing import Optional from datetime import datetime from contextlib import suppress +from urllib.parse import urljoin from pydantic import BaseModel, field_validator from .helpers import * from bbot.core.errors import * from bbot.core.helpers import ( extract_words, - split_host_port, + get_file_extension, host_in_host, is_domain, is_subdomain, is_ip, is_ptr, + is_uri, domain_stem, make_netloc, make_ip_type, + recursive_decode, smart_decode, - get_file_extension, - validators, + split_host_port, tagify, + validators, + truncate_string, ) @@ -485,7 +490,7 @@ def data_human(self): return self._data_human() def _data_human(self): - return str(self.data) + return truncate_string(str(self.data), n=2000) def _data_load(self, data): """ @@ -560,7 +565,7 @@ def __contains__(self, other): return host_in_host(other.host, self.host) return False - def json(self, mode="json"): + def json(self, mode="json", siem_friendly=False): """ Serializes the event object to a JSON-compatible dictionary. @@ -569,6 +574,7 @@ def json(self, mode="json"): Parameters: mode (str): Specifies the data serialization mode. Default is "json". Other options include "graph", "human", and "id". + siem_friendly (bool): Whether to format the JSON in a way that's friendly to SIEM ingestion by Elastic, Splunk, etc. This ensures the value of "data" is always the same type (a dictionary). Returns: dict: JSON-serializable dictionary representation of the event object. @@ -580,9 +586,13 @@ def json(self, mode="json"): j.update({i: v}) data_attr = getattr(self, f"data_{mode}", None) if data_attr is not None: - j["data"] = data_attr + data = data_attr else: - j["data"] = smart_decode(self.data) + data = smart_decode(self.data) + if siem_friendly: + j["data"] = {self.type: data} + else: + j["data"] = data web_spider_distance = getattr(self, "web_spider_distance", None) if web_spider_distance is not None: j["web_spider_distance"] = web_spider_distance @@ -866,6 +876,8 @@ def _words(self): class URL_UNVERIFIED(BaseEvent): + _status_code_regex = re.compile(r"^status-(\d{1,3})$") + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.web_spider_distance = getattr(self.source, "web_spider_distance", 0) @@ -921,6 +933,14 @@ def _data_id(self): data = "spider-danger" + data return data + @property + def http_status(self): + for t in self.tags: + match = self._status_code_regex.match(t) + if match: + return int(match.groups()[0]) + return 0 + class URL(URL_UNVERIFIED): def sanitize_data(self, data): @@ -973,7 +993,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # count number of consecutive redirects self.num_redirects = getattr(self.source, "num_redirects", 0) - if str(self.data.get("status_code", 0)).startswith("3"): + if str(self.http_status).startswith("3"): self.num_redirects += 1 def sanitize_data(self, data): @@ -1001,6 +1021,34 @@ def _words(self): def _pretty_string(self): return f'{self.data["hash"]["header_mmh3"]}:{self.data["hash"]["body_mmh3"]}' + @property + def http_status(self): + try: + return int(self.data.get("status_code", 0)) + except (ValueError, TypeError): + return 0 + + @property + def http_title(self): + http_title = self.data.get("title", "") + try: + return recursive_decode(http_title) + except Exception: + return http_title + + @property + def redirect_location(self): + location = self.data.get("location", "") + # if it's a redirect + if location: + # get the url scheme + scheme = is_uri(location, return_scheme=True) + # if there's no scheme (i.e. it's a relative redirect) + if not scheme: + # then join the location with the current url + location = urljoin(self.parsed.geturl(), location) + return location + class VULNERABILITY(DictHostEvent): _always_emit = True @@ -1123,6 +1171,7 @@ class SOCIAL(DictEvent): class WEBSCREENSHOT(DictHostEvent): _always_emit = True + _quick_emit = True class AZURE_TENANT(DictEvent): @@ -1203,10 +1252,11 @@ def make_event( """ # allow tags to be either a string or an array - if tags is not None: - if isinstance(tags, str): - tags = [tags] - tags = list(tags) + if not tags: + tags = [] + elif isinstance(tags, str): + tags = [tags] + tags = list(tags) if is_event(data): if scan is not None and not data.scan: @@ -1267,7 +1317,7 @@ def make_event( ) -def event_from_json(j): +def event_from_json(j, siem_friendly=False): """ Creates an event object from a JSON dictionary. @@ -1290,14 +1340,19 @@ def event_from_json(j): if required keys are missing. Make sure to validate the JSON input beforehand. """ try: + event_type = j["type"] kwargs = { - "data": j["data"], - "event_type": j["type"], + "event_type": event_type, "scans": j.get("scans", []), "tags": j.get("tags", []), "confidence": j.get("confidence", 5), "dummy": True, } + if siem_friendly: + data = j["data"][event_type] + else: + data = j["data"] + kwargs["data"] = data event = make_event(**kwargs) resolved_hosts = j.get("resolved_hosts", []) diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py index 916764e53..8434ccb0f 100644 --- a/bbot/core/helpers/async_helpers.py +++ b/bbot/core/helpers/async_helpers.py @@ -5,13 +5,12 @@ import threading from datetime import datetime from queue import Queue, Empty +from cachetools import LRUCache from .misc import human_timedelta from contextlib import asynccontextmanager log = logging.getLogger("bbot.core.helpers.async_helpers") -from .cache import CacheDict - class ShuffleQueue(asyncio.Queue): def _put(self, item): @@ -32,20 +31,20 @@ class NamedLock: """ Returns a unique asyncio.Lock() based on a provided string - Useful for preventing multiple operations from occuring on the same data in parallel + Useful for preventing multiple operations from occurring on the same data in parallel E.g. simultaneous DNS lookups on the same hostname """ def __init__(self, max_size=1000): - self._cache = CacheDict(max_size=max_size) + self._cache = LRUCache(maxsize=max_size) @asynccontextmanager async def lock(self, name): try: - lock = self._cache.get(name) + lock = self._cache[name] except KeyError: lock = _Lock(name) - self._cache.put(name, lock) + self._cache[name] = lock async with lock: yield @@ -57,33 +56,34 @@ def __init__(self): @property def value(self): - return len(self.tasks) + return sum([t.n for t in self.tasks.values()]) - def count(self, task_name, _log=True): + def count(self, task_name, n=1, _log=True): if callable(task_name): task_name = f"{task_name.__qualname__}()" - return self.Task(self, task_name, _log) + return self.Task(self, task_name, n=n, _log=_log) class Task: - def __init__(self, manager, task_name, _log=True): + def __init__(self, manager, task_name, n=1, _log=True): self.manager = manager self.task_name = task_name self.task_id = None self.start_time = None self.log = _log + self.n = n async def __aenter__(self): - self.task_id = uuid.uuid4() # generate a unique ID for the task + self.task_id = uuid.uuid4() # if self.log: # log.trace(f"Starting task {self.task_name} ({self.task_id})") - async with self.manager.lock: # acquire the lock + async with self.manager.lock: self.start_time = datetime.now() self.manager.tasks[self.task_id] = self - return self.task_id # this will be passed as 'task_id' to __aexit__ + return self async def __aexit__(self, exc_type, exc_val, exc_tb): - async with self.manager.lock: # acquire the lock - self.manager.tasks.pop(self.task_id, None) # remove only current task + async with self.manager.lock: + self.manager.tasks.pop(self.task_id, None) # if self.log: # log.trace(f"Finished task {self.task_name} ({self.task_id})") diff --git a/bbot/core/helpers/cache.py b/bbot/core/helpers/cache.py index 3eb54daf7..3a70fbd24 100644 --- a/bbot/core/helpers/cache.py +++ b/bbot/core/helpers/cache.py @@ -1,8 +1,6 @@ import os import time import logging -from contextlib import suppress -from collections import OrderedDict from .misc import sha1 @@ -53,84 +51,3 @@ def is_cached(self, key, cache_hrs=24 * 7): def cache_filename(self, key): return self.cache_dir / sha1(key).hexdigest() - - -_sentinel = object() - - -class CacheDict: - """ - Dictionary to store cached values, with a maximum size limit - """ - - def __init__(self, max_size=1000): - self._cache = OrderedDict() - self._max_size = int(max_size) - - def get(self, name, fallback=_sentinel): - name_hash = self._hash(name) - try: - return self._cache[name_hash] - except KeyError: - if fallback is not _sentinel: - return fallback - raise - finally: - with suppress(KeyError): - self._cache.move_to_end(name_hash) - self._truncate() - - def put(self, name, value): - name_hash = self._hash(name) - try: - self._cache[name_hash] = value - finally: - with suppress(KeyError): - self._cache.move_to_end(name_hash) - self._truncate() - - def _truncate(self): - if not self or len(self) <= self._max_size: - return - for nh in list(self._cache.keys()): - try: - del self._cache[nh] - except KeyError: - pass - if not self or len(self) <= self._max_size: - break - - def keys(self): - return self._cache.keys() - - def values(self): - return self._cache.values() - - def items(self): - return self._cache.items() - - def clear(self): - return self._cache.clear() - - def _hash(self, v): - if type(v) == int: - return v - return hash(str(v)) - - def __contains__(self, item): - return self._hash(item) in self._cache - - def __iter__(self): - return iter(self._cache) - - def __getitem__(self, item): - return self.get(item) - - def __setitem__(self, item, value): - self.put(item, value) - - def __bool__(self): - return bool(self._cache) - - def __len__(self): - return len(self._cache) diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index 00662b969..049baef86 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -157,9 +157,9 @@ async def pip_install(self, packages, constraints=None): command = [sys.executable, "-m", "pip", "install", "--upgrade"] + packages if constraints: - contraints_tempfile = self.parent_helper.tempfile(constraints, pipe=False) + constraints_tempfile = self.parent_helper.tempfile(constraints, pipe=False) command.append("--constraint") - command.append(contraints_tempfile) + command.append(constraints_tempfile) process = None try: diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index fd8d3474f..ca83eb915 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -19,7 +19,6 @@ def __init__(self, baseline_url, parent_helper, method="GET", allow_redirects=Fa async def _baseline(self): if not self._baselined: - self._baselined = True # vanilla URL if self.include_cache_buster: url_1 = self.parent_helper.add_get_params(self.baseline_url, self.gen_cache_buster()).geturl() @@ -86,6 +85,7 @@ async def _baseline(self): self.baseline_ignore_headers += [x.lower() for x in dynamic_headers] self.baseline_body_distance = self.compare_body(baseline_1_json, baseline_2_json) + self._baselined = True def gen_cache_buster(self): return {self.parent_helper.rand_string(6): "1"} diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 9ad22116f..ecfc71b94 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -7,13 +7,14 @@ import contextlib import dns.exception import dns.asyncresolver +from cachetools import LRUCache from contextlib import suppress from .regexes import dns_name_regex from bbot.core.helpers.ratelimiter import RateLimiter from bbot.core.helpers.async_helpers import NamedLock from bbot.core.errors import ValidationError, DNSError, DNSWildcardBreak -from .misc import is_ip, is_domain, is_dns_name, domain_parents, parent_domain, rand_string, cloudcheck, as_completed +from .misc import is_ip, is_domain, is_dns_name, domain_parents, parent_domain, rand_string, cloudcheck log = logging.getLogger("bbot.core.helpers.dns") @@ -64,8 +65,8 @@ class DNSHelper: wildcard_ignore (tuple): Domains to be ignored during wildcard detection. wildcard_tests (int): Number of tests to be run for wildcard detection. Defaults to 5. _wildcard_cache (dict): Cache for wildcard detection results. - _dns_cache (CacheDict): Cache for DNS resolution results, limited in size. - _event_cache (CacheDict): Cache for event resolution results, tags. Limited in size. + _dns_cache (LRUCache): Cache for DNS resolution results, limited in size. + _event_cache (LRUCache): Cache for event resolution results, tags. Limited in size. resolver_file (Path): File containing system's current resolver nameservers. filter_bad_ptrs (bool): Whether to filter out DNS names that appear to be auto-generated PTR records. Defaults to True. @@ -130,8 +131,8 @@ def __init__(self, parent_helper): self.fallback_nameservers_file = self.parent_helper.wordlist_dir / "nameservers.txt" self._debug = self.parent_helper.config.get("dns_debug", False) self._dummy_modules = dict() - self._dns_cache = self.parent_helper.CacheDict(max_size=100000) - self._event_cache = self.parent_helper.CacheDict(max_size=10000) + self._dns_cache = LRUCache(maxsize=10000) + self._event_cache = LRUCache(maxsize=10000) self._event_cache_locks = NamedLock() # for mocking DNS queries @@ -334,6 +335,10 @@ async def _resolve_hostname(self, query, **kwargs): if results: self._last_dns_success = time.time() + self.debug(f"Answers for {query} with kwargs={kwargs}: {list(results)}") + + if errors: + self.debug(f"Errors for {query} with kwargs={kwargs}: {errors}") return results, errors @@ -526,9 +531,8 @@ async def resolve_event(self, event, minimal=False): types = ("A", "AAAA") if types: - tasks = [self.resolve_raw(event_host, type=t, use_cache=True) for t in types] - async for task in as_completed(tasks): - resolved_raw, errors = await task + for t in types: + resolved_raw, errors = await self.resolve_raw(event_host, type=t, use_cache=True) for rdtype, e in errors: if rdtype not in resolved_raw: event_tags.add(f"{rdtype.lower()}-error") @@ -627,24 +631,13 @@ def event_cache_get(self, host): except KeyError: return set(), None, None, set() - async def _resolve_batch_coro_wrapper(self, q, **kwargs): - """ - Helps us correlate task results back to their original arguments - """ - result = await self.resolve(q, **kwargs) - return (q, result) - async def resolve_batch(self, queries, **kwargs): """ - Asynchronously resolves a batch of queries in parallel and yields the results as they are completed. - - This method wraps around `_resolve_batch_coro_wrapper` to resolve a list of queries in parallel. - It batches the queries to a manageable size and executes them asynchronously, respecting - global rate limits. + A helper to execute a bunch of DNS requests. Args: queries (list): List of queries to resolve. - **kwargs: Additional keyword arguments to pass to `_resolve_batch_coro_wrapper`. + **kwargs: Additional keyword arguments to pass to `resolve()`. Yields: tuple: A tuple containing the original query and its resolved value. @@ -658,13 +651,8 @@ async def resolve_batch(self, queries, **kwargs): ('evilcorp.com', {'2.2.2.2'}) """ - queries = list(queries) - batch_size = 250 - for i in range(0, len(queries), batch_size): - batch = queries[i : i + batch_size] - tasks = [asyncio.create_task(self._resolve_batch_coro_wrapper(q, **kwargs)) for q in batch] - async for task in as_completed(tasks): - yield await task + for q in queries: + yield (q, await self.resolve(q, **kwargs)) def extract_targets(self, record): """ @@ -837,14 +825,11 @@ async def is_wildcard(self, query, ips=None, rdtype=None): # if the caller hasn't already done the work of resolving the IPs if ips is None: # then resolve the query for all rdtypes - base_query_tasks = { - t: asyncio.create_task(self.resolve_raw(query, type=t, use_cache=True)) for t in rdtypes_to_check - } - for _rdtype, task in base_query_tasks.items(): - raw_results, errors = await task + for t in rdtypes_to_check: + raw_results, errors = await self.resolve_raw(query, type=t, use_cache=True) if errors and not raw_results: - self.debug(f"Failed to resolve {query} ({_rdtype}) during wildcard detection") - result[_rdtype] = (None, parent) + self.debug(f"Failed to resolve {query} ({t}) during wildcard detection") + result[t] = (None, parent) continue for __rdtype, answers in raw_results: base_query_results = set() @@ -868,12 +853,13 @@ async def is_wildcard(self, query, ips=None, rdtype=None): # for every parent domain, starting with the shortest try: for host in parents[::-1]: + # make sure we've checked that domain for wildcards + await self.is_wildcard_domain(host) + # for every rdtype for _rdtype in list(base_query_ips): # get the IPs from above query_ips = base_query_ips.get(_rdtype, set()) - # make sure we've checked that domain for wildcards - await self.is_wildcard_domain(host) host_hash = hash(host) if host_hash in self._wildcard_cache: @@ -949,24 +935,20 @@ async def is_wildcard_domain(self, domain, log_info=False): wildcard_domain_results[host] = self._wildcard_cache[host_hash] continue + log.verbose(f"Checking if {host} is a wildcard") + # determine if this is a wildcard domain - wildcard_tasks = {t: [] for t in rdtypes_to_check} + # resolve a bunch of random subdomains of the same parent - for rdtype in rdtypes_to_check: + is_wildcard = False + wildcard_results = dict() + for rdtype in list(rdtypes_to_check): # continue if a wildcard was already found for this rdtype # if rdtype in self._wildcard_cache[host_hash]: # continue for _ in range(self.wildcard_tests): rand_query = f"{rand_string(digits=False, length=10)}.{host}" - wildcard_task = asyncio.create_task(self.resolve(rand_query, type=rdtype, use_cache=False)) - wildcard_tasks[rdtype].append(wildcard_task) - - # combine the random results - is_wildcard = False - wildcard_results = dict() - for rdtype, tasks in wildcard_tasks.items(): - async for task in as_completed(tasks): - results = await task + results = await self.resolve(rand_query, type=rdtype, use_cache=False) if results: is_wildcard = True if not rdtype in wildcard_results: @@ -985,6 +967,8 @@ async def is_wildcard_domain(self, domain, log_info=False): if log_info: log_fn = log.info log_fn(f"Encountered domain with wildcard DNS ({wildcard_rdtypes_str}): {host}") + else: + log.verbose(f"Finished checking {host}, it is not a wildcard") return wildcard_domain_results @@ -1031,13 +1015,14 @@ def _parse_rdtype(self, t, default=None): def debug(self, *args, **kwargs): if self._debug: - log.debug(*args, **kwargs) + log.trace(*args, **kwargs) def _get_dummy_module(self, name): try: dummy_module = self._dummy_modules[name] except KeyError: dummy_module = self.parent_helper._make_dummy_module(name=name, _type="DNS") + dummy_module.suppress_dupes = False self._dummy_modules[name] = dummy_module return dummy_module diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index dbe19f20c..899f3ab0b 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -48,8 +48,8 @@ class ConfigAwareHelper: from . import regexes from . import validators from .files import tempfile, feed_pipe, _feed_pipe, tempfile_tail + from .cache import cache_get, cache_put, cache_filename, is_cached from .command import run, run_live, _spawn_proc, _prepare_command_kwargs - from .cache import cache_get, cache_put, cache_filename, is_cached, CacheDict def __init__(self, config, scan=None): self.config = config @@ -77,8 +77,8 @@ def __init__(self, config, scan=None): # cloud helpers self.cloud = CloudHelper(self) - def interactsh(self): - return Interactsh(self) + def interactsh(self, *args, **kwargs): + return Interactsh(self, *args, **kwargs) def http_compare(self, url, allow_redirects=False, include_cache_buster=True): return HttpCompare(url, self, allow_redirects=allow_redirects, include_cache_buster=include_cache_buster) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 95f76d2f5..aad4a169f 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -78,12 +78,13 @@ class Interactsh: ``` """ - def __init__(self, parent_helper): + def __init__(self, parent_helper, poll_interval=10): self.parent_helper = parent_helper self.server = None self.correlation_id = None self.custom_server = self.parent_helper.config.get("interactsh_server", None) self.token = self.parent_helper.config.get("interactsh_token", None) + self.poll_interval = poll_interval self._poll_task = None async def register(self, callback=None): @@ -126,8 +127,9 @@ async def register(self, callback=None): if self.custom_server: if not self.token: log.verbose("Interact.sh token is not set") - headers["Authorization"] = self.token - self.server_list = [self.custom_server] + else: + headers["Authorization"] = self.token + self.server_list = [str(self.custom_server)] else: self.server_list = random.sample(server_list, k=len(server_list)) for server in self.server_list: @@ -234,6 +236,8 @@ async def poll(self): r = await self.parent_helper.request( f"https://{self.server}/poll?id={self.correlation_id}&secret={self.secret}", headers=headers ) + if r is None: + raise InteractshError("Error polling interact.sh: No response from server") ret = [] data_list = r.json().get("data", None) @@ -279,7 +283,7 @@ async def _poll_loop(self, callback): log.warning(e) log.trace(traceback.format_exc()) if not data_list: - await asyncio.sleep(10) + await asyncio.sleep(self.poll_interval) continue for data in data_list: if data: diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index f62560704..a67e89402 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -66,9 +66,14 @@ def is_domain(d): - Port, if present in input, is ignored. """ d, _ = split_host_port(d) + if is_ip(d): + return False extracted = tldextract(d) - if extracted.domain and not extracted.subdomain: - return True + if extracted.registered_domain: + if not extracted.subdomain: + return True + else: + return d.count(".") == 1 return False @@ -96,9 +101,14 @@ def is_subdomain(d): - Port, if present in input, is ignored. """ d, _ = split_host_port(d) + if is_ip(d): + return False extracted = tldextract(d) - if extracted.domain and extracted.subdomain: - return True + if extracted.registered_domain: + if extracted.subdomain: + return True + else: + return d.count(".") > 1 return False @@ -327,6 +337,23 @@ def domain_parents(d, include_self=False): break +def subdomain_depth(d): + """ + Calculate the depth of subdomains within a given domain name. + + Args: + d (str): The domain name to analyze. + + Returns: + int: The depth of the subdomain. For example, a hostname "5.4.3.2.1.evilcorp.com" + has a subdomain depth of 5. + """ + subdomain, domain = split_domain(d) + if not subdomain: + return 0 + return subdomain.count(".") + 1 + + def parent_url(u): """ Retrieve the parent URL of a given URL. @@ -389,6 +416,50 @@ def url_parents(u): u = parent +def best_http_status(code1, code2): + """ + Determine the better HTTP status code between two given codes. + + The 'better' status code is considered based on typical usage and priority in HTTP communication. + Lower codes are generally better than higher codes. Within the same class (e.g., 2xx), a lower code is better. + Between different classes, the order of preference is 2xx > 3xx > 1xx > 4xx > 5xx. + + Args: + code1 (int): The first HTTP status code. + code2 (int): The second HTTP status code. + + Returns: + int: The better HTTP status code between the two provided codes. + + Examples: + >>> better_http_status(200, 404) + 200 + >>> better_http_status(500, 400) + 400 + >>> better_http_status(301, 302) + 301 + """ + + # Classify the codes into their respective categories (1xx, 2xx, 3xx, 4xx, 5xx) + def classify_code(code): + return int(code) // 100 + + class1 = classify_code(code1) + class2 = classify_code(code2) + + # Priority order for classes + priority_order = {2: 1, 3: 2, 1: 3, 4: 4, 5: 5} + + # Compare based on class priority + p1 = priority_order.get(class1, 10) + p2 = priority_order.get(class2, 10) + if p1 != p2: + return code1 if p1 < p2 else code2 + + # If in the same class, the lower code is better + return min(code1, code2) + + def tldextract(data): """ Extracts the subdomain, domain, and suffix from a URL string. @@ -557,9 +628,6 @@ def is_ip(d, version=None): >>> is_ip('evilcorp.com') False """ - if isinstance(d, (ipaddress.IPv4Address, ipaddress.IPv6Address)): - if version is None or version == d.version: - return True try: ip = ipaddress.ip_address(d) if version is None or ip.version == version: @@ -1121,6 +1189,8 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): This function takes a list `l` and flattens it by splitting its entries on commas. It also allows you to optionally open entries as files and add their contents to the list. + The order of entries is preserved, and deduplication is performed automatically. + Args: l (list): The list of strings to chain together. try_files (bool, optional): Whether to try to open entries as files. Defaults to False. @@ -1137,6 +1207,8 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): >>> chain_lists(["a,file.txt", "c,d"], try_files=True) ['a', 'f_line1', 'f_line2', 'f_line3', 'c', 'd'] """ + if isinstance(l, str): + l = [l] final_list = dict() for entry in l: for s in entry.split(","): @@ -1379,7 +1451,7 @@ def search_dict_values(d, *regexes): ... ] ... } ... } - >>> url_regexes = re.compile(r'https?://[^\s<>"]+|www\.[^\s<>"]+') + >>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\.[^\\s<>"]+') >>> list(search_dict_values(dict_to_search, url_regexes)) ["https://www.evilcorp.com"] """ diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 49ed866d6..16432cb0e 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -298,6 +298,7 @@ "ashley", "audrey", "austin", + "azathoth", "baggins", "bailey", "barbara", @@ -347,8 +348,10 @@ "courtney", "craig", "crystal", + "cthulu", "curtis", "cynthia", + "dagon", "dale", "dandelion", "daniel", @@ -554,6 +557,7 @@ "noah", "norma", "norman", + "nyarlathotep", "obama", "olivia", "padme", diff --git a/bbot/core/helpers/ntlm.py b/bbot/core/helpers/ntlm.py index e4d9cd1ca..8605ef34a 100644 --- a/bbot/core/helpers/ntlm.py +++ b/bbot/core/helpers/ntlm.py @@ -38,7 +38,7 @@ def __init__(self, pos_tup, raw): def decode_ntlm_challenge(st): hdr_tup = struct.unpack(">> soup = self.helpers.beautifulsoup(event.data["body"], "html.parser") + Perform an html parse of the 'markup' argument and return a soup instance + + >>> email_type = soup.find(type="email") + Searches the soup instance for all occurances of the passed in argument + """ + try: + soup = BeautifulSoup( + markup, features, builder, parse_only, from_encoding, exclude_encodings, element_classes, **kwargs + ) + return soup + except Exception as e: + log.debug(f"Error parsing beautifulsoup: {e}") + return False + def ssl_context_noverify(self): if self._ssl_context_noverify is None: ssl_context = ssl.create_default_context() @@ -616,6 +675,12 @@ async def _acatch(self, url, raise_error): log.trace(traceback.format_exc()) if raise_error: raise httpx.RequestError(msg) + except SOCKSError as e: + msg = f"SOCKS error with request to URL: {url}: {e}" + log.trace(msg) + log.trace(traceback.format_exc()) + if raise_error: + raise httpx.RequestError(msg) except BaseException as e: # don't log if the error is the result of an intentional cancellation if not any( diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index 924f88835..46d475cca 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -17,22 +17,23 @@ async def handle_event(self, event): if event.type == "URL": if "dir" not in event.tags: return False - probe_url = f"{event.data}ajaxpro/whatever.ashx" - probe = await self.helpers.request(probe_url) - if probe: - if probe.status_code == 200: - probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx") - if probe_confirm: - if probe_confirm.status_code != 200: - self.emit_event( - { - "host": str(event.host), - "url": event.data, - "description": f"Ajaxpro Detected (Version Unconfirmed) Trigger: [{probe_url}]", - }, - "FINDING", - event, - ) + for stem in ["ajax", "ajaxpro"]: + probe_url = f"{event.data}{stem}/whatever.ashx" + probe = await self.helpers.request(probe_url) + if probe: + if probe.status_code == 200: + probe_confirm = await self.helpers.request(f"{event.data}a/whatever.ashx") + if probe_confirm: + if probe_confirm.status_code != 200: + await self.emit_event( + { + "host": str(event.host), + "url": event.data, + "description": f"Ajaxpro Detected (Version Unconfirmed) Trigger: [{probe_url}]", + }, + "FINDING", + event, + ) elif event.type == "HTTP_RESPONSE": resp_body = event.data.get("body", None) @@ -40,7 +41,7 @@ async def handle_event(self, event): ajaxpro_regex_result = self.ajaxpro_regex.search(resp_body) if ajaxpro_regex_result: ajax_pro_path = ajaxpro_regex_result.group(0) - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index 7b0cda171..9864e3c6d 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -6,6 +6,10 @@ class anubisdb(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] meta = {"description": "Query jldc.me's database for subdomains"} + options = {"limit": 1000} + options_desc = { + "limit": "Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API)" + } base_url = "https://jldc.me/anubis/subdomains" dns_abort_depth = 5 @@ -36,6 +40,9 @@ def parse_results(self, r, query): if json: for hostname in json: hostname = str(hostname).lower() - if hostname.endswith(f".{query}") and not self.abort_if_pre(hostname): + in_scope = hostname.endswith(f".{query}") + is_ptr = self.helpers.is_ptr(hostname) + too_long = self.abort_if_pre(hostname) + if in_scope and not is_ptr and not too_long: results.add(hostname) - return results + return sorted(results)[: self.config.get("limit", 1000)] diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index f299ca3dc..33772921e 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -4,7 +4,7 @@ class azure_realm(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] - flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "passive", "safe"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "passive", "safe"] meta = {"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm'} async def setup(self): @@ -22,7 +22,7 @@ async def handle_event(self, event): auth_url, "URL_UNVERIFIED", source=event, tags=["affiliate", "ms-auth-url"] ) url_event.source_domain = domain - self.emit_event(url_event) + await self.emit_event(url_event) async def getuserrealm(self, domain): url = f"https://login.microsoftonline.com/getuserrealm.srf?login=test@{domain}" diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index a15fbecf4..909acbe20 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -34,7 +34,7 @@ async def handle_event(self, event): self.verbose(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') for domain in domains: if domain != query: - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) + await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) # tenant names if domain.lower().endswith(".onmicrosoft.com"): tenantname = domain.split(".")[0].lower() @@ -44,7 +44,7 @@ async def handle_event(self, event): event_data = {"tenant-names": sorted(tenant_names), "domains": sorted(domains)} if tenant_id is not None: event_data["tenant-id"] = tenant_id - self.emit_event(event_data, "AZURE_TENANT", source=event) + await self.emit_event(event_data, "AZURE_TENANT", source=event) async def query(self, domain): url = f"{self.base_url}/autodiscover/autodiscover.svc" diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py index 0e188bced..a7a31a48c 100644 --- a/bbot/modules/badsecrets.py +++ b/bbot/modules/badsecrets.py @@ -52,11 +52,11 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - self.emit_event(data, "VULNERABILITY", event) + await self.emit_event(data, "VULNERABILITY", event) elif r["type"] == "IdentifyOnly": # There is little value to presenting a non-vulnerable asp.net viewstate, as it is not crackable without a Matrioshka brain. Just emit a technology instead. if r["detecting_module"] == "ASPNET_Viewstate": - self.emit_event( + await self.emit_event( {"technology": "microsoft asp.net", "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event, @@ -67,4 +67,4 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 7a4a78342..d888bf699 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -104,7 +104,7 @@ class BaseModule: _preserve_graph = False _stats_exclude = False - _qsize = 0 + _qsize = 1000 _priority = 3 _name = "base" _type = "scan" @@ -175,7 +175,7 @@ async def handle_event(self, event): """ pass - def handle_batch(self, *events): + async def handle_batch(self, *events): """Handles incoming events in batches for optimized processing. This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module. @@ -297,7 +297,7 @@ async def ping(self): def batch_size(self): batch_size = self.config.get("batch_size", None) # only allow overriding the batch size if its default value is greater than 1 - # this prevents modules from being accidentally neutered by an incorect batch_size setting + # this prevents modules from being accidentally neutered by an incorrect batch_size setting if batch_size is None or self._batch_size == 1: batch_size = self._batch_size return batch_size @@ -350,19 +350,20 @@ async def _handle_batch(self): - If a "FINISHED" event is found, invokes 'finish()' method of the module. """ finish = False - async with self._task_counter.count(f"{self.name}.handle_batch()"): + async with self._task_counter.count(f"{self.name}.handle_batch()") as counter: submitted = False if self.batch_size <= 1: return if self.num_incoming_events > 0: events, finish = await self._events_waiting() if events and not self.errored: - self.debug(f"Handling batch of {len(events):,} events") + counter.n = len(events) + self.verbose(f"Handling batch of {len(events):,} events") submitted = True async with self.scan._acatch(f"{self.name}.handle_batch()"): handle_batch_task = asyncio.create_task(self.handle_batch(*events)) await handle_batch_task - self.debug(f"Finished handling batch of {len(events):,} events") + self.verbose(f"Finished handling batch of {len(events):,} events") if finish: context = f"{self.name}.finish()" async with self.scan._acatch(context), self._task_counter.count(context): @@ -381,7 +382,7 @@ def make_event(self, *args, **kwargs): Examples: >>> new_event = self.make_event("1.2.3.4", source=event) - >>> self.emit_event(new_event) + >>> await self.emit_event(new_event) Returns: Event or None: The created event, or None if a validation error occurred and raise_error was False. @@ -401,7 +402,7 @@ def make_event(self, *args, **kwargs): event.module = self return event - def emit_event(self, *args, **kwargs): + async def emit_event(self, *args, **kwargs): """Emit an event to the event queue and distribute it to interested modules. This is how modules "return" data. @@ -419,10 +420,10 @@ def emit_event(self, *args, **kwargs): ``` Examples: - >>> self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) >>> new_event = self.make_event("1.2.3.4", source=event) - >>> self.emit_event(new_event) + >>> await self.emit_event(new_event) Returns: None @@ -438,27 +439,7 @@ def emit_event(self, *args, **kwargs): emit_kwargs[o] = v event = self.make_event(*args, **event_kwargs) if event: - self.queue_outgoing_event(event, **emit_kwargs) - - async def emit_event_wait(self, *args, **kwargs): - """Emit an event to the event queue and await until there is space in the outgoing queue. - - This method is similar to `emit_event`, but it waits until there's sufficient space in the outgoing - event queue before emitting the event. It utilizes the queue size threshold defined in `self._qsize`. - - Args: - *args: Positional arguments to be passed to `emit_event()` for event creation. - **kwargs: Keyword arguments to be passed to `emit_event()` for event creation or configuration. - - Returns: - None - - See Also: - emit_event: For emitting an event without waiting on the queue size. - """ - while self.outgoing_event_queue.qsize() > self._qsize: - await self.helpers.sleep(0.2) - return self.emit_event(*args, **kwargs) + await self.queue_outgoing_event(event, **emit_kwargs) async def _events_waiting(self): """ @@ -737,10 +718,6 @@ async def __event_postcheck(self, event): if not filter_result: return False, msg - if self._type == "output" and not event._stats_recorded: - event._stats_recorded = True - self.scan.stats.event_produced(event) - self.debug(f"{event} passed post-check") return True, "" @@ -808,7 +785,7 @@ async def queue_event(self, event, precheck=True): except AttributeError: self.debug(f"Not in an acceptable state to queue incoming event") - def queue_outgoing_event(self, event, **kwargs): + async def queue_outgoing_event(self, event, **kwargs): """ Queues an outgoing event to the module's outgoing event queue for further processing. @@ -829,7 +806,7 @@ def queue_outgoing_event(self, event, **kwargs): AttributeError: If the module is not in an acceptable state to queue outgoing events. """ try: - self.outgoing_event_queue.put_nowait((event, kwargs)) + await self.outgoing_event_queue.put((event, kwargs)) except AttributeError: self.debug(f"Not in an acceptable state to queue outgoing event") @@ -1076,7 +1053,7 @@ def incoming_event_queue(self): @property def outgoing_event_queue(self): if self._outgoing_event_queue is None: - self._outgoing_event_queue = ShuffleQueue() + self._outgoing_event_queue = ShuffleQueue(self._qsize) return self._outgoing_event_queue @property diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index e6c9990dd..435ceae08 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -29,13 +29,13 @@ async def handle_event(self, event): subdomains = await self.query(query, request_fn=self.request_subdomains, parse_fn=self.parse_subdomains) if subdomains: for subdomain in subdomains: - self.emit_event(subdomain, "DNS_NAME", source=event) + await self.emit_event(subdomain, "DNS_NAME", source=event) if self.urls: urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) if urls: - for parsed_url in await self.scan.run_in_executor(self.helpers.validators.collapse_urls, urls): - self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) + for parsed_url in await self.scan.run_in_executor_mp(self.helpers.validators.collapse_urls, urls): + await self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) async def request_subdomains(self, query): url = f"{self.base_url}/{self.helpers.quote(query)}/subdomains/" diff --git a/bbot/modules/bucket_file_enum.py b/bbot/modules/bucket_file_enum.py index 7eb6926c0..facaa021e 100644 --- a/bbot/modules/bucket_file_enum.py +++ b/bbot/modules/bucket_file_enum.py @@ -42,7 +42,7 @@ async def handle_aws(self, event): bucket_file = url + "/" + key file_extension = self.helpers.get_file_extension(key) if file_extension not in self.scan.url_extension_blacklist: - self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") + await self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") urls_emitted += 1 if urls_emitted >= self.file_limit: return diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py index 25a46ddf5..0b5793657 100644 --- a/bbot/modules/builtwith.py +++ b/bbot/modules/builtwith.py @@ -33,14 +33,14 @@ async def handle_event(self, event): if subdomains: for s in subdomains: if s != event: - self.emit_event(s, "DNS_NAME", source=event) + await self.emit_event(s, "DNS_NAME", source=event) # redirects if self.config.get("redirects", True): redirects = await self.query(query, parse_fn=self.parse_redirects, request_fn=self.request_redirects) if redirects: for r in redirects: if r != event: - self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) async def request_domains(self, query): url = f"{self.base_url}/v20/api.json?KEY={self.api_key}&LOOKUP={query}&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes" diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 182f4b4db..c58463401 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -63,6 +63,7 @@ "X-Host": "127.0.0.1", } +# This is planned to be replaced in the future: https://github.com/blacklanternsecurity/bbot/issues/1068 waf_strings = ["The requested URL was rejected"] for qp in query_payloads: @@ -83,8 +84,13 @@ class bypass403(BaseModule): async def do_checks(self, compare_helper, event, collapse_threshold): results = set() + error_count = 0 for sig in signatures: + if error_count > 3: + self.warning(f"Received too many errors for URL {event.data} aborting bypass403") + return None + sig = self.format_signature(sig, event) if sig[2] != None: headers = dict(sig[2]) @@ -95,6 +101,7 @@ async def do_checks(self, compare_helper, event, collapse_threshold): sig[1], headers=headers, method=sig[0], allow_redirects=True ) except HttpCompareError as e: + error_count += 1 self.debug(e) continue @@ -127,12 +134,12 @@ async def handle_event(self, event): self.debug(e) return - collapse_threshold = 10 + collapse_threshold = 6 results = await self.do_checks(compare_helper, event, collapse_threshold) if results is None: return if len(results) > collapse_threshold: - self.emit_event( + await self.emit_event( { "description": f"403 Bypass MULTIPLE SIGNATURES (exceeded threshold {str(collapse_threshold)})", "host": str(event.host), @@ -143,12 +150,13 @@ async def handle_event(self, event): ) else: for description in results: - self.emit_event( + await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", source=event, ) + # When a WAF-check helper is available in the future, we will convert to HTTP_RESPONSE and check for the WAF string here. async def filter_event(self, event): if ("status-403" in event.tags) or ("status-401" in event.tags): return True diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py index c493199d7..382644007 100644 --- a/bbot/modules/credshed.py +++ b/bbot/modules/credshed.py @@ -1,9 +1,9 @@ from contextlib import suppress -from bbot.modules.templates.credential_leak import credential_leak +from bbot.modules.base import BaseModule -class credshed(credential_leak): +class credshed(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME", "EMAIL_ADDRESS"] flags = ["passive", "safe"] @@ -17,6 +17,7 @@ class credshed(credential_leak): "password": "Credshed password", "credshed_url": "URL of credshed server", } + target_only = True async def setup(self): self.base_url = self.config.get("credshed_url", "").rstrip("/") @@ -40,7 +41,7 @@ async def setup(self): return await super().setup() async def handle_event(self, event): - query = self.make_query(event) + query = event.data cs_query = await self.helpers.request( f"{self.base_url}/api/search", method="POST", @@ -76,11 +77,11 @@ async def handle_event(self, event): email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: - self.emit_event(email_event) - if user and not self.already_seen(f"{email}:{user}"): - self.emit_event(user, "USERNAME", source=email_event, tags=tags) - if pw and not self.already_seen(f"{email}:{pw}"): - self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) + await self.emit_event(email_event) + if user: + await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + if pw: + await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) for h_pw in hashes: - if h_pw and not self.already_seen(f"{email}:{h_pw}"): - self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + if h_pw: + await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) diff --git a/bbot/modules/deadly/dastardly.py b/bbot/modules/deadly/dastardly.py index 403e9511e..47fd834fd 100644 --- a/bbot/modules/deadly/dastardly.py +++ b/bbot/modules/deadly/dastardly.py @@ -60,7 +60,7 @@ async def handle_event(self, event): for testcase in testsuite.testcases: for failure in testcase.failures: if failure.severity == "Info": - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": url, @@ -70,7 +70,7 @@ async def handle_event(self, event): event, ) else: - self.emit_event( + await self.emit_event( { "severity": failure.severity, "host": str(event.host), diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index cadaf4990..2c27f055b 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -83,7 +83,7 @@ async def handle_event(self, event): filters = await self.baseline_ffuf(fixed_url, exts=exts) async for r in self.execute_ffuf(self.tempfile, fixed_url, exts=exts, filters=filters): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) async def filter_event(self, event): if "endpoint" in event.tags: @@ -143,7 +143,7 @@ async def baseline_ffuf(self, url, exts=[""], prefix="", suffix="", mode="normal # if we only got 403, we might already be blocked by a WAF. Issue a warning, but it's possible all 'not founds' are given 403 if canary_results[0]["status"] == 403: self.warning( - "All requests of the baseline recieved a 403 response. It is possible a WAF is actively blocking your traffic." + "All requests of the baseline received a 403 response. It is possible a WAF is actively blocking your traffic." ) # if we only got 429, we are almost certainly getting blocked by a WAF or rate-limiting. Specifically with 429, we should respect them and abort the scan. diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 33bcbe7e0..04be1aa95 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -70,7 +70,7 @@ async def setup(self): else: self.warning("Error running nuclei template update command") self.proxy = self.scan.config.get("http_proxy", "") - self.mode = self.config.get("mode", "severe") + self.mode = self.config.get("mode", "severe").lower() self.ratelimit = int(self.config.get("ratelimit", 150)) self.concurrency = int(self.config.get("concurrency", 25)) self.budget = int(self.config.get("budget", 1)) @@ -85,7 +85,7 @@ async def setup(self): self.info(f"Excluding the following nuclei tags: [{self.etags}]") self.severity = self.config.get("severity") if self.mode != "severe" and self.severity != "": - self.info(f"Limiting nuclei templates to the following severites: [{self.severity}]") + self.info(f"Limiting nuclei templates to the following severities: [{self.severity}]") self.iserver = self.scan.config.get("interactsh_server", None) self.itoken = self.scan.config.get("interactsh_token", None) self.retries = int(self.config.get("retries", 0)) @@ -120,7 +120,7 @@ async def setup(self): self.info("Processing nuclei templates to perform budget calculations...") self.nucleibudget = NucleiBudget(self) - self.budget_templates_file = self.helpers.tempfile(self.nucleibudget.collapsable_templates, pipe=False) + self.budget_templates_file = self.helpers.tempfile(self.nucleibudget.collapsible_templates, pipe=False) self.info( f"Loaded [{str(sum(self.nucleibudget.severity_stats.values()))}] templates based on a budget of [{str(self.budget)}] request(s)" @@ -150,7 +150,7 @@ async def handle_batch(self, *events): description_string += f" Extracted Data: [{','.join(extracted_results)}]" if severity in ["INFO", "UNKNOWN"]: - self.emit_event( + await self.emit_event( { "host": str(source_event.host), "url": url, @@ -160,7 +160,7 @@ async def handle_batch(self, *events): source_event, ) else: - self.emit_event( + await self.emit_event( { "severity": severity, "host": str(source_event.host), @@ -295,7 +295,7 @@ def __init__(self, nuclei_module): self.templates_dir = nuclei_module.nuclei_templates_dir self.yaml_list = self.get_yaml_list() self.budget_paths = self.find_budget_paths(nuclei_module.budget) - self.collapsable_templates, self.severity_stats = self.find_collapsable_templates() + self.collapsible_templates, self.severity_stats = self.find_collapsible_templates() def get_yaml_list(self): return list(self.templates_dir.rglob("*.yaml")) @@ -331,8 +331,8 @@ def get_yaml_info_attr(self, yf, attr): yield res # Parse through all templates and locate those which match the conditions necessary to collapse down to the budget setting - def find_collapsable_templates(self): - collapsable_templates = [] + def find_collapsible_templates(self): + collapsible_templates = [] severity_dict = {} for yf in self.yaml_list: valid = True @@ -365,14 +365,14 @@ def find_collapsable_templates(self): valid = False if valid: - collapsable_templates.append(str(yf)) + collapsible_templates.append(str(yf)) severity_gen = self.get_yaml_info_attr(yf, "severity") severity = next(severity_gen) if severity in severity_dict.keys(): severity_dict[severity] += 1 else: severity_dict[severity] = 1 - return collapsable_templates, severity_dict + return collapsible_templates, severity_dict def parse_yaml(self, yamlfile): if yamlfile not in self._yaml_files: diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index f4675e10f..e2908dbbe 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -92,9 +92,9 @@ async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=F found_vhost_b64 = r["input"]["FUZZ"] vhost_dict = {"host": str(event.host), "url": host, "vhost": base64.b64decode(found_vhost_b64).decode()} if f"{vhost_dict['vhost']}{basehost}" != event.parsed.netloc: - self.emit_event(vhost_dict, "VHOST", source=event) + await self.emit_event(vhost_dict, "VHOST", source=event) if skip_dns_host == False: - self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) + await self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) yield vhost_dict["vhost"] diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py index a09de454e..c1a35c419 100644 --- a/bbot/modules/dehashed.py +++ b/bbot/modules/dehashed.py @@ -1,15 +1,16 @@ from contextlib import suppress -from bbot.modules.templates.credential_leak import credential_leak +from bbot.modules.base import BaseModule -class dehashed(credential_leak): +class dehashed(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME"] flags = ["passive", "safe", "email-enum"] meta = {"description": "Execute queries against dehashed.com for exposed credentials", "auth_required": True} options = {"username": "", "api_key": ""} options_desc = {"username": "Email Address associated with your API key", "api_key": "DeHashed API Key"} + target_only = True base_url = "https://api.dehashed.com/search" @@ -49,16 +50,16 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) if email_event is not None: - self.emit_event(email_event) - if user and not self.already_seen(f"{email}:{user}"): - self.emit_event(user, "USERNAME", source=email_event, tags=tags) - if pw and not self.already_seen(f"{email}:{pw}"): - self.emit_event(pw, "PASSWORD", source=email_event, tags=tags) - if h_pw and not self.already_seen(f"{email}:{h_pw}"): - self.emit_event(h_pw, "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event(email_event) + if user: + await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + if pw: + await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) + if h_pw: + await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) async def query(self, event): - query = f"domain:{self.make_query(event)}" + query = f"domain:{event.data}" url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}" page = 0 num_entries = 0 diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 538f51621..eef8e2d8c 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -1,91 +1,151 @@ from bbot.modules.base import BaseModule +# the following are the result of a 1-day internet survey to find the top SRV records +# the scan resulted in 36,282 SRV records. the count for each one is shown. common_srvs = [ - # Micro$oft - "_ldap._tcp.dc._msdcs", - "_ldap._tcp.gc._msdcs", - "_ldap._tcp.pdc._msdcs", - "_ldap._tcp", - "_ldap._tcp.ForestDNSZones", - "_gc._msdcs", - "_kpasswd._tcp", - "_kpasswd._udp", - "_kerberos._tcp.dc._msdcs", - "_kerberos.tcp.dc._msdcs", - "_kerberos-master._tcp", - "_kerberos-master._udp", - "_kerberos._tcp", - "_kerberos._udp", - "_autodiscover._tcp", - # NTP - "_ntp._udp", - # mDNS - "_nntp._tcp", - # email - "_imap._tcp", - "_imap.tcp", - "_imaps._tcp", - "_pop3._tcp", - "_pop3s._tcp", - "_smtp._tcp", - # MailEnable - "_caldav._tcp", - "_caldavs._tcp", - "_carddav._tcp", - "_carddavs._tcp", - # STUN - "_stun._tcp", - "_stun._udp", - "_stuns._tcp", - "_turn._tcp", - "_turn._udp", - "_turns._tcp", - # SIP - "_h323be._tcp", - "_h323be._udp", - "_h323cs._tcp", - "_h323cs._udp", - "_h323ls._tcp", - "_h323ls._udp", - "_sip._tcp", - "_sip._tls", - "_sip._udp", - "_sipfederationtls._tcp", - "_sipinternal._tcp", - "_sipinternaltls._tcp", - "_sips._tcp", - # misc - "_aix._tcp", - "_certificates._tcp", - "_cmp._tcp", - "_crl._tcp", - "_crls._tcp", - "_finger._tcp", - "_ftp._tcp", - "_gc._tcp", - "_hkp._tcp", - "_hkps._tcp", - "_http._tcp", - "_https._tcp", - "_jabber-client._tcp", - "_jabber-client._udp", - "_jabber._tcp", - "_jabber._udp", - "_ocsp._tcp", - "_pgpkeys._tcp", - "_pgprevokations._tcp", - "_PKIXREP._tcp", - "_submission._tcp", - "_svcp._tcp", - "_telnet._tcp", - "_test._tcp", - "_whois._tcp", - "_x-puppet-ca._tcp", - "_x-puppet._tcp", - "_xmpp-client._tcp", - "_xmpp-client._udp", - "_xmpp-server._tcp", - "_xmpp-server._udp", + "_sipfederationtls._tcp", # 6909 + "_sip._tls", # 6853 + "_autodiscover._tcp", # 4268 + "_xmpp-server._tcp", # 1437 + "_sip._tcp", # 1193 + "_sips._tcp", # 1183 + "_caldavs._tcp", # 1179 + "_carddavs._tcp", # 1132 + "_caldav._tcp", # 1035 + "_carddav._tcp", # 1024 + "_sip._udp", # 1007 + "_imaps._tcp", # 1007 + "_submission._tcp", # 906 + "_h323cs._tcp", # 846 + "_h323ls._udp", # 782 + "_xmpp-client._tcp", # 689 + "_pop3s._tcp", # 394 + "_jabber._tcp", # 277 + "_imap._tcp", # 267 + "_turn._udp", # 256 + "_pop3._tcp", # 221 + "_ldap._tcp", # 213 + "_smtps._tcp", # 195 + "_sipinternaltls._tcp", # 192 + "_vlmcs._tcp", # 165 + "_kerberos._udp", # 163 + "_kerberos._tcp", # 148 + "_kpasswd._udp", # 128 + "_kpasswd._tcp", # 100 + "_ntp._udp", # 90 + "_gc._tcp", # 73 + "_kerberos-master._udp", # 66 + "_ldap._tcp.dc._msdcs", # 63 + "_matrix._tcp", # 62 + "_smtp._tcp", # 61 + "_stun._udp", # 57 + "_kerberos._tcp.dc._msdcs", # 54 + "_ldap._tcp.gc._msdcs", # 49 + "_kerberos-adm._tcp", # 44 + "_ldap._tcp.pdc._msdcs", # 43 + "_kerberos-master._tcp", # 43 + "_http._tcp", # 37 + "_h323rs._tcp", # 36 + "_sipinternal._tcp", # 35 + "_turn._tcp", # 33 + "_stun._tcp", # 33 + "_h323ls._tcp", # 33 + "_x-puppet._tcp", # 30 + "_h323cs._udp", # 27 + "_stuns._tcp", # 26 + "_jabber-client._tcp", # 25 + "_x-puppet-ca._tcp", # 22 + "_ts3._udp", # 22 + "_minecraft._tcp", # 22 + "_turns._tcp", # 21 + "_ldaps._tcp", # 21 + "_xmpps-client._tcp", # 20 + "_https._tcp", # 19 + "_ftp._tcp", # 19 + "_xmpp-server._udp", # 18 + "_xmpp-client._udp", # 17 + "_jabber._udp", # 17 + "_jabber-client._udp", # 17 + "_xmpps-server._tcp", # 15 + "_finger._tcp", # 14 + "_stuns._udp", # 12 + "_hkp._tcp", # 12 + "_vlmcs._udp", # 11 + "_turns._udp", # 11 + "_tftp._udp", # 11 + "_ssh._tcp", # 11 + "_rtps._udp", # 11 + "_mysqlsrv._tcp", # 11 + "_hkps._tcp", # 11 + "_h323be._udp", # 11 + "_dns._tcp", # 11 + "_wss._tcp", # 10 + "_wpad._tcp", # 10 + "_whois._tcp", # 10 + "_webexconnect._tcp", # 10 + "_webexconnects._tcp", # 10 + "_vnc._tcp", # 10 + "_test._tcp", # 10 + "_telnet._tcp", # 10 + "_telnets._tcp", # 10 + "_teamspeak._tcp", # 10 + "_svns._tcp", # 10 + "_svcp._tcp", # 10 + "_smb._tcp", # 10 + "_sip-tls._tcp", # 10 + "_sftp._tcp", # 10 + "_secure-pop3._tcp", # 10 + "_secure-imap._tcp", # 10 + "_rtsp._tcp", # 10 + "_rtps._tcp", # 10 + "_rpc._tcp", # 10 + "_rfb._tcp", # 10 + "_raop._tcp", # 10 + "_pstn._tcp", # 10 + "_presence._tcp", # 10 + "_pkixrep._tcp", # 10 + "_pgprevokations._tcp", # 10 + "_pgpkeys._tcp", # 10 + "_ocsp._tcp", # 10 + "_nntp._tcp", # 10 + "_nfs._tcp", # 10 + "_netbios-ssn._tcp", # 10 + "_netbios-ns._tcp", # 10 + "_netbios-dgm._tcp", # 10 + "_mumble._tcp", # 10 + "_msrpc._tcp", # 10 + "_mqtts._tcp", # 10 + "_minecraft._udp", # 10 + "_iscsi._tcp", # 10 + "_ircs._tcp", # 10 + "_ipp._tcp", # 10 + "_ipps._tcp", # 10 + "_h323be._tcp", # 10 + "_gits._tcp", # 10 + "_ftps._tcp", # 10 + "_ftpes._tcp", # 10 + "_dnss._udp", # 10 + "_dnss._tcp", # 10 + "_diameter._tcp", # 10 + "_crl._tcp", # 10 + "_crls._tcp", # 10 + "_cmp._tcp", # 10 + "_certificates._tcp", # 10 + "_aix._tcp", # 10 + "_afpovertcp._tcp", # 10 + "_collab-edge._tls", # 6 + "_tcp", # 5 + "_wildcard", # 3 + "_client._smtp", # 3 + "_udp", # 2 + "_tls", # 2 + "_msdcs", # 2 + "_gc._msdcs", # 2 + "_ldaps._tcp.dc._msdcs", # 1 + "_kerberos._tcp.kdc._msdcs", # 1 + "_kerberos.tcp.dc._msdcs", # 1 + "_imap", # 1 + "_iax", # 1 ] @@ -94,7 +154,17 @@ class dnscommonsrv(BaseModule): produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] meta = {"description": "Check for common SRV records"} - _max_event_handlers = 5 + options = {"top": 50, "max_event_handlers": 10} + options_desc = { + "top": "How many of the top SRV records to check", + "max_event_handlers": "How many instances of the module to run concurrently", + } + _max_event_handlers = 10 + + def _incoming_dedup_hash(self, event): + # dedupe by parent + parent_domain = self.helpers.parent_domain(event.data) + return hash(parent_domain), "already processed parent domain" async def filter_event(self, event): # skip SRV wildcards @@ -103,7 +173,9 @@ async def filter_event(self, event): return True async def handle_event(self, event): - queries = [event.data] + [f"{srv}.{event.data}" for srv in common_srvs] + top = int(self.config.get("top", 50)) + parent_domain = self.helpers.parent_domain(event.data) + queries = [f"{srv}.{parent_domain}" for srv in common_srvs[:top]] async for query, results in self.helpers.resolve_batch(queries, type="srv"): if results: - self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) + await self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index 8bb1fa1ed..c119857be 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -1,5 +1,4 @@ import re -from bs4 import BeautifulSoup from bbot.modules.templates.subdomain_enum import subdomain_enum @@ -25,7 +24,7 @@ async def query(self, domain): return ret else: self.debug(f'Valid response code "{status_code}" from DNSDumpster') - html = BeautifulSoup(res1.content, "html.parser") + html = self.helpers.beautifulsoup(res1.content, "html.parser") csrftoken = None csrfmiddlewaretoken = None try: @@ -73,7 +72,7 @@ async def query(self, domain): self.verbose(f'Bad response code "{status_code}" from DNSDumpster') return ret - html = BeautifulSoup(res2.content, "html.parser") + html = self.helpers.beautifulsoup(res2.content, "html.parser") escaped_domain = re.escape(domain) match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$") for subdomain in html.findAll(text=match_pattern): diff --git a/bbot/modules/dnszonetransfer.py b/bbot/modules/dnszonetransfer.py index 1ec5bf5eb..0a48526dc 100644 --- a/bbot/modules/dnszonetransfer.py +++ b/bbot/modules/dnszonetransfer.py @@ -42,7 +42,9 @@ async def handle_event(self, event): continue self.hugesuccess(f"Successful zone transfer against {nameserver} for domain {domain}!") finding_description = f"Successful DNS zone transfer against {nameserver} for {domain}" - self.emit_event({"host": str(event.host), "description": finding_description}, "FINDING", source=event) + await self.emit_event( + {"host": str(event.host), "description": finding_description}, "FINDING", source=event + ) for name, ttl, rdata in zone.iterate_rdatas(): if str(name) == "@": parent_data = domain @@ -52,13 +54,13 @@ async def handle_event(self, event): if not parent_event or parent_event == event: parent_event = event else: - self.emit_event(parent_event) + await self.emit_event(parent_event) for rdtype, t in self.helpers.dns.extract_targets(rdata): if not self.helpers.is_ip(t): t = f"{t}.{domain}" module = self.helpers.dns._get_dummy_module(rdtype) child_event = self.scan.make_event(t, "DNS_NAME", parent_event, module=module) - self.emit_event(child_event) + await self.emit_event(child_event) else: self.debug(f"No data returned by {nameserver} for domain {domain}") diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 3fd47ee2d..000c3d5cf 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -19,4 +19,4 @@ async def handle_event(self, event): return for email in self.helpers.extract_emails(r.text): if email.endswith(query): - self.emit_event(email, "EMAIL_ADDRESS", source=event) + await self.emit_event(email, "EMAIL_ADDRESS", source=event) diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index ca45402e6..562ae681f 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -114,7 +114,7 @@ def build_extension_list(self, event): else: return [extension_hint] - def find_delimeter(self, hint): + def find_delimiter(self, hint): delimiters = ["_", "-"] for d in delimiters: if d in hint: @@ -158,36 +158,40 @@ async def handle_event(self, event): if "shortname-file" in event.tags: for ext in used_extensions: async for r in self.execute_ffuf(tempfile, root_url, suffix=f".{ext}"): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + ) elif "shortname-directory" in event.tags: async for r in self.execute_ffuf(tempfile, root_url, exts=["/"]): r_url = f"{r['url'].rstrip('/')}/" - self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) if self.config.get("find_delimiters"): if "shortname-directory" in event.tags: - delimeter_r = self.find_delimeter(filename_hint) - if delimeter_r: - delimeter, prefix, partial_hint = delimeter_r - self.verbose(f"Detected delimeter [{delimeter}] in hint [{filename_hint}]") + delimiter_r = self.find_delimiter(filename_hint) + if delimiter_r: + delimiter, prefix, partial_hint = delimiter_r + self.verbose(f"Detected delimiter [{delimiter}] in hint [{filename_hint}]") tempfile, tempfile_len = self.generate_templist(prefix=partial_hint) async for r in self.execute_ffuf( - tempfile, root_url, prefix=f"{prefix}{delimeter}", exts=["/"] + tempfile, root_url, prefix=f"{prefix}{delimiter}", exts=["/"] ): - self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + ) elif "shortname-file" in event.tags: for ext in used_extensions: - delimeter_r = self.find_delimeter(filename_hint) - if delimeter_r: - delimeter, prefix, partial_hint = delimeter_r - self.verbose(f"Detected delimeter [{delimeter}] in hint [{filename_hint}]") + delimiter_r = self.find_delimiter(filename_hint) + if delimiter_r: + delimiter, prefix, partial_hint = delimiter_r + self.verbose(f"Detected delimiter [{delimiter}] in hint [{filename_hint}]") tempfile, tempfile_len = self.generate_templist(prefix=partial_hint) async for r in self.execute_ffuf( - tempfile, root_url, prefix=f"{prefix}{delimeter}", suffix=f".{ext}" + tempfile, root_url, prefix=f"{prefix}{delimiter}", suffix=f".{ext}" ): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] ) @@ -217,7 +221,7 @@ async def finish(self): ) async for r in self.execute_ffuf(tempfile, url, prefix=prefix, exts=["/"]): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=self.shortname_to_event[hint], @@ -233,7 +237,7 @@ async def finish(self): async for r in self.execute_ffuf( tempfile, url, prefix=prefix, suffix=f".{ext}" ): - self.emit_event( + await self.emit_event( r["url"], "URL_UNVERIFIED", source=self.shortname_to_event[hint], diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 2f0c26c59..5cf190d1f 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -14,7 +14,7 @@ class filedownload(BaseModule): watched_events = ["URL_UNVERIFIED", "HTTP_RESPONSE"] produced_events = [] - flags = ["active", "safe", "web-basic"] + flags = ["active", "safe", "web-basic", "web-thorough"] meta = {"description": "Download common filetypes such as PDF, DOCX, PPTX, etc."} options = { "extensions": [ @@ -79,8 +79,8 @@ class filedownload(BaseModule): scope_distance_modifier = 3 async def setup(self): - self.extensions = list(set([e.lower().strip(".") for e in self.options.get("extensions", [])])) - self.max_filesize = self.options.get("max_filesize", "10MB") + self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) + self.max_filesize = self.config.get("max_filesize", "10MB") self.download_dir = self.scan.home / "filedownload" self.helpers.mkdir(self.download_dir) self.urls_downloaded = set() diff --git a/bbot/modules/fingerprintx.py b/bbot/modules/fingerprintx.py index be5695541..a7d8f2ea0 100644 --- a/bbot/modules/fingerprintx.py +++ b/bbot/modules/fingerprintx.py @@ -52,4 +52,4 @@ async def handle_batch(self, *events): protocol_data["port"] = port if banner: protocol_data["banner"] = banner - self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) + await self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index bf2c37097..9d75f4a9e 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -188,7 +188,7 @@ async def handle_event(self, event): for s in self.submodules.values(): await s.test(event) - def interactsh_callback(self, r): + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: @@ -200,7 +200,7 @@ def interactsh_callback(self, r): matched_severity = match[2] matched_read_response = str(match[3]) - self.emit_event( + await self.emit_event( { "severity": matched_severity, "host": str(matched_event.host), @@ -229,6 +229,6 @@ async def finish(self): await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): - self.interactsh_callback(r) + await self.interactsh_callback(r) except InteractshError as e: self.debug(f"Error in interact.sh: {e}") diff --git a/bbot/modules/git.py b/bbot/modules/git.py index dafe151d1..fc61402de 100644 --- a/bbot/modules/git.py +++ b/bbot/modules/git.py @@ -29,7 +29,7 @@ async def handle_event(self, event): text = "" if text: if getattr(result, "status_code", 0) == 200 and "[core]" in text and not self.fp_regex.match(text): - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": url, "description": f"Exposed .git config at {url}"}, "FINDING", event, diff --git a/bbot/modules/github_codesearch.py b/bbot/modules/github_codesearch.py index c98335722..fdc58695b 100644 --- a/bbot/modules/github_codesearch.py +++ b/bbot/modules/github_codesearch.py @@ -21,13 +21,13 @@ async def handle_event(self, event): repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", source=event) if repo_event is None: continue - self.emit_event(repo_event) + await self.emit_event(repo_event) for raw_url in raw_urls: url_event = self.make_event(raw_url, "URL_UNVERIFIED", source=repo_event, tags=["httpx-safe"]) if not url_event: continue url_event.scope_distance = repo_event.scope_distance - self.emit_event(url_event) + await self.emit_event(url_event) async def query(self, query): repos = {} @@ -53,8 +53,8 @@ async def query(self, query): if not items: break for item in items: - htlm_url = item.get("html_url", "") - raw_url = self.raw_url(htlm_url) + html_url = item.get("html_url", "") + raw_url = self.raw_url(html_url) repo_url = item.get("repository", {}).get("html_url", "") if raw_url and repo_url: try: diff --git a/bbot/modules/github_org.py b/bbot/modules/github_org.py index 66182a2a6..66fa038a7 100644 --- a/bbot/modules/github_org.py +++ b/bbot/modules/github_org.py @@ -57,7 +57,7 @@ async def handle_event(self, event): for repo_url in repos: repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", source=event) repo_event.scope_distance = event.scope_distance - self.emit_event(repo_event) + await self.emit_event(repo_event) # find members from org (SOCIAL --> SOCIAL) if is_org and self.include_members: @@ -66,7 +66,7 @@ async def handle_event(self, event): for member in org_members: event_data = {"platform": "github", "profile_name": member, "url": f"https://github.com/{member}"} member_event = self.make_event(event_data, "SOCIAL", tags="github-org-member", source=event) - self.emit_event(member_event) + await self.emit_event(member_event) # find valid orgs from stub (ORG_STUB --> SOCIAL) elif event.type == "ORG_STUB": @@ -80,7 +80,7 @@ async def handle_event(self, event): event_data = {"platform": "github", "profile_name": user, "url": f"https://github.com/{user}"} github_org_event = self.make_event(event_data, "SOCIAL", tags="github-org", source=event) github_org_event.scope_distance = event.scope_distance - self.emit_event(github_org_event) + await self.emit_event(github_org_event) async def query_org_repos(self, query): repos = [] diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 5f4c4a5e8..8ead26314 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -7,7 +7,7 @@ class gowitness(BaseModule): - watched_events = ["URL"] + watched_events = ["URL", "SOCIAL"] produced_events = ["WEBSCREENSHOT", "URL", "URL_UNVERIFIED", "TECHNOLOGY"] flags = ["active", "safe", "web-screenshots"] meta = {"description": "Take screenshots of webpages"} @@ -76,9 +76,8 @@ class gowitness(BaseModule): }, ] _batch_size = 100 - # visit up to and including the scan's configured search distance plus one - # this is one hop further than the default - scope_distance_modifier = 1 + # gowitness accepts SOCIAL events up to distance 2, otherwise it is in-scope-only + scope_distance_modifier = 2 async def setup(self): self.timeout = self.config.get("timeout", 10) @@ -95,6 +94,20 @@ async def setup(self): custom_chrome_path = self.helpers.tools_dir / "chrome-linux" / "chrome" if custom_chrome_path.is_file(): self.chrome_path = custom_chrome_path + + # make sure we have a working chrome install + chrome_test_pass = False + for binary in ("chrome", "chromium", custom_chrome_path): + binary_path = self.helpers.which(binary) + if binary_path and Path(binary_path).is_file(): + chrome_test_proc = await self.helpers.run([binary_path, "--version"]) + if getattr(chrome_test_proc, "returncode", 1) == 0: + self.verbose(f"Found chrome executable at {binary_path}") + chrome_test_pass = True + break + if not chrome_test_pass: + return False, "Failed to set up Google chrome. Please install manually or try again with --force-deps." + self.db_path = self.base_path / "gowitness.sqlite3" self.screenshot_path = self.base_path / "screenshots" self.command = self.construct_command() @@ -120,12 +133,21 @@ async def filter_event(self, event): # ignore events from self if event.type == "URL" and event.module == self: return False, "event is from self" + # Accept out-of-scope SOCIAL pages, but not URLs + if event.scope_distance > 0: + if event.type != "SOCIAL": + return False, "event is not in-scope" return True async def handle_batch(self, *events): self.prep() - stdin = "\n".join([str(e.data) for e in events]) - events = {e.data: e for e in events} + event_dict = {} + for e in events: + key = e.data + if e.type == "SOCIAL": + key = e.data["url"] + event_dict[key] = e + stdin = "\n".join(list(event_dict)) async for line in self.helpers.run_live(self.command, input=stdin): self.debug(line) @@ -136,8 +158,8 @@ async def handle_batch(self, *events): final_url = screenshot["final_url"] filename = screenshot["filename"] webscreenshot_data = {"filename": filename, "url": final_url} - source_event = events[url] - self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) + source_event = event_dict[url] + await self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) # emit URLs for url, row in self.new_network_logs.items(): @@ -147,20 +169,20 @@ async def handle_batch(self, *events): _id = row["url_id"] source_url = self.screenshots_taken[_id] - source_event = events[source_url] + source_event = event_dict[source_url] if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): - self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) + await self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) # emit technologies for _, row in self.new_technologies.items(): source_id = row["url_id"] source_url = self.screenshots_taken[source_id] - source_event = events[source_url] + source_event = event_dict[source_url] technology = row["value"] tech_data = {"technology": technology, "url": source_url, "host": str(source_event.host)} - self.emit_event(tech_data, "TECHNOLOGY", source=source_event) + await self.emit_event(tech_data, "TECHNOLOGY", source=source_event) def construct_command(self): # base executable @@ -192,6 +214,7 @@ def new_screenshots(self): if self.db_path.is_file(): with sqlite3.connect(str(self.db_path)) as con: con.row_factory = sqlite3.Row + con.text_factory = self.helpers.smart_decode cur = con.cursor() res = self.cur_execute(cur, "SELECT * FROM urls") for row in res: diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index bae721990..4adaa766a 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -30,7 +30,7 @@ async def setup(self): def rand_string(self, *args, **kwargs): return self.helpers.rand_string(*args, **kwargs) - def interactsh_callback(self, r): + async def interactsh_callback(self, r): full_id = r.get("full-id", None) if full_id: if "." in full_id: @@ -40,7 +40,7 @@ def interactsh_callback(self, r): matched_event = match[0] matched_technique = match[1] - self.emit_event( + await self.emit_event( { "host": str(matched_event.host), "url": matched_event.data["url"], @@ -58,7 +58,7 @@ async def finish(self): await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): - self.interactsh_callback(r) + await self.interactsh_callback(r) except InteractshError as e: self.debug(f"Error in interact.sh: {e}") @@ -128,7 +128,7 @@ async def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], @@ -168,7 +168,7 @@ async def handle_event(self, event): # emit all the domain reflections we found for dr in domain_reflections: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0c12ad740..376108d43 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -13,12 +13,19 @@ class httpx(BaseModule): flags = ["active", "safe", "web-basic", "web-thorough", "social-enum", "subdomain-enum", "cloud-enum"] meta = {"description": "Visit webpages. Many other modules rely on httpx"} - options = {"threads": 50, "in_scope_only": True, "version": "1.2.5", "max_response_size": 5242880} + options = { + "threads": 50, + "in_scope_only": True, + "version": "1.2.5", + "max_response_size": 5242880, + "store_responses": False, + } options_desc = { "threads": "Number of httpx threads to use", "in_scope_only": "Only visit web resources that are in scope.", "version": "httpx version", "max_response_size": "Max response size in bytes", + "store_responses": "Save raw HTTP responses to scan folder", } deps_ansible = [ { @@ -41,6 +48,7 @@ async def setup(self): self.timeout = self.scan.config.get("httpx_timeout", 5) self.retries = self.scan.config.get("httpx_retries", 1) self.max_response_size = self.config.get("max_response_size", 5242880) + self.store_responses = self.config.get("store_responses", False) self.visited = set() self.httpx_tempdir_regex = re.compile(r"^httpx\d+$") return True @@ -104,6 +112,11 @@ async def handle_batch(self, *events): f"{self.max_response_size}", ] + if self.store_responses: + response_dir = self.scan.home / "httpx" + self.helpers.mkdir(response_dir) + command += ["-srd", str(response_dir)] + dns_resolvers = ",".join(self.helpers.system_resolvers) if dns_resolvers: command += ["-r", dns_resolvers] @@ -153,11 +166,11 @@ async def handle_batch(self, *events): url_event = self.make_event(url, "URL", source_event, tags=tags) if url_event: if url_event != source_event: - self.emit_event(url_event) + await self.emit_event(url_event) else: url_event._resolved.set() # HTTP response - self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) + await self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) for tempdir in Path(tempfile.gettempdir()).iterdir(): if tempdir.is_dir() and self.httpx_tempdir_regex.match(tempdir.name): diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index 0ccf0391b..add45b665 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -289,4 +289,4 @@ async def handle_event(self, event): url = event.data.get("url", "") if url: data["url"] = url - self.emit_event(data, "FINDING", event) + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 1e65c6e4c..792ca6d98 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -26,14 +26,14 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", event) if email_event: - self.emit_event(email_event) + await self.emit_event(email_event) for source in sources: domain = source.get("domain", "") if domain: - self.emit_event(domain, "DNS_NAME", email_event) + await self.emit_event(domain, "DNS_NAME", email_event) url = source.get("uri", "") if url: - self.emit_event(url, "URL_UNVERIFIED", email_event) + await self.emit_event(url, "URL_UNVERIFIED", email_event) async def query(self, query): emails = [] diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index ff7941dc2..7d558a23a 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -221,7 +221,7 @@ class safety_counter_obj: technique_strings.append(f"{method} ({technique})") description = f"IIS Shortname Vulnerability Detected. Potentially Vulnerable Method/Techniques: [{','.join(technique_strings)}]" - self.emit_event( + await self.emit_event( {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, @@ -314,7 +314,7 @@ class safety_counter_obj: hint_type = "shortname-file" else: hint_type = "shortname-directory" - self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) + await self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) async def filter_event(self, event): if "dir" in event.tags: diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f3f15f83f..5ccc8d36a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -21,9 +21,9 @@ def __init__(self, excavate): async def search(self, content, event, **kwargs): results = set() async for result, name in self._search(content, event, **kwargs): - results.add(result) - for result in results: - self.report(result, name, event, **kwargs) + results.add((result, name)) + for result, name in results: + await self.report(result, name, event, **kwargs) async def _search(self, content, event, **kwargs): for name, regex in self.compiled_regexes.items(): @@ -32,7 +32,7 @@ async def _search(self, content, event, **kwargs): for result in regex.findall(content): yield result, name - def report(self, result, name, event): + async def report(self, result, name, event): pass @@ -48,10 +48,10 @@ async def search(self, content, event, **kwargs): async for csp, name in self._search(content, event, **kwargs): extracted_domains = self.extract_domains(csp) for domain in extracted_domains: - self.report(domain, event, **kwargs) + await self.report(domain, event, **kwargs) - def report(self, domain, event, **kwargs): - self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + async def report(self, domain, event, **kwargs): + await self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) class HostnameExtractor(BaseExtractor): @@ -62,8 +62,8 @@ def __init__(self, excavate): self.regexes[f"dns_name_{i+1}"] = r.pattern super().__init__(excavate) - def report(self, result, name, event, **kwargs): - self.excavate.emit_event(result, "DNS_NAME", source=event) + async def report(self, result, name, event, **kwargs): + await self.excavate.emit_event(result, "DNS_NAME", source=event) class URLExtractor(BaseExtractor): @@ -95,7 +95,7 @@ async def search(self, content, event, **kwargs): urls_found = 0 for result, name in results: - url_event = self.report(result, name, event, **kwargs) + url_event = await self.report(result, name, event, **kwargs) if url_event is not None: url_in_scope = self.excavate.scan.in_scope(url_event) is_spider_danger = self.excavate.helpers.is_spider_danger(event, result) @@ -115,7 +115,7 @@ async def search(self, content, event, **kwargs): url_event.add_tag("spider-danger") self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") - self.excavate.emit_event(url_event) + await self.excavate.emit_event(url_event) if url_in_scope: urls_found += 1 @@ -150,7 +150,7 @@ async def _search(self, content, event, **kwargs): yield result, name - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): parsed_uri = None try: parsed_uri = self.excavate.helpers.urlparse(result) @@ -168,7 +168,7 @@ def report(self, result, name, event, **kwargs): parsed_url = getattr(event, "parsed", None) if parsed_url: event_data["url"] = parsed_url.geturl() - self.excavate.emit_event( + await self.excavate.emit_event( event_data, "FINDING", source=event, @@ -177,7 +177,7 @@ def report(self, result, name, event, **kwargs): protocol_data = {"protocol": parsed_uri.scheme, "host": str(host)} if port: protocol_data["port"] = port - self.excavate.emit_event( + await self.excavate.emit_event( protocol_data, "PROTOCOL", source=event, @@ -192,12 +192,12 @@ class EmailExtractor(BaseExtractor): regexes = {"email": _email_regex} tld_blacklist = ["png", "jpg", "jpeg", "bmp", "ico", "gif", "svg", "css", "ttf", "woff", "woff2"] - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): result = result.lower() tld = result.split(".")[-1] if tld not in self.tld_blacklist: self.excavate.debug(f"Found email address [{result}] from parsing [{event.data.get('url')}]") - self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) + await self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) class ErrorExtractor(BaseExtractor): @@ -218,10 +218,10 @@ class ErrorExtractor(BaseExtractor): "ASP.NET:4": r"Error ([\d-]+) \([\dA-F]+\)", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found error message from parsing [{event.data.get('url')}] with regex [{name}]") description = f"Error message Detected at Error Type: {name}" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", source=event, @@ -231,7 +231,7 @@ def report(self, result, name, event, **kwargs): class JWTExtractor(BaseExtractor): regexes = {"JWT": r"eyJ(?:[\w-]*\.)(?:[\w-]*\.)[\w-]*"} - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found JWT candidate [{result}]") try: j.decode(result, options={"verify_signature": False}) @@ -240,7 +240,7 @@ def report(self, result, name, event, **kwargs): if jwt_headers["alg"].upper()[0:2] == "HS": tags = ["crackable"] description = f"JWT Identified [{result}]" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event, @@ -254,14 +254,16 @@ def report(self, result, name, event, **kwargs): class SerializationExtractor(BaseExtractor): regexes = { "Java": r"(?:[^a-zA-Z0-9+/]|^)(rO0[a-zA-Z0-9+/]+={,2})", - ".NET": r"AAEAAAD//[a-zA-Z0-9+/]+={,2}", - "PHP": r"YTo[xyz0123456][a-zA-Z0-9+/]+={,2}", - "Possible Compressed": r"H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2}", + ".NET": r"(?:[^a-zA-Z0-9+/]|^)(AAEAAAD//[a-zA-Z0-9+/]+={,2})", + "PHP (Array)": r"(?:[^a-zA-Z0-9+/]|^)(YTo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "PHP (String)": r"(?:[^a-zA-Z0-9+/]|^)(czo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "PHP (Object)": r"(?:[^a-zA-Z0-9+/]|^)(Tzo[xyz0123456][a-zA-Z0-9+/]+={,2})", + "Possible Compressed": r"(?:[^a-zA-Z0-9+/]|^)(H4sIAAAAAAAA[a-zA-Z0-9+/]+={,2})", } - def report(self, result, name, event, **kwargs): - description = f"{name} serialized object found" - self.excavate.emit_event( + async def report(self, result, name, event, **kwargs): + description = f"{name} serialized object found: [{self.excavate.helpers.truncate_string(result,2000)}]" + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) @@ -272,9 +274,9 @@ class FunctionalityExtractor(BaseExtractor): "Web Service WSDL": r"(?i)((?:http|https)://[^\s]*?.(?:wsdl))", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): description = f"{name} found" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event ) @@ -314,7 +316,7 @@ class JavascriptExtractor(BaseExtractor): "possible_creds_var": r"(?:password|passwd|pwd|pass)\s*=+\s*['\"][^\s'\"]{1,60}['\"]", } - def report(self, result, name, event, **kwargs): + async def report(self, result, name, event, **kwargs): # ensure that basic auth matches aren't false positives if name == "authorization_basic": try: @@ -326,7 +328,7 @@ def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found Possible Secret in Javascript [{result}]") description = f"Possible secret in JS [{result}] Signature [{name}]" - self.excavate.emit_event( + await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event ) @@ -366,16 +368,11 @@ async def handle_event(self, event): # handle redirects web_spider_distance = getattr(event, "web_spider_distance", 0) num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) - location = event.data.get("location", "") + location = getattr(event, "redirect_location", "") # if it's a redirect if location: # get the url scheme scheme = self.helpers.is_uri(location, return_scheme=True) - # if there's no scheme (i.e. it's a relative redirect) - if not scheme: - # then join the location with the current url - location = urljoin(event.parsed.geturl(), location) - scheme = self.helpers.is_uri(location, return_scheme=True) if scheme in ("http", "https"): if num_redirects <= self.max_redirects: # tag redirects to out-of-scope hosts as affiliates @@ -384,7 +381,7 @@ async def handle_event(self, event): # inherit web spider distance from parent (don't increment) source_web_spider_distance = getattr(event, "web_spider_distance", 0) url_event.web_spider_distance = source_web_spider_distance - self.emit_event(url_event) + await self.emit_event(url_event) else: self.verbose(f"Exceeded max HTTP redirects ({self.max_redirects}): {location}") diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 9b57b0e81..7aaf12d30 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -1,6 +1,7 @@ import random import ipaddress +from bbot.core.helpers import validators from bbot.modules.internal.base import BaseInternalModule @@ -21,6 +22,7 @@ class speculate(BaseInternalModule): "STORAGE_BUCKET", "SOCIAL", "AZURE_TENANT", + "USERNAME", ] produced_events = ["DNS_NAME", "OPEN_TCP_PORT", "IP_ADDRESS", "FINDING", "ORG_STUB"] flags = ["passive"] @@ -73,13 +75,13 @@ async def handle_event(self, event): ips = list(net) random.shuffle(ips) for ip in ips: - self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) + await self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) # parent domains if event.type == "DNS_NAME": parent = self.helpers.parent_domain(event.data) if parent != event.data: - self.emit_event(parent, "DNS_NAME", source=event, internal=True) + await self.emit_event(parent, "DNS_NAME", source=event, internal=True) # generate open ports @@ -91,7 +93,7 @@ async def handle_event(self, event): if event.type == "URL" or (event.type == "URL_UNVERIFIED" and self.open_port_consumers): # only speculate port from a URL if it wouldn't be speculated naturally from the host if event.host and (event.port not in self.ports or not speculate_open_ports): - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.host, event.port), "OPEN_TCP_PORT", source=event, @@ -108,7 +110,7 @@ async def handle_event(self, event): # inherit web spider distance from parent (don't increment) source_web_spider_distance = getattr(event, "web_spider_distance", 0) url_event.web_spider_distance = source_web_spider_distance - self.emit_event(url_event) + await self.emit_event(url_event) # from hosts if speculate_open_ports: @@ -120,7 +122,7 @@ async def handle_event(self, event): if event.type == "IP_ADDRESS" or usable_dns: for port in self.ports: - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, @@ -154,7 +156,15 @@ async def handle_event(self, event): stub_event = self.make_event(stub, "ORG_STUB", source=event) if event.scope_distance > 0: stub_event.scope_distance = event.scope_distance - self.emit_event(stub_event) + await self.emit_event(stub_event) + + # USERNAME --> EMAIL + if event.type == "USERNAME": + email = event.data.split(":", 1)[-1] + if validators.soft_validate(email, "email"): + email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=["affiliate"]) + email_event.scope_distance = event.scope_distance + await self.emit_event(email_event) async def filter_event(self, event): # don't accept errored DNS_NAMEs diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index 0384a4d4d..847db0c7a 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -40,8 +40,7 @@ class internetdb(BaseModule): flags = ["passive", "safe", "portscan", "subdomain-enum"] meta = {"description": "Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities"} - # limit outgoing queue size to help avoid rate limiting - _qsize = 100 + _qsize = 500 base_url = "https://internetdb.shodan.io" @@ -76,7 +75,7 @@ async def handle_event(self, event): return if data: if r.status_code == 200: - self._parse_response(data=data, event=event) + await self._parse_response(data=data, event=event) elif r.status_code == 404: detail = data.get("detail", "") if detail: @@ -86,22 +85,22 @@ async def handle_event(self, event): err_msg = data.get("msg", "") self.verbose(f"Shodan error for {ip}: {err_data}: {err_msg}") - def _parse_response(self, data: dict, event): + async def _parse_response(self, data: dict, event): """Handles emitting events from returned JSON""" data: dict # has keys: cpes, hostnames, ip, ports, tags, vulns # ip is a string, ports is a list of ports, the rest is a list of strings for hostname in data.get("hostnames", []): - self.emit_event(hostname, "DNS_NAME", source=event) + await self.emit_event(hostname, "DNS_NAME", source=event) for cpe in data.get("cpes", []): - self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) + await self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) for port in data.get("ports", []): - self.emit_event( + await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, internal=True, quick=True ) vulns = data.get("vulns", []) if vulns: vulns_str = ", ".join([str(v) for v in vulns]) - self.emit_event( + await self.emit_event( {"description": f"Shodan reported verified vulnerabilities: {vulns_str}", "host": str(event.host)}, "FINDING", source=event, @@ -116,10 +115,11 @@ def get_ip(self, event): elif event.type == "DNS_NAME": # always try IPv4 first ipv6 = [] - for host in event.resolved_hosts: - if self.helpers.is_ip(host, version=4): - return host - elif self.helpers.is_ip(host, version=6): - ipv6.append(host) + ips = [h for h in event.resolved_hosts if self.helpers.is_ip(h)] + for ip in sorted([str(ip) for ip in ips]): + if self.helpers.is_ip(ip, version=4): + return ip + elif self.helpers.is_ip(ip, version=6): + ipv6.append(ip) for ip in ipv6: return ip diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index d0a66ce4c..e192b2abb 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -57,4 +57,4 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - self.emit_event(geo_data, "GEOLOCATION", event) + await self.emit_event(geo_data, "GEOLOCATION", event) diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index b6688abee..2b139f807 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -34,4 +34,4 @@ async def handle_event(self, event): ip_event = self.make_event(str(ip), "IP_ADDRESS", event, internal=True) # keep the scope distance low to give it one more hop for DNS resolution # ip_event.scope_distance = max(1, event.scope_distance) - self.emit_event(ip_event) + await self.emit_event(ip_event) diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index a8a143fdc..98f139505 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -47,4 +47,4 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - self.emit_event(geo_data, "GEOLOCATION", event) + await self.emit_event(geo_data, "GEOLOCATION", event) diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py index 4d9c9db68..895ffe243 100644 --- a/bbot/modules/masscan.py +++ b/bbot/modules/masscan.py @@ -105,7 +105,7 @@ async def setup(self): async def handle_batch(self, *events): if self.use_cache: - self.emit_from_cache() + await self.emit_from_cache() else: targets = [str(e.data) for e in events] if not targets: @@ -138,7 +138,7 @@ async def masscan(self, targets, result_callback, ping=False): try: with open(stats_file, "w") as stats_fh: async for line in self.helpers.run_live(command, sudo=True, stderr=stats_fh): - self.process_output(line, result_callback=result_callback) + await self.process_output(line, result_callback=result_callback) finally: for file in (stats_file, target_file): file.unlink() @@ -169,7 +169,7 @@ def _build_masscan_command(self, target_file=None, dry_run=False, ping=False): command += ("--echo",) return command - def process_output(self, line, result_callback): + async def process_output(self, line, result_callback): try: j = json.loads(line) except Exception: @@ -193,20 +193,20 @@ def process_output(self, line, result_callback): result = self.helpers.make_netloc(result, port_number) if source is None: source = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) - self.emit_event(source) - result_callback(result, source=source) + await self.emit_event(source) + await result_callback(result, source=source) - def append_alive_host(self, host, source): + async def append_alive_host(self, host, source): host_event = self.make_event(host, "IP_ADDRESS", source=self.get_source_event(host)) self.alive_hosts[host] = host_event self._write_ping_result(host) - self.emit_event(host_event) + await self.emit_event(host_event) - def emit_open_tcp_port(self, data, source): + async def emit_open_tcp_port(self, data, source): self._write_syn_result(data) - self.emit_event(data, "OPEN_TCP_PORT", source=source) + await self.emit_event(data, "OPEN_TCP_PORT", source=source) - def emit_from_cache(self): + async def emit_from_cache(self): ip_events = {} # ping scan if self.ping_cache.is_file(): @@ -220,7 +220,7 @@ def emit_from_cache(self): break ip_event = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) ip_events[ip] = ip_event - self.emit_event(ip_event) + await self.emit_event(ip_event) # syn scan if self.syn_cache.is_file(): cached_syns = list(self.helpers.read_file(self.syn_cache)) @@ -237,13 +237,15 @@ def emit_from_cache(self): if source_event is None: self.verbose(f"Source event not found for {line}") source_event = self.make_event(line, "IP_ADDRESS", source=self.get_source_event(line)) - self.emit_event(source_event) - self.emit_event(line, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(source_event) + await self.emit_event(line, "OPEN_TCP_PORT", source=source_event) def get_source_event(self, host): - source_event = self.scan.whitelist.get(host) + source_event = self.scan.target.get(host) if source_event is None: - source_event = self.scan.root_event + source_event = self.scan.whitelist.get(host) + if source_event is None: + source_event = self.scan.root_event return source_event async def cleanup(self): diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 5a0865476..02ff1aa85 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -1,6 +1,7 @@ import re import json import random +import asyncio import subprocess from bbot.modules.templates.subdomain_enum import subdomain_enum @@ -13,8 +14,8 @@ class massdns(subdomain_enum): It uses massdns to brute-force subdomains. At the end of a scan, it will leverage BBOT's word cloud to recursively discover target-specific subdomain mutations. - Each subdomain discovered via mutations is tagged with the "mutation" tag. This tag includes the depth at which - the mutations is found. I.e. the first mutation will be tagged "mutation-1". The second one (a mutation of a + Each subdomain discovered via mutations is tagged with the "mutation" tag. This tag indicates the depth at which + the mutation was found. I.e. the first mutation will be tagged "mutation-1". The second one (a mutation of a mutation) will be "mutation-2". Mutations of mutations of mutations will be "mutation-3", etc. This is especially use for bug bounties because it enables you to recognize distant/rare subdomains at a glance. @@ -29,11 +30,13 @@ class massdns(subdomain_enum): "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", "max_resolvers": 1000, "max_mutations": 500, + "max_depth": 5, } options_desc = { "wordlist": "Subdomain wordlist URL", "max_resolvers": "Number of concurrent massdns resolvers", "max_mutations": "Max number of smart mutations per subdomain", + "max_depth": "How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com", } subdomain_file = None deps_ansible = [ @@ -72,7 +75,7 @@ class massdns(subdomain_enum): }, ] reject_wildcards = "strict" - _qsize = 100 + _qsize = 10000 digit_regex = re.compile(r"\d+") @@ -89,6 +92,7 @@ async def setup(self): self.max_resolvers = self.config.get("max_resolvers", 1000) self.max_mutations = self.config.get("max_mutations", 500) + self.max_depth = max(1, self.config.get("max_depth", 5)) nameservers_url = ( "https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt" ) @@ -97,17 +101,33 @@ async def setup(self): cache_hrs=24 * 7, ) self.devops_mutations = list(self.helpers.word_cloud.devops_mutations) - self._mutation_run = 1 + self.mutation_run = 1 + + self.resolve_and_emit_queue = asyncio.Queue() + self.resolve_and_emit_task = asyncio.create_task(self.resolve_and_emit()) return await super().setup() async def filter_event(self, event): query = self.make_query(event) eligible, reason = await self.eligible_for_enumeration(event) + + # limit brute force depth + subdomain_depth = self.helpers.subdomain_depth(query) + 1 + if subdomain_depth > self.max_depth: + eligible = False + reason = f"subdomain depth of *.{query} ({subdomain_depth}) > max_depth ({self.max_depth})" + + # don't brute-force things that look like autogenerated PTRs + if self.helpers.is_ptr(query): + eligible = False + reason = f'"{query}" looks like an autogenerated PTR' + if eligible: self.add_found(event) # reject if already processed if self.already_processed(query): return False, f'Query "{query}" was already processed' + if eligible: self.processed.add(hash(query)) return True, reason @@ -116,23 +136,18 @@ async def filter_event(self, event): async def handle_event(self, event): query = self.make_query(event) self.source_events.add_target(event) - self.info(f"Brute-forcing subdomains for {query} (source: {event.data})") - for hostname in await self.massdns(query, self.subdomain_list): - self.emit_result(hostname, event, query) + results = await self.massdns(query, self.subdomain_list) + await self.resolve_and_emit_queue.put((results, event, None)) def abort_if(self, event): if not event.scope_distance == 0: return True, "event is not in scope" if "wildcard" in event.tags: return True, "event is a wildcard" - - def emit_result(self, result, source_event, query, tags=None): - if not result == source_event: - kwargs = {"abort_if": self.abort_if} - if tags is not None: - kwargs["tags"] = tags - self.emit_event(result, "DNS_NAME", source_event, **kwargs) + if "unresolved" in event.tags: + return True, "event is unresolved" + return False, "" def already_processed(self, hostname): if hash(hostname) in self.processed: @@ -143,12 +158,11 @@ async def massdns(self, domain, subdomains): subdomains = list(subdomains) domain_wildcard_rdtypes = set() - for domain, rdtypes in (await self.helpers.is_wildcard_domain(domain)).items(): + for _domain, rdtypes in (await self.helpers.is_wildcard_domain(domain)).items(): for rdtype, results in rdtypes.items(): if results: domain_wildcard_rdtypes.add(rdtype) - - if "A" in domain_wildcard_rdtypes: + if any([r in domain_wildcard_rdtypes for r in ("A", "CNAME")]): self.info( f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(domain_wildcard_rdtypes)})" ) @@ -205,12 +219,36 @@ async def massdns(self, domain, subdomains): ) # everything checks out - self.verbose(f"Resolving batch of {len(results):,} results") - resolved = dict([l async for l in self.helpers.resolve_batch(results, type=("A", "CNAME"))]) - resolved = {k: v for k, v in resolved.items() if v} - for hostname in resolved: - self.add_found(hostname) - return list(resolved) + return results + + async def resolve_and_emit(self): + """ + When results are found, they are placed into self.resolve_and_emit_queue. + The purpose of this function (which is started as a task in the module's setup()) is to consume results from + the queue, resolve them, and if they resolve, emit them. + + This exists to prevent disrupting the scan with huge batches of DNS resolutions. + """ + while 1: + results, source_event, tags = await self.resolve_and_emit_queue.get() + self.verbose(f"Resolving batch of {len(results):,} results") + async with self._task_counter.count(f"{self.name}.resolve_and_emit()"): + async for hostname, r in self.helpers.resolve_batch(results, type=("A", "CNAME")): + if not r: + self.debug(f"Discarding {hostname} because it didn't resolve") + continue + self.add_found(hostname) + if source_event is None: + source_event = self.source_events.get(hostname) + if source_event is None: + self.warning(f"Could not correlate source event from: {hostname}") + source_event = self.scan.root_event + kwargs = {"abort_if": self.abort_if, "tags": tags} + await self.emit_event(hostname, "DNS_NAME", source_event, **kwargs) + + @property + def running(self): + return super().running or self.resolve_and_emit_queue.qsize() > 0 async def _canary_check(self, domain, num_checks=50): random_subdomains = list(self.gen_random_subdomains(num_checks)) @@ -340,6 +378,9 @@ def add_mutation(_domain_hash, m): self.mutations_tried.add(h) mutations.add(m) + num_base_mutations = len(base_mutations) + self.debug(f"Base mutations for {domain}: {num_base_mutations:,}") + # try every subdomain everywhere else for _domain, _subdomains in found: if _domain == domain: @@ -347,10 +388,7 @@ def add_mutation(_domain_hash, m): for s in _subdomains: first_segment = s.split(".")[0] # skip stuff with lots of numbers (e.g. PTRs) - digits = self.digit_regex.findall(first_segment) - excessive_digits = len(digits) > 2 - long_digits = any(len(d) > 3 for d in digits) - if excessive_digits or long_digits: + if self.has_excessive_digits(first_segment): continue add_mutation(domain_hash, first_segment) for word in self.helpers.extract_words( @@ -358,6 +396,9 @@ def add_mutation(_domain_hash, m): ): add_mutation(domain_hash, word) + num_massdns_mutations = len(mutations) - num_base_mutations + self.debug(f"Mutations from previous subdomains for {domain}: {num_massdns_mutations:,}") + # numbers + devops mutations for mutation in self.helpers.word_cloud.mutations( subdomains, cloud=False, numbers=3, number_padding=1 @@ -366,22 +407,26 @@ def add_mutation(_domain_hash, m): m = delimiter.join(mutation).lower() add_mutation(domain_hash, m) + num_word_cloud_mutations = len(mutations) - num_massdns_mutations + self.debug(f"Mutations added by word cloud for {domain}: {num_word_cloud_mutations:,}") + # special dns mutator + self.debug( + f"DNS Mutator size: {len(self.helpers.word_cloud.dns_mutator):,} (limited to {self.max_mutations:,})" + ) for subdomain in self.helpers.word_cloud.dns_mutator.mutations( subdomains, max_mutations=self.max_mutations ): add_mutation(domain_hash, subdomain) + num_mutations = len(mutations) - num_word_cloud_mutations + self.debug(f"Mutations added by DNS Mutator: {num_mutations:,}") + if mutations: self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") results = list(await self.massdns(query, mutations)) - for hostname in results: - source_event = self.source_events.get(hostname) - if source_event is None: - self.warning(f"Could not correlate source event from: {hostname}") - source_event = self.scan.root_event - self.emit_result(hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"]) if results: + await self.resolve_and_emit_queue.put((results, None, [f"mutation-{self.mutation_run}"])) found_mutations = True continue break @@ -389,7 +434,7 @@ def add_mutation(_domain_hash, m): self.warning(e) if found_mutations: - self._mutation_run += 1 + self.mutation_run += 1 def add_found(self, host): if not isinstance(host, str): @@ -421,3 +466,16 @@ def gen_random_subdomains(self, n=50): yield subdomain for _ in range(5): yield self.helpers.rand_string(length=8, digits=False) + + def has_excessive_digits(self, d): + """ + Identifies dns names with excessive numbers, e.g.: + - w1-2-3.evilcorp.com + - ptr1234.evilcorp.com + """ + digits = self.digit_regex.findall(d) + excessive_digits = len(digits) > 2 + long_digits = any(len(d) > 3 for d in digits) + if excessive_digits or long_digits: + return True + return False diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py new file mode 100644 index 000000000..a59cc30e3 --- /dev/null +++ b/bbot/modules/newsletters.py @@ -0,0 +1,42 @@ +# Created a new module called 'newsletters' that will scrape the websites (or recursive websites, +# thanks to BBOT's sub-domain enumeration) looking for the presence of an 'email type' that also +# contains a 'placeholder'. The combination of these two HTML items usually signify the presence +# of an "Enter Your Email Here" type Newsletter Subscription service. This module could be used +# to find newsletters for a future email bombing attack. + +from .base import BaseModule +import re + +# Known Websites with Newsletters +# https://futureparty.com/ +# https://www.marketingbrew.com/ +# https://buffer.com/ +# https://www.milkkarten.net/ +# https://geekout.mattnavarra.com/ + + +class newsletters(BaseModule): + watched_events = ["HTTP_RESPONSE"] + produced_events = ["FINDING"] + flags = ["active", "safe"] + meta = {"description": "Searches for Newsletter Submission Entry Fields on Websites"} + + # Parse through Website to find a Text Entry Box of 'type = email' + # and ensure that there is placeholder text within it. + def find_type(self, soup): + email_type = soup.find(type="email") + if email_type: + regex = re.compile(r"placeholder") + if regex.search(str(email_type)): + return True + return False + + async def handle_event(self, event): + if event.data["status_code"] == 200: + soup = self.helpers.beautifulsoup(event.data["body"], "html.parser") + result = self.find_type(soup) + if result: + description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" + data = {"host": str(event.host), "description": description, "url": event.data["url"]} + + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/nmap.py b/bbot/modules/nmap.py index aeb28f9ae..6d8a1293b 100644 --- a/bbot/modules/nmap.py +++ b/bbot/modules/nmap.py @@ -52,10 +52,10 @@ async def handle_batch(self, *events): for port in host.open_ports: port_number = int(port.split("/")[0]) netloc = self.helpers.make_netloc(host.address, port_number) - self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) for hostname in host.hostnames: netloc = self.helpers.make_netloc(hostname, port_number) - self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) + await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) finally: output_file.unlink(missing_ok=True) diff --git a/bbot/modules/nsec.py b/bbot/modules/nsec.py index bfd770d44..7d313c140 100644 --- a/bbot/modules/nsec.py +++ b/bbot/modules/nsec.py @@ -18,12 +18,12 @@ async def handle_event(self, event): async for result in self.nsec_walk(event.data): if not emitted_finding: emitted_finding = True - self.emit_event( + await self.emit_event( {"host": event.data, "description": f"DNSSEC NSEC Zone Walking Enabled for domain: {event.data}"}, "FINDING", source=event, ) - self.emit_event(result, "DNS_NAME", source=event) + await self.emit_event(result, "DNS_NAME", source=event) async def get_nsec_record(self, domain): domain = domain.replace("\\000.", "") diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 76e93c595..c69beb941 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -87,7 +87,7 @@ async def handle_event(self, event): for result, request_url in await self.handle_url(event): if result and request_url: self.found.add(found_hash) - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": request_url, @@ -98,7 +98,7 @@ async def handle_event(self, event): ) fqdn = result.get("FQDN", "") if fqdn: - self.emit_event(fqdn, "DNS_NAME", source=event) + await self.emit_event(fqdn, "DNS_NAME", source=event) break async def filter_event(self, event): diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index f4d592511..0c9c26dbb 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -6,7 +6,7 @@ class OAUTH(BaseModule): watched_events = ["DNS_NAME", "URL_UNVERIFIED"] produced_events = ["DNS_NAME"] - flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "active", "safe"] + flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "web-thorough", "active", "safe"] meta = {"description": "Enumerate OAUTH and OpenID Connect services"} options = {"try_all": False} options_desc = {"try_all": "Check for OAUTH/IODC on every subdomain and URL."} @@ -66,16 +66,16 @@ async def handle_event(self, event): source=event, ) finding_event.source_domain = source_domain - self.emit_event(finding_event) + await self.emit_event(finding_event) url_event = self.make_event( token_endpoint, "URL_UNVERIFIED", source=event, tags=["affiliate", "oauth-token-endpoint"] ) url_event.source_domain = source_domain - self.emit_event(url_event) + await self.emit_event(url_event) for result in oidc_results: if result not in (domain, event.data): event_type = "URL_UNVERIFIED" if self.helpers.is_url(result) else "DNS_NAME" - self.emit_event(result, event_type, source=event, tags=["affiliate"]) + await self.emit_event(result, event_type, source=event, tags=["affiliate"]) for oauth_task in oauth_tasks: url = await oauth_task @@ -90,7 +90,7 @@ async def handle_event(self, event): source=event, ) oauth_finding.source_domain = source_domain - self.emit_event(oauth_finding) + await self.emit_event(oauth_finding) def url_and_base(self, url): yield url diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index db9fcd946..afed903ac 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -3,7 +3,7 @@ from contextlib import suppress from .csv import CSV -from bbot.core.helpers.misc import make_ip_type, is_ip, is_port +from bbot.core.helpers.misc import make_ip_type, is_ip, is_port, best_http_status severity_map = { "INFO": 0, @@ -21,9 +21,19 @@ class asset_inventory(CSV): - watched_events = ["OPEN_TCP_PORT", "DNS_NAME", "URL", "FINDING", "VULNERABILITY", "TECHNOLOGY", "IP_ADDRESS"] + watched_events = [ + "OPEN_TCP_PORT", + "DNS_NAME", + "URL", + "FINDING", + "VULNERABILITY", + "TECHNOLOGY", + "IP_ADDRESS", + "WAF", + "HTTP_RESPONSE", + ] produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] - meta = {"description": "Output to an asset inventory style flattened CSV file"} + meta = {"description": "Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV"} options = {"output_file": "", "use_previous": False, "summary_netmask": 16} options_desc = { "output_file": "Set a custom output file", @@ -31,7 +41,20 @@ class asset_inventory(CSV): "summary_netmask": "Subnet mask to use when summarizing IP addresses at end of scan", } - header_row = ["Host", "Provider", "IP(s)", "Status", "Open Ports", "Risk Rating", "Findings", "Description"] + header_row = [ + "Host", + "Provider", + "IP (External)", + "IP (Internal)", + "Open Ports", + "HTTP Status", + "HTTP Title", + "Risk Rating", + "Findings", + "Technologies", + "WAF", + "DNS Records", + ] filename = "asset-inventory.csv" async def setup(self): @@ -88,13 +111,15 @@ def sort_key(asset): findings_and_vulns = asset.findings.union(asset.vulnerabilities) ports = getattr(asset, "ports", set()) ports = [str(p) for p in sorted([int(p) for p in asset.ports])] - ips = sorted([str(i) for i in getattr(asset, "ip_addresses", [])]) + ips_all = getattr(asset, "ip_addresses", []) + ips_external = sorted([str(ip) for ip in [i for i in ips_all if not i.is_private]]) + ips_internal = sorted([str(ip) for ip in [i for i in ips_all if i.is_private]]) host = self.helpers.make_ip_type(getattr(asset, "host", "")) if host and isinstance(host, str): _, domain = self.helpers.split_domain(host) if domain: increment_stat("Domains", domain) - for ip in ips: + for ip in ips_all: net = ipaddress.ip_network(f"{ip}/{self.summary_netmask}", strict=False) increment_stat("IP Addresses", str(net)) for port in ports: @@ -102,12 +127,16 @@ def sort_key(asset): row = { "Host": host, "Provider": getattr(asset, "provider", ""), - "IP(s)": ",".join(ips), - "Status": "Active" if asset.ports else "N/A", - "Open Ports": ",".join(ports), + "IP (External)": ", ".join(ips_external), + "IP (Internal)": ", ".join(ips_internal), + "Open Ports": ", ".join(ports), + "HTTP Status": asset.http_status_full, + "HTTP Title": str(getattr(asset, "http_title", "")), "Risk Rating": severity_map[getattr(asset, "risk_rating", "")], "Findings": "\n".join(findings_and_vulns), - "Description": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "Technologies": "\n".join(str(x) for x in getattr(asset, "technologies", set())), + "WAF": getattr(asset, "waf", ""), + "DNS Records": ", ".join(sorted([str(r) for r in getattr(asset, "dns_records", [])])), } row.update(asset.custom_fields) self.writerow(row) @@ -136,7 +165,7 @@ async def finish(self): # yield to event loop to make sure we don't hold up the scan await self.helpers.sleep(0) host = row.get("Host", "").strip() - ips = row.get("IP(s)", "") + ips = row.get("IP (External)", "") + "," + row.get("IP (Internal)", "") if not host or not ips: continue hostkey = _make_hostkey(host, ips) @@ -148,19 +177,19 @@ async def finish(self): self.add_custom_headers(list(asset.custom_fields)) if not is_ip(asset.host): host_event = self.make_event(asset.host, "DNS_NAME", source=self.scan.root_event) - self.emit_event(host_event) + await self.emit_event(host_event) for port in asset.ports: netloc = self.helpers.make_netloc(asset.host, port) open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=host_event) - self.emit_event(open_port_event) + await self.emit_event(open_port_event) else: for ip in asset.ip_addresses: ip_event = self.make_event(ip, "IP_ADDRESS", source=self.scan.root_event) - self.emit_event(ip_event) + await self.emit_event(ip_event) for port in asset.ports: netloc = self.helpers.make_netloc(ip, port) open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=ip_event) - self.emit_event(open_port_event) + await self.emit_event(open_port_event) else: self.warning( f"use_previous=True was set but no previous asset inventory was found at {self.output_file}" @@ -185,14 +214,19 @@ class Asset: def __init__(self, host): self.host = host self.ip_addresses = set() + self.dns_records = set() self.ports = set() self.findings = set() self.vulnerabilities = set() self.status = "UNKNOWN" self.risk_rating = 0 self.provider = "" + self.waf = "" self.technologies = set() self.custom_fields = {} + self.http_status = 0 + self.http_title = "" + self.redirect_location = "" def absorb_csv_row(self, row): # host @@ -200,7 +234,8 @@ def absorb_csv_row(self, row): if host and not is_ip(host): self.host = host # ips - self.ip_addresses = set(_make_ip_list(row.get("IP(s)", ""))) + self.ip_addresses = set(_make_ip_list(row.get("IP (External)", ""))) + self.ip_addresses.update(set(_make_ip_list(row.get("IP (Internal)", "")))) # ports ports = [i.strip() for i in row.get("Open Ports", "").split(",")] self.ports.update(set(i for i in ports if i and is_port(i))) @@ -208,7 +243,7 @@ def absorb_csv_row(self, row): findings = [i.strip() for i in row.get("Findings", "").splitlines()] self.findings.update(set(i for i in findings if i)) # technologies - technologies = [i.strip() for i in row.get("Description", "").splitlines()] + technologies = [i.strip() for i in row.get("Technologies", "").splitlines()] self.technologies.update(set(i for i in technologies if i)) # risk rating risk_rating = row.get("Risk Rating", "").strip() @@ -230,7 +265,25 @@ def absorb_event(self, event): if not is_ip(event.host): self.host = event.host - self.ip_addresses = set(_make_ip_list(event.resolved_hosts)) + dns_children = getattr(event, "_dns_children", {}) + for rdtype, records in sorted(dns_children.items(), key=lambda x: x[0]): + for record in sorted([str(r) for r in records]): + self.dns_records.add(f"{rdtype}:{record}") + + http_status = getattr(event, "http_status", 0) + update_http_status = bool(http_status) and best_http_status(http_status, self.http_status) == http_status + if update_http_status: + self.http_status = http_status + if str(http_status).startswith("3"): + if event.type == "HTTP_RESPONSE": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + self.redirect_location = redirect_location + else: + self.redirect_location = "" + + if event.resolved_hosts: + self.ip_addresses.update(set(_make_ip_list(event.resolved_hosts))) if event.port: self.ports.add(str(event.port)) @@ -251,6 +304,16 @@ def absorb_event(self, event): if event.type == "TECHNOLOGY": self.technologies.add(event.data["technology"]) + if event.type == "WAF": + if waf := event.data.get("WAF", ""): + if update_http_status or not self.waf: + self.waf = waf + + if event.type == "HTTP_RESPONSE": + if title := event.data.get("title", ""): + if update_http_status or not self.http_title: + self.http_title = title + for tag in event.tags: if tag.startswith("cdn-") or tag.startswith("cloud-"): self.provider = tag @@ -260,6 +323,10 @@ def absorb_event(self, event): def hostkey(self): return _make_hostkey(self.host, self.ip_addresses) + @property + def http_status_full(self): + return str(self.http_status) + (f" -> {self.redirect_location}" if self.redirect_location else "") + def _make_hostkey(host, ips): """ diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index c82d96304..1d4ec9a36 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -27,7 +27,7 @@ def _event_precheck(self, event): # output module specific stuff # omitted events such as HTTP_RESPONSE etc. - if event._omit: + if event._omit and not event.type in self.get_watched_events(): return False, "_omit is True" # force-output certain events to the graph @@ -41,6 +41,14 @@ def _event_precheck(self, event): return True, "precheck succeeded" + async def _event_postcheck(self, event): + acceptable, reason = await super()._event_postcheck(event) + if acceptable and not event._stats_recorded and event.type not in ("FINISHED",): + event._stats_recorded = True + self.scan.stats.event_distributed(event) + self.scan.stats.event_produced(event) + return acceptable, reason + def is_incoming_duplicate(self, event, add=False): is_incoming_duplicate, reason = super().is_incoming_duplicate(event, add=add) # make exception for graph-important events diff --git a/bbot/modules/output/emails.py b/bbot/modules/output/emails.py index 029bc5aca..e96c5d97c 100644 --- a/bbot/modules/output/emails.py +++ b/bbot/modules/output/emails.py @@ -12,14 +12,19 @@ class Emails(Human): output_filename = "emails.txt" + async def setup(self): + self.emails_written = 0 + return await super().setup() + def _scope_distance_check(self, event): return BaseModule._scope_distance_check(self, event) async def handle_event(self, event): if self.file is not None: + self.emails_written += 1 self.file.write(f"{event.data}\n") self.file.flush() async def report(self): if getattr(self, "_file", None) is not None: - self.info(f"Saved email addresses to {self.output_file}") + self.info(f"Saved {self.emails_written:,} email addresses to {self.output_file}") diff --git a/bbot/modules/output/http.py b/bbot/modules/output/http.py index 10ca1c8df..014610736 100644 --- a/bbot/modules/output/http.py +++ b/bbot/modules/output/http.py @@ -13,6 +13,7 @@ class HTTP(BaseOutputModule): "username": "", "password": "", "timeout": 10, + "siem_friendly": False, } options_desc = { "url": "Web URL", @@ -21,12 +22,14 @@ class HTTP(BaseOutputModule): "username": "Username (basic auth)", "password": "Password (basic auth)", "timeout": "HTTP timeout", + "siem_friendly": "Format JSON in a SIEM-friendly way for ingestion into Elastic, Splunk, etc.", } async def setup(self): self.url = self.config.get("url", "") self.method = self.config.get("method", "POST") self.timeout = self.config.get("timeout", 10) + self.siem_friendly = self.config.get("siem_friendly", False) self.headers = {} bearer = self.config.get("bearer", "") if bearer: @@ -52,7 +55,7 @@ async def handle_event(self, event): method=self.method, auth=self.auth, headers=self.headers, - json=dict(event), + json=event.json(siem_friendly=self.siem_friendly), raise_error=True, ) break diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index f13cf7808..bf8517db9 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -7,16 +7,22 @@ class JSON(BaseOutputModule): watched_events = ["*"] meta = {"description": "Output to Newline-Delimited JSON (NDJSON)"} - options = {"output_file": "", "console": False} - options_desc = {"output_file": "Output to file", "console": "Output to console"} + options = {"output_file": "", "console": False, "siem_friendly": False} + options_desc = { + "output_file": "Output to file", + "console": "Output to console", + "siem_friendly": "Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc.", + } _preserve_graph = True async def setup(self): self._prep_output_dir("output.ndjson") + self.siem_friendly = self.config.get("siem_friendly", False) return True async def handle_event(self, event): - event_str = json.dumps(dict(event)) + event_json = event.json(siem_friendly=self.siem_friendly) + event_str = json.dumps(event_json) if self.file is not None: self.file.write(event_str + "\n") self.file.flush() diff --git a/bbot/modules/output/splunk.py b/bbot/modules/output/splunk.py new file mode 100644 index 000000000..242f1759e --- /dev/null +++ b/bbot/modules/output/splunk.py @@ -0,0 +1,59 @@ +from bbot.core.errors import RequestError + +from bbot.modules.output.base import BaseOutputModule + + +class Splunk(BaseOutputModule): + watched_events = ["*"] + meta = {"description": "Send every event to a splunk instance through HTTP Event Collector"} + options = { + "url": "", + "hectoken": "", + "index": "", + "source": "", + "timeout": 10, + } + options_desc = { + "url": "Web URL", + "hectoken": "HEC Token", + "index": "Index to send data to", + "source": "Source path to be added to the metadata", + "timeout": "HTTP timeout", + } + + async def setup(self): + self.url = self.config.get("url", "") + self.source = self.config.get("source", "bbot") + self.index = self.config.get("index", "main") + self.timeout = self.config.get("timeout", 10) + self.headers = {} + + hectoken = self.config.get("hectoken", "") + if hectoken: + self.headers["Authorization"] = f"Splunk {hectoken}" + if not self.url: + return False, "Must set URL" + if not self.source: + self.warning("Please provide a source") + return True + + async def handle_event(self, event): + while 1: + try: + data = { + "index": self.index, + "source": self.source, + "sourcetype": "_json", + "event": event.json(), + } + await self.helpers.request( + url=self.url, + method="POST", + headers=self.headers, + json=data, + raise_error=True, + ) + break + except RequestError as e: + self.warning(f"Error sending {event}: {e}, retrying...") + await self.helpers.sleep(1) diff --git a/bbot/modules/output/subdomains.py b/bbot/modules/output/subdomains.py index 49dea2db8..bfb7174ac 100644 --- a/bbot/modules/output/subdomains.py +++ b/bbot/modules/output/subdomains.py @@ -15,6 +15,7 @@ class Subdomains(Human): async def setup(self): self.include_unresolved = self.config.get("include_unresolved", False) + self.subdomains_written = 0 return await super().setup() async def filter_event(self, event): @@ -27,9 +28,10 @@ def _scope_distance_check(self, event): async def handle_event(self, event): if self.file is not None: + self.subdomains_written += 1 self.file.write(f"{event.data}\n") self.file.flush() async def report(self): if getattr(self, "_file", None) is not None: - self.info(f"Saved subdomains to {self.output_file}") + self.info(f"Saved {self.subdomains_written:,} subdomains to {self.output_file}") diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 65880d9c8..3458edaa9 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -119,14 +119,14 @@ async def do_mining(self, wl, url, batch_size, compare_helper): pass return results - def process_results(self, event, results): + async def process_results(self, event, results): url = event.data.get("url") for result, reasons, reflection in results: tags = [] if reflection: tags = ["http_reflection"] description = f"[Paramminer] {self.compare_mode.capitalize()}: [{result}] Reasons: [{reasons}] Reflection: [{str(reflection)}]" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": url, "description": description}, "FINDING", event, @@ -171,7 +171,7 @@ async def handle_event(self, event): results = await self.do_mining(wl, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{event.data}]") - self.process_results(event, results) + await self.process_results(event, results) async def count_test(self, url): baseline = await self.helpers.request(url) @@ -247,4 +247,4 @@ async def finish(self): results = await self.do_mining(untested_matches_copy, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{url}]") - self.process_results(event, results) + await self.process_results(event, results) diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index c1e0773c3..2c378f585 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -20,7 +20,7 @@ async def handle_event(self, event): if results: for hostname in results: if not hostname == event: - self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) + await self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) async def query(self, query): results = set() diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 619107b53..5a63f824e 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -26,7 +26,7 @@ async def handle_event(self, event): query = self.make_query(event) self.verbose(f"Searching for any postman workspaces, collections, requests belonging to {query}") for url in await self.query(query): - self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") + await self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") async def query(self, query): interesting_urls = [] @@ -70,16 +70,22 @@ async def query(self, query): workspaces.append(workspace) for item in workspaces: id = item.get("id", "") - interesting_urls.append(f"{self.base_url}/workspace/{id}") - environments, collections = await self.search_workspace(id) - interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") - for e_id in environments: - interesting_urls.append(f"{self.base_url}/environment/{e_id}") - for c_id in collections: - interesting_urls.append(f"{self.base_url}/collection/{c_id}") - requests = await self.search_collections(id) - for r_id in requests: - interesting_urls.append(f"{self.base_url}/request/{r_id}") + name = item.get("name", "") + tldextract = self.helpers.tldextract(query) + if tldextract.domain.lower() in name.lower(): + self.verbose(f"Discovered workspace {name} ({id})") + interesting_urls.append(f"{self.base_url}/workspace/{id}") + environments, collections = await self.search_workspace(id) + interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") + for e_id in environments: + interesting_urls.append(f"{self.base_url}/environment/{e_id}") + for c_id in collections: + interesting_urls.append(f"{self.base_url}/collection/{c_id}") + requests = await self.search_collections(id) + for r_id in requests: + interesting_urls.append(f"{self.base_url}/request/{r_id}") + else: + self.verbose(f"Skipping workspace {name} ({id}) as it does not appear to be in scope") return interesting_urls async def search_workspace(self, id): @@ -90,6 +96,8 @@ async def search_workspace(self, id): status_code = getattr(r, "status_code", 0) try: json = r.json() + if not isinstance(json, dict): + raise ValueError(f"Got unexpected value for JSON: {json}") except Exception as e: self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") return [], [] diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index a8f57709a..db8612a1e 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -42,9 +42,9 @@ async def handle_event(self, event): emails = asn.pop("emails", []) self.cache_put(asn) asn_event = self.make_event(asn, "ASN", source=event) - self.emit_event(asn_event) + await self.emit_event(asn_event) for email in emails: - self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) + await self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) async def report(self): asn_data = sorted(self.asn_cache.items(), key=lambda x: self.asn_counts[x[0]], reverse=True) diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index fd873b799..717900bee 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -48,4 +48,4 @@ async def handle_event(self, event): tags = [] if self.helpers.is_spider_danger(event, unverified_url): tags.append("spider-danger") - self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) + await self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) diff --git a/bbot/modules/secretsdb.py b/bbot/modules/secretsdb.py index 83f305c61..3fc8ad539 100644 --- a/bbot/modules/secretsdb.py +++ b/bbot/modules/secretsdb.py @@ -54,7 +54,7 @@ async def handle_event(self, event): parsed_url = getattr(event, "parsed", None) if parsed_url: event_data["url"] = parsed_url.geturl() - self.emit_event( + await self.emit_event( event_data, "FINDING", source=event, diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index 87358a955..86872c052 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -19,7 +19,7 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - await self.emit_event_wait(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) async def query(self, query, parse_fn=None, request_fn=None): results = set() @@ -43,5 +43,5 @@ async def query(self, query, parse_fn=None, request_fn=None): results.add(hostname) yield hostname if ' {e.host}" description += f" ({source_hosts_str})" - self.emit_event({"host": event.host, "url": url, "description": description}, "FINDING", source=event) + await self.emit_event( + {"host": event.host, "url": url, "description": description}, "FINDING", source=event + ) else: self.debug(reason) diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py index 6d1825b8b..c5c69293c 100644 --- a/bbot/modules/subdomaincenter.py +++ b/bbot/modules/subdomaincenter.py @@ -21,7 +21,7 @@ async def request_url(self, query): for i, _ in enumerate(range(self.retries + 1)): if i > 0: self.verbose(f"Retry #{i} for {query} after response code {status_code}") - response = await self.helpers.request(url) + response = await self.helpers.request(url, timeout=self.http_timeout + 30) status_code = getattr(response, "status_code", 0) if status_code == 429: await self.sleep(20) diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index 71ebc4e08..45b9b31d7 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -211,7 +211,7 @@ async def handle_event(self, event): version = "<= 2019 (Either Pre-2017 (vulnerable), or 2017-2019 w/ Encrypt-Then-Mac)" description = f"Telerik RAU AXD Handler detected. Verbose Errors Enabled: [{str(verbose_errors)}] Version Guess: [{version}]" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": f"{event.data}{webresource}", "description": description}, "FINDING", event, @@ -237,7 +237,7 @@ async def handle_event(self, event): description = f"[CVE-2017-11317] [{str(version)}] {webresource}" if "fileInfo" in output.stdout: self.debug(f"Confirmed Vulnerable Telerik (version: {str(version)}") - self.emit_event( + await self.emit_event( { "severity": "CRITICAL", "description": description, @@ -276,7 +276,7 @@ async def handle_event(self, event): await self.helpers.cancel_tasks(tasks) self.debug(f"Detected Telerik UI instance ({dh})") description = f"Telerik DialogHandler detected" - self.emit_event( + await self.emit_event( {"host": str(event.host), "url": f"{event.data}{dh}", "description": description}, "FINDING", event, @@ -288,50 +288,49 @@ async def handle_event(self, event): spellcheckhandler = "Telerik.Web.UI.SpellCheckHandler.axd" result, _ = await self.test_detector(event.data, spellcheckhandler) - try: - # The standard behavior for the spellcheck handler without parameters is a 500 - if result.status_code == 500: - # Sometimes webapps will just return 500 for everything, so rule out the false positive - validate_result, _ = await self.test_detector(event.data, self.helpers.rand_string()) - self.debug(validate_result) - if validate_result.status_code != 500: - self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") - description = f"Telerik SpellCheckHandler detected" - self.emit_event( - { - "host": str(event.host), - "url": f"{event.data}{spellcheckhandler}", - "description": description, - }, - "FINDING", - event, - ) - except Exception: - pass + status_code = getattr(result, "status_code", 0) + # The standard behavior for the spellcheck handler without parameters is a 500 + if status_code == 500: + # Sometimes webapps will just return 500 for everything, so rule out the false positive + validate_result, _ = await self.test_detector(event.data, self.helpers.rand_string()) + self.debug(validate_result) + validate_status_code = getattr(validate_result, "status_code", 0) + if validate_status_code not in (0, 500): + self.debug(f"Detected Telerik UI instance (Telerik.Web.UI.SpellCheckHandler.axd)") + description = f"Telerik SpellCheckHandler detected" + await self.emit_event( + { + "host": str(event.host), + "url": f"{event.data}{spellcheckhandler}", + "description": description, + }, + "FINDING", + event, + ) chartimagehandler = "ChartImage.axd?ImageName=bqYXJAqm315eEd6b%2bY4%2bGqZpe7a1kY0e89gfXli%2bjFw%3d" result, _ = await self.test_detector(event.data, chartimagehandler) - - if result: - if result.status_code == 200: - chartimagehandler_error = "ChartImage.axd?ImageName=" - result_error, _ = await self.test_detector(event.data, chartimagehandler_error) - if result_error.status_code != 200: - self.emit_event( - { - "host": str(event.host), - "url": f"{event.data}{chartimagehandler}", - "description": "Telerik ChartImage AXD Handler Detected", - }, - "FINDING", - event, - ) + status_code = getattr(result, "status_code", 0) + if status_code == 200: + chartimagehandler_error = "ChartImage.axd?ImageName=" + result_error, _ = await self.test_detector(event.data, chartimagehandler_error) + error_status_code = getattr(result_error, "status_code", 0) + if error_status_code not in (0, 200): + await self.emit_event( + { + "host": str(event.host), + "url": f"{event.data}{chartimagehandler}", + "description": "Telerik ChartImage AXD Handler Detected", + }, + "FINDING", + event, + ) elif event.type == "HTTP_RESPONSE": resp_body = event.data.get("body", None) if resp_body: if '":{"SerializedParameters":"' in resp_body: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], @@ -341,7 +340,7 @@ async def handle_event(self, event): event, ) elif '"_serializedConfiguration":"' in resp_body: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": event.data["url"], diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py index eef8f5bee..f9681385b 100644 --- a/bbot/modules/templates/bucket.py +++ b/bbot/modules/templates/bucket.py @@ -52,7 +52,7 @@ async def handle_dns_name(self, event): for d in self.delimiters: buckets.add(d.join(split)) async for bucket_name, url, tags in self.brute_buckets(buckets, permutations=self.permutations): - self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) async def handle_storage_bucket(self, event): url = event.data["url"] @@ -61,12 +61,12 @@ async def handle_storage_bucket(self, event): description, tags = await self._check_bucket_open(bucket_name, url) if description: event_data = {"host": event.host, "url": url, "description": description} - self.emit_event(event_data, "FINDING", source=event, tags=tags) + await self.emit_event(event_data, "FINDING", source=event, tags=tags) async for bucket_name, url, tags in self.brute_buckets( [bucket_name], permutations=self.permutations, omit_base=True ): - self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) async def brute_buckets(self, buckets, permutations=False, omit_base=False): buckets = set(buckets) diff --git a/bbot/modules/templates/credential_leak.py b/bbot/modules/templates/credential_leak.py deleted file mode 100644 index 5085e197a..000000000 --- a/bbot/modules/templates/credential_leak.py +++ /dev/null @@ -1,33 +0,0 @@ -from bbot.modules.base import BaseModule - - -class credential_leak(BaseModule): - """ - A typical free API-based subdomain enumeration module - Inherited by many other modules including sublist3r, dnsdumpster, etc. - """ - - async def setup(self): - self.queries_processed = set() - self.data_seen = set() - return True - - async def filter_event(self, event): - query = self.make_query(event) - query_hash = hash(query) - if query_hash not in self.queries_processed: - self.queries_processed.add(query_hash) - return True - return False, f'Already processed "{query}"' - - def make_query(self, event): - if "target" in event.tags: - return event.data - _, domain = self.helpers.split_domain(event.data) - return domain - - def already_seen(self, item): - h = hash(item) - already_seen = h in self.data_seen - self.data_seen.add(h) - return already_seen diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 0a0067628..790b35515 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -38,7 +38,7 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) async def request_url(self, query): url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" diff --git a/bbot/modules/url_manipulation.py b/bbot/modules/url_manipulation.py index f4d598c63..983595fe1 100644 --- a/bbot/modules/url_manipulation.py +++ b/bbot/modules/url_manipulation.py @@ -74,7 +74,7 @@ async def handle_event(self, event): if "body" in reasons: reported_signature = f"Modified URL: {sig[1]}" description = f"Url Manipulation: [{','.join(reasons)}] Sig: [{reported_signature}]" - self.emit_event( + await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", source=event, diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index f1efe08e5..4c3811af0 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -25,16 +25,18 @@ async def handle_event(self, event): domain_event = self.make_event(domain, "DNS_NAME", source=event) if domain_event: if str(domain_event.host).endswith(query) and not str(domain_event.host) == str(event.host): - self.emit_event(domain_event, abort_if=self.abort_if) + await self.emit_event(domain_event, abort_if=self.abort_if) source_event = domain_event if url: url_event = self.make_event(url, "URL_UNVERIFIED", source=source_event) if url_event: if str(url_event.host).endswith(query): if self.urls: - self.emit_event(url_event, abort_if=self.abort_if) + await self.emit_event(url_event, abort_if=self.abort_if) else: - self.emit_event(str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if) + await self.emit_event( + str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if + ) else: self.debug(f"{url_event.host} does not match {query}") diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index c2a5e4431..9b154ab63 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -26,7 +26,7 @@ async def setup(self): async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) for domain, _ in await self.query(query): - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) async def query(self, query): results = set() @@ -37,9 +37,8 @@ async def query(self, query): self.verbose(f"Error retrieving reverse whois results (status code: {status_code})") content = getattr(r, "content", b"") - from bs4 import BeautifulSoup - html = BeautifulSoup(content, "html.parser") + html = self.helpers.beautifulsoup(content, "html.parser") found = set() for table_row in html.findAll("tr"): table_cells = table_row.findAll("td") diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index 836d14ea2..8fd0bc3d4 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -1,6 +1,12 @@ from bbot.modules.base import BaseModule from wafw00f import main as wafw00f_main +# disable wafw00f logging +import logging + +wafw00f_logger = logging.getLogger("wafw00f") +wafw00f_logger.setLevel(logging.CRITICAL + 100) + class wafw00f(BaseModule): """ @@ -20,18 +26,24 @@ class wafw00f(BaseModule): in_scope_only = True per_hostport_only = True + async def filter_event(self, event): + http_status = getattr(event, "http_status", 0) + if not http_status or str(http_status).startswith("3"): + return False, f"Invalid HTTP status code: {http_status}" + return True, "" + async def handle_event(self, event): url = f"{event.parsed.scheme}://{event.parsed.netloc}/" - WW = await self.scan.run_in_executor(wafw00f_main.WAFW00F, url) + WW = await self.scan.run_in_executor(wafw00f_main.WAFW00F, url, followredirect=False) waf_detections = await self.scan.run_in_executor(WW.identwaf) if waf_detections: for waf in waf_detections: - self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) + await self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) else: if self.config.get("generic_detect") == True: generic = await self.scan.run_in_executor(WW.genericdetect) if generic: - self.emit_event( + await self.emit_event( { "host": str(event.host), "url": url, diff --git a/bbot/modules/wappalyzer.py b/bbot/modules/wappalyzer.py index 6d30fc057..c87274a29 100644 --- a/bbot/modules/wappalyzer.py +++ b/bbot/modules/wappalyzer.py @@ -28,7 +28,7 @@ async def setup(self): async def handle_event(self, event): for res in await self.scan.run_in_executor(self.wappalyze, event.data): - self.emit_event( + await self.emit_event( {"technology": res.lower(), "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event ) diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 4bec112bf..92dc78db5 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -27,7 +27,7 @@ async def setup(self): async def handle_event(self, event): query = self.make_query(event) for result, event_type in await self.query(query): - self.emit_event(result, event_type, event, abort_if=self.abort_if) + await self.emit_event(result, event_type, event, abort_if=self.abort_if) async def query(self, query): results = set() @@ -56,7 +56,7 @@ async def query(self, query): dns_names = set() collapsed_urls = 0 start_time = datetime.now() - parsed_urls = await self.scan.run_in_executor( + parsed_urls = await self.scan.run_in_executor_mp( self.helpers.validators.collapse_urls, urls, threshold=self.garbage_threshold, diff --git a/bbot/modules/zoomeye.py b/bbot/modules/zoomeye.py index 3c83fa828..b1d4e7670 100644 --- a/bbot/modules/zoomeye.py +++ b/bbot/modules/zoomeye.py @@ -36,7 +36,7 @@ async def handle_event(self, event): tags = [] if not hostname.endswith(f".{query}"): tags = ["affiliate"] - self.emit_event(hostname, "DNS_NAME", event, tags=tags) + await self.emit_event(hostname, "DNS_NAME", event, tags=tags) async def query(self, query): results = set() diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 7f320dbe2..96929e4fc 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -168,7 +168,6 @@ async def _emit_event(self, event, **kwargs): - Updating scan statistics. """ log.debug(f"Emitting {event}") - event_distributed = False try: on_success_callback = kwargs.pop("on_success_callback", None) abort_if = kwargs.pop("abort_if", None) @@ -205,6 +204,7 @@ async def _emit_event(self, event, **kwargs): dns_children = {} if event.type in ("DNS_NAME", "IP_ADDRESS"): + event._dns_children = dns_children for tag in dns_tags: event.add_tag(tag) @@ -260,7 +260,7 @@ async def _emit_event(self, event, **kwargs): abort_result, reason = abort_result msg += f": {reason}" if abort_result: - log.debug(msg) + log.verbose(msg) return # run success callback before distributing event (so it can add tags, etc.) @@ -269,14 +269,13 @@ async def _emit_event(self, event, **kwargs): await self.scan.helpers.execute_sync_or_async(on_success_callback, event) await self.distribute_event(event) - event_distributed = True # speculate DNS_NAMES and IP_ADDRESSes from other event types source_event = event if ( event.host and event.type not in ("DNS_NAME", "DNS_NAME_UNRESOLVED", "IP_ADDRESS", "IP_RANGE") - and not str(event.module) == "speculate" + and not (event.type in ("OPEN_TCP_PORT", "URL_UNVERIFIED") and str(event.module) == "speculate") ): source_module = self.scan.helpers._make_dummy_module("host", _type="internal") source_module._priority = 4 @@ -320,6 +319,7 @@ async def _emit_event(self, event, **kwargs): f'Event validation failed for DNS child of {source_event}: "{record}" ({rdtype}): {e}' ) for child_event in dns_child_events: + log.debug(f"Queueing DNS child for {event}: {child_event}") self.queue_event(child_event) except ValidationError as e: @@ -328,8 +328,6 @@ async def _emit_event(self, event, **kwargs): finally: event._resolved.set() - if event_distributed: - self.scan.stats.event_distributed(event) log.debug(f"{event.module}.emit_event() finished for {event}") def hash_event_graph(self, event): diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 31bc37680..1a74d41a8 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -1,4 +1,5 @@ import re +import sys import asyncio import logging import traceback @@ -12,6 +13,7 @@ from collections import OrderedDict from concurrent.futures import ProcessPoolExecutor +from bbot import __version__ from bbot import config as bbot_config from .target import Target @@ -329,6 +331,7 @@ async def async_start(self): await self._prep() self._start_log_handlers() + self.trace(f'Ran BBOT {__version__} at {scan_start_time}, command: {" ".join(sys.argv)}') if not self.target: self.warning(f"No scan targets specified") @@ -931,10 +934,13 @@ def error(self, *args, trace=True, **kwargs): if trace: self.trace() - def trace(self): - e_type, e_val, e_traceback = exc_info() - if e_type is not None: - log.trace(traceback.format_exc()) + def trace(self, msg=None): + if msg is None: + e_type, e_val, e_traceback = exc_info() + if e_type is not None: + log.trace(traceback.format_exc()) + else: + log.trace(msg) def critical(self, *args, trace=True, **kwargs): log.critical(*args, extra={"scan_id": self.id}, **kwargs) diff --git a/bbot/scanner/stats.py b/bbot/scanner/stats.py index cc2b3e576..02be9a801 100644 --- a/bbot/scanner/stats.py +++ b/bbot/scanner/stats.py @@ -15,7 +15,6 @@ def __init__(self, scan): self.scan = scan self.module_stats = {} self.events_emitted_by_type = {} - self.perf_stats = [] def event_distributed(self, event): _increment(self.events_emitted_by_type, event.type) @@ -66,9 +65,6 @@ def table(self): return [header] + table def _make_table(self): - self.perf_stats.sort(key=lambda x: x[-1]) - for callback, runtime in self.perf_stats: - log.info(f"{callback}\t{runtime}") table = self.table() if len(table) == 1: table += [["None", "None", "None"]] diff --git a/bbot/scripts/docs.py b/bbot/scripts/docs.py index dcf9cd710..8e6d045f3 100755 --- a/bbot/scripts/docs.py +++ b/bbot/scripts/docs.py @@ -94,6 +94,11 @@ def update_individual_module_options(): assert len(bbot_module_table.splitlines()) > 50 update_md_files("BBOT MODULES", bbot_module_table) + # BBOT output modules + bbot_output_module_table = module_loader.modules_table(mod_type="output") + assert len(bbot_output_module_table.splitlines()) > 10 + update_md_files("BBOT OUTPUT MODULES", bbot_output_module_table) + # BBOT module options bbot_module_options_table = module_loader.modules_options_table() assert len(bbot_module_options_table.splitlines()) > 100 diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 684ec18a2..b60a0633d 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -1,10 +1,12 @@ import ssl import shutil import pytest +import asyncio import logging from pathlib import Path from pytest_httpserver import HTTPServer +from bbot.core.helpers.misc import execute_sync_or_async from bbot.core.helpers.interactsh import server_list as interactsh_servers @@ -98,20 +100,34 @@ class Interactsh_mock: def __init__(self): self.interactions = [] self.correlation_id = "deadbeef-dead-beef-dead-beefdeadbeef" + self.stop = False def mock_interaction(self, subdomain_tag): self.interactions.append(subdomain_tag) async def register(self, callback=None): + if callable(callback): + asyncio.create_task(self.poll_loop(callback)) return "fakedomain.fakeinteractsh.com" async def deregister(self, callback=None): - pass + self.stop = True - async def poll(self): + async def poll_loop(self, callback=None): + while not self.stop: + data_list = await self.poll(callback) + if not data_list: + await asyncio.sleep(1) + continue + + async def poll(self, callback=None): poll_results = [] for subdomain_tag in self.interactions: - poll_results.append({"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": "HTTP"}) + result = {"full-id": f"{subdomain_tag}.fakedomain.fakeinteractsh.com", "protocol": "HTTP"} + poll_results.append(result) + if callback is not None: + await execute_sync_or_async(callback, result) + self.interactions = [] return poll_results diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 675385784..cadde29ad 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -91,7 +91,9 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("https://evilcorp.com.:666", dummy=True) == "https://evilcorp.com:666/" assert scan.make_event("https://[bad::c0de]", dummy=True).with_port().geturl() == "https://[bad::c0de]:443/" assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" - assert "status-200" in scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]).tags + url_event = scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]) + assert "status-200" in url_event.tags + assert url_event.http_status == 200 with pytest.raises(ValidationError, match=".*status tag.*"): scan.make_event("https://evilcorp.com", "URL", events.ipv4_url) @@ -101,6 +103,22 @@ async def test_events(events, scan, helpers, bbot_config): assert events.http_response.parsed.scheme == "http" assert events.http_response.with_port().geturl() == "http://example.com:80/" + http_response = scan.make_event( + { + "port": "80", + "title": "HTTP%20RESPONSE", + "url": "http://www.evilcorp.com:80", + "input": "http://www.evilcorp.com:80", + "location": "/asdf", + "status_code": 301, + }, + "HTTP_RESPONSE", + dummy=True, + ) + assert http_response.http_status == 301 + assert http_response.http_title == "HTTP RESPONSE" + assert http_response.redirect_location == "http://www.evilcorp.com/asdf" + # open port tests assert events.open_port in events.domain assert "api.publicapis.org:443" in events.open_port @@ -232,6 +250,10 @@ async def test_events(events, scan, helpers, bbot_config): corrected_event3 = scan.make_event("wat.asdf.com", "IP_ADDRESS", dummy=True) assert corrected_event3.type == "DNS_NAME" + corrected_event4 = scan.make_event("bob@evilcorp.com", "USERNAME", dummy=True) + assert corrected_event4.type == "EMAIL_ADDRESS" + assert "affiliate" in corrected_event4.tags + test_vuln = scan.make_event( {"host": "EVILcorp.com", "severity": "iNfo ", "description": "asdf"}, "VULNERABILITY", dummy=True ) @@ -303,7 +325,10 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" - assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/テスト" + assert ( + scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) # thai assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" @@ -334,8 +359,7 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" assert ( - scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data - == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) assert ( scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data @@ -361,6 +385,19 @@ async def test_events(events, scan, helpers, bbot_config): assert reconstituted_event.type == "DNS_NAME" assert "127.0.0.1" in reconstituted_event.resolved_hosts + # SIEM-friendly serialize/deserialize + json_event_siemfriendly = db_event.json(siem_friendly=True) + assert json_event_siemfriendly["scope_distance"] == 1 + assert json_event_siemfriendly["data"] == {"DNS_NAME": "evilcorp.com"} + assert json_event_siemfriendly["type"] == "DNS_NAME" + assert json_event_siemfriendly["timestamp"] == timestamp + reconstituted_event2 = event_from_json(json_event_siemfriendly, siem_friendly=True) + assert reconstituted_event2.scope_distance == 1 + assert reconstituted_event2.timestamp.timestamp() == timestamp + assert reconstituted_event2.data == "evilcorp.com" + assert reconstituted_event2.type == "DNS_NAME" + assert "127.0.0.1" in reconstituted_event2.resolved_hosts + http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", source=scan.root_event) assert http_response.source_id == scan.root_event.id assert http_response.data["input"] == "http://example.com:80" diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 0c50f65eb..c8045e595 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -52,8 +52,14 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https ### MISC ### assert helpers.is_domain("evilcorp.co.uk") assert not helpers.is_domain("www.evilcorp.co.uk") + assert helpers.is_domain("evilcorp.notreal") + assert not helpers.is_domain("asdf.evilcorp.notreal") + assert not helpers.is_domain("notreal") assert helpers.is_subdomain("www.evilcorp.co.uk") assert not helpers.is_subdomain("evilcorp.co.uk") + assert helpers.is_subdomain("www.evilcorp.notreal") + assert not helpers.is_subdomain("evilcorp.notreal") + assert not helpers.is_subdomain("notreal") assert helpers.is_url("http://evilcorp.co.uk/asdf?a=b&c=d#asdf") assert helpers.is_url("https://evilcorp.co.uk/asdf?a=b&c=d#asdf") assert helpers.is_uri("ftp://evilcorp.co.uk") == True @@ -67,6 +73,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.parent_domain("www.evilcorp.co.uk") == "evilcorp.co.uk" assert helpers.parent_domain("evilcorp.co.uk") == "evilcorp.co.uk" assert helpers.parent_domain("localhost") == "localhost" + assert helpers.parent_domain("www.evilcorp.notreal") == "evilcorp.notreal" + assert helpers.parent_domain("evilcorp.notreal") == "evilcorp.notreal" + assert helpers.parent_domain("notreal") == "notreal" assert list(helpers.domain_parents("test.www.evilcorp.co.uk")) == ["www.evilcorp.co.uk", "evilcorp.co.uk"] assert list(helpers.domain_parents("www.evilcorp.co.uk", include_self=True)) == [ "www.evilcorp.co.uk", @@ -159,6 +168,12 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "]:22/my-file.csv", ) + assert helpers.best_http_status(200, 404) == 200 + assert helpers.best_http_status(500, 400) == 400 + assert helpers.best_http_status(301, 302) == 301 + assert helpers.best_http_status(0, 302) == 302 + assert helpers.best_http_status(500, 0) == 500 + assert helpers.split_domain("www.evilcorp.co.uk") == ("www", "evilcorp.co.uk") assert helpers.split_domain("asdf.www.test.notreal") == ("asdf.www", "test.notreal") assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") @@ -167,6 +182,12 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.split_domain("192.168.0.1") == ("", "192.168.0.1") assert helpers.split_domain("dead::beef") == ("", "dead::beef") + assert helpers.subdomain_depth("a.s.d.f.evilcorp.co.uk") == 4 + assert helpers.subdomain_depth("a.s.d.f.evilcorp.com") == 4 + assert helpers.subdomain_depth("evilcorp.com") == 0 + assert helpers.subdomain_depth("a.evilcorp.com") == 1 + assert helpers.subdomain_depth("a.s.d.f.evilcorp.notreal") == 4 + assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) assert helpers.split_host_port("evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) @@ -338,6 +359,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert "asdf" in helpers.str_or_file(str(test_file)) assert "nope" in helpers.str_or_file("nope") assert tuple(helpers.chain_lists([str(test_file), "nope"], try_files=True)) == ("asdf", "fdsa", "nope") + assert tuple(helpers.chain_lists("one, two", try_files=True)) == ("one", "two") assert test_file.is_file() with pytest.raises(DirectoryCreationError, match="Failed to create.*"): @@ -434,20 +456,6 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.cache_get("string", cache_hrs=24 * 7) is None assert helpers.cache_get("string", cache_hrs=24 * 14) == "wat" - cache_dict = helpers.CacheDict(max_size=10) - cache_dict.put("1", 2) - assert cache_dict["1"] == 2 - assert cache_dict.get("1") == 2 - assert len(cache_dict) == 1 - cache_dict["2"] = 3 - assert cache_dict["2"] == 3 - assert cache_dict.get("2") == 3 - assert len(cache_dict) == 2 - for i in range(20): - cache_dict[str(i)] = i + 1 - assert len(cache_dict) == 10 - assert tuple(cache_dict) == tuple(hash(str(x)) for x in range(10, 20)) - test_file = Path(scan.config["home"]) / "testfile.asdf" with open(test_file, "w") as f: for i in range(100): diff --git a/bbot/test/test_step_1/test_manager_deduplication.py b/bbot/test/test_step_1/test_manager_deduplication.py index 305796bea..e046988ae 100644 --- a/bbot/test/test_step_1/test_manager_deduplication.py +++ b/bbot/test/test_step_1/test_manager_deduplication.py @@ -15,7 +15,7 @@ async def setup(self): async def handle_event(self, event): self.events.append(event) - self.emit_event(f"{self.name}.test.notreal", "DNS_NAME", source=event) + await self.emit_event(f"{self.name}.test.notreal", "DNS_NAME", source=event) class EverythingModule(DefaultModule): _name = "everything_module" @@ -27,7 +27,7 @@ class EverythingModule(DefaultModule): async def handle_event(self, event): self.events.append(event) if event.type == "DNS_NAME": - self.emit_event(f"{event.data}:88", "OPEN_TCP_PORT", source=event) + await self.emit_event(f"{event.data}:88", "OPEN_TCP_PORT", source=event) class NoSuppressDupes(DefaultModule): _name = "no_suppress_dupes" diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index 08ac2c3ae..a927da8a3 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -257,7 +257,7 @@ async def filter_event(self, event): return False, "bleh" async def handle_event(self, event): - self.emit_event( + await self.emit_event( {"host": str(event.host), "description": "yep", "severity": "CRITICAL"}, "VULNERABILITY", source=event ) @@ -681,7 +681,7 @@ def custom_setup(scan): assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) assert 0 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) - assert len(all_events) == 14 + assert len(all_events) == 13 assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) @@ -692,9 +692,8 @@ def custom_setup(scan): assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert len(all_events_nodups) == 12 + assert len(all_events_nodups) == 11 assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) @@ -705,7 +704,6 @@ def custom_setup(scan): assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 6 @@ -719,7 +717,6 @@ def custom_setup(scan): assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999"]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) - assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) # sslcert with out-of-scope chain events, all_events, all_events_nodups, graph_output_events, graph_output_batch_events = await do_scan( @@ -739,9 +736,8 @@ def custom_setup(scan): assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) - assert 0 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) - assert len(all_events) == 12 + assert len(all_events) == 11 assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 1]) assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 1]) @@ -750,9 +746,8 @@ def custom_setup(scan): assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert len(all_events_nodups) == 10 + assert len(all_events_nodups) == 9 assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 1]) assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 1]) @@ -761,7 +756,6 @@ def custom_setup(scan): assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "sslcert"]) assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 5 @@ -773,7 +767,6 @@ def custom_setup(scan): assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) - assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) @pytest.mark.asyncio diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index e21a43aa0..7a25bb4ea 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -18,16 +18,27 @@ async def test_modules_basic(scan, helpers, events, bbot_config, bbot_scanner, h httpx_mock.add_response(method=http_method, url=re.compile(r".*"), json={"test": "test"}) # output module specific event filtering tests - base_output_module = BaseOutputModule(scan) - base_output_module.watched_events = ["IP_ADDRESS"] + base_output_module_1 = BaseOutputModule(scan) + base_output_module_1.watched_events = ["IP_ADDRESS"] localhost = scan.make_event("127.0.0.1", source=scan.root_event) - assert base_output_module._event_precheck(localhost)[0] == True + assert base_output_module_1._event_precheck(localhost)[0] == True localhost._internal = True - assert base_output_module._event_precheck(localhost)[0] == False + assert base_output_module_1._event_precheck(localhost)[0] == False localhost._internal = False - assert base_output_module._event_precheck(localhost)[0] == True + assert base_output_module_1._event_precheck(localhost)[0] == True localhost._omit = True - assert base_output_module._event_precheck(localhost)[0] == False + assert base_output_module_1._event_precheck(localhost)[0] == True + + base_output_module_2 = BaseOutputModule(scan) + base_output_module_2.watched_events = ["*"] + localhost = scan.make_event("127.0.0.1", source=scan.root_event) + assert base_output_module_2._event_precheck(localhost)[0] == True + localhost._internal = True + assert base_output_module_2._event_precheck(localhost)[0] == False + localhost._internal = False + assert base_output_module_2._event_precheck(localhost)[0] == True + localhost._omit = True + assert base_output_module_2._event_precheck(localhost)[0] == False # common event filtering tests for module_class in (BaseModule, BaseOutputModule, BaseReportModule, BaseInternalModule): @@ -289,3 +300,57 @@ async def test_modules_basic_perdomainonly(scan, helpers, events, bbot_config, b else: assert valid_1 == True assert valid_2 == True + + +@pytest.mark.asyncio +async def test_modules_basic_stats(helpers, events, bbot_config, bbot_scanner, httpx_mock, monkeypatch): + from bbot.modules.base import BaseModule + + class dummy(BaseModule): + _name = "dummy" + watched_events = ["*"] + + async def handle_event(self, event): + await self.emit_event( + {"host": "www.evilcorp.com", "url": "http://www.evilcorp.com", "description": "asdf"}, "FINDING", event + ) + + scan = bbot_scanner( + "evilcorp.com", + config=bbot_config, + force_start=True, + ) + scan.helpers.dns.mock_dns({("evilcorp.com", "A"): "127.0.254.1", ("www.evilcorp.com", "A"): "127.0.254.2"}) + + scan.modules["dummy"] = dummy(scan) + events = [e async for e in scan.async_start()] + + assert len(events) == 3 + + assert set(scan.stats.module_stats) == {"dummy", "python", "TARGET"} + + target_stats = scan.stats.module_stats["TARGET"] + assert target_stats.emitted == {"SCAN": 1, "DNS_NAME": 1} + assert target_stats.emitted_total == 2 + assert target_stats.produced == {"SCAN": 1, "DNS_NAME": 1} + assert target_stats.produced_total == 2 + assert target_stats.consumed == {} + assert target_stats.consumed_total == 0 + + dummy_stats = scan.stats.module_stats["dummy"] + assert dummy_stats.emitted == {"FINDING": 1} + assert dummy_stats.emitted_total == 1 + assert dummy_stats.produced == {"FINDING": 1} + assert dummy_stats.produced_total == 1 + assert dummy_stats.consumed == {"SCAN": 1, "DNS_NAME": 1} + assert dummy_stats.consumed_total == 2 + + python_stats = scan.stats.module_stats["python"] + assert python_stats.emitted == {} + assert python_stats.emitted_total == 0 + assert python_stats.produced == {} + assert python_stats.produced_total == 0 + assert python_stats.consumed == {"SCAN": 1, "FINDING": 1, "DNS_NAME": 1} + assert python_stats.consumed_total == 3 + + assert scan.stats.events_emitted_by_type == {"SCAN": 1, "FINDING": 1, "DNS_NAME": 1} diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 9179d42e6..675197265 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -24,7 +24,7 @@ async def test_web_helpers(bbot_scanner, bbot_config, bbot_httpserver): # should fail because URL is not in-scope assert response.status_code == 500 response = await scan2.helpers.request(url) - # should suceed because URL is in-scope + # should succeed because URL is in-scope assert response.status_code == 200 assert response.text == "test_http_helpers_yep" @@ -45,6 +45,30 @@ async def test_web_helpers(bbot_scanner, bbot_config, bbot_httpserver): assert filename2.is_file() with open(filename2) as f: assert f.read() == download_content + + # beautifulsoup + download_content = """ +
+

Example Domain

+

This domain is for use in illustrative examples in documents. You may use this + domain in literature without prior coordination or asking for permission.

+

More information...

+
+ """ + + path = "/test_http_helpers_beautifulsoup" + url = bbot_httpserver.url_for(path) + bbot_httpserver.expect_request(uri=path).respond_with_data(download_content, status=200) + webpage = await scan1.helpers.request(url) + assert webpage, f"Webpage is False" + soup = scan1.helpers.beautifulsoup(webpage, "html.parser") + assert soup, f"Soup is False" + # pretty_print = soup.prettify() + # assert pretty_print, f"PrettyPrint is False" + # scan1.helpers.log.info(f"{pretty_print}") + html_text = soup.find(text="Example Domain") + assert html_text, f"Find HTML Text is False" + # 404 path = "/test_http_helpers_download_404" url = bbot_httpserver.url_for(path) @@ -104,23 +128,61 @@ async def test_web_helpers(bbot_scanner, bbot_config, bbot_httpserver): async def test_web_interactsh(bbot_scanner, bbot_config, bbot_httpserver): from bbot.core.helpers.interactsh import server_list + sync_called = False + async_called = False + + sync_correct_url = False + async_correct_url = False + scan1 = bbot_scanner("8.8.8.8", config=bbot_config) + scan1.status = "RUNNING" - interactsh_client = scan1.helpers.interactsh() + interactsh_client = scan1.helpers.interactsh(poll_interval=3) + interactsh_client2 = scan1.helpers.interactsh(poll_interval=3) async def async_callback(data): - log.debug(f"interactsh poll: {data}") + nonlocal async_called + nonlocal async_correct_url + async_called = True + d = data.get("raw-request", "") + async_correct_url |= "bbot_interactsh_test" in d + log.debug(f"interactsh poll (async): {d}") + + def sync_callback(data): + nonlocal sync_called + nonlocal sync_correct_url + sync_called = True + d = data.get("raw-request", "") + sync_correct_url |= "bbot_interactsh_test" in d + log.debug(f"interactsh poll (sync): {d}") interactsh_domain = await interactsh_client.register(callback=async_callback) - url = f"https://{interactsh_domain}/bbot_interactsh_test" + url = f"http://{interactsh_domain}/bbot_interactsh_test" response = await scan1.helpers.request(url) assert response.status_code == 200 - await asyncio.sleep(10) assert any(interactsh_domain.endswith(f"{s}") for s in server_list) + + interactsh_domain2 = await interactsh_client2.register(callback=sync_callback) + url2 = f"http://{interactsh_domain2}/bbot_interactsh_test" + response2 = await scan1.helpers.request(url2) + assert response2.status_code == 200 + assert any(interactsh_domain2.endswith(f"{s}") for s in server_list) + + await asyncio.sleep(10) + data_list = await interactsh_client.poll() + data_list2 = await interactsh_client2.poll() assert isinstance(data_list, list) - assert any("bbot_interactsh_test" in d.get("raw-request", "") for d in data_list) + assert isinstance(data_list2, list) + assert await interactsh_client.deregister() is None + assert await interactsh_client2.deregister() is None + + assert sync_called, "Interactsh synchrononous callback was not called" + assert async_called, "Interactsh async callback was not called" + + assert sync_correct_url, f"Data content was not correct for {url2}" + assert async_correct_url, f"Data content was not correct for {url}" @pytest.mark.asyncio diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index a4562cfc7..fa8d0a2d3 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -47,6 +47,7 @@ class ModuleTestBase: module_name = None config_overrides = {} modules_overrides = [] + log = logging.getLogger("bbot") class ModuleTest: def __init__(self, module_test_base, httpx_mock, httpserver, httpserver_ssl, monkeypatch, request): diff --git a/bbot/test/test_step_2/module_tests/test_module_aggregate.py b/bbot/test/test_step_2/module_tests/test_module_aggregate.py index 1049fb2a2..59f5370b3 100644 --- a/bbot/test/test_step_2/module_tests/test_module_aggregate.py +++ b/bbot/test/test_step_2/module_tests/test_module_aggregate.py @@ -2,7 +2,10 @@ class TestAggregate(ModuleTestBase): - config_overrides = {"dns_resolution": True} + config_overrides = {"dns_resolution": True, "scope_report_distance": 1} + + async def setup_before_prep(self, module_test): + module_test.scan.helpers.dns.mock_dns({("blacklanternsecurity.com", "A"): "1.2.3.4"}) def check(self, module_test, events): filename = next(module_test.scan.home.glob("scan-stats-table*.txt")) diff --git a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py index 2f4303013..cc4399266 100644 --- a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +++ b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py @@ -8,16 +8,12 @@ class TestAsset_Inventory(ModuleTestBase): modules_overrides = ["asset_inventory", "nmap", "sslcert"] async def setup_before_prep(self, module_test): - old_resolve_fn = module_test.scan.helpers.dns.resolve_event - - async def resolve_event(event, minimal=False): - if event.data == "www.bbottest.notreal": - return ["a-record"], True, False, {"A": {"127.0.0.1"}} - elif event.data == "127.0.0.1": - return ["ptr-record"], False, False, {"PTR": {"asdf.bbottest.notreal"}} - return await old_resolve_fn(event, minimal) - - module_test.monkeypatch.setattr(module_test.scan.helpers.dns, "resolve_event", resolve_event) + module_test.scan.helpers.dns.mock_dns( + { + ("127.0.0.1", "PTR"): "www.bbottest.notreal", + ("www.bbottest.notreal", "A"): "127.0.0.1", + } + ) def check(self, module_test, events): assert any(e.data == "127.0.0.1:9999" for e in events), "No open port found" @@ -25,7 +21,7 @@ def check(self, module_test, events): filename = next(module_test.scan.home.glob("asset-inventory.csv")) with open(filename) as f: content = f.read() - assert "www.bbottest.notreal,,127.0.0.1" in content + assert "www.bbottest.notreal,,,127.0.0.1" in content filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) with open(filename) as f: assert "127.0.0.0/16" in f.read() @@ -45,7 +41,7 @@ def check(self, module_test, events): filename = next(module_test.scan.home.glob("asset-inventory.csv")) with open(filename) as f: content = f.read() - assert "www.bbottest.notreal,,127.0.0.1" in content + assert "www.bbottest.notreal,,,127.0.0.1" in content filename = next(module_test.scan.home.glob("asset-inventory-ip-addresses-table*.txt")) with open(filename) as f: assert "127.0.0.0/16" in f.read() diff --git a/bbot/test/test_step_2/module_tests/test_module_credshed.py b/bbot/test/test_step_2/module_tests/test_module_credshed.py index 7de642412..4b9566077 100644 --- a/bbot/test/test_step_2/module_tests/test_module_credshed.py +++ b/bbot/test/test_step_2/module_tests/test_module_credshed.py @@ -74,18 +74,31 @@ def check(self, module_test, events): assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "judy@blacklanternsecurity.com"]) assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "tim@blacklanternsecurity.com"]) assert 1 == len( - [e for e in events if e.type == "HASHED_PASSWORD" and e.data == "539FE8942DEADBEEFBC49E6EB2F175AC"] + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:539FE8942DEADBEEFBC49E6EB2F175AC" + ] ) assert 1 == len( - [e for e in events if e.type == "HASHED_PASSWORD" and e.data == "D2D8F0E9A4A2DEADBEEF1AC80F36D61F"] + [ + e + for e in events + if e.type == "HASHED_PASSWORD" + and e.data == "judy@blacklanternsecurity.com:D2D8F0E9A4A2DEADBEEF1AC80F36D61F" + ] ) assert 1 == len( [ e for e in events if e.type == "HASHED_PASSWORD" - and e.data == "$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" + and e.data + == "judy@blacklanternsecurity.com:$2a$12$SHIC49jLIwsobdeadbeefuWb2BKWHUOk2yhpD77A0itiZI1vJqXHm" ] ) - assert 1 == len([e for e in events if e.type == "PASSWORD" and e.data == "TimTamSlam69"]) - assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim"]) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:tim"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_dehashed.py b/bbot/test/test_step_2/module_tests/test_module_dehashed.py index 8b20c85c5..ab1cc20aa 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dehashed.py +++ b/bbot/test/test_step_2/module_tests/test_module_dehashed.py @@ -37,7 +37,11 @@ class TestDehashed(ModuleTestBase): - config_overrides = {"modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}} + modules_overrides = ["dehashed", "speculate"] + config_overrides = { + "scope_report_distance": 2, + "modules": {"dehashed": {"username": "admin", "api_key": "deadbeef"}}, + } async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( @@ -46,8 +50,9 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - assert len(events) == 9 - assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert len(events) == 11 + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "blacklanternsecurity.com"]) + assert 1 == len([e for e in events if e.type == "ORG_STUB" and e.data == "blacklanternsecurity"]) assert 1 == len( [ e @@ -56,6 +61,22 @@ def check(self, module_test, events): and e.data == "bob@bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" and e.data == "bob.com" and e.scope_distance == 1 and "affiliate" in e.tags + ] + ) + assert 1 == len([e for e in events if e.type == "EMAIL_ADDRESS" and e.data == "bob@blacklanternsecurity.com"]) + assert 1 == len( + [ + e + for e in events + if e.type == "USERNAME" + and e.data == "bob@blacklanternsecurity.com:bob@bob.com" and e.source.data == "bob@blacklanternsecurity.com" ] ) @@ -65,8 +86,11 @@ def check(self, module_test, events): e for e in events if e.type == "HASHED_PASSWORD" - and e.data == "$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" + and e.data + == "bob@blacklanternsecurity.com:$2a$12$pVmwJ7pXEr3mE.DmCCE4fOUDdeadbeefd2KuCy/tq1ZUFyEOH2bve" ] ) - assert 1 == len([e for e in events if e.type == "PASSWORD" and e.data == "TimTamSlam69"]) - assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "timmy"]) + assert 1 == len( + [e for e in events if e.type == "PASSWORD" and e.data == "tim@blacklanternsecurity.com:TimTamSlam69"] + ) + assert 1 == len([e for e in events if e.type == "USERNAME" and e.data == "tim@blacklanternsecurity.com:timmy"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 0add8aea6..52e447a21 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -280,3 +280,40 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any(e.data == "asdffoo.test.notreal" for e in events) assert any(e.data == "https://asdffoo.test.notreal/some/path" for e in events) + + +class TestExcavateSerializationNegative(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + "

llsdtVVFlJxhcGGYTo2PMGTRNFVKZxeKTVbhyosM3Sm/5apoY1/yUmN6HVcn+Xt798SPzgXQlZMttsqp1U1iJFmFO2aCGL/v3tmm/fs7itYsoNnJCelWvm9P4ic1nlKTBOpMjT5B5NmriZwTAzZ5ASjCKcmN8Vh=

" + ) + + def check(self, module_test, events): + assert not any(e.type == "FINDING" for e in events), "Found Results without word boundary" + + +class TestExcavateSerializationPositive(TestExcavate): + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data( + """ +

.NET

+

AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA==

+

Java

+

rO0ABXQADUhlbGxvLCB3b3JsZCE=

+

PHP (string)

+

czoyNDoiSGVsbG8sIHdvcmxkISBNb3JlIHRleHQuIjs=

+

PHP (array)

+

YTo0OntpOjA7aToxO2k6MTtzOjE0OiJzZWNvbmQgZWxlbWVudCI7aToyO2k6MztpOjM7czoxODoiTW9yZSB0ZXh0IGluIGFycmF5Ijt9

+

PHP (object)

+

TzoxMjoiU2FtcGxlT2JqZWN0IjoyOntzOjg6InByb3BlcnR5IjtzOjEzOiJJbml0aWFsIHZhbHVlIjtzOjE2OiJhZGRpdGlvbmFsU3RyaW5nIjtzOjIxOiJFeHRyYSB0ZXh0IGluIG9iamVjdC4iO30=

+

Compression

+

H4sIAAAAAAAA/yu2MjS2UvJIzcnJ11Eozy/KSVFUsgYAZN5upRUAAAA=

+ +""" + ) + + def check(self, module_test, events): + for serialize_type in ["Java", ".NET", "PHP (Array)", "PHP (String)", "PHP (Object)", "Possible Compressed"]: + assert any( + e.type == "FINDING" and serialize_type in e.data["description"] for e in events + ), f"Did not find {serialize_type} Serialized Object" diff --git a/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py b/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py index 3d59653eb..cbbec11ea 100644 --- a/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py +++ b/bbot/test/test_step_2/module_tests/test_module_ffuf_shortnames.py @@ -177,9 +177,9 @@ def check(self, module_test, events): basic_detection = False directory_detection = False prefix_detection = False - delimeter_detection = False - directory_delimeter_detection = False - prefix_delimeter_detection = False + delimiter_detection = False + directory_delimiter_detection = False + prefix_delimiter_detection = False short_extensions_detection = False for e in events: @@ -191,18 +191,18 @@ def check(self, module_test, events): if e.data == "http://127.0.0.1:8888/adm_portal.aspx": prefix_detection = True if e.data == "http://127.0.0.1:8888/abcconsole.aspx": - delimeter_detection = True + delimiter_detection = True if e.data == "http://127.0.0.1:8888/abcconsole.aspx": - directory_delimeter_detection = True + directory_delimiter_detection = True if e.data == "http://127.0.0.1:8888/xyzdirectory/": - prefix_delimeter_detection = True + prefix_delimiter_detection = True if e.data == "http://127.0.0.1:8888/short.pl": short_extensions_detection = True assert basic_detection assert directory_detection assert prefix_detection - assert delimeter_detection - assert directory_delimeter_detection - assert prefix_delimeter_detection + assert delimiter_detection + assert directory_delimiter_detection + assert prefix_delimiter_detection assert short_extensions_detection diff --git a/bbot/test/test_step_2/module_tests/test_module_gowitness.py b/bbot/test/test_step_2/module_tests/test_module_gowitness.py index 35e0db799..09ad6144c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_gowitness.py +++ b/bbot/test/test_step_2/module_tests/test_module_gowitness.py @@ -3,17 +3,18 @@ class TestGowitness(ModuleTestBase): targets = ["127.0.0.1:8888"] - modules_overrides = ["gowitness", "httpx"] + modules_overrides = ["gowitness", "httpx", "social", "excavate"] import shutil from pathlib import Path home_dir = Path("/tmp/.bbot_gowitness_test") shutil.rmtree(home_dir, ignore_errors=True) - config_overrides = {"force_deps": True, "home": str(home_dir)} + config_overrides = {"force_deps": True, "home": str(home_dir), "scope_report_distance": 2, "omit_event_types": []} async def setup_after_prep(self, module_test): respond_args = { "response_data": """BBOT is life + @@ -21,21 +22,41 @@ async def setup_after_prep(self, module_test): "headers": {"Server": "Apache/2.4.41 (Ubuntu)"}, } module_test.set_expect_requests(respond_args=respond_args) + request_args = dict(uri="/blacklanternsecurity") + respond_args = dict(response_data="blacklanternsecurity github") + module_test.set_expect_requests(request_args, respond_args) + + # monkeypatch social + old_emit_event = module_test.scan.modules["social"].emit_event + + async def new_emit_event(event_data, event_type, **kwargs): + if event_data["url"] == "https://github.com/blacklanternsecurity": + event_data["url"] = event_data["url"].replace("https://github.com", "http://127.0.0.1:8888") + await old_emit_event(event_data, event_type, **kwargs) + + module_test.monkeypatch.setattr(module_test.scan.modules["social"], "emit_event", new_emit_event) def check(self, module_test, events): screenshots_path = self.home_dir / "scans" / module_test.scan.name / "gowitness" / "screenshots" screenshots = list(screenshots_path.glob("*.png")) - assert screenshots, f"No .png files found at {screenshots_path}" - url = False - webscreenshot = False - technology = False - for event in events: - if event.type == "URL_UNVERIFIED": - url = True - elif event.type == "WEBSCREENSHOT": - webscreenshot = True - elif event.type == "TECHNOLOGY": - technology = True - assert url, "No URL emitted" - assert webscreenshot, "No WEBSCREENSHOT emitted" - assert technology, "No TECHNOLOGY emitted" + assert ( + len(screenshots) == 2 + ), f"{len(screenshots):,} .png files found at {screenshots_path}, should have been 2" + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/"]) + assert 1 == len( + [e for e in events if e.type == "URL_UNVERIFIED" and e.data == "https://fonts.googleapis.com/"] + ) + assert 0 == len([e for e in events if e.type == "URL" and e.data == "https://fonts.googleapis.com/"]) + assert 1 == len( + [e for e in events if e.type == "SOCIAL" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity"] + ) + assert 2 == len([e for e in events if e.type == "WEBSCREENSHOT"]) + assert 1 == len([e for e in events if e.type == "WEBSCREENSHOT" and e.data["url"] == "http://127.0.0.1:8888/"]) + assert 1 == len( + [ + e + for e in events + if e.type == "WEBSCREENSHOT" and e.data["url"] == "http://127.0.0.1:8888/blacklanternsecurity" + ] + ) + assert len([e for e in events if e.type == "TECHNOLOGY"]) diff --git a/bbot/test/test_step_2/module_tests/test_module_http.py b/bbot/test/test_step_2/module_tests/test_module_http.py index 3b4e819b9..d0afcefb2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_http.py +++ b/bbot/test/test_step_2/module_tests/test_module_http.py @@ -1,3 +1,6 @@ +import json +import httpx + from .base import ModuleTestBase @@ -15,10 +18,46 @@ class TestHTTP(ModuleTestBase): } } + def verify_data(self, j): + return j["data"] == "blacklanternsecurity.com" and j["type"] == "DNS_NAME" + async def setup_after_prep(self, module_test): + self.got_event = False + self.headers_correct = False + self.method_correct = False + self.url_correct = False + + async def custom_callback(request): + j = json.loads(request.content) + if request.url == self.downstream_url: + self.url_correct = True + if request.method == "PUT": + self.method_correct = True + if "Authorization" in request.headers: + self.headers_correct = True + if self.verify_data(j): + self.got_event = True + return httpx.Response( + status_code=200, + ) + + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_callback(custom_callback) module_test.httpx_mock.add_response( method="PUT", headers={"Authorization": "bearer auth_token"}, url=self.downstream_url ) def check(self, module_test, events): - pass + assert self.got_event == True + assert self.headers_correct == True + assert self.method_correct == True + assert self.url_correct == True + + +class TestHTTPSIEMFriendly(TestHTTP): + modules_overrides = ["http"] + config_overrides = {"output_modules": {"http": dict(TestHTTP.config_overrides["output_modules"]["http"])}} + config_overrides["output_modules"]["http"]["siem_friendly"] = True + + def verify_data(self, j): + return j["data"] == {"DNS_NAME": "blacklanternsecurity.com"} and j["type"] == "DNS_NAME" diff --git a/bbot/test/test_step_2/module_tests/test_module_httpx.py b/bbot/test/test_step_2/module_tests/test_module_httpx.py index 77f3e98b8..ebd9bbdb1 100644 --- a/bbot/test/test_step_2/module_tests/test_module_httpx.py +++ b/bbot/test/test_step_2/module_tests/test_module_httpx.py @@ -3,6 +3,7 @@ class TestHTTPX(ModuleTestBase): targets = ["http://127.0.0.1:8888/url", "127.0.0.1:8888"] + config_overrides = {"modules": {"httpx": {"store_responses": True}}} # HTML for a page with a login form html_with_login = """ @@ -48,6 +49,8 @@ def check(self, module_test, events): url = True assert url, "Failed to visit target URL" assert open_port, "Failed to visit target OPEN_TCP_PORT" + saved_response = module_test.scan.home / "httpx" / "127.0.0.1.8888[slash]url.txt" + assert saved_response.is_file(), "Failed to save raw httpx response" class TestHTTPX_404(ModuleTestBase): diff --git a/bbot/test/test_step_2/module_tests/test_module_json.py b/bbot/test/test_step_2/module_tests/test_module_json.py index 6dafb68a5..1e67db085 100644 --- a/bbot/test/test_step_2/module_tests/test_module_json.py +++ b/bbot/test/test_step_2/module_tests/test_module_json.py @@ -12,3 +12,18 @@ def check(self, module_test, events): e = event_from_json(json.loads(lines[0])) assert e.type == "SCAN" assert e.data == f"{module_test.scan.name} ({module_test.scan.id})" + + +class TestJSONSIEMFriendly(ModuleTestBase): + modules_overrides = ["json"] + config_overrides = {"output_modules": {"json": {"siem_friendly": True}}} + + def check(self, module_test, events): + txt_file = module_test.scan.home / "output.ndjson" + lines = list(module_test.scan.helpers.read_file(txt_file)) + passed = False + for line in lines: + e = json.loads(line) + if e["data"] == {"DNS_NAME": "blacklanternsecurity.com"}: + passed = True + assert passed diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py new file mode 100644 index 000000000..d3712be5c --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -0,0 +1,57 @@ +from .base import ModuleTestBase + +# import logging + + +class TestNewsletters(ModuleTestBase): + found_tgt = "http://127.0.0.1:8888/found" + missing_tgt = "http://127.0.0.1:8888/missing" + targets = [found_tgt, missing_tgt] + modules_overrides = ["speculate", "httpx", "newsletters"] + + html_with_newsletter = """ + + """ + + html_without_newsletter = """ +
+

Example Domain

+

This domain is for use in illustrative examples in documents. You may use this + domain in literature without prior coordination or asking for permission.

+

More information...

+
+ """ + + async def setup_after_prep(self, module_test): + request_args = dict(uri="/found", headers={"test": "header"}) + respond_args = dict(response_data=self.html_with_newsletter) + module_test.set_expect_requests(request_args, respond_args) + request_args = dict(uri="/missing", headers={"test": "header"}) + respond_args = dict(response_data=self.html_without_newsletter) + module_test.set_expect_requests(request_args, respond_args) + + def check(self, module_test, events): + found = False + missing = True + for event in events: + # self.log.info(f"event type: {event.type}") + if event.type == "FINDING": + # self.log.info(f"event data: {event.data}") + # Verify Positive Result + if event.data["url"] == self.found_tgt: + found = True + # Verify Negative Result (should skip this statement if correct) + elif event.data["url"] == self.missing_tgt: + missing = False + assert found, f"NEWSLETTER 'Found' Error - Expect status of True but got False" + assert missing, f"NEWSLETTER 'Missing' Error - Expect status of True but got False" diff --git a/bbot/test/test_step_2/module_tests/test_module_postman.py b/bbot/test/test_step_2/module_tests/test_module_postman.py index 2879b4838..e4c8d8ce6 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postman.py +++ b/bbot/test/test_step_2/module_tests/test_module_postman.py @@ -30,9 +30,9 @@ async def setup_after_prep(self, module_test): "workspaces": [ { "visibilityStatus": "public", - "name": "SpilledSecrets", + "name": "BlackLanternSecuritySpilledSecrets", "id": "afa061be-9cb0-4520-9d4d-fe63361daf0f", - "slug": "spilledsecrets", + "slug": "blacklanternsecurityspilledsecrets", } ], "collectionForkLabel": "", @@ -60,6 +60,52 @@ async def setup_after_prep(self, module_test): }, }, }, + { + "score": 498.22398, + "normalizedScore": 8.43312266976538, + "document": { + "isPublisherVerified": False, + "publisherType": "user", + "curatedInList": [], + "publisherId": "28329861", + "publisherHandle": "", + "publisherLogo": "", + "isPublic": True, + "customHostName": "", + "id": "b7fa2137-b7fa2137-23bf-45d1-b176-35359af30ded", + "workspaces": [ + { + "visibilityStatus": "public", + "name": "SpilledSecrets", + "id": "92d0451b-119d-4ef0-b74c-22c400e5ce05", + "slug": "spilledsecrets", + } + ], + "collectionForkLabel": "", + "method": "POST", + "entityType": "request", + "url": "www.example.com/index", + "isBlacklisted": False, + "warehouse__updated_at_collection": "2023-12-11 02:00:00", + "isPrivateNetworkEntity": False, + "warehouse__updated_at_request": "2023-12-11 02:00:00", + "publisherName": "NA", + "name": "A test post request", + "privateNetworkMeta": "", + "privateNetworkFolders": [], + "documentType": "request", + "collection": { + "id": "007e8d67-007e8d67-932b-46ff-b95c-a2aa216edaf3", + "name": "Secret Collection", + "tags": [], + "forkCount": 0, + "watcherCount": 0, + "views": 31, + "apiId": "", + "apiName": "", + }, + }, + }, ], }, ) @@ -183,10 +229,10 @@ async def setup_after_prep(self, module_test): old_emit_event = module_test.module.emit_event - def new_emit_event(event_data, event_type, **kwargs): + async def new_emit_event(event_data, event_type, **kwargs): if event_data.startswith("https://www.postman.com"): event_data = event_data.replace("https://www.postman.com", "http://127.0.0.1:8888") - old_emit_event(event_data, event_type, **kwargs) + await old_emit_event(event_data, event_type, **kwargs) module_test.monkeypatch.setattr(module_test.module, "emit_event", new_emit_event) module_test.scan.helpers.dns.mock_dns({("asdf.blacklanternsecurity.com", "A"): "127.0.0.1"}) @@ -199,6 +245,9 @@ def check(self, module_test, events): assert any( e.data == "http://127.0.0.1:8888/_api/workspace/afa061be-9cb0-4520-9d4d-fe63361daf0f" for e in events ), "Failed to detect workspace" + assert any( + e.data != "http://127.0.0.1:8888/_api/workspace/92d0451b-119d-4ef0-b74c-22c400e5ce05" for e in events + ), "Workspace should not be detected" assert any( e.data == "http://127.0.0.1:8888/_api/workspace/afa061be-9cb0-4520-9d4d-fe63361daf0f/globals" for e in events diff --git a/bbot/test/test_step_2/module_tests/test_module_splunk.py b/bbot/test/test_step_2/module_tests/test_module_splunk.py new file mode 100644 index 000000000..67d67a4ef --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_splunk.py @@ -0,0 +1,58 @@ +import json +import httpx + +from .base import ModuleTestBase + + +class TestSplunk(ModuleTestBase): + downstream_url = "https://splunk.blacklanternsecurity.fakedomain:1234/services/collector" + config_overrides = { + "output_modules": { + "splunk": { + "url": downstream_url, + "hectoken": "HECTOKEN", + "index": "bbot_index", + "source": "bbot_source", + } + } + } + + def verify_data(self, j): + if not j["source"] == "bbot_source": + return False + if not j["index"] == "bbot_index": + return False + data = j["event"] + if not data["data"] == "blacklanternsecurity.com" and data["type"] == "DNS_NAME": + return False + return True + + async def setup_after_prep(self, module_test): + self.url_correct = False + self.method_correct = False + self.got_event = False + self.headers_correct = False + + async def custom_callback(request): + j = json.loads(request.content) + if request.url == self.downstream_url: + self.url_correct = True + if request.method == "POST": + self.method_correct = True + if "Authorization" in request.headers: + self.headers_correct = True + if self.verify_data(j): + self.got_event = True + return httpx.Response( + status_code=200, + ) + + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_callback(custom_callback) + module_test.httpx_mock.add_response() + + def check(self, module_test, events): + assert self.got_event == True + assert self.headers_correct == True + assert self.method_correct == True + assert self.url_correct == True diff --git a/bbot/test/test_step_2/module_tests/test_module_wafw00f.py b/bbot/test/test_step_2/module_tests/test_module_wafw00f.py index 38f1d5764..9955e584b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_wafw00f.py +++ b/bbot/test/test_step_2/module_tests/test_module_wafw00f.py @@ -12,3 +12,19 @@ async def setup_after_prep(self, module_test): def check(self, module_test, events): assert any(e.type == "WAF" and "LiteSpeed" in e.data["WAF"] for e in events) + + +class TestWafw00f_noredirect(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "wafw00f"] + + async def setup_after_prep(self, module_test): + expect_args = {"method": "GET", "uri": "/"} + respond_args = {"status": 301, "headers": {"Location": "/redirect"}} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + expect_args = {"method": "GET", "uri": "/redirect"} + respond_args = {"response_data": "Proudly powered by litespeed web server"} + module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) + + def check(self, module_test, events): + assert not any(e.type == "WAF" for e in events) diff --git a/docs/contribution.md b/docs/contribution.md index 65b074adb..175c3e7af 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -74,7 +74,7 @@ class MyModule(BaseModule): self.hugeinfo(f"GOT EVENT: {event}") for ip in await self.helpers.resolve(event.data): self.hugesuccess(f"EMITTING IP_ADDRESS: {ip}") - self.emit_event(ip, "IP_ADDRESS", source=event) + await self.emit_event(ip, "IP_ADDRESS", source=event) ``` After saving the module, you can run it simply by specifying it with `-m`: @@ -134,7 +134,6 @@ BBOT automates module dependencies with **Ansible**. If your module relies on a ```python class MyModule(BaseModule): ... - deps_pip = ["beautifulsoup4"] deps_apt = ["chromium-browser"] deps_ansible = [ { diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 60e0c7b62..ebf4f182f 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -1,118 +1,120 @@ # List of Modules -| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | -|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| -| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | -| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | -| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | -| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | -| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic | HTTP_RESPONSE, URL_UNVERIFIED | | -| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | -| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | -| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | -| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | -| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | -| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | -| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | -| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | -| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME, URL_UNVERIFIED | DNS_NAME | -| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | -| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | -| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | -| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | -| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | -| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | -| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | -| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | -| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | -| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | -| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | -| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | -| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | -| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | -| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | -| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | -| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | -| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | -| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | -| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | -| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | -| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | -| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | -| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | -| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | -| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Output to an asset inventory style flattened CSV file | | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY | IP_ADDRESS, OPEN_TCP_PORT | -| csv | output | No | Output to CSV | | * | | -| discord | output | No | Message a Discord channel when certain events are encountered | | * | | -| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | -| http | output | No | Send every event to a custom URL via a web request | | * | | -| human | output | No | Output to text | | * | | -| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | -| neo4j | output | No | Output to Neo4j | | * | | -| python | output | No | Output via Python API | | * | | -| slack | output | No | Message a Slack channel when certain events are encountered | | * | | -| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | -| teams | output | No | Message a Teams channel when certain events are encountered | | * | | -| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | -| websocket | output | No | Output to websockets | | * | | -| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | -| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | -| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| +| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | +| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | +| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | +| dnszonetransfer | scan | No | Attempt DNS zone transfers | active, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | +| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | +| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | +| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | +| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | +| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | +| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | +| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | +| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | active, safe | HTTP_RESPONSE | FINDING | +| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | +| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, VULNERABILITY | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | +| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | +| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | +| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | +| subdomain_hijack | scan | No | Detect hijackable subdomains | active, cloud-enum, safe, subdomain-enum, subdomain-hijack, web-basic, web-thorough | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING | +| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | +| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | +| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | +| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | +| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | +| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | +| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | +| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | +| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | +| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | +| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | +| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | +| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | +| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | +| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | +| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| nsec | scan | No | Enumerate subdomains by NSEC-walking | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | +| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | +| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | +| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | +| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | +| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | +| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/modules/nuclei.md b/docs/modules/nuclei.md index 3f492b8ba..d94e6f7f0 100644 --- a/docs/modules/nuclei.md +++ b/docs/modules/nuclei.md @@ -8,7 +8,7 @@ BBOT integrates with [Nuclei](https://github.com/projectdiscovery/nuclei), an op * The BBOT Nuclei module ingests **[URL]** events and emits events of type **[VULNERABILITY]** or **[FINDING]** -* Vulnerabilities will inherit their severity from the Nuclei templates​ +* Vulnerabilities will inherit their severity from the Nuclei templates * Nuclei templates of severity INFO will be emitted as **[FINDINGS]** ## Default Behavior @@ -59,15 +59,15 @@ This is equivalent to the Nuclei '-as' scan option. It only use templates that m #### Budget -Budget mode is unique to BBOT. ​ +Budget mode is unique to BBOT. -For larger scans with thousands of targets, doing a FULL Nuclei scan (1000s of Requests) for each is not realistic. ​ -As an alternative to the other modes, you can take advantage of Nuclei's "collapsible" template feature. ​ +For larger scans with thousands of targets, doing a FULL Nuclei scan (1000s of Requests) for each is not realistic. +As an alternative to the other modes, you can take advantage of Nuclei's "collapsible" template feature. For only the cost of one (or more) "extra" request(s) per host, it can activate several hundred modules. These are modules which happen to look at a BaseUrl, and typically look for a specific string or other attribute. Nuclei is smart about reusing the request data when it can, and we can use this to our advantage. -The budget parameter is the # of extra requests per host you are willing to send to "feed" Nuclei templates​ (defaults to 1). -For those times when vulnerability scanning isn't the main focus, but you want to look for easy wins.​ +The budget parameter is the # of extra requests per host you are willing to send to "feed" Nuclei templates (defaults to 1). +For those times when vulnerability scanning isn't the main focus, but you want to look for easy wins. Of course, there is a rapidly diminishing return when you set he value to more than a handful. Eventually, this becomes 1 template per 1 budget value increase. However, in the 1-10 range there is a lot of value. This graphic should give you a rough visual idea of this concept. @@ -86,20 +86,20 @@ The **ratelimit** and **concurrency** settings default to the same defaults that ```bash # Scan a SINGLE target with a basic port scan and web modules -bbot -f web-basic -m nmap nuclei --allow-deadly -t app.evilcorp.com​ +bbot -f web-basic -m nmap nuclei --allow-deadly -t app.evilcorp.com ``` ```bash # Scanning MULTIPLE targets -bbot -f web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com​ +bbot -f web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` ```bash # Scanning MULTIPLE targets while performing subdomain enumeration -bbot -f subdomain-enum web-basic -m nmap nuclei –allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com​ +bbot -f subdomain-enum web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` ```bash -# Scanning MULTIPLE targets on a BUDGET​ -bbot -f subdomain-enum web-basic -m nmap nuclei –allow-deadly –c modules.nuclei.mode=Budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com​ +# Scanning MULTIPLE targets on a BUDGET +bbot -f subdomain-enum web-basic -m nmap nuclei --allow-deadly -c modules.nuclei.mode=budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` diff --git a/docs/release_history.md b/docs/release_history.md index 1b68587ef..4b5b15bac 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,3 +1,68 @@ +## v1.1.6 +February 21, 2024 + +## Improvements +- https://github.com/blacklanternsecurity/bbot/pull/1001 +- https://github.com/blacklanternsecurity/bbot/pull/1006 +- https://github.com/blacklanternsecurity/bbot/pull/1010 +- https://github.com/blacklanternsecurity/bbot/pull/1013 +- https://github.com/blacklanternsecurity/bbot/pull/1014 +- https://github.com/blacklanternsecurity/bbot/pull/1015 +- https://github.com/blacklanternsecurity/bbot/pull/1032 +- https://github.com/blacklanternsecurity/bbot/pull/1043 +- https://github.com/blacklanternsecurity/bbot/pull/1047 +- https://github.com/blacklanternsecurity/bbot/pull/1048 +- https://github.com/blacklanternsecurity/bbot/pull/1049 +- https://github.com/blacklanternsecurity/bbot/pull/1051 +- https://github.com/blacklanternsecurity/bbot/pull/1065 +- https://github.com/blacklanternsecurity/bbot/pull/1070 +- https://github.com/blacklanternsecurity/bbot/pull/1076 +- https://github.com/blacklanternsecurity/bbot/pull/1077 +- https://github.com/blacklanternsecurity/bbot/pull/1095 +- https://github.com/blacklanternsecurity/bbot/pull/1101 +- https://github.com/blacklanternsecurity/bbot/pull/1103 + +## Bigfixes +- https://github.com/blacklanternsecurity/bbot/pull/1005 +- https://github.com/blacklanternsecurity/bbot/pull/1022 +- https://github.com/blacklanternsecurity/bbot/pull/1030 +- https://github.com/blacklanternsecurity/bbot/pull/1033 +- https://github.com/blacklanternsecurity/bbot/pull/1034 +- https://github.com/blacklanternsecurity/bbot/pull/1042 +- https://github.com/blacklanternsecurity/bbot/pull/1066 +- https://github.com/blacklanternsecurity/bbot/pull/1067 +- https://github.com/blacklanternsecurity/bbot/pull/1073 +- https://github.com/blacklanternsecurity/bbot/pull/1086 +- https://github.com/blacklanternsecurity/bbot/pull/1089 +- https://github.com/blacklanternsecurity/bbot/pull/1094 +- https://github.com/blacklanternsecurity/bbot/pull/1098 + +## New Modules +- https://github.com/blacklanternsecurity/bbot/pull/1072 +- https://github.com/blacklanternsecurity/bbot/pull/1091 + + +## v1.1.5 +January 29, 2024 + +## Improvements +- https://github.com/blacklanternsecurity/bbot/pull/1001 +- https://github.com/blacklanternsecurity/bbot/pull/1006 +- https://github.com/blacklanternsecurity/bbot/pull/1010 +- https://github.com/blacklanternsecurity/bbot/pull/1013 +- https://github.com/blacklanternsecurity/bbot/pull/1014 +- https://github.com/blacklanternsecurity/bbot/pull/1015 +- https://github.com/blacklanternsecurity/bbot/pull/1032 +- https://github.com/blacklanternsecurity/bbot/pull/1040 + +## Bugfixes +- https://github.com/blacklanternsecurity/bbot/pull/1005 +- https://github.com/blacklanternsecurity/bbot/pull/1022 +- https://github.com/blacklanternsecurity/bbot/pull/1030 +- https://github.com/blacklanternsecurity/bbot/pull/1033 +- https://github.com/blacklanternsecurity/bbot/pull/1034 + + ## v1.1.4 January 11, 2024 diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 3b134df5d..628cb8847 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -61,7 +61,7 @@ Target: Modules: -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: affiliates,ajaxpro,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,credshed,crobat,crt,dastardly,dehashed,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,github_codesearch,github_org,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,leakix,masscan,massdns,myssl,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,postman,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye + Modules to enable. Choices: affiliates,ajaxpro,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,credshed,crobat,crt,dastardly,dehashed,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,github_codesearch,github_org,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,leakix,masscan,massdns,myssl,newsletters,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,postman,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye -l, --list-modules List available modules. -em MODULE [MODULE ...], --exclude-modules MODULE [MODULE ...] Exclude these modules. @@ -73,7 +73,7 @@ Modules: -ef FLAG [FLAG ...], --exclude-flags FLAG [FLAG ...] Disable modules with these flags. (e.g. -ef aggressive) -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: asset_inventory,csv,discord,emails,http,human,json,neo4j,python,slack,subdomains,teams,web_report,websocket + Output module(s). Choices: asset_inventory,csv,discord,emails,http,human,json,neo4j,python,slack,splunk,subdomains,teams,web_report,websocket --allow-deadly Enable the use of highly aggressive modules Scan: diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index ab57e4eb9..d203f3ec4 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -238,6 +238,7 @@ Many modules accept their own configuration options. These options have the abil | modules.gowitness.version | str | gowitness version | 2.4.2 | | modules.httpx.in_scope_only | bool | Only visit web resources that are in scope. | True | | modules.httpx.max_response_size | int | Max response size in bytes | 5242880 | +| modules.httpx.store_responses | bool | Save raw HTTP responses to scan folder | False | | modules.httpx.threads | int | Number of httpx threads to use | 50 | | modules.httpx.version | str | httpx version | 1.2.5 | | modules.iis_shortnames.detect_only | bool | Only detect the vulnerability and do not run the shortname scanner | True | @@ -290,6 +291,7 @@ Many modules accept their own configuration options. These options have the abil | modules.vhost.lines | int | take only the first N lines from the wordlist when finding directories | 5000 | | modules.vhost.wordlist | str | Wordlist containing subdomains | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt | | modules.wafw00f.generic_detect | bool | When no specific WAF detections are made, try to perform a generic detect | True | +| modules.anubisdb.limit | int | Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API) | 1000 | | modules.bevigil.api_key | str | BeVigil OSINT API Key | | | modules.bevigil.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.binaryedge.api_key | str | BinaryEdge API key | | @@ -307,6 +309,8 @@ Many modules accept their own configuration options. These options have the abil | modules.credshed.username | str | Credshed username | | | modules.dehashed.api_key | str | DeHashed API Key | | | modules.dehashed.username | str | Email Address associated with your API key | | +| modules.dnscommonsrv.max_event_handlers | int | How many instances of the module to run concurrently | 10 | +| modules.dnscommonsrv.top | int | How many of the top SRV records to check | 50 | | modules.fullhunt.api_key | str | FullHunt API Key | | | modules.github_codesearch.api_key | str | Github token | | | modules.github_codesearch.limit | int | Limit code search to this many results | 100 | @@ -319,6 +323,7 @@ Many modules accept their own configuration options. These options have the abil | modules.ipneighbor.num_bits | int | Netmask size (in CIDR notation) to check. Default is 4 bits (16 hosts) | 4 | | modules.ipstack.api_key | str | IPStack GeoIP API Key | | | modules.leakix.api_key | str | LeakIX API Key | | +| modules.massdns.max_depth | int | How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com | 5 | | modules.massdns.max_mutations | int | Max number of smart mutations per subdomain | 500 | | modules.massdns.max_resolvers | int | Number of concurrent massdns resolvers | 1000 | | modules.massdns.wordlist | str | Subdomain wordlist URL | https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt | @@ -345,6 +350,7 @@ Many modules accept their own configuration options. These options have the abil | output_modules.http.bearer | str | Authorization Bearer token | | | output_modules.http.method | str | HTTP method | POST | | output_modules.http.password | str | Password (basic auth) | | +| output_modules.http.siem_friendly | bool | Format JSON in a SIEM-friendly way for ingestion into Elastic, Splunk, etc. | False | | output_modules.http.timeout | int | HTTP timeout | 10 | | output_modules.http.url | str | Web URL | | | output_modules.http.username | str | Username (basic auth) | | @@ -352,12 +358,18 @@ Many modules accept their own configuration options. These options have the abil | output_modules.human.output_file | str | Output to file | | | output_modules.json.console | bool | Output to console | False | | output_modules.json.output_file | str | Output to file | | +| output_modules.json.siem_friendly | bool | Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc. | False | | output_modules.neo4j.password | str | Neo4j password | bbotislife | | output_modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | output_modules.neo4j.username | str | Neo4j username | neo4j | | output_modules.slack.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | | output_modules.slack.min_severity | str | Only allow VULNERABILITY events of this severity or higher | LOW | | output_modules.slack.webhook_url | str | Discord webhook URL | | +| output_modules.splunk.hectoken | str | HEC Token | | +| output_modules.splunk.index | str | Index to send data to | | +| output_modules.splunk.source | str | Source path to be added to the metadata | | +| output_modules.splunk.timeout | int | HTTP timeout | 10 | +| output_modules.splunk.url | str | Web URL | | | output_modules.subdomains.include_unresolved | bool | Include unresolved subdomains in output | False | | output_modules.subdomains.output_file | str | Output to file | | | output_modules.teams.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index d5ce37b64..d2aaa4595 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -51,33 +51,33 @@ Below is a full list of event types along with which modules produce/consume the | Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | |---------------------|-----------------------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | +| * | 12 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, splunk, teams, websocket | | | ASN | 0 | 1 | | asn | | AZURE_TENANT | 1 | 0 | speculate | | | CODE_REPOSITORY | 0 | 2 | | github_codesearch, github_org | | DNS_NAME | 58 | 44 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, internetdb, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | | DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | | EMAIL_ADDRESS | 1 | 6 | emails | credshed, emailformat, hunterio, pgp, skymem, sslcert | -| FINDING | 2 | 24 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | +| FINDING | 2 | 25 | asset_inventory, web_report | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | | GEOLOCATION | 0 | 2 | | ip2location, ipstack | | HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 15 | 1 | ajaxpro, badsecrets, dastardly, excavate, filedownload, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | +| HTTP_RESPONSE | 17 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, excavate, filedownload, host_header, hunt, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer | httpx | | IP_ADDRESS | 9 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, masscan, nmap, speculate | asset_inventory, ipneighbor, speculate | | IP_RANGE | 3 | 0 | masscan, nmap, speculate | | | OPEN_TCP_PORT | 4 | 5 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, masscan, nmap, speculate | | ORG_STUB | 1 | 1 | github_org | speculate | | PASSWORD | 0 | 2 | | credshed, dehashed | | PROTOCOL | 0 | 1 | | fingerprintx | -| SOCIAL | 2 | 1 | github_org, speculate | social | +| SOCIAL | 3 | 1 | github_org, gowitness, speculate | social | | STORAGE_BUCKET | 7 | 5 | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | | TECHNOLOGY | 2 | 4 | asset_inventory, web_report | badsecrets, gowitness, internetdb, wappalyzer | | URL | 19 | 2 | ajaxpro, asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | | URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | | URL_UNVERIFIED | 5 | 13 | filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, postman, robots, urlscan, wayback | -| USERNAME | 0 | 2 | | credshed, dehashed | +| USERNAME | 1 | 2 | speculate | credshed, dehashed | | VHOST | 1 | 1 | web_report | vhost | | VULNERABILITY | 2 | 7 | asset_inventory, web_report | ajaxpro, badsecrets, dastardly, generic_ssrf, internetdb, nuclei, telerik | -| WAF | 0 | 1 | | wafw00f | +| WAF | 1 | 1 | asset_inventory | wafw00f | | WEBSCREENSHOT | 0 | 1 | | gowitness | @@ -87,12 +87,12 @@ BBOT has a sharp distinction between Findings and Vulnerabilities: **VULNERABILITY** -* There's a higher standard for what is allowed to be a vulnerability. They should be considered **confirmed** and **actionable​** - no additional confirmation required -* They are always assigned a severity. The possible severities are: LOW, MEDIUM, HIGH, or CRITICAL​ +* There's a higher standard for what is allowed to be a vulnerability. They should be considered **confirmed** and **actionable** - no additional confirmation required +* They are always assigned a severity. The possible severities are: LOW, MEDIUM, HIGH, or CRITICAL -**FINDING​** +**FINDING** -* Findings can range anywhere from "slightly interesting behavior" to "likely, but unconfirmed vulnerability"​ +* Findings can range anywhere from "slightly interesting behavior" to "likely, but unconfirmed vulnerability" * Are often false positives By making this separation, actionable vulnerabilities can be identified quickly in the midst of a large scan diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 40e90038a..1c93bedb3 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -107,28 +107,28 @@ A single module can have multiple flags. For example, the `securitytrails` modul ### List of Flags -| Flag | # Modules | Description | Modules | -|------------------|-------------|-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 75 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 39 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 26 | More advanced web scanning functionality | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | -| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | -| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | -| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | -| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | -| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 76 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, filedownload, fingerprintx, fullhunt, git, github_codesearch, github_org, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, nsec, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 57 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 47 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 40 | Makes active connections to target systems | ajaxpro, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnszonetransfer, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, newsletters, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| aggressive | 19 | Generates a large amount of network traffic | bypass403, dastardly, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | +| cloud-enum | 11 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth, subdomain_hijack | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| slow | 8 | May take a long time to complete | bucket_digitalocean, dastardly, fingerprintx, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | ## Dependencies diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 81b4b8ede..af1db4737 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -1,6 +1,6 @@ # Output -By default, BBOT saves its output in TXT, JSON, and CSV formats: +By default, BBOT saves its output in TXT, JSON, and CSV formats. The filenames are logged at the end of each scan: ![bbot output](https://github.com/blacklanternsecurity/bbot/assets/20261699/bb3da441-2682-408f-b955-19b268823b82) Every BBOT scan gets a unique and mildly-entertaining name like **`demonic_jimmy`**. Output for that scan, including scan stats and any web screenshots, etc., are saved to a folder by that name in `~/.bbot/scans`. The most recent 20 scans are kept, and older ones are removed. You can change the location of BBOT's output with `--output`, and you can also pick a custom scan name with `--name`. @@ -135,6 +135,25 @@ output_modules: password: P@ssw0rd ``` +### Splunk + +The `splunk` output module sends [events](events.md) in JSON format to a desired splunk instance via [HEC](https://docs.splunk.com/Documentation/Splunk/9.2.0/Data/UsetheHTTPEventCollector). + +You can customize this output with the following config options: + +```yaml title="~/.bbot/config/bbot.yml" +output_modules: + splunk: + # The full URL with the URI `/services/collector/event` + url: https://localhost:8088/services/collector/event + # Generated from splunk webui + hectoken: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + # Defaults to `main` if not set + index: my-specific-index + # Defaults to `bbot` if not set + source: /my/source.json +``` + ### Asset Inventory The `asset_inventory` module produces a CSV like this: diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 0572bedb2..5e115bcde 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -53,6 +53,24 @@ You can also pair the web spider with subdomain enumeration: bbot -t evilcorp.com -f subdomain-enum -c spider.yml ``` +### Ingesting BBOT Data Into SIEM (Elastic, Splunk) + +If your goal is to feed BBOT data into a SIEM such as Elastic, be sure to enable this option when scanning: + +```bash +bbot -t evilcorp.com -c output_modules.json.siem_friendly=true +``` + +This nests the event's `.data` beneath its event type like so: +```json +{ + "type": "DNS_NAME", + "data": { + "DNS_NAME": "blacklanternsecurity.com" + } +} +``` + ### Custom HTTP Proxy Web pentesters may appreciate BBOT's ability to quickly populate Burp Suite site maps for all subdomains in a target. If your scan includes gowitness, this will capture the traffic as if you manually visited each website in your browser -- including auxiliary web resources and javascript API calls. To accomplish this, set the `http_proxy` config option like so: diff --git a/poetry.lock b/poetry.lock index 8cdf62957..ab85c1785 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ ansible-core = ">=2.14.7,<2.15.0" [[package]] name = "ansible-core" -version = "2.14.13" +version = "2.14.14" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-core-2.14.13.tar.gz", hash = "sha256:4e1bb334f0c3226ab48c599efe49cd5fe03f25d4558bc06c274ade2ba3e2576a"}, - {file = "ansible_core-2.14.13-py3-none-any.whl", hash = "sha256:65e96d04dce1e5dd415b4681d464f7f9a949d515f623145c4a8bc3468e75f3b0"}, + {file = "ansible-core-2.14.14.tar.gz", hash = "sha256:f06a94a88a372d4db4b3973e465022fbe3545602580864115d21a280accb7ca3"}, + {file = "ansible_core-2.14.14-py3-none-any.whl", hash = "sha256:d1d282b71b9d8fdd515ae045e5909cfa393cfa0e9fecaae2dbbb4d326ab58681"}, ] [package.dependencies] @@ -121,51 +121,54 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" -version = "23.12.1" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -183,15 +186,26 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -384,13 +398,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "2.1.0.181" +version = "2.2.0.203" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = ">=3.9,<4.0" files = [ - {file = "cloudcheck-2.1.0.181-py3-none-any.whl", hash = "sha256:cb9ebd5d546038974ed13bd7c9edc83fe481af791bf53decf7ecdd9070deb23a"}, - {file = "cloudcheck-2.1.0.181.tar.gz", hash = "sha256:c3b24c72b3f082f87a81bf8542dc2a5177593fe4ea0e983acd2f486a5d127cc9"}, + {file = "cloudcheck-2.2.0.203-py3-none-any.whl", hash = "sha256:8f17944c6183c7ad96b031c4cc3efb1c1addaa23c2d17f3c81ff54c4cd494716"}, + {file = "cloudcheck-2.2.0.203.tar.gz", hash = "sha256:83eb239b024579712a19d736dc9649eff4a3f6e9314db3cd94faa883adbfe46c"}, ] [package.dependencies] @@ -410,63 +424,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -477,47 +491,56 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -551,22 +574,23 @@ files = [ [[package]] name = "dnspython" -version = "2.4.2" +version = "2.5.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, + {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, + {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, ] [package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +idna = ["idna (>=2.1)"] +trio = ["trio (>=0.14)"] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "docutils" @@ -581,13 +605,13 @@ files = [ [[package]] name = "dunamai" -version = "1.19.0" +version = "1.19.1" description = "Dynamic version generation" optional = false -python-versions = ">=3.5,<4.0" +python-versions = ">=3.5" files = [ - {file = "dunamai-1.19.0-py3-none-any.whl", hash = "sha256:1ed948676bbf0812bfaafe315a134634f8d6eb67138513c75aa66e747404b9c6"}, - {file = "dunamai-1.19.0.tar.gz", hash = "sha256:6ad99ae34f7cd290550a2ef1305d2e0292e6e6b5b1b830dfc07ceb7fd35fec09"}, + {file = "dunamai-1.19.1-py3-none-any.whl", hash = "sha256:a6aa0ae3bdb01a12d6f219555d6e230ec02663afb43a7bd37933e6c4fecefc9b"}, + {file = "dunamai-1.19.1.tar.gz", hash = "sha256:c8112efec15cd1a8e0384d5d6f3be97a01bf7bc45b54069533856aaaa33a9b9e"}, ] [package.dependencies] @@ -658,13 +682,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.38.1" +version = "0.40.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.38.1-py3-none-any.whl", hash = "sha256:334c79d3b5964ade65c05dfcaf53518c576dedd387aaba5c9fd71212f34f1483"}, - {file = "griffe-0.38.1.tar.gz", hash = "sha256:bd68d7da7f3d87bc57eb9962b250db123efd9bbcc06c11c1a91b6e583b2a9361"}, + {file = "griffe-0.40.1-py3-none-any.whl", hash = "sha256:5b8c023f366fe273e762131fe4bfd141ea56c09b3cb825aa92d06a82681cfd93"}, + {file = "griffe-0.40.1.tar.gz", hash = "sha256:66c48a62e2ce5784b6940e603300fcfb807b6f099b94e7f753f1841661fd5c7c"}, ] [package.dependencies] @@ -783,13 +807,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -949,13 +973,13 @@ source = ["Cython (==0.29.37)"] [[package]] name = "markdown" -version = "3.5.1" +version = "3.5.2" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.1-py3-none-any.whl", hash = "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc"}, - {file = "Markdown-3.5.1.tar.gz", hash = "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.dependencies] @@ -967,71 +991,71 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1120,13 +1144,13 @@ mkdocs = ">=1.1" [[package]] name = "mkdocs-material" -version = "9.5.3" +version = "9.5.8" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.3-py3-none-any.whl", hash = "sha256:76c93a8525cceb0b395b9cedab3428bf518cf6439adef2b940f1c1574b775d89"}, - {file = "mkdocs_material-9.5.3.tar.gz", hash = "sha256:5899219f422f0a6de784232d9d40374416302ffae3c160cacc72969fcc1ee372"}, + {file = "mkdocs_material-9.5.8-py3-none-any.whl", hash = "sha256:14563314bbf97da4bfafc69053772341babfaeb3329cde01d3e63cec03997af8"}, + {file = "mkdocs_material-9.5.8.tar.gz", hash = "sha256:2a429213e83f84eda7a588e2b186316d806aac602b7f93990042f7a1f3d3cf65"}, ] [package.dependencies] @@ -1143,8 +1167,8 @@ regex = ">=2022.4" requests = ">=2.26,<3.0" [package.extras] -git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] -imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] @@ -1301,28 +1325,28 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1368,27 +1392,27 @@ virtualenv = ">=20.10.0" [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -1429,59 +1453,59 @@ files = [ [[package]] name = "pycryptodome" -version = "3.19.1" +version = "3.20.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:694020d2ff985cd714381b9da949a21028c24b86f562526186f6af7c7547e986"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4464b0e8fd5508bff9baf18e6fd4c6548b1ac2ce9862d6965ff6a84ec9cb302a"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:420972f9c62978e852c74055d81c354079ce3c3a2213a92c9d7e37bbc63a26e2"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1bc0c49d986a1491d66d2a56570f12e960b12508b7e71f2423f532e28857f36"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e038ab77fec0956d7aa989a3c647652937fc142ef41c9382c2ebd13c127d5b4a"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-win32.whl", hash = "sha256:a991f8ffe8dfe708f86690948ae46442eebdd0fff07dc1b605987939a34ec979"}, - {file = "pycryptodome-3.19.1-cp27-cp27m-win_amd64.whl", hash = "sha256:2c16426ef49d9cba018be2340ea986837e1dfa25c2ea181787971654dd49aadd"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6d0d2b97758ebf2f36c39060520447c26455acb3bcff309c28b1c816173a6ff5"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b8b80ff92049fd042177282917d994d344365ab7e8ec2bc03e853d93d2401786"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd4e7e8bf0fc1ada854688b9b309ee607e2aa85a8b44180f91021a4dd330a928"}, - {file = "pycryptodome-3.19.1-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:8cf5d3d6cf921fa81acd1f632f6cedcc03f5f68fc50c364cd39490ba01d17c49"}, - {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297"}, - {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065"}, - {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17"}, - {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2"}, - {file = "pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03"}, - {file = "pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7"}, - {file = "pycryptodome-3.19.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:ed932eb6c2b1c4391e166e1a562c9d2f020bfff44a0e1b108f67af38b390ea89"}, - {file = "pycryptodome-3.19.1-pp27-pypy_73-win32.whl", hash = "sha256:81e9d23c0316fc1b45d984a44881b220062336bbdc340aa9218e8d0656587934"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37e531bf896b70fe302f003d3be5a0a8697737a8d177967da7e23eff60d6483c"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd4e95b0eb4b28251c825fe7aa941fe077f993e5ca9b855665935b86fbb1cc08"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22c80246c3c880c6950d2a8addf156cee74ec0dc5757d01e8e7067a3c7da015"}, - {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e70f5c839c7798743a948efa2a65d1fe96bb397fe6d7f2bde93d869fe4f0ad69"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6c3df3613592ea6afaec900fd7189d23c8c28b75b550254f4bd33fe94acb84b9"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08b445799d571041765e7d5c9ca09c5d3866c2f22eeb0dd4394a4169285184f4"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:954d156cd50130afd53f8d77f830fe6d5801bd23e97a69d358fed068f433fbfe"}, - {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b7efd46b0b4ac869046e814d83244aeab14ef787f4850644119b1c8b0ec2d637"}, - {file = "pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, + {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, + {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, + {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, + {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, + {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, + {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, + {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, + {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, + {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -1489,116 +1513,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -1756,18 +1754,18 @@ Werkzeug = ">=2.0.0" [[package]] name = "pytest-httpx" -version = "0.28.0" +version = "0.29.0" description = "Send responses to httpx." optional = false python-versions = ">=3.9" files = [ - {file = "pytest_httpx-0.28.0-py3-none-any.whl", hash = "sha256:045774556a3633688742315a6981aab2806ce93bcbcc8444253ab87bca286800"}, - {file = "pytest_httpx-0.28.0.tar.gz", hash = "sha256:a82505fdf59f19eaaf2853db3f3832b3dee35d3bc58000232db2b65c5fca0614"}, + {file = "pytest_httpx-0.29.0-py3-none-any.whl", hash = "sha256:7d6fd29042e7b98ed98199ded120bc8100c8078ca306952666e89bf8807b95ff"}, + {file = "pytest_httpx-0.29.0.tar.gz", hash = "sha256:ed08ed802e2b315b83cdd16f0b26cbb2b836c29e0fde5c18bc3105f1073e0332"}, ] [package.dependencies] httpx = "==0.26.*" -pytest = "==7.*" +pytest = ">=7,<9" [package.extras] testing = ["pytest-asyncio (==0.23.*)", "pytest-cov (==4.*)"] @@ -1860,6 +1858,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2033,18 +2032,16 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "1.5.1" +version = "2.0.0" description = "File transport adapter for Requests" optional = false python-versions = "*" files = [ - {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, - {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, + {file = "requests-file-2.0.0.tar.gz", hash = "sha256:20c5931629c558fda566cacc10cfe2cd502433e628f568c34c80d96a0cc95972"}, ] [package.dependencies] requests = ">=1.0.0" -six = "*" [[package]] name = "resolvelib" @@ -2101,6 +2098,17 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] +[[package]] +name = "socksio" +version = "1.0.0" +description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." +optional = false +python-versions = ">=3.6" +files = [ + {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, + {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, +] + [[package]] name = "soupsieve" version = "2.5" @@ -2201,17 +2209,18 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2237,38 +2246,40 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -2423,4 +2434,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "55d756e108a8f225f7974eab2b98b7f1b3feeabfb23aed0107f7f0616b201bc6" +content-hash = "e9c476ba44a5968f7bd6c9759ac4c6f8e679384bd6b0dd4f128af873a68a34da" diff --git a/pyproject.toml b/pyproject.toml index c845bbc60..f16540fb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,12 +45,12 @@ aioconsole = "^0.6.2" pydantic = "^2.4.2" httpx = "^0.26.0" cloudcheck = "^2.1.0.181" -pytest-httpx = "^0.28.0" tldextract = "^5.1.1" +cachetools = "^5.3.2" +socksio = "^1.0.0" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" -black = "^23.1.0" pytest-cov = "^4.0.0" poetry-dynamic-versioning = "^0.21.4" pytest-rerunfailures = "^11.1.2" @@ -62,6 +62,8 @@ pytest-env = "^0.8.2" pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" +black = "^24.1.1" +pytest-httpx = "^0.29.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" @@ -89,7 +91,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.5{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v1.1.6{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"]