diff --git a/bbot/__init__.py b/bbot/__init__.py index 8e016095f..8746d8131 100644 --- a/bbot/__init__.py +++ b/bbot/__init__.py @@ -1,2 +1,4 @@ # version placeholder (replaced by poetry-dynamic-versioning) __version__ = "v0.0.0" + +from .scanner import Scanner, Preset diff --git a/bbot/cli.py b/bbot/cli.py index 5cf2f9cd1..7910d2a36 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -44,8 +44,6 @@ async def _main(): # start by creating a default scan preset preset = Preset(_log=True, name="bbot_cli_main") - # populate preset symlinks - preset.all_presets # parse command line arguments and merge into preset try: preset.parse_args() diff --git a/bbot/core/config/files.py b/bbot/core/config/files.py index 6547d02ec..0f05c0b50 100644 --- a/bbot/core/config/files.py +++ b/bbot/core/config/files.py @@ -49,6 +49,7 @@ def _get_config(self, filename, name="config"): return OmegaConf.create() def get_custom_config(self): + self.ensure_config_file() return self._get_config(self.config_filename, name="config") def get_default_config(self): diff --git a/bbot/core/config/logger.py b/bbot/core/config/logger.py index a187af141..6a213d42d 100644 --- a/bbot/core/config/logger.py +++ b/bbot/core/config/logger.py @@ -172,6 +172,13 @@ def include_logger(self, logger): for handler in self.log_handlers.values(): self.add_log_handler(handler) + def stderr_filter(self, record): + if record.levelno == logging.TRACE and self.log_level > logging.DEBUG: + return False + if record.levelno < self.log_level: + return False + return True + @property def log_handlers(self): if self._log_handlers is None: @@ -189,16 +196,9 @@ def log_handlers(self): f"{log_dir}/bbot.debug.log", when="d", interval=1, backupCount=14 ) - def stderr_filter(record): - if record.levelno == logging.TRACE and self.log_level > logging.DEBUG: - return False - if record.levelno < self.log_level: - return False - return True - # Log to stderr stderr_handler = logging.StreamHandler(sys.stderr) - stderr_handler.addFilter(stderr_filter) + stderr_handler.addFilter(self.stderr_filter) # log to files debug_handler.addFilter(lambda x: x.levelno == logging.TRACE or (x.levelno < logging.VERBOSE)) main_handler.addFilter(lambda x: x.levelno != logging.TRACE and x.levelno >= logging.VERBOSE) diff --git a/bbot/core/core.py b/bbot/core/core.py index 1c43e5035..0a789c0ac 100644 --- a/bbot/core/core.py +++ b/bbot/core/core.py @@ -1,10 +1,10 @@ +import os import logging -import traceback from copy import copy -import multiprocessing from pathlib import Path from omegaconf import OmegaConf + DEFAULT_CONFIG = None @@ -22,27 +22,6 @@ class BBOTCore: - load quickly """ - class BBOTProcess(multiprocessing.Process): - - def __init__(self, *args, **kwargs): - self.logging_queue = kwargs.pop("logging_queue") - self.log_level = kwargs.pop("log_level") - super().__init__(*args, **kwargs) - - def run(self): - log = logging.getLogger("bbot.core.process") - try: - from bbot.core import CORE - - CORE.logger.setup_queue_handler(self.logging_queue, self.log_level) - super().run() - except KeyboardInterrupt: - log.warning(f"Got KeyboardInterrupt in {self.name}") - log.trace(traceback.format_exc()) - except BaseException as e: - log.warning(f"Error in {self.name}: {e}") - log.trace(traceback.format_exc()) - def __init__(self): self._logger = None self._files_config = None @@ -52,10 +31,6 @@ def __init__(self): self._config = None self._custom_config = None - # ensure bbot home dir - if not "home" in self.config: - self.custom_config["home"] = "~/.bbot" - # bare minimum == logging self.logger self.log = logging.getLogger("bbot.core") @@ -105,6 +80,9 @@ def default_config(self): global DEFAULT_CONFIG if DEFAULT_CONFIG is None: self.default_config = self.files_config.get_default_config() + # ensure bbot home dir + if not "home" in self.default_config: + self.default_config["home"] = "~/.bbot" return DEFAULT_CONFIG @default_config.setter @@ -166,7 +144,15 @@ def files_config(self): return self._files_config def create_process(self, *args, **kwargs): - process = self.BBOTProcess(*args, logging_queue=self.logger.queue, log_level=self.logger.log_level, **kwargs) + if os.environ.get("BBOT_TESTING", "") == "True": + import threading + + kwargs.pop("custom_name", None) + process = threading.Thread(*args, **kwargs) + else: + from .helpers.process import BBOTProcess + + process = BBOTProcess(*args, **kwargs) process.daemon = True return process diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 24781ab3b..f9b601491 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -7,27 +7,59 @@ import tempfile import traceback import zmq.asyncio +import multiprocessing from pathlib import Path from contextlib import asynccontextmanager, suppress from bbot.core import CORE +from bbot.errors import BBOTEngineError from bbot.core.helpers.misc import rand_string CMD_EXIT = 1000 -class EngineClient: +error_sentinel = object() + + +class EngineBase: + + ERROR_CLASS = BBOTEngineError + + def __init__(self): + self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") + + def pickle(self, obj): + try: + return pickle.dumps(obj) + except Exception as e: + self.log.error(f"Error serializing object: {obj}: {e}") + self.log.trace(traceback.format_exc()) + return error_sentinel + + def unpickle(self, binary): + try: + return pickle.loads(binary) + except Exception as e: + self.log.error(f"Error deserializing binary: {e}") + self.log.trace(f"Offending binary: {binary}") + self.log.trace(traceback.format_exc()) + return error_sentinel + + +class EngineClient(EngineBase): SERVER_CLASS = None def __init__(self, **kwargs): + super().__init__() self.name = f"EngineClient {self.__class__.__name__}" + self.process = None + self.process_name = multiprocessing.current_process().name if self.SERVER_CLASS is None: raise ValueError(f"Must set EngineClient SERVER_CLASS, {self.SERVER_CLASS}") self.CMDS = dict(self.SERVER_CLASS.CMDS) for k, v in list(self.CMDS.items()): self.CMDS[v] = k - self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") self.socket_address = f"zmq_{rand_string(8)}.sock" self.socket_path = Path(tempfile.gettempdir()) / self.socket_address self.server_kwargs = kwargs.pop("server_kwargs", {}) @@ -35,68 +67,93 @@ def __init__(self, **kwargs): self.context = zmq.asyncio.Context() atexit.register(self.cleanup) - async def run_and_return(self, command, **kwargs): + def check_error(self, message): + if isinstance(message, dict) and len(message) == 1 and "_e" in message: + error, trace = message["_e"] + error = self.ERROR_CLASS(error) + error.engine_traceback = trace + raise error + return False + + async def run_and_return(self, command, *args, **kwargs): async with self.new_socket() as socket: - message = self.make_message(command, args=kwargs) - await socket.send(message) - binary = await socket.recv() + try: + message = self.make_message(command, args=args, kwargs=kwargs) + if message is error_sentinel: + return + await socket.send(message) + binary = await socket.recv() + except BaseException: + # -1 == special "cancel" signal + cancel_message = pickle.dumps({"c": -1}) + await socket.send(cancel_message) + raise # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") - message = pickle.loads(binary) + message = self.unpickle(binary) self.log.debug(f"{self.name}.{command}({kwargs}) got message: {message}") # error handling if self.check_error(message): return return message - async def run_and_yield(self, command, **kwargs): - message = self.make_message(command, args=kwargs) + async def run_and_yield(self, command, *args, **kwargs): + message = self.make_message(command, args=args, kwargs=kwargs) + if message is error_sentinel: + return async with self.new_socket() as socket: await socket.send(message) while 1: - binary = await socket.recv() - # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") - message = pickle.loads(binary) - self.log.debug(f"{self.name}.{command}({kwargs}) got message: {message}") - # error handling - if self.check_error(message) or self.check_stop(message): - break - yield message - - def check_error(self, message): - if isinstance(message, dict) and len(message) == 1 and "_e" in message: - error, trace = message["_e"] - self.log.error(error) - self.log.trace(trace) - return True - return False + try: + binary = await socket.recv() + # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") + message = self.unpickle(binary) + self.log.debug(f"{self.name}.{command}({kwargs}) got message: {message}") + # error handling + if self.check_error(message) or self.check_stop(message): + break + yield message + except GeneratorExit: + # -1 == special "cancel" signal + cancel_message = pickle.dumps({"c": -1}) + await socket.send(cancel_message) + raise def check_stop(self, message): if isinstance(message, dict) and len(message) == 1 and "_s" in message: return True return False - def make_message(self, command, args): + def make_message(self, command, args=None, kwargs=None): try: cmd_id = self.CMDS[command] except KeyError: raise KeyError(f'Command "{command}" not found. Available commands: {",".join(self.available_commands)}') - return pickle.dumps(dict(c=cmd_id, a=args)) + message = {"c": cmd_id} + if args: + message["a"] = args + if kwargs: + message["k"] = kwargs + return pickle.dumps(message) @property def available_commands(self): return [s for s in self.CMDS if isinstance(s, str)] def start_server(self): - process = CORE.create_process( - target=self.server_process, - args=( - self.SERVER_CLASS, - self.socket_path, - ), - kwargs=self.server_kwargs, - ) - process.start() - return process + if self.process_name == "MainProcess": + self.process = CORE.create_process( + target=self.server_process, + args=( + self.SERVER_CLASS, + self.socket_path, + ), + kwargs=self.server_kwargs, + custom_name="bbot dnshelper", + ) + self.process.start() + return self.process + else: + raise BBOTEngineError(f"Tried to start server from process {self.process_name}") @staticmethod def server_process(server_class, socket_path, **kwargs): @@ -130,12 +187,12 @@ def cleanup(self): self.socket_path.unlink(missing_ok=True) -class EngineServer: +class EngineServer(EngineBase): CMDS = {} def __init__(self, socket_path): - self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") + super().__init__() self.name = f"EngineServer {self.__class__.__name__}" if socket_path is not None: # create ZeroMQ context @@ -144,51 +201,105 @@ def __init__(self, socket_path): self.socket = self.context.socket(zmq.ROUTER) # create socket file self.socket.bind(f"ipc://{socket_path}") + # task <--> client id mapping + self.tasks = dict() - async def run_and_return(self, client_id, command_fn, **kwargs): - self.log.debug(f"{self.name} run-and-return {command_fn.__name__}({kwargs})") + async def run_and_return(self, client_id, command_fn, *args, **kwargs): try: - result = await command_fn(**kwargs) - except Exception as e: - error = f"Unhandled error in {self.name}.{command_fn.__name__}({kwargs}): {e}" - trace = traceback.format_exc() - result = {"_e": (error, trace)} - await self.send_socket_multipart([client_id, pickle.dumps(result)]) + self.log.debug(f"{self.name} run-and-return {command_fn.__name__}({args}, {kwargs})") + try: + result = await command_fn(*args, **kwargs) + except (asyncio.CancelledError, KeyboardInterrupt): + return + except BaseException as e: + error = f"Error in {self.name}.{command_fn.__name__}({args}, {kwargs}): {e}" + trace = traceback.format_exc() + self.log.debug(error) + self.log.debug(trace) + result = {"_e": (error, trace)} + finally: + self.tasks.pop(client_id, None) + await self.send_socket_multipart(client_id, result) + except BaseException as e: + self.log.critical( + f"Unhandled exception in {self.name}.run_and_return({client_id}, {command_fn}, {args}, {kwargs}): {e}" + ) + self.log.critical(traceback.format_exc()) - async def run_and_yield(self, client_id, command_fn, **kwargs): - self.log.debug(f"{self.name} run-and-yield {command_fn.__name__}({kwargs})") + async def run_and_yield(self, client_id, command_fn, *args, **kwargs): try: - async for _ in command_fn(**kwargs): - await self.send_socket_multipart([client_id, pickle.dumps(_)]) - await self.send_socket_multipart([client_id, pickle.dumps({"_s": None})]) - except Exception as e: - error = f"Unhandled error in {self.name}.{command_fn.__name__}({kwargs}): {e}" - trace = traceback.format_exc() - result = {"_e": (error, trace)} - await self.send_socket_multipart([client_id, pickle.dumps(result)]) + self.log.debug(f"{self.name} run-and-yield {command_fn.__name__}({args}, {kwargs})") + try: + async for _ in command_fn(*args, **kwargs): + await self.send_socket_multipart(client_id, _) + await self.send_socket_multipart(client_id, {"_s": None}) + except (asyncio.CancelledError, KeyboardInterrupt): + return + except BaseException as e: + error = f"Error in {self.name}.{command_fn.__name__}({args}, {kwargs}): {e}" + trace = traceback.format_exc() + self.log.debug(error) + self.log.debug(trace) + result = {"_e": (error, trace)} + await self.send_socket_multipart(client_id, result) + finally: + self.tasks.pop(client_id, None) + except BaseException as e: + self.log.critical( + f"Unhandled exception in {self.name}.run_and_yield({client_id}, {command_fn}, {args}, {kwargs}): {e}" + ) + self.log.critical(traceback.format_exc()) - async def send_socket_multipart(self, *args, **kwargs): + async def send_socket_multipart(self, client_id, message): try: - await self.socket.send_multipart(*args, **kwargs) + message = pickle.dumps(message) + await self.socket.send_multipart([client_id, message]) except Exception as e: self.log.warning(f"Error sending ZMQ message: {e}") self.log.trace(traceback.format_exc()) + def check_error(self, message): + if message is error_sentinel: + return True + async def worker(self): try: while 1: client_id, binary = await self.socket.recv_multipart() - message = pickle.loads(binary) + message = self.unpickle(binary) self.log.debug(f"{self.name} got message: {message}") + if self.check_error(message): + continue cmd = message.get("c", None) if not isinstance(cmd, int): self.log.warning(f"No command sent in message: {message}") continue - kwargs = message.get("a", {}) + if cmd == -1: + task = self.tasks.get(client_id, None) + if task is None: + continue + task, _cmd, _args, _kwargs = task + self.log.debug(f"Cancelling client id {client_id} (task: {task})") + task.cancel() + try: + await task + except (KeyboardInterrupt, asyncio.CancelledError): + pass + except BaseException as e: + self.log.error(f"Unhandled error in {_cmd}({_args}, {_kwargs}): {e}") + self.log.trace(traceback.format_exc()) + self.tasks.pop(client_id, None) + continue + + args = message.get("a", ()) + if not isinstance(args, tuple): + self.log.warning(f"{self.name}: received invalid args of type {type(args)}, should be tuple") + continue + kwargs = message.get("k", {}) if not isinstance(kwargs, dict): - self.log.warning(f"{self.name}: received invalid message of type {type(kwargs)}, should be dict") + self.log.warning(f"{self.name}: received invalid kwargs of type {type(kwargs)}, should be dict") continue command_name = self.CMDS[cmd] @@ -199,11 +310,12 @@ async def worker(self): continue if inspect.isasyncgenfunction(command_fn): - coroutine = self.run_and_yield(client_id, command_fn, **kwargs) + coroutine = self.run_and_yield(client_id, command_fn, *args, **kwargs) else: - coroutine = self.run_and_return(client_id, command_fn, **kwargs) + coroutine = self.run_and_return(client_id, command_fn, *args, **kwargs) - asyncio.create_task(coroutine) + task = asyncio.create_task(coroutine) + self.tasks[client_id] = task, command_fn, args, kwargs except Exception as e: self.log.error(f"Error in EngineServer worker: {e}") self.log.trace(traceback.format_exc()) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index c5f4d578b..c715efcba 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1,11 +1,11 @@ import re import json import logging +import datetime import ipaddress import traceback from copy import copy from typing import Optional -from datetime import datetime from contextlib import suppress from urllib.parse import urljoin from radixtarget import RadixTarget @@ -15,7 +15,6 @@ from bbot.errors import * from bbot.core.helpers import ( extract_words, - get_file_extension, is_domain, is_subdomain, is_ip, @@ -29,6 +28,7 @@ split_host_port, tagify, validators, + get_file_extension, ) @@ -65,8 +65,8 @@ class BaseEvent: scan (Scanner): The scan object that generated the event. timestamp (datetime.datetime): The time at which the data was discovered. resolved_hosts (list of str): List of hosts to which the event data resolves, applicable for URLs and DNS names. - source (BaseEvent): The source event that led to the discovery of this event. - source_id (str): The `id` attribute of the source event. + parent (BaseEvent): The parent event that led to the discovery of this event. + parent_id (str): The `id` attribute of the parent event. tags (set of str): Descriptive tags for the event, e.g., `mx-record`, `in-scope`. module (BaseModule): The module that discovered the event. module_sequence (str): The sequence of modules that participated in the discovery. @@ -82,7 +82,7 @@ class BaseEvent: "scan": "SCAN:4d786912dbc97be199da13074699c318e2067a7f", "timestamp": 1688526222.723366, "resolved_hosts": ["185.199.108.153"], - "source": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", + "parent": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", "tags": ["in-scope", "distance-0", "dir", "ip-185-199-108-153", "status-301", "http-title-301-moved-permanently"], "module": "httpx", "module_sequence": "httpx" @@ -99,8 +99,6 @@ class BaseEvent: _quick_emit = False # Whether this event has been retroactively marked as part of an important discovery chain _graph_important = False - # Exclude from output modules - _omit = False # Disables certain data validations _dummy = False # Data validation, if data is a dictionary @@ -112,7 +110,8 @@ def __init__( self, data, event_type, - source=None, + parent=None, + context=None, module=None, scan=None, scans=None, @@ -131,7 +130,7 @@ def __init__( Attributes: data (str, dict): The primary data for the event. event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. - source (BaseEvent, optional): Source event that led to this event's discovery. Defaults to None. + parent (BaseEvent, optional): Parent event that led to this event's discovery. Defaults to None. module (str, optional): Module that discovered the event. Defaults to None. scan (Scan, optional): BBOT Scan object. Required unless _dummy is True. Defaults to None. scans (list of Scan, optional): BBOT Scan objects, used primarily when unserializing an Event from the database. Defaults to None. @@ -142,13 +141,14 @@ def __init__( _internal (Any, optional): If specified, makes the event internal. Defaults to None. Raises: - ValidationError: If either `scan` or `source` are not specified and `_dummy` is False. + ValidationError: If either `scan` or `parent` are not specified and `_dummy` is False. """ self._id = None self._hash = None self.__host = None self._port = None + self._omit = False self.__words = None self._priority = None self._host_original = None @@ -159,7 +159,13 @@ def __init__( # keep track of whether this event has been recorded by the scan self._stats_recorded = False - self.timestamp = datetime.utcnow() + if timestamp is not None: + self.timestamp = timestamp + else: + try: + self.timestamp = datetime.datetime.now(datetime.UTC) + except AttributeError: + self.timestamp = datetime.datetime.utcnow() self._tags = set() if tags is not None: @@ -169,7 +175,7 @@ def __init__( self._type = event_type self.confidence = int(confidence) - # for creating one-off events without enforcing source requirement + # for creating one-off events without enforcing parent requirement self._dummy = _dummy self._internal = False @@ -185,9 +191,6 @@ def __init__( if self.scan: self.scans = list(set([self.scan.id] + self.scans)) - # check type blacklist - self._check_omit() - self._scope_distance = -1 try: @@ -199,20 +202,26 @@ def __init__( if not self.data: raise ValidationError(f'Invalid event data "{data}" for type "{self.type}"') - self._source = None - self._source_id = None - self.source = source - if (not self.source) and (not self._dummy): - raise ValidationError(f"Must specify event source") + self._parent = None + self._parent_id = None + self.parent = parent + if (not self.parent) and (not self._dummy): + raise ValidationError(f"Must specify event parent") # internal events are not ingested by output modules if not self._dummy: # removed this second part because it was making certain sslcert events internal - if _internal: # or source._internal: + if _internal: # or parent._internal: self.internal = True # inherit web spider distance from parent - self.web_spider_distance = getattr(self.source, "web_spider_distance", 0) + self.web_spider_distance = getattr(self.parent, "web_spider_distance", 0) + + if not context: + context = getattr(self.module, "default_discovery_context", "") + self._discovery_context = "" + if context: + self.discovery_context = context @property def data(self): @@ -297,12 +306,12 @@ def host_original(self): @property def port(self): self.host - if getattr(self, "parsed", None): - if self.parsed.port is not None: - return self.parsed.port - elif self.parsed.scheme == "https": + if getattr(self, "parsed_url", None): + if self.parsed_url.port is not None: + return self.parsed_url.port + elif self.parsed_url.scheme == "https": return 443 - elif self.parsed.scheme == "http": + elif self.parsed_url.scheme == "http": return 80 return self._port @@ -317,6 +326,26 @@ def host_stem(self): else: return f"{self.host}" + @property + def discovery_context(self): + return self._discovery_context + + @discovery_context.setter + def discovery_context(self, context): + try: + self._discovery_context = context.format(module=self.module, event=self) + except Exception as e: + log.warning(f"Error formatting discovery context for {self}: {e}") + self._discovery_context = context + + @property + def discovery_path(self): + """ + This event's full discovery context, including those of all its parents + """ + full_event_chain = list(reversed(self.get_parents())) + [self] + return [e.discovery_context for e in full_event_chain if e.type != "SCAN"] + @property def words(self): if self.__words is None: @@ -402,9 +431,9 @@ def scope_distance(self, scope_distance): self.add_tag(f"distance-{new_scope_distance}") self._scope_distance = new_scope_distance # apply recursively to parent events - source_scope_distance = getattr(self.source, "scope_distance", -1) - if source_scope_distance >= 0 and self != self.source: - self.source.scope_distance = scope_distance + 1 + parent_scope_distance = getattr(self.parent, "scope_distance", -1) + if parent_scope_distance >= 0 and self != self.parent: + self.parent.scope_distance = scope_distance + 1 @property def scope_description(self): @@ -420,71 +449,73 @@ def scope_description(self): return f"distance-{self.scope_distance}" @property - def source(self): - return self._source + def parent(self): + return self._parent - @source.setter - def source(self, source): + @parent.setter + def parent(self, parent): """ - Setter for the source attribute, ensuring it's a valid event and updating scope distance. + Setter for the parent attribute, ensuring it's a valid event and updating scope distance. - Sets the source of the event and automatically adjusts the scope distance based on the source event's - scope distance. The scope distance is incremented by 1 if the host of the source event is different + Sets the parent of the event and automatically adjusts the scope distance based on the parent event's + scope distance. The scope distance is incremented by 1 if the host of the parent event is different from the current event's host. Parameters: - source (BaseEvent): The new source event to set. Must be a valid event object. + parent (BaseEvent): The new parent event to set. Must be a valid event object. Note: - If an invalid source is provided and the event is not a dummy, a warning will be logged. + If an invalid parent is provided and the event is not a dummy, a warning will be logged. """ - if is_event(source): - self._source = source - hosts_are_same = self.host and (self.host == source.host) - if source.scope_distance >= 0: - new_scope_distance = int(source.scope_distance) + if is_event(parent): + self._parent = parent + hosts_are_same = self.host and (self.host == parent.host) + if parent.scope_distance >= 0: + new_scope_distance = int(parent.scope_distance) # only increment the scope distance if the host changes if self._scope_distance_increment_same_host or not hosts_are_same: new_scope_distance += 1 self.scope_distance = new_scope_distance # inherit certain tags if hosts_are_same: - for t in source.tags: + for t in parent.tags: if t == "affiliate": self.add_tag("affiliate") elif t.startswith("mutation-"): self.add_tag(t) elif not self._dummy: - log.warning(f"Tried to set invalid source on {self}: (got: {source})") + log.warning(f"Tried to set invalid parent on {self}: (got: {parent})") @property - def source_id(self): - source_id = getattr(self.get_source(), "id", None) - if source_id is not None: - return source_id - return self._source_id + def parent_id(self): + parent_id = getattr(self.get_parent(), "id", None) + if parent_id is not None: + return parent_id + return self._parent_id - def get_source(self): + def get_parent(self): """ Takes into account events with the _omit flag """ - if getattr(self.source, "_omit", False): - return self.source.get_source() - return self.source + if getattr(self.parent, "_omit", False): + return self.parent.get_parent() + return self.parent - def get_sources(self, omit=False): - sources = [] + def get_parents(self, omit=False): + parents = [] e = self while 1: if omit: - source = e.get_source() + parent = e.get_parent() else: - source = e.source - if e == source: + parent = e.parent + if parent is None: + break + if e == parent: break - sources.append(source) - e = source - return sources + parents.append(parent) + e = parent + return parents def _host(self): return "" @@ -612,11 +643,13 @@ def json(self, mode="json", siem_friendly=False): Returns: dict: JSON-serializable dictionary representation of the event object. """ + # type, ID, scope description j = dict() for i in ("type", "id", "scope_description"): v = getattr(self, i, "") if v: j.update({i: v}) + # event data data_attr = getattr(self, f"data_{mode}", None) if data_attr is not None: data = data_attr @@ -626,30 +659,44 @@ def json(self, mode="json", siem_friendly=False): j["data"] = {self.type: data} else: j["data"] = data + # host, dns children + if self.host: + j["host"] = str(self.host) + j["resolved_hosts"] = sorted(str(h) for h in self.resolved_hosts) + j["dns_children"] = {k: list(v) for k, v in self.dns_children.items()} + # web spider distance web_spider_distance = getattr(self, "web_spider_distance", None) if web_spider_distance is not None: j["web_spider_distance"] = web_spider_distance + # scope distance j["scope_distance"] = self.scope_distance + # scan if self.scan: j["scan"] = self.scan.id + # timestamp j["timestamp"] = self.timestamp.timestamp() - if self.host: - j["resolved_hosts"] = sorted(str(h) for h in self.resolved_hosts) - source_id = self.source_id - if source_id: - j["source"] = source_id + # parent event + parent_id = self.parent_id + if parent_id: + j["parent"] = parent_id + # tags if self.tags: j.update({"tags": list(self.tags)}) + # parent module if self.module: j.update({"module": str(self.module)}) + # sequence of modules that led to discovery if self.module_sequence: j.update({"module_sequence": str(self.module_sequence)}) + # discovery context + j["discovery_context"] = self.discovery_context + j["discovery_path"] = self.discovery_path # normalize non-primitive python objects for k, v in list(j.items()): if k == "data": continue - if type(v) not in (str, int, float, bool, list, type(None)): + if type(v) not in (str, int, float, bool, list, dict, type(None)): try: j[k] = json.dumps(v, sort_keys=True) except Exception: @@ -676,14 +723,14 @@ def module_sequence(self): """ Get a human-friendly string that represents the sequence of modules responsible for generating this event. - Includes the names of omitted source events to provide a complete view of the module sequence leading to this event. + Includes the names of omitted parent events to provide a complete view of the module sequence leading to this event. Returns: str: The module sequence in human-friendly format. """ module_name = getattr(self.module, "name", "") - if getattr(self.source, "_omit", False): - module_name = f"{self.source.module_sequence}->{module_name}" + if getattr(self.parent, "_omit", False): + module_name = f"{self.parent.module_sequence}->{module_name}" return module_name @property @@ -701,10 +748,10 @@ def module_priority(self, priority): def priority(self): if self._priority is None: timestamp = self.timestamp.timestamp() - if self.source.timestamp == self.timestamp: + if self.parent.timestamp == self.timestamp: self._priority = (timestamp,) else: - self._priority = getattr(self.source, "priority", ()) + (timestamp,) + self._priority = getattr(self.parent, "priority", ()) + (timestamp,) return self._priority @@ -717,13 +764,6 @@ def type(self, val): self._type = val self._hash = None self._id = None - self._check_omit() - - def _check_omit(self): - if self.scan is not None: - omit_event_types = self.scan.config.get("omit_event_types", []) - if omit_event_types and self.type in omit_event_types: - self._omit = True def __iter__(self): """ @@ -783,7 +823,7 @@ class DictEvent(BaseEvent): def sanitize_data(self, data): url = data.get("url", "") if url: - self.parsed = validators.validate_url_parsed(url) + self.parsed_url = validators.validate_url_parsed(url) return data def _data_load(self, data): @@ -797,7 +837,7 @@ def _host(self): if isinstance(self.data, dict) and "host" in self.data: return make_ip_type(self.data["host"]) else: - parsed = getattr(self, "parsed") + parsed = getattr(self, "parsed_url", None) if parsed is not None: return make_ip_type(parsed.hostname) @@ -825,7 +865,7 @@ def __init__(self, *args, **kwargs): self.add_tag(f"ipv{ip.version}") if ip.is_private: self.add_tag("private-ip") - self.dns_resolve_distance = getattr(self.source, "dns_resolve_distance", 0) + self.dns_resolve_distance = getattr(self.parent, "dns_resolve_distance", 0) def sanitize_data(self, data): return validators.validate_host(data) @@ -839,14 +879,14 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # prevent runaway DNS entries self.dns_resolve_distance = 0 - source = getattr(self, "source", None) + parent = getattr(self, "parent", None) module = getattr(self, "module", None) module_type = getattr(module, "_type", "") - source_module = getattr(source, "module", None) - source_module_type = getattr(source_module, "_type", "") + parent_module = getattr(parent, "module", None) + parent_module_type = getattr(parent_module, "_type", "") if module_type == "DNS": - self.dns_resolve_distance = getattr(source, "dns_resolve_distance", 0) - if source_module_type == "DNS": + self.dns_resolve_distance = getattr(parent, "dns_resolve_distance", 0) + if parent_module_type == "DNS": self.dns_resolve_distance += 1 # self.add_tag(f"resolve-distance-{self.dns_resolve_distance}") @@ -913,47 +953,42 @@ def __init__(self, *args, **kwargs): # increment the web spider distance if self.type == "URL_UNVERIFIED": self.web_spider_distance += 1 - self.num_redirects = getattr(self.source, "num_redirects", 0) + self.num_redirects = getattr(self.parent, "num_redirects", 0) def sanitize_data(self, data): - self.parsed = validators.validate_url_parsed(data) + self.parsed_url = validators.validate_url_parsed(data) + + # special handling of URL extensions + if self.parsed_url is not None: + url_path = self.parsed_url.path + if url_path: + parsed_path_lower = str(url_path).lower() + extension = get_file_extension(parsed_path_lower) + if extension: + self.url_extension = extension + self.add_tag(f"extension-{extension}") # tag as dir or endpoint - if str(self.parsed.path).endswith("/"): + if str(self.parsed_url.path).endswith("/"): self.add_tag("dir") else: self.add_tag("endpoint") - parsed_path_lower = str(self.parsed.path).lower() - - scan = getattr(self, "scan", None) - url_extension_blacklist = getattr(scan, "url_extension_blacklist", []) - url_extension_httpx_only = getattr(scan, "url_extension_httpx_only", []) - - extension = get_file_extension(parsed_path_lower) - if extension: - self.add_tag(f"extension-{extension}") - if extension in url_extension_blacklist: - self.add_tag("blacklisted") - if extension in url_extension_httpx_only: - self.add_tag("httpx-only") - self._omit = True - - data = self.parsed.geturl() + data = self.parsed_url.geturl() return data def with_port(self): netloc_with_port = make_netloc(self.host, self.port) - return self.parsed._replace(netloc=netloc_with_port) + return self.parsed_url._replace(netloc=netloc_with_port) def _words(self): - first_elem = self.parsed.path.lstrip("/").split("/")[0] + first_elem = self.parsed_url.path.lstrip("/").split("/")[0] if not "." in first_elem: return extract_words(first_elem) return set() def _host(self): - return make_ip_type(self.parsed.hostname) + return make_ip_type(self.parsed_url.hostname) def _data_id(self): # consider spider-danger tag when deduping @@ -1027,14 +1062,14 @@ class HTTP_RESPONSE(URL_UNVERIFIED, DictEvent): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # count number of consecutive redirects - self.num_redirects = getattr(self.source, "num_redirects", 0) + self.num_redirects = getattr(self.parent, "num_redirects", 0) if str(self.http_status).startswith("3"): self.num_redirects += 1 def sanitize_data(self, data): url = data.get("url", "") - self.parsed = validators.validate_url_parsed(url) - data["url"] = self.parsed.geturl() + self.parsed_url = validators.validate_url_parsed(url) + data["url"] = self.parsed_url.geturl() header_dict = {} for i in data.get("raw_header", "").splitlines(): @@ -1082,7 +1117,7 @@ def redirect_location(self): # if there's no scheme (i.e. it's a relative redirect) if not scheme: # then join the location with the current url - location = urljoin(self.parsed.geturl(), location) + location = urljoin(self.parsed_url.geturl(), location) return location @@ -1235,7 +1270,8 @@ def _pretty_string(self): def make_event( data, event_type=None, - source=None, + parent=None, + context=None, module=None, scan=None, scans=None, @@ -1254,7 +1290,8 @@ def make_event( Parameters: data (Union[str, dict, BaseEvent]): The primary data for the event or an existing event object. event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. Auto-detected if not provided. - source (BaseEvent, optional): Source event leading to this event's discovery. + parent (BaseEvent, optional): Parent event leading to this event's discovery. + context (str, optional): Description of circumstances leading to event's discovery. module (str, optional): Module that discovered the event. scan (Scan, optional): BBOT Scan object associated with the event. scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. @@ -1271,11 +1308,11 @@ def make_event( Examples: If inside a module, e.g. from within its `handle_event()`: - >>> self.make_event("1.2.3.4", source=event) - IP_ADDRESS("1.2.3.4", module=nmap, tags={'ipv4', 'distance-1'}) + >>> self.make_event("1.2.3.4", parent=event) + IP_ADDRESS("1.2.3.4", module=portscan, tags={'ipv4', 'distance-1'}) If you're outside a module but you have a scan object: - >>> scan.make_event("1.2.3.4", source=scan.root_event) + >>> scan.make_event("1.2.3.4", parent=scan.root_event) IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'}) If you're outside a scan and just messing around: @@ -1303,8 +1340,10 @@ def make_event( data.scans = scans if module is not None: data.module = module - if source is not None: - data.source = source + if parent is not None: + data.parent = parent + if context is not None: + data.discovery_context = context if internal == True: data.internal = True if tags: @@ -1346,7 +1385,8 @@ def make_event( return event_class( data, event_type=event_type, - source=source, + parent=parent, + context=context, module=module, scan=scan, scans=scans, @@ -1386,6 +1426,7 @@ def event_from_json(j, siem_friendly=False): "scans": j.get("scans", []), "tags": j.get("tags", []), "confidence": j.get("confidence", 5), + "context": j.get("discovery_context", None), "dummy": True, } if siem_friendly: @@ -1398,11 +1439,11 @@ def event_from_json(j, siem_friendly=False): resolved_hosts = j.get("resolved_hosts", []) event._resolved_hosts = set(resolved_hosts) - event.timestamp = datetime.fromtimestamp(j["timestamp"]) + event.timestamp = datetime.datetime.fromtimestamp(j["timestamp"]) event.scope_distance = j["scope_distance"] - source_id = j.get("source", None) - if source_id is not None: - event._source_id = source_id + parent_id = j.get("parent", None) + if parent_id is not None: + event._parent_id = parent_id return event except KeyError as e: raise ValidationError(f"Event missing required field: {e}") diff --git a/bbot/core/helpers/bloom.py b/bbot/core/helpers/bloom.py new file mode 100644 index 000000000..357c715c0 --- /dev/null +++ b/bbot/core/helpers/bloom.py @@ -0,0 +1,71 @@ +import os +import mmh3 +import mmap + + +class BloomFilter: + """ + Simple bloom filter implementation capable of rougly 400K lookups/s. + + BBOT uses bloom filters in scenarios like DNS brute-forcing, where it's useful to keep track + of which mutations have been tried so far. + + A 100-megabyte bloom filter (800M bits) can store 10M entries with a .01% false-positive rate. + A python hash is 36 bytes. So if you wanted to store these in a set, this would take up + 36 * 10M * 2 (key+value) == 720 megabytes. So we save rougly 7 times the space. + """ + + def __init__(self, size=8000000): + self.size = size # total bits + self.byte_size = (size + 7) // 8 # calculate byte size needed for the given number of bits + + # Create an anonymous mmap region, compatible with both Windows and Unix + if os.name == "nt": # Windows + # -1 indicates an anonymous memory map in Windows + self.mmap_file = mmap.mmap(-1, self.byte_size) + else: # Unix/Linux + # Use MAP_ANONYMOUS along with MAP_SHARED + self.mmap_file = mmap.mmap(-1, self.byte_size, prot=mmap.PROT_WRITE, flags=mmap.MAP_ANON | mmap.MAP_SHARED) + + self.clear_all_bits() + + def add(self, item): + for hash_value in self._hashes(item): + index = hash_value // 8 + position = hash_value % 8 + current_byte = self.mmap_file[index] + self.mmap_file[index] = current_byte | (1 << position) + + def check(self, item): + for hash_value in self._hashes(item): + index = hash_value // 8 + position = hash_value % 8 + current_byte = self.mmap_file[index] + if not (current_byte & (1 << position)): + return False + return True + + def clear_all_bits(self): + self.mmap_file.seek(0) + # Write zeros across the entire mmap length + self.mmap_file.write(b"\x00" * self.byte_size) + + def _hashes(self, item): + if not isinstance(item, bytes): + if not isinstance(item, str): + item = str(item) + item = item.encode("utf-8") + return [abs(hash(item)) % self.size, abs(mmh3.hash(item)) % self.size, abs(self._fnv1a_hash(item)) % self.size] + + def _fnv1a_hash(self, data): + hash = 0x811C9DC5 # 2166136261 + for byte in data: + hash ^= byte + hash = (hash * 0x01000193) % 2**32 # 16777619 + return hash + + def __del__(self): + self.mmap_file.close() + + def __contains__(self, item): + return self.check(item) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 06fc8a91f..a6afaada6 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -2,6 +2,7 @@ import asyncio import logging import traceback +from signal import SIGINT from subprocess import CompletedProcess, CalledProcessError from .misc import smart_decode, smart_encode @@ -9,7 +10,7 @@ log = logging.getLogger("bbot.core.helpers.command") -async def run(self, *command, check=False, text=True, **kwargs): +async def run(self, *command, check=False, text=True, idle_timeout=None, **kwargs): """Runs a command asynchronously and gets its output as a string. This method is a simple helper for executing a command and capturing its output. @@ -20,6 +21,7 @@ async def run(self, *command, check=False, text=True, **kwargs): check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. Defaults to False. text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + idle_timeout (int, optional): Sets a limit on the number of seconds the process can run before throwing a TimeoutError **kwargs (dict): Additional keyword arguments for the subprocess. Returns: @@ -36,6 +38,7 @@ async def run(self, *command, check=False, text=True, **kwargs): # proc_tracker optionally keeps track of which processes are running under which modules # this allows for graceful SIGINTing of a module's processes in the case when it's killed proc_tracker = kwargs.pop("_proc_tracker", set()) + log_stderr = kwargs.pop("_log_stderr", True) proc, _input, command = await self._spawn_proc(*command, **kwargs) if proc is not None: proc_tracker.add(proc) @@ -45,7 +48,15 @@ async def run(self, *command, check=False, text=True, **kwargs): _input = b"\n".join(smart_encode(i) for i in _input) + b"\n" else: _input = smart_encode(_input) - stdout, stderr = await proc.communicate(_input) + + try: + if idle_timeout is not None: + stdout, stderr = await asyncio.wait_for(proc.communicate(_input), timeout=idle_timeout) + else: + stdout, stderr = await proc.communicate(_input) + except asyncio.exceptions.TimeoutError: + proc.send_signal(SIGINT) + raise # surface stderr if text: @@ -56,7 +67,7 @@ async def run(self, *command, check=False, text=True, **kwargs): if proc.returncode: if check: raise CalledProcessError(proc.returncode, command, output=stdout, stderr=stderr) - if stderr: + if stderr and log_stderr: command_str = " ".join(command) log.warning(f"Stderr for run({command_str}):\n\t{stderr}") @@ -65,7 +76,7 @@ async def run(self, *command, check=False, text=True, **kwargs): proc_tracker.remove(proc) -async def run_live(self, *command, check=False, text=True, **kwargs): +async def run_live(self, *command, check=False, text=True, idle_timeout=None, **kwargs): """Runs a command asynchronously and iterates through its output line by line in realtime. This method is useful for executing a command and capturing its output on-the-fly, as it is generated. @@ -76,6 +87,7 @@ async def run_live(self, *command, check=False, text=True, **kwargs): check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. Defaults to False. text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + idle_timeout (int, optional): Sets a limit on the number of seconds the process can remain idle (no lines sent to stdout) before throwing a TimeoutError **kwargs (dict): Additional keyword arguments for the subprocess. Yields: @@ -92,6 +104,7 @@ async def run_live(self, *command, check=False, text=True, **kwargs): # proc_tracker optionally keeps track of which processes are running under which modules # this allows for graceful SIGINTing of a module's processes in the case when it's killed proc_tracker = kwargs.pop("_proc_tracker", set()) + log_stderr = kwargs.pop("_log_stderr", True) proc, _input, command = await self._spawn_proc(*command, **kwargs) if proc is not None: proc_tracker.add(proc) @@ -102,7 +115,13 @@ async def run_live(self, *command, check=False, text=True, **kwargs): while 1: try: - line = await proc.stdout.readline() + if idle_timeout is not None: + line = await asyncio.wait_for(proc.stdout.readline(), timeout=idle_timeout) + else: + line = await proc.stdout.readline() + except asyncio.exceptions.TimeoutError: + proc.send_signal(SIGINT) + raise except ValueError as e: command_str = " ".join([str(c) for c in command]) log.warning(f"Error executing command {command_str}: {e}") @@ -134,7 +153,7 @@ async def run_live(self, *command, check=False, text=True, **kwargs): if check: raise CalledProcessError(proc.returncode, command, output=stdout, stderr=stderr) # surface stderr - if stderr: + if stderr and log_stderr: command_str = " ".join(command) log.warning(f"Stderr for run_live({command_str}):\n\t{stderr}") finally: diff --git a/bbot/core/helpers/dns/brute.py b/bbot/core/helpers/dns/brute.py new file mode 100644 index 000000000..c34e96610 --- /dev/null +++ b/bbot/core/helpers/dns/brute.py @@ -0,0 +1,180 @@ +import json +import random +import asyncio +import logging +import subprocess + + +class DNSBrute: + """ + Helper for DNS brute-forcing. + + Examples: + >>> domain = "evilcorp.com" + >>> subdomains = ["www", "mail"] + >>> results = await self.helpers.dns.brute(self, domain, subdomains) + """ + + nameservers_url = ( + "https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt" + ) + + def __init__(self, parent_helper): + self.parent_helper = parent_helper + self.log = logging.getLogger("bbot.helper.dns.brute") + self.num_canaries = 100 + self.max_resolvers = self.parent_helper.config.get("dns", {}).get("brute_threads", 1000) + self.devops_mutations = list(self.parent_helper.word_cloud.devops_mutations) + self.digit_regex = self.parent_helper.re.compile(r"\d+") + self._resolver_file = None + self._dnsbrute_lock = asyncio.Lock() + + async def __call__(self, *args, **kwargs): + return await self.dnsbrute(*args, **kwargs) + + async def dnsbrute(self, module, domain, subdomains, type=None): + subdomains = list(subdomains) + + if type is None: + type = "A" + type = str(type).strip().upper() + + domain_wildcard_rdtypes = set() + for _domain, rdtypes in (await self.parent_helper.dns.is_wildcard_domain(domain)).items(): + for rdtype, results in rdtypes.items(): + if results: + domain_wildcard_rdtypes.add(rdtype) + if any([r in domain_wildcard_rdtypes for r in (type, "CNAME")]): + self.log.info( + f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(domain_wildcard_rdtypes)})" + ) + return [] + else: + self.log.trace(f"{domain}: A is not in domain_wildcard_rdtypes:{domain_wildcard_rdtypes}") + + canaries = self.gen_random_subdomains(self.num_canaries) + canaries_list = list(canaries) + canaries_pre = canaries_list[: int(self.num_canaries / 2)] + canaries_post = canaries_list[int(self.num_canaries / 2) :] + # sandwich subdomains between canaries + subdomains = canaries_pre + subdomains + canaries_post + + results = [] + canaries_triggered = [] + async for hostname, ip, rdtype in self._massdns(module, domain, subdomains, rdtype=type): + sub = hostname.split(domain)[0] + if sub in canaries: + canaries_triggered.append(sub) + else: + results.append(hostname) + + if len(canaries_triggered) > 5: + self.log.info( + f"Aborting massdns on {domain} due to false positive: ({len(canaries_triggered):,} canaries triggered - {','.join(canaries_triggered)})" + ) + return [] + + # everything checks out + return results + + async def _massdns(self, module, domain, subdomains, rdtype): + """ + { + "name": "www.blacklanternsecurity.com.", + "type": "A", + "class": "IN", + "status": "NOERROR", + "data": { + "answers": [ + { + "ttl": 3600, + "type": "CNAME", + "class": "IN", + "name": "www.blacklanternsecurity.com.", + "data": "blacklanternsecurity.github.io." + }, + { + "ttl": 3600, + "type": "A", + "class": "IN", + "name": "blacklanternsecurity.github.io.", + "data": "185.199.108.153" + } + ] + }, + "resolver": "168.215.165.186:53" + } + """ + resolver_file = await self.resolver_file() + command = ( + "massdns", + "-r", + resolver_file, + "-s", + self.max_resolvers, + "-t", + rdtype, + "-o", + "J", + "-q", + ) + subdomains = self.gen_subdomains(subdomains, domain) + hosts_yielded = set() + async with self._dnsbrute_lock: + async for line in module.run_process_live(*command, stderr=subprocess.DEVNULL, input=subdomains): + try: + j = json.loads(line) + except json.decoder.JSONDecodeError: + self.log.debug(f"Failed to decode line: {line}") + continue + answers = j.get("data", {}).get("answers", []) + if type(answers) == list and len(answers) > 0: + answer = answers[0] + hostname = answer.get("name", "").strip(".").lower() + if hostname.endswith(f".{domain}"): + data = answer.get("data", "") + rdtype = answer.get("type", "").upper() + if data and rdtype: + hostname_hash = hash(hostname) + if hostname_hash not in hosts_yielded: + hosts_yielded.add(hostname_hash) + yield hostname, data, rdtype + + async def gen_subdomains(self, prefixes, domain): + for p in prefixes: + if domain: + p = f"{p}.{domain}" + yield p + + async def resolver_file(self): + if self._resolver_file is None: + self._resolver_file = await self.parent_helper.wordlist( + self.nameservers_url, + cache_hrs=24 * 7, + ) + return self._resolver_file + + def gen_random_subdomains(self, n=50): + delimiters = (".", "-") + lengths = list(range(3, 8)) + for i in range(0, max(0, n - 5)): + d = delimiters[i % len(delimiters)] + l = lengths[i % len(lengths)] + segments = list(random.choice(self.devops_mutations) for _ in range(l)) + segments.append(self.parent_helper.rand_string(length=8, digits=False)) + subdomain = d.join(segments) + yield subdomain + for _ in range(5): + yield self.parent_helper.rand_string(length=8, digits=False) + + def has_excessive_digits(self, d): + """ + Identifies dns names with excessive numbers, e.g.: + - w1-2-3.evilcorp.com + - ptr1234.evilcorp.com + """ + is_ptr = self.parent_helper.is_ptr(d) + digits = self.digit_regex.findall(d) + excessive_digits = len(digits) > 2 + long_digits = any(len(d) > 3 for d in digits) + return is_ptr or excessive_digits or long_digits diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index 7f775483c..aec01126d 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -4,6 +4,7 @@ import dns.asyncresolver from radixtarget import RadixTarget +from bbot.errors import DNSError from bbot.core.engine import EngineClient from ..misc import clean_dns_record, is_ip, is_domain, is_dns_name @@ -15,6 +16,7 @@ class DNSHelper(EngineClient): SERVER_CLASS = DNSEngine + ERROR_CLASS = DNSError """Helper class for DNS-related operations within BBOT. @@ -73,6 +75,9 @@ def __init__(self, parent_helper): # TODO: DNS server speed test (start in background task) self.resolver_file = self.parent_helper.tempfile(self.system_resolvers, pipe=False) + # brute force helper + self._brute = None + async def resolve(self, query, **kwargs): return await self.run_and_return("resolve", query=query, **kwargs) @@ -84,6 +89,14 @@ async def resolve_raw_batch(self, queries): async for _ in self.run_and_yield("resolve_raw_batch", queries=queries): yield _ + @property + def brute(self): + if self._brute is None: + from .brute import DNSBrute + + self._brute = DNSBrute(self.parent_helper) + return self._brute + async def is_wildcard(self, query, ips=None, rdtype=None): """ Use this method to check whether a *host* is a wildcard entry diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 6018e0e3f..62e8cb201 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -80,7 +80,7 @@ def __init__(self, socket_path, config={}): # modules from kicking off wildcard detection for the same domain at the same time self._wildcard_lock = NamedLock() - self._dns_connectivity_lock = asyncio.Lock() + self._dns_connectivity_lock = None self._last_dns_success = None self._last_connectivity_warning = time.time() # keeps track of warnings issued for wildcard detection to prevent duplicate warnings @@ -669,6 +669,12 @@ async def is_wildcard_domain(self, domain, log_info=False): return wildcard_domain_results + @property + def dns_connectivity_lock(self): + if self._dns_connectivity_lock is None: + self._dns_connectivity_lock = asyncio.Lock() + return self._dns_connectivity_lock + async def _connectivity_check(self, interval=5): """ Periodically checks for an active internet connection by attempting DNS resolution. @@ -688,7 +694,7 @@ async def _connectivity_check(self, interval=5): if time.time() - self._last_dns_success < interval: return True dns_server_working = [] - async with self._dns_connectivity_lock: + async with self.dns_connectivity_lock: with suppress(Exception): dns_server_working = await self._catch(self.resolver.resolve, "www.google.com", rdtype="A") if dns_server_working: diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 16afc05cd..4ba21cdc5 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -4,7 +4,6 @@ from pathlib import Path import multiprocessing as mp from functools import partial -from cloudcheck import cloud_providers from concurrent.futures import ProcessPoolExecutor from . import misc @@ -82,7 +81,7 @@ def __init__(self, preset): num_processes = max(1, mp.cpu_count() - 1) self.process_pool = ProcessPoolExecutor(max_workers=num_processes) - self.cloud = cloud_providers + self._cloud = None self.re = RegexHelper(self) self.dns = DNSHelper(self) @@ -91,6 +90,19 @@ def __init__(self, preset): self.word_cloud = WordCloud(self) self.dummy_modules = {} + @property + def cloud(self): + if self._cloud is None: + from cloudcheck import cloud_providers + + self._cloud = cloud_providers + return self._cloud + + def bloom_filter(self, size): + from .bloom import BloomFilter + + return BloomFilter(size) + def interactsh(self, *args, **kwargs): return Interactsh(self, *args, **kwargs) @@ -110,8 +122,8 @@ def clean_old_scans(self): _filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name) self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter) - def make_target(self, *events): - return Target(*events) + def make_target(self, *events, **kwargs): + return Target(*events, **kwargs) @property def config(self): diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 206fc50f0..9dedd0d28 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -236,6 +236,7 @@ def split_host_port(d): port = match.group(3) if port is None and scheme is not None: + scheme = scheme.lower() if scheme in ("https", "wss"): port = 443 elif scheme in ("http", "ws"): @@ -821,7 +822,7 @@ def truncate_string(s, n): return s -def extract_params_json(json_data): +def extract_params_json(json_data, compare_mode="getparam"): """ Extracts keys from a JSON object and returns them as a set. Used by the `paramminer_headers` module. @@ -851,18 +852,18 @@ def extract_params_json(json_data): current_data = stack.pop() if isinstance(current_data, dict): for key, value in current_data.items(): - keys.add(key) + if _validate_param(key, compare_mode): + keys.add(key) if isinstance(value, (dict, list)): stack.append(value) elif isinstance(current_data, list): for item in current_data: if isinstance(item, (dict, list)): stack.append(item) - return keys -def extract_params_xml(xml_data): +def extract_params_xml(xml_data, compare_mode="getparam"): """ Extracts tags from an XML object and returns them as a set. @@ -892,21 +893,45 @@ def extract_params_xml(xml_data): while stack: current_element = stack.pop() - tags.add(current_element.tag) + if _validate_param(current_element.tag, compare_mode): + tags.add(current_element.tag) for child in current_element: stack.append(child) return tags -def extract_params_html(html_data): +# Define valid characters for each mode based on RFCs +valid_chars = { + "header": set(chr(c) for c in range(33, 127) if chr(c) not in '."(),;:\\'), + "getparam": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="), + "cookie": set(chr(c) for c in range(33, 127) if chr(c) not in ' ",;=\\'), +} + + +def _validate_param(param, compare_mode): + if len(param) > 100: + return False + if compare_mode not in valid_chars: + raise ValueError(f"Invalid compare_mode: {compare_mode}") + allowed_chars = valid_chars[compare_mode] + return set(param).issubset(allowed_chars) + + +def extract_params_html(html_data, compare_mode="getparam"): """ - Extracts parameters from an HTML object, yielding them one at a time. + Extracts parameters from an HTML object, yielding them one at a time. This function filters + these parameters based on a specified mode that determines the type of validation + or comparison against rules for headers, GET parameters, or cookies. If no mode is specified, + it defaults to 'getparam', which is the least restrictive. Args: html_data (str): HTML-formatted string. + compare_mode (str, optional): The mode to compare extracted parameter names against. + Defaults to 'getparam'. Valid modes are 'header', 'getparam', 'cookie'. Yields: - str: A string containing the parameter found in HTML object. + str: A string containing the parameter found in the HTML object that meets the + criteria of the specified mode. Examples: >>> html_data = ''' @@ -922,20 +947,27 @@ def extract_params_html(html_data): ... ... ''' >>> list(extract_params_html(html_data)) - ['user', 'param2', 'param3'] + ['user', 'param1', 'param2', 'param3'] """ + + found_params = [] + input_tag = bbot_regexes.input_tag_regex.findall(html_data) for i in input_tag: - log.debug(f"FOUND PARAM ({i}) IN INPUT TAGS") - yield i + if _validate_param(i, compare_mode): + log.debug(f"FOUND PARAM ({i}) IN INPUT TAGS") + found_params.append(i) # check for jquery get parameters jquery_get = bbot_regexes.jquery_get_regex.findall(html_data) - - for i in jquery_get: - log.debug(f"FOUND PARAM ({i}) IN JQUERY GET PARAMS") - yield i + if jquery_get: + for i in jquery_get: + for x in i.split(","): + s = x.split(":")[0].rstrip() + if _validate_param(s, compare_mode): + log.debug(f"FOUND PARAM ({s}) IN A JQUERY GET PARAMS") + found_params.append(s) # check for jquery post parameters jquery_post = bbot_regexes.jquery_post_regex.findall(html_data) @@ -943,13 +975,21 @@ def extract_params_html(html_data): for i in jquery_post: for x in i.split(","): s = x.split(":")[0].rstrip() - log.debug(f"FOUND PARAM ({s}) IN A JQUERY POST PARAMS") - yield s + if _validate_param(s, compare_mode): + log.debug(f"FOUND PARAM ({s}) IN A JQUERY POST PARAMS") + found_params.append(s) a_tag = bbot_regexes.a_tag_regex.findall(html_data) - for s in a_tag: - log.debug(f"FOUND PARAM ({s}) IN A TAG GET PARAMS") - yield s + for tag in a_tag: + a_tag_querystring = tag.split("&") if tag else [] + for s in a_tag_querystring: + if "=" in s: + s0 = s.split("=")[0] + if _validate_param(s0, compare_mode): + log.debug(f"FOUND PARAM ({s0}) IN A TAG GET PARAMS") + found_params.append(s0) + + return found_params def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=100, word_regexes=None): @@ -1391,7 +1431,7 @@ def search_dict_values(d, *regexes): ... ] ... } ... } - >>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\.[^\\s<>"]+') + >>> url_regexes = re.compile(r'https?://[^\\s<>"]+|www\\.[^\\s<>"]+') >>> list(search_dict_values(dict_to_search, url_regexes)) ["https://www.evilcorp.com"] """ @@ -2012,6 +2052,50 @@ def human_to_bytes(filesize): raise ValueError(f'Unable to convert filesize "{filesize}" to bytes') +def integer_to_ordinal(n): + """ + Convert an integer to its ordinal representation. + + Args: + n (int): The integer to convert. + + Returns: + str: The ordinal representation of the integer. + + Examples: + >>> integer_to_ordinal(1) + '1st' + >>> integer_to_ordinal(2) + '2nd' + >>> integer_to_ordinal(3) + '3rd' + >>> integer_to_ordinal(11) + '11th' + >>> integer_to_ordinal(21) + '21st' + >>> integer_to_ordinal(101) + '101st' + """ + # Check the last digit + last_digit = n % 10 + # Check the last two digits for special cases (11th, 12th, 13th) + last_two_digits = n % 100 + + if 10 <= last_two_digits <= 20: + suffix = "th" + else: + if last_digit == 1: + suffix = "st" + elif last_digit == 2: + suffix = "nd" + elif last_digit == 3: + suffix = "rd" + else: + suffix = "th" + + return f"{n}{suffix}" + + def cpu_architecture(): """Return the CPU architecture of the current system. @@ -2076,7 +2160,7 @@ def os_platform_friendly(): tag_filter_regex = re.compile(r"[^a-z0-9]+") -def tagify(s, maxlen=None): +def tagify(s, delimiter=None, maxlen=None): """Sanitize a string into a tag-friendly format. Converts a given string to lowercase and replaces all characters not matching @@ -2095,8 +2179,10 @@ def tagify(s, maxlen=None): >>> tagify("HTTP Web Title", maxlen=8) 'http-web' """ + if delimiter is None: + delimiter = "-" ret = str(s).lower() - return tag_filter_regex.sub("-", ret)[:maxlen].strip("-") + return tag_filter_regex.sub(delimiter, ret)[:maxlen].strip(delimiter) def memory_status(): @@ -2327,6 +2413,27 @@ def get_exception_chain(e): return exception_chain +def in_exception_chain(e, exc_types): + """ + Given an Exception and a list of Exception types, returns whether any of the specified types are contained anywhere in the Exception chain. + + Args: + e (BaseException): The exception to check + exc_types (list[Exception]): Exception types to consider intentional cancellations. Default is KeyboardInterrupt + + Returns: + bool: Whether the error is the result of an intentional cancellaion + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except Exception as e: + ... if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + ... raise + """ + return any([isinstance(_, exc_types) for _ in get_exception_chain(e)]) + + def get_traceback_details(e): """ Retrieves detailed information from the traceback of an exception. @@ -2591,3 +2698,38 @@ def clean_dns_record(record): if not isinstance(record, str): record = str(record.to_text()) return str(record).rstrip(".").lower() + + +def truncate_filename(file_path, max_length=255): + """ + Truncate the filename while preserving the file extension to ensure the total path length does not exceed the maximum length. + + Args: + file_path (str): The original file path. + max_length (int): The maximum allowed length for the total path. Default is 255. + + Returns: + pathlib.Path: A new Path object with the truncated filename. + + Raises: + ValueError: If the directory path is too long to accommodate any filename within the limit. + + Example: + >>> truncate_filename('/path/to/example_long_filename.txt', 20) + PosixPath('/path/to/example.txt') + """ + p = Path(file_path) + directory, stem, suffix = p.parent, p.stem, p.suffix + + max_filename_length = max_length - len(str(directory)) - len(suffix) - 1 # 1 for the '/' separator + + if max_filename_length <= 0: + raise ValueError("The directory path is too long to accommodate any filename within the limit.") + + if len(stem) > max_filename_length: + truncated_stem = stem[:max_filename_length] + else: + truncated_stem = stem + + new_path = directory / (truncated_stem + suffix) + return new_path diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 3e16b446a..c0a9ef4c3 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -2,6 +2,7 @@ adjectives = [ "abnormal", + "accidental", "acoustic", "acrophobic", "adorable", @@ -9,6 +10,7 @@ "affectionate", "aggravated", "aggrieved", + "almighty", "anal", "atrocious", "awkward", @@ -140,6 +142,7 @@ "medicated", "mediocre", "melodramatic", + "mighty", "moist", "molten", "monstrous", @@ -188,6 +191,7 @@ "rapid_unscheduled", "raving", "reckless", + "reductive", "ripped", "sadistic", "satanic", @@ -233,7 +237,6 @@ "ticklish", "tiny", "tricky", - "tufty", "twitchy", "ugly", "unabated", @@ -578,6 +581,7 @@ "rachel", "radagast", "ralph", + "rambunctious", "randy", "raymond", "rebecca", diff --git a/bbot/core/helpers/process.py b/bbot/core/helpers/process.py new file mode 100644 index 000000000..90843b441 --- /dev/null +++ b/bbot/core/helpers/process.py @@ -0,0 +1,54 @@ +import logging +import traceback +import multiprocessing +from multiprocessing.context import SpawnProcess + +from .misc import in_exception_chain + + +current_process = multiprocessing.current_process() + + +class BBOTProcess(SpawnProcess): + + default_name = "bbot process pool" + + def __init__(self, *args, **kwargs): + self.log_queue = kwargs.pop("log_queue", None) + self.log_level = kwargs.pop("log_level", None) + self.custom_name = kwargs.pop("custom_name", self.default_name) + super().__init__(*args, **kwargs) + self.daemon = True + + def run(self): + """ + A version of Process.run() with BBOT logging and better error handling + """ + log = logging.getLogger("bbot.core.process") + try: + if self.log_level is not None and self.log_queue is not None: + from bbot.core import CORE + + CORE.logger.setup_queue_handler(self.log_queue, self.log_level) + if self.custom_name: + from setproctitle import setproctitle + + setproctitle(str(self.custom_name)) + super().run() + except BaseException as e: + if not in_exception_chain(e, (KeyboardInterrupt,)): + log.warning(f"Error in {self.name}: {e}") + log.trace(traceback.format_exc()) + + +if current_process.name == "MainProcess": + # if this is the main bbot process, set the logger and queue for the first time + from bbot.core import CORE + from functools import partialmethod + + BBOTProcess.__init__ = partialmethod( + BBOTProcess.__init__, log_level=CORE.logger.log_level, log_queue=CORE.logger.queue + ) + +mp_context = multiprocessing.get_context("spawn") +mp_context.Process = BBOTProcess diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 4e2ada0c2..35502e152 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -20,9 +20,24 @@ word_num_regex = re.compile(r"[^\W_]+") num_regex = re.compile(r"\d+") -_ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*" +_ipv4_regex = r"(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}" +ipv4_regex = re.compile(_ipv4_regex, re.I) + +# IPv6 is complicated, so we have accomodate alternative patterns, +# :(:[A-F0-9]{1,4}){1,7} == ::1, ::ffff:1 +# ([A-F0-9]{1,4}:){1,7}: == 2001::, 2001:db8::, 2001:db8:0:1:2:3:: +# ([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4}) == 2001::1, 2001:db8::1, 2001:db8:0:1:2:3::1 +# ([A-F0-9]{1,4}:){7,7}([A-F0-9]{1,4}) == 1:1:1:1:1:1:1:1, ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff + +_ipv6_regex = r"(:(:[A-F0-9]{1,4}){1,7}|([A-F0-9]{1,4}:){1,7}:|([A-F0-9]{1,4}:){1,6}:([A-F0-9]{1,4})|([A-F0-9]{1,4}:){7,7}([A-F0-9]{1,4}))" ipv6_regex = re.compile(_ipv6_regex, re.I) +_ip_range_regexes = ( + _ipv4_regex + r"\/[0-9]{1,2}", + _ipv6_regex + r"\/[0-9]{1,3}", +) +ip_range_regexes = list(re.compile(r, re.I) for r in _ip_range_regexes) + # dns names with periods _dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?" dns_name_regex = re.compile(_dns_name_regex, re.I) @@ -73,6 +88,17 @@ "EMAIL_ADDRESS", (r"^" + _email_regex + r"$",), ), + ( + "IP_ADDRESS", + ( + r"^" + _ipv4_regex + r"$", + r"^" + _ipv6_regex + r"$", + ), + ), + ( + "IP_RANGE", + tuple(r"^" + r + r"$" for r in _ip_range_regexes), + ), ( "OPEN_TCP_PORT", tuple(r"^" + r + r"$" for r in _open_port_regexes), @@ -90,9 +116,9 @@ # For use with extract_params_html helper input_tag_regex = re.compile(r"]+?name=[\"\'](\w+)[\"\']") -jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") +jquery_get_regex = re.compile(r"\$.get\([\'\"].+[\'\"].+\{(.+)\}") jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") -a_tag_regex = re.compile(r"]*href=[\"\'][^\"\'?>]*\?([^&\"\'=]+)") +a_tag_regex = re.compile(r"]*href=[\"\'][^\"\'#>]*\?([^\"\'#>]+)[\"\']") valid_netloc = r"[^\s!@#$%^&()=/?\\'\";~`<>]+" diff --git a/bbot/core/helpers/web/__init__.py b/bbot/core/helpers/web/__init__.py new file mode 100644 index 000000000..8fcf82abb --- /dev/null +++ b/bbot/core/helpers/web/__init__.py @@ -0,0 +1 @@ +from .web import WebHelper diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py new file mode 100644 index 000000000..76015411c --- /dev/null +++ b/bbot/core/helpers/web/client.py @@ -0,0 +1,91 @@ +import httpx +import logging +from httpx._models import Cookies + +log = logging.getLogger("bbot.core.helpers.web.client") + + +class DummyCookies(Cookies): + def extract_cookies(self, *args, **kwargs): + pass + + +class BBOTAsyncClient(httpx.AsyncClient): + """ + A subclass of httpx.AsyncClient tailored with BBOT-specific configurations and functionalities. + This class provides rate limiting, logging, configurable timeouts, user-agent customization, custom + headers, and proxy settings. Additionally, it allows the disabling of cookies, making it suitable + for use across an entire scan. + + Attributes: + _bbot_scan (object): BBOT scan object containing configuration details. + _persist_cookies (bool): Flag to determine whether cookies should be persisted across requests. + + Examples: + >>> async with BBOTAsyncClient(_bbot_scan=bbot_scan_object) as client: + >>> response = await client.request("GET", "https://example.com") + >>> print(response.status_code) + 200 + """ + + @classmethod + def from_config(cls, config, target, *args, **kwargs): + kwargs["_config"] = config + kwargs["_target"] = target + retries = kwargs.pop("retries", config.get("http_retries", 1)) + ssl_verify = config.get("ssl_verify", False) + if ssl_verify is False: + from .ssl_context import ssl_context_noverify + + ssl_verify = ssl_context_noverify + kwargs["transport"] = httpx.AsyncHTTPTransport(retries=retries, verify=ssl_verify) + kwargs["verify"] = ssl_verify + return cls(*args, **kwargs) + + def __init__(self, *args, **kwargs): + self._config = kwargs.pop("_config") + self._target = kwargs.pop("_target") + + http_debug = self._config.get("http_debug", None) + if http_debug: + log.trace(f"Creating AsyncClient: {args}, {kwargs}") + + self._persist_cookies = kwargs.pop("persist_cookies", True) + + # timeout + http_timeout = self._config.get("http_timeout", 20) + if not "timeout" in kwargs: + kwargs["timeout"] = http_timeout + + # headers + headers = kwargs.get("headers", None) + if headers is None: + headers = {} + # user agent + user_agent = self._config.get("user_agent", "BBOT") + if "User-Agent" not in headers: + headers["User-Agent"] = user_agent + kwargs["headers"] = headers + # proxy + proxies = self._config.get("http_proxy", None) + kwargs["proxies"] = proxies + + super().__init__(*args, **kwargs) + if not self._persist_cookies: + self._cookies = DummyCookies() + + def build_request(self, *args, **kwargs): + request = super().build_request(*args, **kwargs) + # add custom headers if the URL is in-scope + # TODO: re-enable this + if self._target.in_scope(str(request.url)): + for hk, hv in self._config.get("http_headers", {}).items(): + # don't clobber headers + if hk not in request.headers: + request.headers[hk] = hv + return request + + def _merge_cookies(self, cookies): + if self._persist_cookies: + return super()._merge_cookies(cookies) + return cookies diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py new file mode 100644 index 000000000..2bbacbc71 --- /dev/null +++ b/bbot/core/helpers/web/engine.py @@ -0,0 +1,250 @@ +import ssl +import anyio +import httpx +import asyncio +import logging +import traceback +from socksio.exceptions import SOCKSError +from contextlib import asynccontextmanager + +from bbot.core.engine import EngineServer +from bbot.core.helpers.misc import bytes_to_human, human_to_bytes, get_exception_chain + +log = logging.getLogger("bbot.core.helpers.web.engine") + + +class HTTPEngine(EngineServer): + + CMDS = { + 0: "request", + 1: "request_batch", + 2: "request_custom_batch", + 3: "download", + } + + client_only_options = ( + "retries", + "max_redirects", + ) + + def __init__(self, socket_path, target, config={}): + super().__init__(socket_path) + self.target = target + self.config = config + self.http_debug = self.config.get("http_debug", False) + self._ssl_context_noverify = None + self.web_client = self.AsyncClient(persist_cookies=False) + + def AsyncClient(self, *args, **kwargs): + from .client import BBOTAsyncClient + + return BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + + async def request(self, *args, **kwargs): + raise_error = kwargs.pop("raise_error", False) + # TODO: use this + cache_for = kwargs.pop("cache_for", None) # noqa + + client = kwargs.get("client", self.web_client) + + # allow vs follow, httpx why?? + allow_redirects = kwargs.pop("allow_redirects", None) + if allow_redirects is not None and "follow_redirects" not in kwargs: + kwargs["follow_redirects"] = allow_redirects + + # in case of URL only, assume GET request + if len(args) == 1: + kwargs["url"] = args[0] + args = [] + + url = kwargs.get("url", "") + + if not args and "method" not in kwargs: + kwargs["method"] = "GET" + + client_kwargs = {} + for k in list(kwargs): + if k in self.client_only_options: + v = kwargs.pop(k) + client_kwargs[k] = v + + if client_kwargs: + client = self.AsyncClient(**client_kwargs) + + async with self._acatch(url, raise_error): + if self.http_debug: + logstr = f"Web request: {str(args)}, {str(kwargs)}" + log.trace(logstr) + response = await client.request(*args, **kwargs) + if self.http_debug: + log.trace( + f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}" + ) + return response + + async def request_batch(self, urls, *args, threads=10, **kwargs): + tasks = {} + + urls = list(urls) + + def new_task(): + if urls: + url = urls.pop(0) + task = asyncio.create_task(self.request(url, *args, **kwargs)) + tasks[task] = url + + for _ in range(threads): # Start initial batch of tasks + new_task() + + while tasks: # While there are tasks pending + # Wait for the first task to complete + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + + for task in done: + response = task.result() + url = tasks.pop(task) + yield (url, response) + new_task() + + async def request_custom_batch(self, urls_and_kwargs, threads=10): + tasks = {} + urls_and_kwargs = list(urls_and_kwargs) + + def new_task(): + if urls_and_kwargs: # Ensure there are args to process + url, kwargs, custom_tracker = urls_and_kwargs.pop(0) + task = asyncio.create_task(self.request(url, **kwargs)) + tasks[task] = (url, kwargs, custom_tracker) + + for _ in range(threads): # Start initial batch of tasks + new_task() + + while tasks: # While there are tasks pending + # Wait for the first task to complete + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + + for task in done: + response = task.result() + url, kwargs, custom_tracker = tasks.pop(task) + yield (url, kwargs, custom_tracker, response) + new_task() + + async def download(self, url, **kwargs): + warn = kwargs.pop("warn", True) + filename = kwargs.pop("filename") + raise_error = kwargs.get("raise_error", False) + try: + content, response = await self.stream_request(url, **kwargs) + log.debug(f"Download result: HTTP {response.status_code}") + response.raise_for_status() + with open(filename, "wb") as f: + f.write(content) + return filename + except httpx.HTTPError as e: + log_fn = log.verbose + if warn: + log_fn = log.warning + log_fn(f"Failed to download {url}: {e}") + if raise_error: + raise + + async def stream_request(self, url, **kwargs): + follow_redirects = kwargs.pop("follow_redirects", True) + max_size = kwargs.pop("max_size", None) + raise_error = kwargs.pop("raise_error", False) + if max_size is not None: + max_size = human_to_bytes(max_size) + kwargs["follow_redirects"] = follow_redirects + if not "method" in kwargs: + kwargs["method"] = "GET" + try: + total_size = 0 + chunk_size = 8192 + chunks = [] + + async with self._acatch(url, raise_error=True), self.web_client.stream(url=url, **kwargs) as response: + agen = response.aiter_bytes(chunk_size=chunk_size) + async for chunk in agen: + _chunk_size = len(chunk) + if max_size is not None and total_size + _chunk_size > max_size: + log.verbose( + f"Size of response from {url} exceeds {bytes_to_human(max_size)}, file will be truncated" + ) + agen.aclose() + break + total_size += _chunk_size + chunks.append(chunk) + return b"".join(chunks), response + except httpx.HTTPError as e: + self.debug(f"Error requesting {url}: {e}") + if raise_error: + raise + + def ssl_context_noverify(self): + if self._ssl_context_noverify is None: + ssl_context = ssl.create_default_context() + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + ssl_context.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 + ssl_context.set_ciphers("ALL:@SECLEVEL=0") + ssl_context.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option + self._ssl_context_noverify = ssl_context + return self._ssl_context_noverify + + @asynccontextmanager + async def _acatch(self, url, raise_error): + """ + Asynchronous context manager to handle various httpx errors during a request. + + Yields: + None + + Note: + This function is internal and should generally not be used directly. + `url`, `args`, `kwargs`, and `raise_error` should be in the same context as this function. + """ + try: + yield + except httpx.TimeoutException: + if raise_error: + raise + else: + log.verbose(f"HTTP timeout to URL: {url}") + except httpx.ConnectError: + if raise_error: + raise + else: + log.debug(f"HTTP connect failed to URL: {url}") + except httpx.HTTPError as e: + if raise_error: + raise + else: + log.trace(f"Error with request to URL: {url}: {e}") + log.trace(traceback.format_exc()) + except ssl.SSLError as e: + msg = f"SSL error with request to URL: {url}: {e}" + if raise_error: + raise httpx.RequestError(msg) + else: + log.trace(msg) + log.trace(traceback.format_exc()) + except anyio.EndOfStream as e: + msg = f"AnyIO error with request to URL: {url}: {e}" + if raise_error: + raise httpx.RequestError(msg) + else: + log.trace(msg) + log.trace(traceback.format_exc()) + except SOCKSError as e: + msg = f"SOCKS error with request to URL: {url}: {e}" + if raise_error: + raise httpx.RequestError(msg) + else: + log.trace(msg) + log.trace(traceback.format_exc()) + except BaseException as e: + # don't log if the error is the result of an intentional cancellation + if not any(isinstance(_e, asyncio.exceptions.CancelledError) for _e in get_exception_chain(e)): + log.trace(f"Unhandled exception with request to URL: {url}: {e}") + log.trace(traceback.format_exc()) + raise diff --git a/bbot/core/helpers/web/ssl_context.py b/bbot/core/helpers/web/ssl_context.py new file mode 100644 index 000000000..fabe4188f --- /dev/null +++ b/bbot/core/helpers/web/ssl_context.py @@ -0,0 +1,8 @@ +import ssl + +ssl_context_noverify = ssl.create_default_context() +ssl_context_noverify.check_hostname = False +ssl_context_noverify.verify_mode = ssl.CERT_NONE +ssl_context_noverify.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 +ssl_context_noverify.set_ciphers("ALL:@SECLEVEL=0") +ssl_context_noverify.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web/web.py similarity index 62% rename from bbot/core/helpers/web.py rename to bbot/core/helpers/web/web.py index 26773bc9c..161355473 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web/web.py @@ -1,108 +1,30 @@ import re -import ssl -import anyio -import httpx -import asyncio import logging import warnings import traceback from pathlib import Path from bs4 import BeautifulSoup -from contextlib import asynccontextmanager -from httpx._models import Cookies -from socksio.exceptions import SOCKSError - -from bbot.errors import WordlistError, CurlError -from bbot.core.helpers.ratelimiter import RateLimiter +from bbot.core.engine import EngineClient +from bbot.core.helpers.misc import truncate_filename +from bbot.errors import WordlistError, CurlError, WebError from bs4 import MarkupResemblesLocatorWarning from bs4.builder import XMLParsedAsHTMLWarning +from .engine import HTTPEngine + warnings.filterwarnings("ignore", category=XMLParsedAsHTMLWarning) warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning) log = logging.getLogger("bbot.core.helpers.web") -class DummyCookies(Cookies): - def extract_cookies(self, *args, **kwargs): - pass - - -class BBOTAsyncClient(httpx.AsyncClient): - """ - A subclass of httpx.AsyncClient tailored with BBOT-specific configurations and functionalities. - This class provides rate limiting, logging, configurable timeouts, user-agent customization, custom - headers, and proxy settings. Additionally, it allows the disabling of cookies, making it suitable - for use across an entire scan. +class WebHelper(EngineClient): - Attributes: - _bbot_scan (object): BBOT scan object containing configuration details. - _rate_limiter (RateLimiter): A rate limiter object to limit web requests. - _persist_cookies (bool): Flag to determine whether cookies should be persisted across requests. + SERVER_CLASS = HTTPEngine + ERROR_CLASS = WebError - Examples: - >>> async with BBOTAsyncClient(_bbot_scan=bbot_scan_object) as client: - >>> response = await client.request("GET", "https://example.com") - >>> print(response.status_code) - 200 - """ - - def __init__(self, *args, **kwargs): - self._preset = kwargs.pop("_preset") - web_requests_per_second = self._preset.config.get("web_requests_per_second", 100) - self._rate_limiter = RateLimiter(web_requests_per_second, "Web") - - http_debug = self._preset.config.get("http_debug", None) - if http_debug: - log.trace(f"Creating AsyncClient: {args}, {kwargs}") - - self._persist_cookies = kwargs.pop("persist_cookies", True) - - # timeout - http_timeout = self._preset.config.get("http_timeout", 20) - if not "timeout" in kwargs: - kwargs["timeout"] = http_timeout - - # headers - headers = kwargs.get("headers", None) - if headers is None: - headers = {} - # user agent - user_agent = self._preset.config.get("user_agent", "BBOT") - if "User-Agent" not in headers: - headers["User-Agent"] = user_agent - kwargs["headers"] = headers - # proxy - proxies = self._preset.config.get("http_proxy", None) - kwargs["proxies"] = proxies - - super().__init__(*args, **kwargs) - if not self._persist_cookies: - self._cookies = DummyCookies() - - async def request(self, *args, **kwargs): - async with self._rate_limiter: - return await super().request(*args, **kwargs) - - def build_request(self, *args, **kwargs): - request = super().build_request(*args, **kwargs) - # add custom headers if the URL is in-scope - if self._preset.in_scope(str(request.url)): - for hk, hv in self._preset.config.get("http_headers", {}).items(): - # don't clobber headers - if hk not in request.headers: - request.headers[hk] = hv - return request - - def _merge_cookies(self, cookies): - if self._persist_cookies: - return super()._merge_cookies(cookies) - return cookies - - -class WebHelper: """ Main utility class for managing HTTP operations in BBOT. It serves as a wrapper around the BBOTAsyncClient, which itself is a subclass of httpx.AsyncClient. The class provides functionalities to make HTTP requests, @@ -126,26 +48,18 @@ class WebHelper: >>> filename = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") """ - client_only_options = ( - "retries", - "max_redirects", - ) - def __init__(self, parent_helper): self.parent_helper = parent_helper - self.http_debug = self.parent_helper.config.get("http_debug", False) - self._ssl_context_noverify = None - self.ssl_verify = self.parent_helper.config.get("ssl_verify", False) - if self.ssl_verify is False: - self.ssl_verify = self.ssl_context_noverify() - self.web_client = self.AsyncClient(persist_cookies=False) + self.preset = self.parent_helper.preset + self.config = self.preset.config + self.target = self.preset.target + self.ssl_verify = self.config.get("ssl_verify", False) + super().__init__(server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.radix_only}) def AsyncClient(self, *args, **kwargs): - kwargs["_preset"] = self.parent_helper.preset - retries = kwargs.pop("retries", self.parent_helper.config.get("http_retries", 1)) - kwargs["transport"] = httpx.AsyncHTTPTransport(retries=retries, verify=self.ssl_verify) - kwargs["verify"] = self.ssl_verify - return BBOTAsyncClient(*args, **kwargs) + from .client import BBOTAsyncClient + + return BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) async def request(self, *args, **kwargs): """ @@ -191,47 +105,49 @@ async def request(self, *args, **kwargs): Note: If the web request fails, it will return None unless `raise_error` is `True`. """ + return await self.run_and_return("request", *args, **kwargs) + + async def request_batch(self, urls, *args, **kwargs): + """ + Given a list of URLs, request them in parallel and yield responses as they come in. - raise_error = kwargs.pop("raise_error", False) - # TODO: use this - cache_for = kwargs.pop("cache_for", None) # noqa + Args: + urls (list[str]): List of URLs to visit + *args: Positional arguments to pass through to httpx + **kwargs: Keyword arguments to pass through to httpx - client = kwargs.get("client", self.web_client) + Examples: + >>> async for url, response in self.helpers.request_batch(urls, headers={"X-Test": "Test"}): + >>> if response is not None and response.status_code == 200: + >>> self.hugesuccess(response) + """ + async for _ in self.run_and_yield("request_batch", urls, *args, **kwargs): + yield _ - # allow vs follow, httpx why?? - allow_redirects = kwargs.pop("allow_redirects", None) - if allow_redirects is not None and "follow_redirects" not in kwargs: - kwargs["follow_redirects"] = allow_redirects + async def request_custom_batch(self, urls_and_kwargs): + """ + Make web requests in parallel with custom options for each request. Yield responses as they come in. - # in case of URL only, assume GET request - if len(args) == 1: - kwargs["url"] = args[0] - args = [] + Similar to `request_batch` except it allows individual arguments for each URL. - url = kwargs.get("url", "") + Args: + urls_and_kwargs (list[tuple]): List of tuples in the format: (url, kwargs, custom_tracker) + where custom_tracker is an optional value for your own internal use. You may use it to + help correlate requests, etc. - if not args and "method" not in kwargs: - kwargs["method"] = "GET" - - client_kwargs = {} - for k in list(kwargs): - if k in self.client_only_options: - v = kwargs.pop(k) - client_kwargs[k] = v - - if client_kwargs: - client = self.AsyncClient(**client_kwargs) - - async with self._acatch(url, raise_error): - if self.http_debug: - logstr = f"Web request: {str(args)}, {str(kwargs)}" - log.trace(logstr) - response = await client.request(*args, **kwargs) - if self.http_debug: - log.trace( - f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}" - ) - return response + Examples: + >>> urls_and_kwargs = [ + >>> ("http://evilcorp.com/1", {"method": "GET"}, "request-1"), + >>> ("http://evilcorp.com/2", {"method": "POST"}, "request-2"), + >>> ] + >>> async for url, kwargs, custom_tracker, response in self.helpers.request_custom_batch( + >>> urls_and_kwargs + >>> ): + >>> if response is not None and response.status_code == 200: + >>> self.hugesuccess(response) + """ + async for _ in self.run_and_yield("request_custom_batch", urls_and_kwargs): + yield _ async def download(self, url, **kwargs): """ @@ -258,56 +174,21 @@ async def download(self, url, **kwargs): """ success = False filename = kwargs.pop("filename", self.parent_helper.cache_filename(url)) - follow_redirects = kwargs.pop("follow_redirects", True) + filename = truncate_filename(Path(filename).resolve()) + kwargs["filename"] = filename max_size = kwargs.pop("max_size", None) - warn = kwargs.pop("warn", True) - raise_error = kwargs.pop("raise_error", False) if max_size is not None: max_size = self.parent_helper.human_to_bytes(max_size) + kwargs["max_size"] = max_size cache_hrs = float(kwargs.pop("cache_hrs", -1)) - total_size = 0 - chunk_size = 8192 - log.debug(f"Downloading file from {url} with cache_hrs={cache_hrs}") if cache_hrs > 0 and self.parent_helper.is_cached(url): log.debug(f"{url} is cached at {self.parent_helper.cache_filename(url)}") success = True else: - # kwargs["raise_error"] = True - # kwargs["stream"] = True - kwargs["follow_redirects"] = follow_redirects - if not "method" in kwargs: - kwargs["method"] = "GET" - try: - async with self._acatch(url, raise_error=True), self.AsyncClient().stream( - url=url, **kwargs - ) as response: - status_code = getattr(response, "status_code", 0) - log.debug(f"Download result: HTTP {status_code}") - if status_code != 0: - response.raise_for_status() - with open(filename, "wb") as f: - agen = response.aiter_bytes(chunk_size=chunk_size) - async for chunk in agen: - if max_size is not None and total_size + chunk_size > max_size: - log.verbose( - f"Filesize of {url} exceeds {self.parent_helper.bytes_to_human(max_size)}, file will be truncated" - ) - agen.aclose() - break - total_size += chunk_size - f.write(chunk) - success = True - except httpx.HTTPError as e: - log_fn = log.verbose - if warn: - log_fn = log.warning - log_fn(f"Failed to download {url}: {e}") - if raise_error: - raise - return + success = await self.run_and_return("download", url, **kwargs) if success: - return filename.resolve() + return filename async def wordlist(self, path, lines=None, **kwargs): """ @@ -538,7 +419,7 @@ async def curl(self, *args, **kwargs): output = (await self.parent_helper.run(curl_command)).stdout return output - def is_spider_danger(self, source_event, url): + def is_spider_danger(self, parent_event, url): """ Determines whether visiting a URL could potentially trigger a web-spider-like happening. @@ -547,7 +428,7 @@ def is_spider_danger(self, source_event, url): the function returns True, indicating a possible web-spider risk. Args: - source_event: The source event object that discovered the URL. + parent_event: The parent event object that discovered the URL. url (str): The URL to evaluate for web-spider risk. Returns: @@ -557,15 +438,15 @@ def is_spider_danger(self, source_event, url): - Write tests for this function Examples: - >>> is_spider_danger(source_event_obj, "https://example.com/subpage") + >>> is_spider_danger(parent_event_obj, "https://example.com/subpage") True - >>> is_spider_danger(source_event_obj, "https://example.com/") + >>> is_spider_danger(parent_event_obj, "https://example.com/") False """ url_depth = self.parent_helper.url_depth(url) web_spider_depth = self.parent_helper.config.get("web_spider_depth", 1) - spider_distance = getattr(source_event, "web_spider_distance", 0) + 1 + spider_distance = getattr(parent_event, "web_spider_distance", 0) + 1 web_spider_distance = self.parent_helper.config.get("web_spider_distance", 0) if (url_depth > web_spider_depth) or (spider_distance > web_spider_distance): return True @@ -629,120 +510,102 @@ def beautifulsoup( log.debug(f"Error parsing beautifulsoup: {e}") return False - def ssl_context_noverify(self): - if self._ssl_context_noverify is None: - ssl_context = ssl.create_default_context() - ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE - ssl_context.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 - ssl_context.set_ciphers("ALL:@SECLEVEL=0") - ssl_context.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option - self._ssl_context_noverify = ssl_context - return self._ssl_context_noverify - - @asynccontextmanager - async def _acatch(self, url, raise_error): - """ - Asynchronous context manager to handle various httpx errors during a request. + user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]] + pass_keywords = [re.compile(r, re.I) for r in ["pass"]] - Yields: - None - - Note: - This function is internal and should generally not be used directly. - `url`, `args`, `kwargs`, and `raise_error` should be in the same context as this function. + def is_login_page(self, html): """ - try: - yield - except httpx.TimeoutException: - if raise_error: - raise - else: - log.verbose(f"HTTP timeout to URL: {url}") - except httpx.ConnectError: - if raise_error: - raise - else: - log.debug(f"HTTP connect failed to URL: {url}") - except httpx.HTTPError as e: - if raise_error: - raise - else: - log.trace(f"Error with request to URL: {url}: {e}") - log.trace(traceback.format_exc()) - except ssl.SSLError as e: - msg = f"SSL error with request to URL: {url}: {e}" - if raise_error: - raise httpx.RequestError(msg) - else: - log.trace(msg) - log.trace(traceback.format_exc()) - except anyio.EndOfStream as e: - msg = f"AnyIO error with request to URL: {url}: {e}" - if raise_error: - raise httpx.RequestError(msg) - else: - log.trace(msg) - log.trace(traceback.format_exc()) - except SOCKSError as e: - msg = f"SOCKS error with request to URL: {url}: {e}" - if raise_error: - raise httpx.RequestError(msg) - else: - log.trace(msg) - log.trace(traceback.format_exc()) - except BaseException as e: - # don't log if the error is the result of an intentional cancellation - if not any( - isinstance(_e, asyncio.exceptions.CancelledError) for _e in self.parent_helper.get_exception_chain(e) - ): - log.trace(f"Unhandled exception with request to URL: {url}: {e}") - log.trace(traceback.format_exc()) - raise - + Determines if the provided HTML content contains a login page. -user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]] -pass_keywords = [re.compile(r, re.I) for r in ["pass"]] + This function parses the HTML to search for forms with input fields typically used for + authentication. If it identifies password fields or a combination of username and password + fields, it returns True. + Args: + html (str): The HTML content to analyze. -def is_login_page(html): - """ - Determines if the provided HTML content contains a login page. + Returns: + bool: True if the HTML contains a login page, otherwise False. - This function parses the HTML to search for forms with input fields typically used for - authentication. If it identifies password fields or a combination of username and password - fields, it returns True. + Examples: + >>> is_login_page('
') + True - Args: - html (str): The HTML content to analyze. + >>> is_login_page('
') + False + """ + try: + soup = BeautifulSoup(html, "html.parser") + except Exception as e: + log.debug(f"Error parsing html: {e}") + return False - Returns: - bool: True if the HTML contains a login page, otherwise False. + forms = soup.find_all("form") - Examples: - >>> is_login_page('
') - True + # first, check for obvious password fields + for form in forms: + if form.find_all("input", {"type": "password"}): + return True - >>> is_login_page('
') - False - """ - try: - soup = BeautifulSoup(html, "html.parser") - except Exception as e: - log.debug(f"Error parsing html: {e}") + # next, check for forms that have both a user-like and password-like field + for form in forms: + user_fields = sum(bool(form.find_all("input", {"name": r})) for r in self.user_keywords) + pass_fields = sum(bool(form.find_all("input", {"name": r})) for r in self.pass_keywords) + if user_fields and pass_fields: + return True return False - forms = soup.find_all("form") - - # first, check for obvious password fields - for form in forms: - if form.find_all("input", {"type": "password"}): - return True + def response_to_json(self, response): + """ + Convert web response to JSON object, similar to the output of `httpx -irr -json` + """ - # next, check for forms that have both a user-like and password-like field - for form in forms: - user_fields = sum(bool(form.find_all("input", {"name": r})) for r in user_keywords) - pass_fields = sum(bool(form.find_all("input", {"name": r})) for r in pass_keywords) - if user_fields and pass_fields: - return True - return False + if response is None: + return + + import mmh3 + from datetime import datetime + from hashlib import md5, sha256 + from bbot.core.helpers.misc import tagify, urlparse, split_host_port, smart_decode + + request = response.request + url = str(request.url) + parsed_url = urlparse(url) + netloc = parsed_url.netloc + scheme = parsed_url.scheme.lower() + host, port = split_host_port(f"{scheme}://{netloc}") + + raw_headers = "\r\n".join([f"{k}: {v}" for k, v in response.headers.items()]) + raw_headers_encoded = raw_headers.encode() + + headers = {} + for k, v in response.headers.items(): + k = tagify(k, delimiter="_") + headers[k] = v + + j = { + "timestamp": datetime.now().isoformat(), + "hash": { + "body_md5": md5(response.content).hexdigest(), + "body_mmh3": mmh3.hash(response.content), + "body_sha256": sha256(response.content).hexdigest(), + # "body_simhash": "TODO", + "header_md5": md5(raw_headers_encoded).hexdigest(), + "header_mmh3": mmh3.hash(raw_headers_encoded), + "header_sha256": sha256(raw_headers_encoded).hexdigest(), + # "header_simhash": "TODO", + }, + "header": headers, + "body": smart_decode(response.content), + "content_type": headers.get("content_type", "").split(";")[0].strip(), + "url": url, + "host": str(host), + "port": port, + "scheme": scheme, + "method": response.request.method, + "path": parsed_url.path, + "raw_header": raw_headers, + "status_code": response.status_code, + } + + return j diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index 5eafb00c5..fbd4e7593 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -451,7 +451,7 @@ def add_word(self, word): class DNSMutator(Mutator): """ - DNS-specific mutator used by the `massdns` module to generate target-specific subdomain mutations. + DNS-specific mutator used by the `dnsbrute_mutations` module to generate target-specific subdomain mutations. This class extends the Mutator base class to add DNS-specific logic for generating subdomain mutations based on input words. It utilizes custom word extraction patterns diff --git a/bbot/core/modules.py b/bbot/core/modules.py index b9ae83af5..c5eb8f902 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -487,7 +487,7 @@ def recommend_dependencies(self, modules): """ resolve_choices = {} # step 1: build a dictionary containing event types and their associated modules - # {"IP_ADDRESS": set("nmap", "ipneighbor", ...)} + # {"IP_ADDRESS": set("masscan", "ipneighbor", ...)} watched = {} produced = {} for modname in modules: @@ -560,11 +560,11 @@ def modules_table(self, modules=None, mod_type=None): str: A formatted table string. Examples: - >>> print(modules_table(["nmap"])) + >>> print(modules_table(["portscan"])) +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ | Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +==========+========+=================+==============================+===============================+======================+===================+ - | nmap | scan | No | Execute port scans with nmap | active, aggressive, portscan, | DNS_NAME, IP_ADDRESS | OPEN_TCP_PORT | + | portscan | scan | No | Execute port scans | active, aggressive, portscan, | DNS_NAME, IP_ADDRESS | OPEN_TCP_PORT | | | | | | web-thorough | | | +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ """ diff --git a/bbot/core/shared_deps.py b/bbot/core/shared_deps.py index 751117752..ebc3759e4 100644 --- a/bbot/core/shared_deps.py +++ b/bbot/core/shared_deps.py @@ -111,6 +111,36 @@ }, ] +MASSCAN = [ + { + "name": "install dev tools", + "package": {"name": ["gcc", "git", "make"], "state": "present"}, + "become": True, + "ignore_errors": True, + }, + { + "name": "Download masscan source code", + "git": { + "repo": "https://github.com/robertdavidgraham/masscan.git", + "dest": "#{BBOT_TEMP}/masscan", + "single_branch": True, + "version": "master", + }, + }, + { + "name": "Build masscan", + "command": { + "chdir": "#{BBOT_TEMP}/masscan", + "cmd": "make -j", + "creates": "#{BBOT_TEMP}/masscan/bin/masscan", + }, + }, + { + "name": "Install masscan", + "copy": {"src": "#{BBOT_TEMP}/masscan/bin/masscan", "dest": "#{BBOT_TOOLS}/", "mode": "u+x,g+x,o+x"}, + }, +] + # shared module dependencies -- ffuf, massdns, chromium, etc. SHARED_DEPS = {} for var, val in list(locals().items()): diff --git a/bbot/defaults.yml b/bbot/defaults.yml index 2625b795e..b47c8b4b8 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -24,6 +24,10 @@ deps: ffuf: version: "2.1.0" +dns: + # Number of concurrent massdns lookups (-s) + brute_threads: 1000 + ### WEB SPIDER ### # Set the maximum number of HTTP links that can be followed in a row (0 == no spidering allowed) diff --git a/bbot/errors.py b/bbot/errors.py index e50e581cd..30948835d 100644 --- a/bbot/errors.py +++ b/bbot/errors.py @@ -72,3 +72,15 @@ class PresetConditionError(BBOTError): class PresetAbortError(PresetConditionError): pass + + +class BBOTEngineError(BBOTError): + pass + + +class WebError(BBOTEngineError): + pass + + +class DNSError(BBOTEngineError): + pass diff --git a/bbot/modules/ajaxpro.py b/bbot/modules/ajaxpro.py index ba3e0eb3e..dda98ad2b 100644 --- a/bbot/modules/ajaxpro.py +++ b/bbot/modules/ajaxpro.py @@ -11,7 +11,11 @@ class ajaxpro(BaseModule): watched_events = ["HTTP_RESPONSE", "URL"] produced_events = ["VULNERABILITY", "FINDING"] flags = ["active", "safe", "web-thorough"] - meta = {"description": "Check for potentially vulnerable Ajaxpro instances"} + meta = { + "description": "Check for potentially vulnerable Ajaxpro instances", + "created_date": "2024-01-18", + "author": "@liquidsec", + } async def handle_event(self, event): if event.type == "URL": @@ -33,6 +37,7 @@ async def handle_event(self, event): }, "FINDING", event, + context="{module} discovered Ajaxpro instance ({event.type}) at {event.data}", ) elif event.type == "HTTP_RESPONSE": @@ -49,4 +54,5 @@ async def handle_event(self, event): }, "FINDING", event, + context="{module} discovered Ajaxpro instance ({event.type}) at {event.data}", ) diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index bf4c88e93..f95adde0b 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -5,7 +5,11 @@ class anubisdb(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query jldc.me's database for subdomains"} + meta = { + "description": "Query jldc.me's database for subdomains", + "created_date": "2022-10-04", + "author": "@TheTechromancer", + } options = {"limit": 1000} options_desc = { "limit": "Limit the number of subdomains returned per query (increasing this may slow the scan due to garbage results from this API)" diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index a3d6ad6ba..9b09eaf5f 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -5,7 +5,11 @@ class azure_realm(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "passive", "safe"] - meta = {"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm'} + meta = { + "description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm', + "created_date": "2023-07-12", + "author": "@TheTechromancer", + } async def setup(self): self.processed = set() @@ -19,10 +23,13 @@ async def handle_event(self, event): auth_url = await self.getuserrealm(domain) if auth_url: url_event = self.make_event( - auth_url, "URL_UNVERIFIED", source=event, tags=["affiliate", "ms-auth-url"] + auth_url, "URL_UNVERIFIED", parent=event, tags=["affiliate", "ms-auth-url"] ) url_event.source_domain = domain - await self.emit_event(url_event) + await self.emit_event( + url_event, + context="{module} queried login.microsoftonline.com for user realm and found {event.type}: {event.data}", + ) async def getuserrealm(self, domain): url = f"https://login.microsoftonline.com/getuserrealm.srf?login=test@{domain}" diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index a15bbb68f..bd4a9b3dc 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -8,7 +8,11 @@ class azure_tenant(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "passive", "safe"] - meta = {"description": "Query Azure for tenant sister domains"} + meta = { + "description": "Query Azure for tenant sister domains", + "created_date": "2024-07-04", + "author": "@TheTechromancer", + } base_url = "https://autodiscover-s.outlook.com" in_scope_only = True @@ -34,17 +38,30 @@ async def handle_event(self, event): self.verbose(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') for domain in domains: if domain != query: - await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) + await self.emit_event( + domain, + "DNS_NAME", + parent=event, + tags=["affiliate", "azure-tenant"], + context=f'{{module}} queried Outlook autodiscover for "{query}" and found {{event.type}}: {{event.data}}', + ) # tenant names if domain.lower().endswith(".onmicrosoft.com"): tenantname = domain.split(".")[0].lower() if tenantname: tenant_names.add(tenantname) - event_data = {"tenant-names": sorted(tenant_names), "domains": sorted(domains)} + tenant_names = sorted(tenant_names) + event_data = {"tenant-names": tenant_names, "domains": sorted(domains)} + tenant_names_str = ",".join(tenant_names) if tenant_id is not None: event_data["tenant-id"] = tenant_id - await self.emit_event(event_data, "AZURE_TENANT", source=event) + await self.emit_event( + event_data, + "AZURE_TENANT", + parent=event, + context=f'{{module}} queried Outlook autodiscover for "{query}" and found {{event.type}}: {tenant_names_str}', + ) async def query(self, domain): url = f"{self.base_url}/autodiscover/autodiscover.svc" diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py index 992ae5c0d..4331578a8 100644 --- a/bbot/modules/baddns.py +++ b/bbot/modules/baddns.py @@ -10,14 +10,18 @@ class baddns(BaseModule): watched_events = ["DNS_NAME", "DNS_NAME_UNRESOLVED"] produced_events = ["FINDING", "VULNERABILITY"] flags = ["active", "safe", "web-basic", "baddns", "cloud-enum", "subdomain-hijack"] - meta = {"description": "Check hosts for domain/subdomain takeovers"} + meta = { + "description": "Check hosts for domain/subdomain takeovers", + "created_date": "2024-01-18", + "author": "@liquidsec", + } options = {"custom_nameservers": [], "only_high_confidence": False} options_desc = { "custom_nameservers": "Force BadDNS to use a list of custom nameservers", "only_high_confidence": "Do not emit low-confidence or generic detections", } max_event_handlers = 8 - deps_pip = ["baddns~=1.1.0"] + deps_pip = ["baddns~=1.1.789"] def select_modules(self): selected_modules = [] @@ -62,7 +66,11 @@ async def handle_event(self, event): "host": str(event.host), } await self.emit_event( - data, "VULNERABILITY", event, tags=[f"baddns-{module_instance.name.lower()}"] + data, + "VULNERABILITY", + event, + tags=[f"baddns-{module_instance.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}', ) elif r_dict["confidence"] in ["UNLIKELY", "POSSIBLE"] and not self.only_high_confidence: @@ -71,7 +79,11 @@ async def handle_event(self, event): "host": str(event.host), } await self.emit_event( - data, "FINDING", event, tags=[f"baddns-{module_instance.name.lower()}"] + data, + "FINDING", + event, + tags=[f"baddns-{module_instance.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {r_dict["description"]}', ) else: self.warning(f"Got unrecognized confidence level: {r['confidence']}") @@ -80,5 +92,9 @@ async def handle_event(self, event): if found_domains: for found_domain in found_domains: await self.emit_event( - found_domain, "DNS_NAME", event, tags=[f"baddns-{module_instance.name.lower()}"] + found_domain, + "DNS_NAME", + event, + tags=[f"baddns-{module_instance.name.lower()}"], + context=f'{{module}}\'s "{r_dict["module"]}" module found {{event.type}}: {{event.data}}', ) diff --git a/bbot/modules/baddns_zone.py b/bbot/modules/baddns_zone.py index ac0fc3c57..aa4e2979d 100644 --- a/bbot/modules/baddns_zone.py +++ b/bbot/modules/baddns_zone.py @@ -6,14 +6,18 @@ class baddns_zone(baddns_module): watched_events = ["DNS_NAME"] produced_events = ["FINDING", "VULNERABILITY"] flags = ["active", "safe", "subdomain-enum", "baddns", "cloud-enum"] - meta = {"description": "Check hosts for DNS zone transfers and NSEC walks"} + meta = { + "description": "Check hosts for DNS zone transfers and NSEC walks", + "created_date": "2024-01-29", + "author": "@liquidsec", + } options = {"custom_nameservers": [], "only_high_confidence": False} options_desc = { "custom_nameservers": "Force BadDNS to use a list of custom nameservers", "only_high_confidence": "Do not emit low-confidence or generic detections", } max_event_handlers = 8 - deps_pip = ["baddns~=1.1.0"] + deps_pip = ["baddns~=1.1.789"] def select_modules(self): selected_modules = [] @@ -24,6 +28,6 @@ def select_modules(self): # minimize nsec records feeding back into themselves async def filter_event(self, event): - if "baddns-nsec" in event.tags or "baddns-nsec" in event.source.tags: + if "baddns-nsec" in event.tags or "baddns-nsec" in event.parent.tags: return False return True diff --git a/bbot/modules/badsecrets.py b/bbot/modules/badsecrets.py index 01cc36ed8..8f8201ada 100644 --- a/bbot/modules/badsecrets.py +++ b/bbot/modules/badsecrets.py @@ -1,7 +1,6 @@ import multiprocessing - +from pathlib import Path from .base import BaseModule - from badsecrets.base import carve_all_modules @@ -9,9 +8,29 @@ class badsecrets(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING", "VULNERABILITY", "TECHNOLOGY"] flags = ["active", "safe", "web-basic"] - meta = {"description": "Library for detecting known or weak secrets across many web frameworks"} + meta = { + "description": "Library for detecting known or weak secrets across many web frameworks", + "created_date": "2022-11-19", + "author": "@liquidsec", + } + options = {"custom_secrets": None} + options_desc = { + "custom_secrets": "Include custom secrets loaded from a local file", + } deps_pip = ["badsecrets~=0.4.490"] + async def setup(self): + self.custom_secrets = None + custom_secrets = self.config.get("custom_secrets", None) + if custom_secrets: + if Path(custom_secrets).is_file(): + self.custom_secrets = custom_secrets + self.info(f"Successfully loaded secrets file [{custom_secrets}]") + else: + self.warning(f"custom secrets file [{custom_secrets}] is not valid") + return None, "Custom secrets file not valid" + return True + @property def _max_event_handlers(self): return max(1, multiprocessing.cpu_count() - 1) @@ -39,6 +58,7 @@ async def handle_event(self, event): headers=resp_headers, cookies=resp_cookies, url=event.data.get("url", None), + custom_resource=self.custom_secrets, ) except Exception as e: self.warning(f"Error processing {event}: {e}") @@ -52,14 +72,21 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - await self.emit_event(data, "VULNERABILITY", event) + await self.emit_event( + data, + "VULNERABILITY", + event, + context=f'{{module}}\'s "{r["detecting_module"]}" module found known {r["description"]["product"]} secret ({{event.type}}): "{r["secret"]}"', + ) elif r["type"] == "IdentifyOnly": # There is little value to presenting a non-vulnerable asp.net viewstate, as it is not crackable without a Matrioshka brain. Just emit a technology instead. if r["detecting_module"] == "ASPNET_Viewstate": + technology = "microsoft asp.net" await self.emit_event( - {"technology": "microsoft asp.net", "url": event.data["url"], "host": str(event.host)}, + {"technology": technology, "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event, + context=f"{{module}} identified {{event.type}}: {technology}", ) else: data = { @@ -67,4 +94,9 @@ async def handle_event(self, event): "url": event.data["url"], "host": str(event.host), } - await self.emit_event(data, "FINDING", event) + await self.emit_event( + data, + "FINDING", + event, + context=f'{{module}} identified cryptographic product ({{event.type}}): "{r["description"]["product"]}"', + ) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 1cb827ccb..5cb4cc6fe 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -105,6 +105,8 @@ class BaseModule: batch_wait = 10 failed_request_abort_threshold = 5 + default_discovery_context = "{module} discovered {event.type}: {event.data}" + _preserve_graph = False _stats_exclude = False _qsize = 1000 @@ -112,6 +114,7 @@ class BaseModule: _name = "base" _type = "scan" _intercept = False + _shuffle_incoming_queue = True def __init__(self, scan): """Initializes a module instance. @@ -414,7 +417,7 @@ def make_event(self, *args, **kwargs): raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. Examples: - >>> new_event = self.make_event("1.2.3.4", source=event) + >>> new_event = self.make_event("1.2.3.4", parent=event) >>> await self.emit_event(new_event) Returns: @@ -424,6 +427,10 @@ def make_event(self, *args, **kwargs): ValidationError: If the event could not be validated and raise_error is True. """ raise_error = kwargs.pop("raise_error", False) + module = kwargs.pop("module", None) + if module is None: + if (not args) or getattr(args[0], "module", None) is None: + kwargs["module"] = self try: event = self.scan.make_event(*args, **kwargs) except ValidationError as e: @@ -431,8 +438,6 @@ def make_event(self, *args, **kwargs): raise self.warning(f"{e}") return - if not event.module: - event.module = self return event async def emit_event(self, *args, **kwargs): @@ -453,9 +458,9 @@ async def emit_event(self, *args, **kwargs): ``` Examples: - >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + >>> await self.emit_event("www.evilcorp.com", parent=event, tags=["affiliate"]) - >>> new_event = self.make_event("1.2.3.4", source=event) + >>> new_event = self.make_event("1.2.3.4", parent=event) >>> await self.emit_event(new_event) Returns: @@ -632,7 +637,8 @@ async def _worker(self): else: self.debug(f"Not accepting {event} because {reason}") except asyncio.CancelledError: - self.log.trace("Worker cancelled") + # this trace was used for debugging leaked CancelledErrors from inside httpx + # self.log.trace("Worker cancelled") raise self.log.trace(f"Worker stopped") @@ -684,6 +690,7 @@ def _event_precheck(self, event): if self.target_only: if "target" not in event.tags: return False, "it did not meet target_only filter criteria" + # exclude certain URLs (e.g. javascript): # TODO: revisit this after httpx rework if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags: @@ -703,7 +710,7 @@ async def _event_postcheck(self, event): # check duplicates is_incoming_duplicate, reason = self.is_incoming_duplicate(event, add=True) if is_incoming_duplicate and not self.accept_dupes: - return False, f"module has already seen {event}" + (f" ({reason})" if reason else "") + return False, f"module has already seen it" + (f" ({reason})" if reason else "") return acceptable, reason @@ -729,13 +736,6 @@ async def _event_postcheck_inner(self, event): if self._is_graph_important(event): return True, "event is critical to the graph" - # don't send out-of-scope targets to active modules (excluding portscanners, because they can handle it) - # this only takes effect if your target and whitelist are different - # TODO: the logic here seems incomplete, it could probably use some work. - if "active" in self.flags and "portscan" not in self.flags: - if "target" in event.tags and event not in self.scan.whitelist: - return False, "it is not in whitelist and module has active flag" - # check scope distance filter_result, reason = self._scope_distance_check(event) if not filter_result: @@ -743,7 +743,12 @@ async def _event_postcheck_inner(self, event): # custom filtering async with self.scan._acatch(context=self.filter_event): - filter_result = await self.filter_event(event) + try: + filter_result = await self.filter_event(event) + except Exception as e: + msg = f"Unhandled exception in {self.name}.filter_event({event}): {e}" + self.error(msg) + return False, msg msg = str(self._custom_filter_criteria_msg) with suppress(ValueError, TypeError): filter_result, reason = filter_result @@ -891,7 +896,12 @@ def is_incoming_duplicate(self, event, add=False): if event.type in ("FINISHED",): return False, "" reason = "" - event_hash = self._incoming_dedup_hash(event) + try: + event_hash = self._incoming_dedup_hash(event) + except Exception as e: + msg = f"Unhandled exception in {self.name}._incoming_dedup_hash({event}): {e}" + self.error(msg) + return True, msg with suppress(TypeError, ValueError): event_hash, reason = event_hash is_dup = event_hash in self._incoming_dup_tracker @@ -952,7 +962,7 @@ def get_per_hostport_hash(self, event): >>> event = self.make_event("https://example.com:8443") >>> self.get_per_hostport_hash(event) """ - parsed = getattr(event, "parsed", None) + parsed = getattr(event, "parsed_url", None) if parsed is None: to_hash = self.helpers.make_netloc(event.host, event.port) else: @@ -1095,7 +1105,10 @@ def config(self): @property def incoming_event_queue(self): if self._incoming_event_queue is None: - self._incoming_event_queue = ShuffleQueue() + if self._shuffle_incoming_queue: + self._incoming_event_queue = ShuffleQueue() + else: + self._incoming_event_queue = asyncio.Queue() return self._incoming_event_queue @property @@ -1465,8 +1478,13 @@ async def _worker(self): await self.forward_event(event, kwargs) except asyncio.CancelledError: - self.log.trace("Worker cancelled") + # this trace was used for debugging leaked CancelledErrors from inside httpx + # self.log.trace("Worker cancelled") raise + except BaseException as e: + self.critical(f"Critical failure in intercept module {self.name}: {e}") + self.critical(traceback.format_exc()) + self.scan.stop() self.log.trace(f"Worker stopped") async def get_incoming_event(self): @@ -1486,7 +1504,7 @@ async def queue_outgoing_event(self, event, **kwargs): Used by emit_event() to raise new events to the scan """ # if this was a normal module, we'd put it in the outgoing queue - # but because it's a intercept module, we need to queue it with the first intercept module + # but because it's an intercept module, we need to queue it at the scan's ingress await self.scan.ingress_module.queue_event(event, kwargs) async def queue_event(self, event, kwargs=None): diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index bbf339b08..3926d90b3 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -9,7 +9,12 @@ class bevigil(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME", "URL_UNVERIFIED"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Retrieve OSINT data from mobile applications using BeVigil", "auth_required": True} + meta = { + "description": "Retrieve OSINT data from mobile applications using BeVigil", + "created_date": "2022-10-26", + "author": "@alt-glitch", + "auth_required": True, + } options = {"api_key": "", "urls": False} options_desc = {"api_key": "BeVigil OSINT API Key", "urls": "Emit URLs in addition to DNS_NAMEs"} @@ -29,13 +34,23 @@ async def handle_event(self, event): subdomains = await self.query(query, request_fn=self.request_subdomains, parse_fn=self.parse_subdomains) if subdomains: for subdomain in subdomains: - await self.emit_event(subdomain, "DNS_NAME", source=event) + await self.emit_event( + subdomain, + "DNS_NAME", + parent=event, + context=f'{{module}} queried BeVigil\'s API for "{query}" and discovered {{event.type}}: {{event.data}}', + ) if self.urls: urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) if urls: for parsed_url in await self.helpers.run_in_executor_mp(self.helpers.validators.collapse_urls, urls): - await self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) + await self.emit_event( + parsed_url.geturl(), + "URL_UNVERIFIED", + parent=event, + context=f'{{module}} queried BeVigil\'s API for "{query}" and discovered {{event.type}}: {{event.data}}', + ) async def request_subdomains(self, query): url = f"{self.base_url}/{self.helpers.quote(query)}/subdomains/" diff --git a/bbot/modules/binaryedge.py b/bbot/modules/binaryedge.py index 64970c861..15d195c01 100644 --- a/bbot/modules/binaryedge.py +++ b/bbot/modules/binaryedge.py @@ -5,7 +5,12 @@ class binaryedge(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the BinaryEdge API", "auth_required": True} + meta = { + "description": "Query the BinaryEdge API", + "created_date": "2024-08-18", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": "", "max_records": 1000} options_desc = { "api_key": "BinaryEdge API key", diff --git a/bbot/modules/bucket_amazon.py b/bbot/modules/bucket_amazon.py index 3e17b186a..7829606f7 100644 --- a/bbot/modules/bucket_amazon.py +++ b/bbot/modules/bucket_amazon.py @@ -5,7 +5,11 @@ class bucket_amazon(bucket_template): watched_events = ["DNS_NAME", "STORAGE_BUCKET"] produced_events = ["STORAGE_BUCKET", "FINDING"] flags = ["active", "safe", "cloud-enum", "web-basic"] - meta = {"description": "Check for S3 buckets related to target"} + meta = { + "description": "Check for S3 buckets related to target", + "created_date": "2022-11-04", + "author": "@TheTechromancer", + } options = {"permutations": False} options_desc = { "permutations": "Whether to try permutations", diff --git a/bbot/modules/bucket_azure.py b/bbot/modules/bucket_azure.py index 6c828afed..be9b81643 100644 --- a/bbot/modules/bucket_azure.py +++ b/bbot/modules/bucket_azure.py @@ -5,7 +5,11 @@ class bucket_azure(bucket_template): watched_events = ["DNS_NAME", "STORAGE_BUCKET"] produced_events = ["STORAGE_BUCKET", "FINDING"] flags = ["active", "safe", "cloud-enum", "web-basic"] - meta = {"description": "Check for Azure storage blobs related to target"} + meta = { + "description": "Check for Azure storage blobs related to target", + "created_date": "2022-11-04", + "author": "@TheTechromancer", + } options = {"permutations": False} options_desc = { "permutations": "Whether to try permutations", diff --git a/bbot/modules/bucket_digitalocean.py b/bbot/modules/bucket_digitalocean.py index c467fde6e..8e1e008fc 100644 --- a/bbot/modules/bucket_digitalocean.py +++ b/bbot/modules/bucket_digitalocean.py @@ -5,7 +5,11 @@ class bucket_digitalocean(bucket_template): watched_events = ["DNS_NAME", "STORAGE_BUCKET"] produced_events = ["STORAGE_BUCKET", "FINDING"] flags = ["active", "safe", "slow", "cloud-enum", "web-thorough"] - meta = {"description": "Check for DigitalOcean spaces related to target"} + meta = { + "description": "Check for DigitalOcean spaces related to target", + "created_date": "2022-11-08", + "author": "@TheTechromancer", + } options = {"permutations": False} options_desc = { "permutations": "Whether to try permutations", diff --git a/bbot/modules/bucket_file_enum.py b/bbot/modules/bucket_file_enum.py index facaa021e..a8c06996a 100644 --- a/bbot/modules/bucket_file_enum.py +++ b/bbot/modules/bucket_file_enum.py @@ -10,7 +10,9 @@ class bucket_file_enum(BaseModule): watched_events = ["STORAGE_BUCKET"] produced_events = ["URL_UNVERIFIED"] meta = { - "description": "Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS" + "description": "Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS", + "created_date": "2023-11-14", + "author": "@TheTechromancer", } flags = ["passive", "safe", "cloud-enum"] options = { @@ -42,7 +44,14 @@ async def handle_aws(self, event): bucket_file = url + "/" + key file_extension = self.helpers.get_file_extension(key) if file_extension not in self.scan.url_extension_blacklist: - await self.emit_event(bucket_file, "URL_UNVERIFIED", source=event, tags="filedownload") + extension_upper = file_extension.upper() + await self.emit_event( + bucket_file, + "URL_UNVERIFIED", + parent=event, + tags="filedownload", + context=f"{{module}} enumerate files in bucket and discovered {extension_upper} file at {{event.type}}: {{event.data}}", + ) urls_emitted += 1 if urls_emitted >= self.file_limit: return diff --git a/bbot/modules/bucket_firebase.py b/bbot/modules/bucket_firebase.py index 01b1fc213..6743a5a22 100644 --- a/bbot/modules/bucket_firebase.py +++ b/bbot/modules/bucket_firebase.py @@ -5,7 +5,11 @@ class bucket_firebase(bucket_template): watched_events = ["DNS_NAME", "STORAGE_BUCKET"] produced_events = ["STORAGE_BUCKET", "FINDING"] flags = ["active", "safe", "cloud-enum", "web-basic"] - meta = {"description": "Check for open Firebase databases related to target"} + meta = { + "description": "Check for open Firebase databases related to target", + "created_date": "2023-03-20", + "author": "@TheTechromancer", + } options = {"permutations": False} options_desc = { "permutations": "Whether to try permutations", diff --git a/bbot/modules/bucket_google.py b/bbot/modules/bucket_google.py index 9e63ddc8b..0251eec24 100644 --- a/bbot/modules/bucket_google.py +++ b/bbot/modules/bucket_google.py @@ -9,7 +9,11 @@ class bucket_google(bucket_template): watched_events = ["DNS_NAME", "STORAGE_BUCKET"] produced_events = ["STORAGE_BUCKET", "FINDING"] flags = ["active", "safe", "cloud-enum", "web-basic"] - meta = {"description": "Check for Google object storage related to target"} + meta = { + "description": "Check for Google object storage related to target", + "created_date": "2022-11-04", + "author": "@TheTechromancer", + } options = {"permutations": False} options_desc = { "permutations": "Whether to try permutations", diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py index 0b5793657..e51bb5db8 100644 --- a/bbot/modules/builtwith.py +++ b/bbot/modules/builtwith.py @@ -17,7 +17,12 @@ class builtwith(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "passive", "safe"] - meta = {"description": "Query Builtwith.com for subdomains", "auth_required": True} + meta = { + "description": "Query Builtwith.com for subdomains", + "created_date": "2022-08-23", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": "", "redirects": True} options_desc = {"api_key": "Builtwith API key", "redirects": "Also look up inbound and outbound redirects"} base_url = "https://api.builtwith.com" @@ -33,14 +38,25 @@ async def handle_event(self, event): if subdomains: for s in subdomains: if s != event: - await self.emit_event(s, "DNS_NAME", source=event) + await self.emit_event( + s, + "DNS_NAME", + parent=event, + context=f'{{module}} queried the BuiltWith API for "{query}" and found {{event.type}}: {{event.data}}', + ) # redirects if self.config.get("redirects", True): redirects = await self.query(query, parse_fn=self.parse_redirects, request_fn=self.request_redirects) if redirects: for r in redirects: if r != event: - await self.emit_event(r, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event( + r, + "DNS_NAME", + parent=event, + tags=["affiliate"], + context=f'{{module}} queried the BuiltWith redirect API for "{query}" and found redirect to {{event.type}}: {{event.data}}', + ) async def request_domains(self, query): url = f"{self.base_url}/v20/api.json?KEY={self.api_key}&LOOKUP={query}&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes" diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 0ce3df899..4f3b51789 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -79,7 +79,7 @@ class bypass403(BaseModule): watched_events = ["URL"] produced_events = ["FINDING"] flags = ["active", "aggressive", "web-thorough"] - meta = {"description": "Check 403 pages for common bypasses"} + meta = {"description": "Check 403 pages for common bypasses", "created_date": "2022-07-05", "author": "@liquidsec"} in_scope_only = True async def do_checks(self, compare_helper, event, collapse_threshold): @@ -146,14 +146,16 @@ async def handle_event(self, event): "url": event.data, }, "FINDING", - source=event, + parent=event, + context=f"{{module}} discovered multiple potential 403 bypasses ({{event.type}}) for {event.data}", ) else: for description in results: await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", - source=event, + parent=event, + context=f"{{module}} discovered potential 403 bypass ({{event.type}}) for {event.data}", ) # When a WAF-check helper is available in the future, we will convert to HTTP_RESPONSE and check for the WAF string here. @@ -164,10 +166,10 @@ async def filter_event(self, event): def format_signature(self, sig, event): if sig[3] == True: - cleaned_path = event.parsed.path.strip("/") + cleaned_path = event.parsed_url.path.strip("/") else: - cleaned_path = event.parsed.path.lstrip("/") - kwargs = {"scheme": event.parsed.scheme, "netloc": event.parsed.netloc, "path": cleaned_path} + cleaned_path = event.parsed_url.path.lstrip("/") + kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path} formatted_url = sig[1].format(**kwargs) if sig[2] != None: formatted_headers = {k: v.format(**kwargs) for k, v in sig[2].items()} diff --git a/bbot/modules/c99.py b/bbot/modules/c99.py index 8e05a1c4b..8db5775cb 100644 --- a/bbot/modules/c99.py +++ b/bbot/modules/c99.py @@ -5,7 +5,12 @@ class c99(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the C99 API for subdomains", "auth_required": True} + meta = { + "description": "Query the C99 API for subdomains", + "created_date": "2022-07-08", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "c99.nl API key"} diff --git a/bbot/modules/censys.py b/bbot/modules/censys.py index b8609adf8..cb8a7c956 100644 --- a/bbot/modules/censys.py +++ b/bbot/modules/censys.py @@ -9,7 +9,12 @@ class censys(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the Censys API", "auth_required": True} + meta = { + "description": "Query the Censys API", + "created_date": "2022-08-04", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_id": "", "api_secret": "", "max_pages": 5} options_desc = { "api_id": "Censys.io API ID", diff --git a/bbot/modules/certspotter.py b/bbot/modules/certspotter.py index 4441b9d98..9d7e00d87 100644 --- a/bbot/modules/certspotter.py +++ b/bbot/modules/certspotter.py @@ -5,7 +5,11 @@ class certspotter(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query Certspotter's API for subdomains"} + meta = { + "description": "Query Certspotter's API for subdomains", + "created_date": "2022-07-28", + "author": "@TheTechromancer", + } base_url = "https://api.certspotter.com/v1" diff --git a/bbot/modules/chaos.py b/bbot/modules/chaos.py index 3eb763573..e98fedd84 100644 --- a/bbot/modules/chaos.py +++ b/bbot/modules/chaos.py @@ -5,7 +5,12 @@ class chaos(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query ProjectDiscovery's Chaos API for subdomains", "auth_required": True} + meta = { + "description": "Query ProjectDiscovery's Chaos API for subdomains", + "created_date": "2022-08-14", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "Chaos API key"} diff --git a/bbot/modules/code_repository.py b/bbot/modules/code_repository.py new file mode 100644 index 000000000..ef76954a9 --- /dev/null +++ b/bbot/modules/code_repository.py @@ -0,0 +1,56 @@ +import re +from bbot.modules.base import BaseModule + + +class code_repository(BaseModule): + watched_events = ["URL_UNVERIFIED"] + produced_events = ["CODE_REPOSITORY"] + meta = { + "description": "Look for code repository links in webpages", + "created_date": "2024-05-15", + "author": "@domwhewell-sage", + } + flags = ["passive", "safe", "code-enum"] + + # platform name : (regex, case_sensitive) + code_repositories = { + "git": [ + (r"github.com/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + (r"gitlab.(?:com|org)/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + ], + "docker": (r"hub.docker.com/r/[a-zA-Z0-9_-]+/[a-zA-Z0-9_-]+", False), + } + + scope_distance_modifier = 1 + + async def setup(self): + self.compiled_regexes = {} + for k, v in self.code_repositories.items(): + if isinstance(v, list): + self.compiled_regexes[k] = [(re.compile(pattern), c) for pattern, c in v] + else: + pattern, c = v + self.compiled_regexes[k] = (re.compile(pattern), c) + return True + + async def handle_event(self, event): + for platform, regexes in self.compiled_regexes.items(): + if not isinstance(regexes, list): + regexes = [regexes] + for regex, case_sensitive in regexes: + for match in regex.finditer(event.data): + url = match.group() + if not case_sensitive: + url = url.lower() + url = f"https://{url}" + repo_event = self.make_event( + {"url": url}, + "CODE_REPOSITORY", + tags=platform, + parent=event, + ) + repo_event.scope_distance = event.scope_distance + await self.emit_event( + repo_event, + context=f"{{module}} detected {platform} {{event.type}} at {url}", + ) diff --git a/bbot/modules/columbus.py b/bbot/modules/columbus.py index 2e8901359..4c09f3a5d 100644 --- a/bbot/modules/columbus.py +++ b/bbot/modules/columbus.py @@ -5,7 +5,11 @@ class columbus(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query the Columbus Project API for subdomains"} + meta = { + "description": "Query the Columbus Project API for subdomains", + "created_date": "2023-06-01", + "author": "@TheTechromancer", + } base_url = "https://columbus.elmasy.com/api/lookup" diff --git a/bbot/modules/credshed.py b/bbot/modules/credshed.py index 382644007..3630646a6 100644 --- a/bbot/modules/credshed.py +++ b/bbot/modules/credshed.py @@ -1,14 +1,16 @@ from contextlib import suppress -from bbot.modules.base import BaseModule +from bbot.modules.templates.subdomain_enum import subdomain_enum -class credshed(BaseModule): +class credshed(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME", "EMAIL_ADDRESS"] flags = ["passive", "safe"] meta = { "description": "Send queries to your own credshed server to check for known credentials of your targets", + "created_date": "2023-10-12", + "author": "@SpamFaux", "auth_required": True, } options = {"username": "", "password": "", "credshed_url": ""} @@ -41,7 +43,7 @@ async def setup(self): return await super().setup() async def handle_event(self, event): - query = event.data + query = self.make_query(event) cs_query = await self.helpers.request( f"{self.base_url}/api/search", method="POST", @@ -75,13 +77,33 @@ async def handle_event(self, event): if src: tags = [f"credshed-source-{src}"] - email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) + email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=tags) if email_event is not None: - await self.emit_event(email_event) + await self.emit_event( + email_event, context=f'{{module}} searched for "{query}" and found {{event.type}}: {{event.data}}' + ) if user: - await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + await self.emit_event( + f"{email}:{user}", + "USERNAME", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) if pw: - await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) + await self.emit_event( + f"{email}:{pw}", + "PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) for h_pw in hashes: if h_pw: - await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event( + f"{email}:{h_pw}", + "HASHED_PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) diff --git a/bbot/modules/crobat.py b/bbot/modules/crobat.py index 7ece44fdb..f7baacf37 100644 --- a/bbot/modules/crobat.py +++ b/bbot/modules/crobat.py @@ -6,5 +6,5 @@ class crobat(subdomain_enum): produced_events = ["DNS_NAME"] # tag "subdomain-enum" removed 2023-02-24 because API is offline flags = ["passive", "safe"] - meta = {"description": "Query Project Crobat for subdomains"} + meta = {"description": "Query Project Crobat for subdomains", "created_date": "2022-06-03", "author": "@j3tj3rk"} base_url = "https://sonar.omnisint.io" diff --git a/bbot/modules/crt.py b/bbot/modules/crt.py index 9773f72d4..27d088a0d 100644 --- a/bbot/modules/crt.py +++ b/bbot/modules/crt.py @@ -5,7 +5,11 @@ class crt(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query crt.sh (certificate transparency) for subdomains"} + meta = { + "description": "Query crt.sh (certificate transparency) for subdomains", + "created_date": "2022-05-13", + "author": "@TheTechromancer", + } base_url = "https://crt.sh" reject_wildcards = False diff --git a/bbot/modules/deadly/dastardly.py b/bbot/modules/deadly/dastardly.py index 73b9dc008..4476b99ab 100644 --- a/bbot/modules/deadly/dastardly.py +++ b/bbot/modules/deadly/dastardly.py @@ -6,12 +6,18 @@ class dastardly(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING", "VULNERABILITY"] flags = ["active", "aggressive", "slow", "web-thorough"] - meta = {"description": "Lightweight web application security scanner"} + meta = { + "description": "Lightweight web application security scanner", + "created_date": "2023-12-11", + "author": "@domwhewell-sage", + } deps_pip = ["lxml~=4.9.2"] deps_common = ["docker"] per_hostport_only = True + default_discovery_context = "{module} performed a light web scan against {event.parent.data['url']} and discovered {event.data['description']} at {event.data['url']}" + async def setup(self): await self.run_process("systemctl", "start", "docker", sudo=True) await self.run_process("docker", "pull", "public.ecr.aws/portswigger/dastardly:latest", sudo=True) @@ -27,7 +33,7 @@ async def filter_event(self, event): return True async def handle_event(self, event): - host = event.parsed._replace(path="/").geturl() + host = event.parsed_url._replace(path="/").geturl() self.verbose(f"Running Dastardly scan against {host}") command, output_file = self.construct_command(host) finished_proc = await self.run_process(command, sudo=True) @@ -46,6 +52,7 @@ async def handle_event(self, event): }, "FINDING", event, + context=f"{{module}} executed web scan against {host} and identified {{event.type}}: {failure.instance}", ) else: await self.emit_event( @@ -57,6 +64,7 @@ async def handle_event(self, event): }, "VULNERABILITY", event, + context=f"{{module}} executed web scan against {host} and identified {failure.severity.lower()} {{event.type}}: {failure.instance}", ) def construct_command(self, target): diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index a56c73506..ece21076a 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -11,7 +11,7 @@ class ffuf(BaseModule): watched_events = ["URL"] produced_events = ["URL_UNVERIFIED"] flags = ["aggressive", "active"] - meta = {"description": "A fast web fuzzer written in Go"} + meta = {"description": "A fast web fuzzer written in Go", "created_date": "2022-04-10", "author": "@pmueller"} options = { "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/Web-Content/raft-small-directories.txt", @@ -57,7 +57,7 @@ async def handle_event(self, event): return # only FFUF against a directory - if "." in event.parsed.path.split("/")[-1]: + if "." in event.parsed_url.path.split("/")[-1]: self.debug("Aborting FFUF as period was detected in right-most path segment (likely a file)") return else: @@ -71,7 +71,13 @@ async def handle_event(self, event): filters = await self.baseline_ffuf(fixed_url, exts=exts) async for r in self.execute_ffuf(self.tempfile, fixed_url, exts=exts, filters=filters): - await self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], + "URL_UNVERIFIED", + parent=event, + tags=[f"status-{r['status']}"], + context=f"{{module}} brute-forced {event.data} and found {{event.type}}: {{event.data}}", + ) async def filter_event(self, event): if "endpoint" in event.tags: diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 9234f7494..228a65898 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -8,7 +8,11 @@ class nuclei(BaseModule): watched_events = ["URL"] produced_events = ["FINDING", "VULNERABILITY", "TECHNOLOGY"] flags = ["active", "aggressive"] - meta = {"description": "Fast and customisable vulnerability scanner"} + meta = { + "description": "Fast and customisable vulnerability scanner", + "created_date": "2022-03-12", + "author": "@TheTechromancer", + } options = { "version": "3.2.0", @@ -140,19 +144,20 @@ async def handle_batch(self, *events): async for severity, template, tags, host, url, name, extracted_results in self.execute_nuclei(nuclei_input): # this is necessary because sometimes nuclei is inconsistent about the data returned in the host field cleaned_host = temp_target.get(host) - source_event = self.correlate_event(events, cleaned_host) + parent_event = self.correlate_event(events, cleaned_host) - if not source_event: + if not parent_event: continue if url == "": - url = str(source_event.data) + url = str(parent_event.data) if severity == "INFO" and "tech" in tags: await self.emit_event( - {"technology": str(name).lower(), "url": url, "host": str(source_event.host)}, + {"technology": str(name).lower(), "url": url, "host": str(parent_event.host)}, "TECHNOLOGY", - source_event, + parent_event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {str(name).lower()}", ) continue @@ -163,30 +168,32 @@ async def handle_batch(self, *events): if severity in ["INFO", "UNKNOWN"]: await self.emit_event( { - "host": str(source_event.host), + "host": str(parent_event.host), "url": url, "description": description_string, }, "FINDING", - source_event, + parent_event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {description_string}", ) else: await self.emit_event( { "severity": severity, - "host": str(source_event.host), + "host": str(parent_event.host), "url": url, "description": description_string, }, "VULNERABILITY", - source_event, + parent_event, + context=f"{{module}} scanned {url} and identified {severity.lower()} {{event.type}}: {description_string}", ) def correlate_event(self, events, host): for event in events: if host in event: return event - self.verbose(f"Failed to correlate nuclei result for {host}. Possible source events:") + self.verbose(f"Failed to correlate nuclei result for {host}. Possible parent events:") for event in events: self.verbose(f" - {event.data}") diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index cf7be1f67..98991c53d 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -8,7 +8,7 @@ class vhost(ffuf): watched_events = ["URL"] produced_events = ["VHOST", "DNS_NAME"] flags = ["active", "aggressive", "slow"] - meta = {"description": "Fuzz for virtual hosts"} + meta = {"description": "Fuzz for virtual hosts", "created_date": "2022-05-02", "author": "@liquidsec"} special_vhost_list = ["127.0.0.1", "localhost", "host.docker.internal"] options = { @@ -33,7 +33,7 @@ async def setup(self): async def handle_event(self, event): if not self.helpers.is_ip(event.host) or self.config.get("force_basehost"): - host = f"{event.parsed.scheme}://{event.parsed.netloc}" + host = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}" if host in self.scanned_hosts.keys(): return else: @@ -44,7 +44,7 @@ async def handle_event(self, event): if self.config.get("force_basehost"): basehost = self.config.get("force_basehost") else: - basehost = self.helpers.parent_domain(event.parsed.netloc) + basehost = self.helpers.parent_domain(event.parsed_url.netloc) self.debug(f"Using basehost: {basehost}") async for vhost in self.ffuf_vhost(host, f".{basehost}", event): @@ -55,7 +55,7 @@ async def handle_event(self, event): # check existing host for mutations self.verbose("Checking for vhost mutations on main host") async for vhost in self.ffuf_vhost( - host, f".{basehost}", event, wordlist=self.mutations_check(event.parsed.netloc.split(".")[0]) + host, f".{basehost}", event, wordlist=self.mutations_check(event.parsed_url.netloc.split(".")[0]) ): pass @@ -80,11 +80,23 @@ async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=F wordlist, host, exts=[""], suffix=basehost, filters=filters, mode="hostheader" ): found_vhost_b64 = r["input"]["FUZZ"] - vhost_dict = {"host": str(event.host), "url": host, "vhost": base64.b64decode(found_vhost_b64).decode()} - if f"{vhost_dict['vhost']}{basehost}" != event.parsed.netloc: - await self.emit_event(vhost_dict, "VHOST", source=event) + vhost_str = base64.b64decode(found_vhost_b64).decode() + vhost_dict = {"host": str(event.host), "url": host, "vhost": vhost_str} + if f"{vhost_dict['vhost']}{basehost}" != event.parsed_url.netloc: + await self.emit_event( + vhost_dict, + "VHOST", + parent=event, + context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {vhost_str}", + ) if skip_dns_host == False: - await self.emit_event(f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", source=event, tags=["vhost"]) + await self.emit_event( + f"{vhost_dict['vhost']}{basehost}", + "DNS_NAME", + parent=event, + tags=["vhost"], + context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {{event.data}}", + ) yield vhost_dict["vhost"] @@ -102,13 +114,13 @@ async def finish(self): for host, event in self.scanned_hosts.items(): if host not in self.wordcloud_tried_hosts: - event.parsed = urlparse(host) + event.parsed_url = urlparse(host) self.verbose("Checking main host with wordcloud") if self.config.get("force_basehost"): basehost = self.config.get("force_basehost") else: - basehost = self.helpers.parent_domain(event.parsed.netloc) + basehost = self.helpers.parent_domain(event.parsed_url.netloc) async for vhost in self.ffuf_vhost(host, f".{basehost}", event, wordlist=tempfile): pass diff --git a/bbot/modules/dehashed.py b/bbot/modules/dehashed.py index caa5fb662..37202fbae 100644 --- a/bbot/modules/dehashed.py +++ b/bbot/modules/dehashed.py @@ -1,13 +1,18 @@ from contextlib import suppress -from bbot.modules.base import BaseModule +from bbot.modules.templates.subdomain_enum import subdomain_enum -class dehashed(BaseModule): +class dehashed(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["PASSWORD", "HASHED_PASSWORD", "USERNAME"] flags = ["passive", "safe", "email-enum"] - meta = {"description": "Execute queries against dehashed.com for exposed credentials", "auth_required": True} + meta = { + "description": "Execute queries against dehashed.com for exposed credentials", + "created_date": "2023-10-12", + "author": "@SpamFaux", + "auth_required": True, + } options = {"username": "", "api_key": ""} options_desc = {"username": "Email Address associated with your API key", "api_key": "DeHashed API Key"} target_only = True @@ -29,7 +34,8 @@ async def setup(self): return await super().setup() async def handle_event(self, event): - async for entries in self.query(event): + query = self.make_query(event) + async for entries in self.query(query): for entry in entries: # we have to clean up the email field because dehashed does a poor job of it email_str = entry.get("email", "").replace("\\", "") @@ -48,18 +54,39 @@ async def handle_event(self, event): if db_name: tags = [f"db-{db_name}"] if email: - email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=tags) + email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=tags) if email_event is not None: - await self.emit_event(email_event) + await self.emit_event( + email_event, + context=f'{{module}} searched API for "{query}" and found {{event.type}}: {{event.data}}', + ) if user: - await self.emit_event(f"{email}:{user}", "USERNAME", source=email_event, tags=tags) + await self.emit_event( + f"{email}:{user}", + "USERNAME", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) if pw: - await self.emit_event(f"{email}:{pw}", "PASSWORD", source=email_event, tags=tags) + await self.emit_event( + f"{email}:{pw}", + "PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) if h_pw: - await self.emit_event(f"{email}:{h_pw}", "HASHED_PASSWORD", source=email_event, tags=tags) + await self.emit_event( + f"{email}:{h_pw}", + "HASHED_PASSWORD", + parent=email_event, + tags=tags, + context=f"{{module}} found {email} with {{event.type}}: {{event.data}}", + ) - async def query(self, event): - query = f"domain:{event.data}" + async def query(self, domain): + query = f"domain:{domain}" url = f"{self.base_url}?query={query}&size=10000&page=" + "{page}" page = 0 num_entries = 0 @@ -81,7 +108,7 @@ async def query(self, event): ) elif (page >= 3) and (total > num_entries): self.info( - f"{event.data} has {total:,} results in Dehashed. The API can only process the first 30,000 results. Please check dehashed.com to get the remaining results." + f"{domain} has {total:,} results in Dehashed. The API can only process the first 30,000 results. Please check dehashed.com to get the remaining results." ) agen.aclose() break diff --git a/bbot/modules/digitorus.py b/bbot/modules/digitorus.py index 0da487744..48c060346 100644 --- a/bbot/modules/digitorus.py +++ b/bbot/modules/digitorus.py @@ -7,7 +7,11 @@ class digitorus(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query certificatedetails.com for subdomains"} + meta = { + "description": "Query certificatedetails.com for subdomains", + "created_date": "2023-07-25", + "author": "@TheTechromancer", + } base_url = "https://certificatedetails.com" diff --git a/bbot/modules/dnsbrute.py b/bbot/modules/dnsbrute.py new file mode 100644 index 000000000..8ed05e2f0 --- /dev/null +++ b/bbot/modules/dnsbrute.py @@ -0,0 +1,55 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum + + +class dnsbrute(subdomain_enum): + flags = ["subdomain-enum", "passive", "aggressive"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = {"description": "Brute-force subdomains with massdns + static wordlist"} + options = { + "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", + "max_depth": 5, + } + options_desc = { + "wordlist": "Subdomain wordlist URL", + "max_depth": "How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com", + } + deps_common = ["massdns"] + reject_wildcards = "strict" + dedup_strategy = "lowest_parent" + _qsize = 10000 + + async def setup(self): + self.max_depth = max(1, self.config.get("max_depth", 5)) + self.subdomain_file = await self.helpers.wordlist(self.config.get("wordlist")) + self.subdomain_list = set(self.helpers.read_file(self.subdomain_file)) + self.wordlist_size = len(self.subdomain_list) + return await super().setup() + + async def filter_event(self, event): + eligible, reason = await super().filter_event(event) + query = self.make_query(event) + + # limit brute force depth + subdomain_depth = self.helpers.subdomain_depth(query) + 1 + if subdomain_depth > self.max_depth: + eligible = False + reason = f"subdomain depth of *.{query} ({subdomain_depth}) > max_depth ({self.max_depth})" + + # don't brute-force things that look like autogenerated PTRs + if self.helpers.dns.brute.has_excessive_digits(query): + eligible = False + reason = f'"{query}" looks like an autogenerated PTR' + + return eligible, reason + + async def handle_event(self, event): + query = self.make_query(event) + self.info(f"Brute-forcing {self.wordlist_size:,} subdomains for {query} (source: {event.data})") + for hostname in await self.helpers.dns.brute(self, query, self.subdomain_list): + await self.emit_event( + hostname, + "DNS_NAME", + parent=event, + context=f'{{module}} tried {self.wordlist_size:,} subdomains against "{query}" and found {{event.type}}: {{event.data}}', + ) diff --git a/bbot/modules/dnsbrute_mutations.py b/bbot/modules/dnsbrute_mutations.py new file mode 100644 index 000000000..bb78114cf --- /dev/null +++ b/bbot/modules/dnsbrute_mutations.py @@ -0,0 +1,137 @@ +from bbot.modules.base import BaseModule + + +class dnsbrute_mutations(BaseModule): + flags = ["subdomain-enum", "passive", "aggressive", "slow"] + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + meta = {"description": "Brute-force subdomains with massdns + target-specific mutations"} + options = { + "max_mutations": 100, + } + options_desc = { + "max_mutations": "Maximum number of target-specific mutations to try per subdomain", + } + deps_common = ["massdns"] + _qsize = 10000 + + async def setup(self): + self.found = {} + self.parent_events = self.helpers.make_target() + self.max_mutations = self.config.get("max_mutations", 500) + # 800M bits == 100MB bloom filter == 10M entries before false positives start emerging + self.mutations_tried = self.helpers.bloom_filter(800000000) + self._mutation_run_counter = {} + return True + + async def handle_event(self, event): + # here we don't brute-force, we just add the subdomain to our end-of-scan TODO + self.add_found(event) + + def add_found(self, event): + self.parent_events.add(event) + host = str(event.host) + if self.helpers.is_subdomain(host): + subdomain, domain = host.split(".", 1) + if not self.helpers.dns.brute.has_excessive_digits(subdomain): + try: + self.found[domain].add(subdomain) + except KeyError: + self.found[domain] = {subdomain} + + async def finish(self): + found = sorted(self.found.items(), key=lambda x: len(x[-1]), reverse=True) + # if we have a lot of rounds to make, don't try mutations on less-populated domains + trimmed_found = [] + if found: + avg_subdomains = sum([len(subdomains) for domain, subdomains in found[:50]]) / len(found[:50]) + for i, (domain, subdomains) in enumerate(found): + # accept domains that are in the top 50 or have more than 5 percent of the average number of subdomains + if i < 50 or (len(subdomains) > 1 and len(subdomains) >= (avg_subdomains * 0.05)): + trimmed_found.append((domain, subdomains)) + else: + self.verbose( + f"Skipping mutations on {domain} because it only has {len(subdomains):,} subdomain(s) (avg: {avg_subdomains:,})" + ) + + base_mutations = set() + try: + for i, (domain, subdomains) in enumerate(trimmed_found): + self.verbose(f"{domain} has {len(subdomains):,} subdomains") + # keep looping as long as we're finding things + while 1: + query = domain + + mutations = set(base_mutations) + + def add_mutation(m): + h = f"{m}.{domain}" + if h not in self.mutations_tried: + self.mutations_tried.add(h) + mutations.add(m) + + # try every subdomain everywhere else + for _domain, _subdomains in found: + if _domain == domain: + continue + for s in _subdomains: + first_segment = s.split(".")[0] + # skip stuff with lots of numbers (e.g. PTRs) + if self.helpers.dns.brute.has_excessive_digits(first_segment): + continue + add_mutation(first_segment) + for word in self.helpers.extract_words( + first_segment, word_regexes=self.helpers.word_cloud.dns_mutator.extract_word_regexes + ): + add_mutation(word) + + # numbers + devops mutations + for mutation in self.helpers.word_cloud.mutations( + subdomains, cloud=False, numbers=3, number_padding=1 + ): + for delimiter in ("", ".", "-"): + m = delimiter.join(mutation).lower() + add_mutation(m) + + # special dns mutator + for subdomain in self.helpers.word_cloud.dns_mutator.mutations( + subdomains, max_mutations=self.max_mutations + ): + add_mutation(subdomain) + + if mutations: + self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(trimmed_found)})") + results = await self.helpers.dns.brute(self, query, mutations) + try: + mutation_run = self._mutation_run_counter[domain] + except KeyError: + self._mutation_run_counter[domain] = mutation_run = 1 + self._mutation_run_counter[domain] += 1 + for hostname in results: + parent_event = self.parent_events.get_host(hostname) + if parent_event is None: + self.warning(f"Could not correlate parent event from: {hostname}") + parent_event = self.scan.root_event + mutation_run_ordinal = self.helpers.integer_to_ordinal(mutation_run) + await self.emit_event( + hostname, + "DNS_NAME", + parent=parent_event, + tags=[f"mutation-{mutation_run}"], + abort_if=self.abort_if, + context=f'{{module}} found a mutated subdomain of "{domain}" on its {mutation_run_ordinal} run: {{event.type}}: {{event.data}}', + ) + if results: + continue + break + except AssertionError as e: + self.warning(e) + + def abort_if(self, event): + if not event.scope_distance == 0: + return True, "event is not in scope" + if "wildcard" in event.tags: + return True, "event is a wildcard" + if "unresolved" in event.tags: + return True, "event is unresolved" + return False, "" diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index eef8e2d8c..790ff8e65 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -1,4 +1,4 @@ -from bbot.modules.base import BaseModule +from bbot.modules.templates.subdomain_enum import subdomain_enum # the following are the result of a 1-day internet survey to find the top SRV records # the scan resulted in 36,282 SRV records. the count for each one is shown. @@ -147,35 +147,23 @@ "_imap", # 1 "_iax", # 1 ] +num_srvs = len(common_srvs) -class dnscommonsrv(BaseModule): +class dnscommonsrv(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Check for common SRV records"} - options = {"top": 50, "max_event_handlers": 10} - options_desc = { - "top": "How many of the top SRV records to check", - "max_event_handlers": "How many instances of the module to run concurrently", - } - _max_event_handlers = 10 - - def _incoming_dedup_hash(self, event): - # dedupe by parent - parent_domain = self.helpers.parent_domain(event.data) - return hash(parent_domain), "already processed parent domain" - - async def filter_event(self, event): - # skip SRV wildcards - if "SRV" in await self.helpers.is_wildcard(event.host): - return False - return True + meta = {"description": "Check for common SRV records", "created_date": "2022-05-15", "author": "@TheTechromancer"} + dedup_strategy = "lowest_parent" async def handle_event(self, event): - top = int(self.config.get("top", 50)) - parent_domain = self.helpers.parent_domain(event.data) - queries = [f"{srv}.{parent_domain}" for srv in common_srvs[:top]] - async for query, results in self.helpers.resolve_batch(queries, type="srv"): - if results: - await self.emit_event(query, "DNS_NAME", tags=["srv-record"], source=event) + query = self.make_query(event) + self.verbose(f'Brute-forcing {num_srvs:,} SRV records for "{query}"') + for hostname in await self.helpers.dns.brute(self, query, common_srvs, type="SRV"): + await self.emit_event( + hostname, + "DNS_NAME", + parent=event, + context=f'{{module}} tried {num_srvs:,} common SRV records against "{query}" and found {{event.type}}: {{event.data}}', + ) diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index 06986f216..a1d981565 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -7,7 +7,11 @@ class dnsdumpster(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query dnsdumpster for subdomains"} + meta = { + "description": "Query dnsdumpster for subdomains", + "created_date": "2022-03-12", + "author": "@TheTechromancer", + } base_url = "https://dnsdumpster.com" diff --git a/bbot/modules/docker_pull.py b/bbot/modules/docker_pull.py index bcb731f4d..0ddd6578b 100644 --- a/bbot/modules/docker_pull.py +++ b/bbot/modules/docker_pull.py @@ -9,7 +9,11 @@ class docker_pull(BaseModule): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] flags = ["passive", "safe", "slow"] - meta = {"description": "Download images from a docker repository"} + meta = { + "description": "Download images from a docker repository", + "created_date": "2024-03-24", + "author": "@domwhewell-sage", + } options = {"all_tags": False, "output_folder": ""} options_desc = { "all_tags": "Download all tags from each registry (Default False)", @@ -50,10 +54,15 @@ async def handle_event(self, event): if repo_path: self.verbose(f"Downloaded docker repository {repo_url} to {repo_path}") codebase_event = self.make_event( - {"path": str(repo_path)}, "FILESYSTEM", tags=["docker", "tarball"], source=event + {"path": str(repo_path), "description": f"Docker image repository: {repo_url}"}, + "FILESYSTEM", + tags=["docker", "tarball"], + parent=event, ) codebase_event.scope_distance = event.scope_distance - await self.emit_event(codebase_event) + await self.emit_event( + codebase_event, context=f"{{module}} downloaded Docker image to {{event.type}}: {repo_path}" + ) def get_registry_and_repository(self, repository_url): """Function to get the registry and repository from a html repository URL.""" diff --git a/bbot/modules/dockerhub.py b/bbot/modules/dockerhub.py index aedaf7290..c9c206d7a 100644 --- a/bbot/modules/dockerhub.py +++ b/bbot/modules/dockerhub.py @@ -5,7 +5,11 @@ class dockerhub(BaseModule): watched_events = ["SOCIAL", "ORG_STUB"] produced_events = ["SOCIAL", "CODE_REPOSITORY", "URL_UNVERIFIED"] flags = ["passive", "safe", "code-enum"] - meta = {"description": "Search for docker repositories of discovered orgs/usernames"} + meta = { + "description": "Search for docker repositories of discovered orgs/usernames", + "created_date": "2024-03-12", + "author": "@domwhewell-sage", + } site_url = "https://hub.docker.com" api_url = f"{site_url}/v2" @@ -36,19 +40,26 @@ async def handle_org_stub(self, event): site_url = f"{self.site_url}/u/{p}" # emit social event await self.emit_event( - {"platform": "docker", "url": site_url, "profile_name": p}, "SOCIAL", source=event + {"platform": "docker", "url": site_url, "profile_name": p}, + "SOCIAL", + parent=event, + context=f"{{module}} tried {event.type} {event.data} and found docker profile ({{event.type}}) at {p}", ) async def handle_social(self, event): username = event.data.get("profile_name", "") if not username: return - # emit API endpoint to be visited by httpx (for url/email extraction, etc.) - await self.emit_event(f"{self.api_url}/users/{username}", "URL_UNVERIFIED", source=event, tags="httpx-safe") self.verbose(f"Searching for docker images belonging to {username}") repos = await self.get_repos(username) for repo in repos: - await self.emit_event({"url": repo}, "CODE_REPOSITORY", tags="docker", source=event) + await self.emit_event( + {"url": repo}, + "CODE_REPOSITORY", + tags="docker", + parent=event, + context=f"{{module}} found docker image {{event.type}}: {repo}", + ) async def get_repos(self, username): repos = [] diff --git a/bbot/modules/dotnetnuke.py b/bbot/modules/dotnetnuke.py index cd3753dc2..2207600e2 100644 --- a/bbot/modules/dotnetnuke.py +++ b/bbot/modules/dotnetnuke.py @@ -20,7 +20,11 @@ class dotnetnuke(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["VULNERABILITY", "TECHNOLOGY"] flags = ["active", "aggressive", "web-thorough"] - meta = {"description": "Scan for critical DotNetNuke (DNN) vulnerabilities"} + meta = { + "description": "Scan for critical DotNetNuke (DNN) vulnerabilities", + "created_date": "2023-11-21", + "author": "@liquidsec", + } async def setup(self): self.event_dict = {} @@ -44,15 +48,18 @@ async def interactsh_callback(self, r): event = self.interactsh_subdomain_tags.get(full_id.split(".")[0]) if not event: return + url = event.data["url"] + description = "DotNetNuke Blind-SSRF (CVE 2017-0929)" await self.emit_event( { "severity": "MEDIUM", "host": str(event.host), - "url": event.data["url"], - "description": f"DotNetNuke Blind-SSRF (CVE 2017-0929)", + "url": url, + "description": description, }, "VULNERABILITY", event, + context=f"{{module}} scanned {url} and found medium {{event.type}}: {description}", ) else: # this is likely caused by something trying to resolve the base domain first and can be ignored @@ -65,10 +72,12 @@ async def handle_event(self, event): if raw_headers: for header_signature in self.DNN_signatures_header: if header_signature in raw_headers: + url = event.data["url"] await self.emit_event( - {"technology": "DotNetNuke", "url": event.data["url"], "host": str(event.host)}, + {"technology": "DotNetNuke", "url": url, "host": str(event.host)}, "TECHNOLOGY", event, + context=f"{{module}} scanned {url} and found {{event.type}}: DotNetNuke", ) detected = True break @@ -80,6 +89,7 @@ async def handle_event(self, event): {"technology": "DotNetNuke", "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event, + context=f"{{module}} scanned {event.data['url']} and found {{event.type}}: DotNetNuke", ) detected = True break @@ -90,15 +100,17 @@ async def handle_event(self, event): result = await self.helpers.request(probe_url, cookies=self.exploit_probe) if result: if "for 16-bit app support" in result.text and "[extensions]" in result.text: + description = "DotNetNuke Personalization Cookie Deserialization" await self.emit_event( { "severity": "CRITICAL", - "description": "DotNetNuke Personalization Cookie Deserialization", + "description": description, "host": str(event.host), "url": probe_url, }, "VULNERABILITY", event, + context=f"{{module}} scanned {probe_url} and found critical {{event.type}}: {description}", ) if "endpoint" not in event.tags: @@ -109,15 +121,17 @@ async def handle_event(self, event): ) if result: if "" in result.text: + description = "DotNetNuke dnnUI_NewsArticlesSlider Module Arbitrary File Read" await self.emit_event( { "severity": "CRITICAL", - "description": "DotNetNuke dnnUI_NewsArticlesSlider Module Arbitrary File Read", + "description": description, "host": str(event.host), "url": f'{event.data["url"]}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx', }, "VULNERABILITY", event, + context=f'{{module}} scanned {event.data["url"]} and found critical {{event.type}}: {description}', ) # DNNArticle GetCSS.ashx File Read @@ -126,15 +140,17 @@ async def handle_event(self, event): ) if result: if "" in result.text: + description = "DotNetNuke DNNArticle Module GetCSS.ashx Arbitrary File Read" await self.emit_event( { "severity": "CRITICAL", - "description": "DotNetNuke DNNArticle Module GetCSS.ashx Arbitrary File Read", + "description": description, "host": str(event.host), "url": f'{event.data["url"]}/Desktopmodules/DNNArticle/GetCSS.ashx/?CP=%2fweb.config', }, "VULNERABILITY", event, + context=f'{{module}} scanned {event.data["url"]} and found critical {{event.type}}: {description}', ) # InstallWizard SuperUser Privilege Escalation @@ -144,15 +160,17 @@ async def handle_event(self, event): f'{event.data["url"]}/Install/InstallWizard.aspx?__viewstate=1' ) if result_confirm.status_code == 500: + description = "DotNetNuke InstallWizard SuperUser Privilege Escalation" await self.emit_event( { "severity": "CRITICAL", - "description": "DotNetNuke InstallWizard SuperUser Privilege Escalation", + "description": description, "host": str(event.host), "url": f'{event.data["url"]}/Install/InstallWizard.aspx', }, "VULNERABILITY", event, + context=f'{{module}} scanned {event.data["url"]} and found critical {{event.type}}: {description}', ) return diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 31cff1468..c7161070a 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -5,7 +5,11 @@ class emailformat(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] - meta = {"description": "Query email-format.com for email addresses"} + meta = { + "description": "Query email-format.com for email addresses", + "created_date": "2022-07-11", + "author": "@TheTechromancer", + } in_scope_only = False per_domain_only = True @@ -19,4 +23,9 @@ async def handle_event(self, event): return for email in await self.helpers.re.extract_emails(r.text): if email.endswith(query): - await self.emit_event(email, "EMAIL_ADDRESS", source=event) + await self.emit_event( + email, + "EMAIL_ADDRESS", + parent=event, + context=f'{{module}} searched email-format.com for "{query}" and found {{event.type}}: {{event.data}}', + ) diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index cfc58cba4..6fcb90f0f 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -33,7 +33,11 @@ class ffuf_shortnames(ffuf): watched_events = ["URL_HINT"] produced_events = ["URL_UNVERIFIED"] flags = ["aggressive", "active", "iis-shortnames", "web-thorough"] - meta = {"description": "Use ffuf in combination IIS shortnames"} + meta = { + "description": "Use ffuf in combination IIS shortnames", + "created_date": "2022-07-05", + "author": "@liquidsec", + } options = { "wordlist": "", # default is defined within setup function @@ -92,7 +96,7 @@ async def setup(self): def build_extension_list(self, event): used_extensions = [] - extension_hint = event.parsed.path.rsplit(".", 1)[1].lower().strip() + extension_hint = event.parsed_url.path.rsplit(".", 1)[1].lower().strip() if len(extension_hint) == 3: with open(self.wordlist_extensions) as f: for l in f: @@ -112,24 +116,24 @@ def find_delimiter(self, hint): return None async def filter_event(self, event): - if event.source.type != "URL": - return False, "its source event is not of type URL" + if event.parent.type != "URL": + return False, "its parent event is not of type URL" return True async def handle_event(self, event): - filename_hint = re.sub(r"~\d", "", event.parsed.path.rsplit(".", 1)[0].split("/")[-1]).lower() + filename_hint = re.sub(r"~\d", "", event.parsed_url.path.rsplit(".", 1)[0].split("/")[-1]).lower() - host = f"{event.source.parsed.scheme}://{event.source.parsed.netloc}/" + host = f"{event.parent.parsed_url.scheme}://{event.parent.parsed_url.netloc}/" if host not in self.per_host_collection.keys(): - self.per_host_collection[host] = [(filename_hint, event.source.data)] + self.per_host_collection[host] = [(filename_hint, event.parent.data)] else: - self.per_host_collection[host].append((filename_hint, event.source.data)) + self.per_host_collection[host].append((filename_hint, event.parent.data)) self.shortname_to_event[filename_hint] = event - root_stub = "/".join(event.parsed.path.split("/")[:-1]) - root_url = f"{event.parsed.scheme}://{event.parsed.netloc}{root_stub}/" + root_stub = "/".join(event.parsed_url.path.split("/")[:-1]) + root_url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{root_stub}/" if "shortname-file" in event.tags: used_extensions = self.build_extension_list(event) @@ -148,12 +152,24 @@ async def handle_event(self, event): if "shortname-file" in event.tags: for ext in used_extensions: async for r in self.execute_ffuf(tempfile, root_url, suffix=f".{ext}"): - await self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r["url"], + "URL_UNVERIFIED", + parent=event, + tags=[f"status-{r['status']}"], + context=f"{{module}} brute-forced {ext.upper()} files at {root_url} and found {{event.type}}: {{event.data}}", + ) elif "shortname-directory" in event.tags: async for r in self.execute_ffuf(tempfile, root_url, exts=["/"]): r_url = f"{r['url'].rstrip('/')}/" - await self.emit_event(r_url, "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + await self.emit_event( + r_url, + "URL_UNVERIFIED", + parent=event, + tags=[f"status-{r['status']}"], + context=f"{{module}} brute-forced directories at {r_url} and found {{event.type}}: {{event.data}}", + ) if self.config.get("find_delimiters"): if "shortname-directory" in event.tags: @@ -162,8 +178,15 @@ async def handle_event(self, event): delimiter, prefix, partial_hint = delimiter_r self.verbose(f"Detected delimiter [{delimiter}] in hint [{filename_hint}]") tempfile, tempfile_len = self.generate_templist(prefix=partial_hint) - async for r in self.execute_ffuf(tempfile, root_url, prefix=f"{prefix}{delimiter}", exts=["/"]): - await self.emit_event(r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"]) + ffuf_prefix = f"{prefix}{delimiter}" + async for r in self.execute_ffuf(tempfile, root_url, prefix=ffuf_prefix, exts=["/"]): + await self.emit_event( + r["url"], + "URL_UNVERIFIED", + parent=event, + tags=[f"status-{r['status']}"], + context=f'{{module}} brute-forced directories with detected prefix "{ffuf_prefix}" and found {{event.type}}: {{event.data}}', + ) elif "shortname-file" in event.tags: for ext in used_extensions: @@ -172,11 +195,14 @@ async def handle_event(self, event): delimiter, prefix, partial_hint = delimiter_r self.verbose(f"Detected delimiter [{delimiter}] in hint [{filename_hint}]") tempfile, tempfile_len = self.generate_templist(prefix=partial_hint) - async for r in self.execute_ffuf( - tempfile, root_url, prefix=f"{prefix}{delimiter}", suffix=f".{ext}" - ): + ffuf_prefix = f"{prefix}{delimiter}" + async for r in self.execute_ffuf(tempfile, root_url, prefix=ffuf_prefix, suffix=f".{ext}"): await self.emit_event( - r["url"], "URL_UNVERIFIED", source=event, tags=[f"status-{r['status']}"] + r["url"], + "URL_UNVERIFIED", + parent=event, + tags=[f"status-{r['status']}"], + context=f'{{module}} brute-forced {ext.upper()} files with detected prefix "{ffuf_prefix}" and found {{event.type}}: {{event.data}}', ) async def finish(self): @@ -208,8 +234,9 @@ async def finish(self): await self.emit_event( r["url"], "URL_UNVERIFIED", - source=self.shortname_to_event[hint], + parent=self.shortname_to_event[hint], tags=[f"status-{r['status']}"], + context=f'{{module}} brute-forced directories with common prefix "{prefix}" and found {{event.type}}: {{event.data}}', ) elif "shortname-file" in self.shortname_to_event[hint].tags: used_extensions = self.build_extension_list(self.shortname_to_event[hint]) @@ -224,6 +251,7 @@ async def finish(self): await self.emit_event( r["url"], "URL_UNVERIFIED", - source=self.shortname_to_event[hint], + parent=self.shortname_to_event[hint], tags=[f"status-{r['status']}"], + context=f'{{module}} brute-forced {ext.upper()} files with common prefix "{prefix}" and found {{event.type}}: {{event.data}}', ) diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py index 22cddabee..cb454c442 100644 --- a/bbot/modules/filedownload.py +++ b/bbot/modules/filedownload.py @@ -15,7 +15,11 @@ class filedownload(BaseModule): watched_events = ["URL_UNVERIFIED", "HTTP_RESPONSE"] produced_events = [] flags = ["active", "safe", "web-basic"] - meta = {"description": "Download common filetypes such as PDF, DOCX, PPTX, etc."} + meta = { + "description": "Download common filetypes such as PDF, DOCX, PPTX, etc.", + "created_date": "2023-10-11", + "author": "@TheTechromancer", + } options = { "extensions": [ "bak", # Backup File diff --git a/bbot/modules/fingerprintx.py b/bbot/modules/fingerprintx.py index 41efa75b6..cac87c5c3 100644 --- a/bbot/modules/fingerprintx.py +++ b/bbot/modules/fingerprintx.py @@ -7,7 +7,11 @@ class fingerprintx(BaseModule): watched_events = ["OPEN_TCP_PORT"] produced_events = ["PROTOCOL"] flags = ["active", "safe", "service-enum", "slow"] - meta = {"description": "Fingerprint exposed services like RDP, SSH, MySQL, etc."} + meta = { + "description": "Fingerprint exposed services like RDP, SSH, MySQL, etc.", + "created_date": "2023-01-30", + "author": "@TheTechromancer", + } options = {"version": "1.1.4"} options_desc = {"version": "fingerprintx version"} _batch_size = 10 @@ -38,18 +42,24 @@ async def handle_batch(self, *events): ip = j.get("ip", "") host = j.get("host", ip) port = str(j.get("port", "")) + protocol = j.get("protocol", "").upper() + if not host and port and protocol: + continue banner = j.get("metadata", {}).get("banner", "").strip() - if port: - port_data = f"{host}:{port}" - protocol = j.get("protocol", "") + port_data = f"{host}:{port}" tags = set() if host and ip: tags.add(f"ip-{ip}") - if host and port and protocol: - source_event = _input.get(port_data) - protocol_data = {"host": host, "protocol": protocol.upper()} - if port: - protocol_data["port"] = port - if banner: - protocol_data["banner"] = banner - await self.emit_event(protocol_data, "PROTOCOL", source=source_event, tags=tags) + parent_event = _input.get(port_data) + protocol_data = {"host": host, "protocol": protocol} + if port: + protocol_data["port"] = port + if banner: + protocol_data["banner"] = banner + await self.emit_event( + protocol_data, + "PROTOCOL", + parent=parent_event, + tags=tags, + context=f"{{module}} probed {port_data} and detected {{event.type}}: {protocol}", + ) diff --git a/bbot/modules/fullhunt.py b/bbot/modules/fullhunt.py index 1485dc6b5..90f83bcfc 100644 --- a/bbot/modules/fullhunt.py +++ b/bbot/modules/fullhunt.py @@ -5,7 +5,12 @@ class fullhunt(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the fullhunt.io API for subdomains", "auth_required": True} + meta = { + "description": "Query the fullhunt.io API for subdomains", + "created_date": "2022-08-24", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "FullHunt API Key"} diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index 42efa5050..0aa61a3e5 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -44,7 +44,7 @@ def __init__(self, parent_module): self.test_paths = self.create_paths() def set_base_url(self, event): - return f"{event.parsed.scheme}://{event.parsed.netloc}" + return f"{event.parsed_url.scheme}://{event.parsed_url.netloc}" def create_paths(self): return self.paths @@ -140,7 +140,7 @@ async def test(self, event): ]> &{rand_entity};""" - test_url = f"{event.parsed.scheme}://{event.parsed.netloc}/" + test_url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/" r = await self.parent_module.helpers.curl( url=test_url, method="POST", raw_body=post_body, headers={"Content-type": "application/xml"} ) @@ -152,7 +152,7 @@ class generic_ssrf(BaseModule): watched_events = ["URL"] produced_events = ["VULNERABILITY"] flags = ["active", "aggressive", "web-thorough"] - meta = {"description": "Check for generic SSRFs"} + meta = {"description": "Check for generic SSRFs", "created_date": "2022-07-30", "author": "@liquidsec"} in_scope_only = True deps_apt = ["curl"] @@ -209,6 +209,7 @@ async def interactsh_callback(self, r): }, "VULNERABILITY", matched_event, + context=f"{{module}} scanned {matched_event.data} and detected {{event.type}}: {matched_technique}", ) else: # this is likely caused by something trying to resolve the base domain first and can be ignored diff --git a/bbot/modules/git.py b/bbot/modules/git.py index 5ffb91331..9a180bc11 100644 --- a/bbot/modules/git.py +++ b/bbot/modules/git.py @@ -7,7 +7,11 @@ class git(BaseModule): watched_events = ["URL"] produced_events = ["FINDING"] flags = ["active", "safe", "web-basic", "code-enum"] - meta = {"description": "Check for exposed .git repositories"} + meta = { + "description": "Check for exposed .git repositories", + "created_date": "2023-05-30", + "author": "@TheTechromancer", + } in_scope_only = True @@ -20,19 +24,16 @@ async def handle_event(self, event): self.helpers.urljoin(base_url, ".git/config"), self.helpers.urljoin(f"{base_url}/", ".git/config"), } - tasks = [self.get_url(u) for u in urls] - async for task in self.helpers.as_completed(tasks): - result, url = await task - text = getattr(result, "text", "") + async for url, response in self.helpers.request_batch(urls): + text = getattr(response, "text", "") if not text: text = "" if text: - if getattr(result, "status_code", 0) == 200 and "[core]" in text and not self.fp_regex.match(text): + if getattr(response, "status_code", 0) == 200 and "[core]" in text and not self.fp_regex.match(text): + description = f"Exposed .git config at {url}" await self.emit_event( - {"host": str(event.host), "url": url, "description": f"Exposed .git config at {url}"}, + {"host": str(event.host), "url": url, "description": description}, "FINDING", event, + context="{module} detected {event.type}: {description}", ) - - async def get_url(self, url): - return (await self.helpers.request(url), url) diff --git a/bbot/modules/git_clone.py b/bbot/modules/git_clone.py index 2778efdde..6fcee1b34 100644 --- a/bbot/modules/git_clone.py +++ b/bbot/modules/git_clone.py @@ -1,4 +1,5 @@ from pathlib import Path +from subprocess import CalledProcessError from bbot.modules.templates.github import github @@ -6,7 +7,11 @@ class git_clone(github): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] flags = ["passive", "safe", "slow"] - meta = {"description": "Clone code github repositories"} + meta = { + "description": "Clone code github repositories", + "created_date": "2024-03-08", + "author": "@domwhewell-sage", + } options = {"api_key": "", "output_folder": ""} options_desc = {"api_key": "Github token", "output_folder": "Folder to clone repositories to"} @@ -35,10 +40,13 @@ async def handle_event(self, event): if repo_path: self.verbose(f"Cloned {repo_url} to {repo_path}") codebase_event = self.make_event( - {"path": str(repo_path)}, "FILESYSTEM", tags=["git", "folder"], source=event + {"path": str(repo_path)}, "FILESYSTEM", tags=["git", "folder"], parent=event ) codebase_event.scope_distance = event.scope_distance - await self.emit_event(codebase_event) + await self.emit_event( + codebase_event, + context=f"{{module}} downloaded git repo at {repo_url} to {{event.type}}: {repo_path}", + ) async def clone_git_repository(self, repository_url): if self.api_key: @@ -46,9 +54,11 @@ async def clone_git_repository(self, repository_url): else: url = repository_url command = ["git", "-C", self.output_dir, "clone", url] - output = await self.run_process(command) - if output.returncode == 0: - folder_name = output.stderr.split("Cloning into '")[1].split("'")[0] - return self.output_dir / folder_name - else: - return None + try: + output = await self.run_process(command, env={"GIT_TERMINAL_PROMPT": "0"}, check=True) + except CalledProcessError as e: + self.debug(f"Error cloning {url}. STDERR: {repr(e.stderr)}") + return + + folder_name = output.stderr.split("Cloning into '")[1].split("'")[0] + return self.output_dir / folder_name diff --git a/bbot/modules/github_codesearch.py b/bbot/modules/github_codesearch.py index 634b38f58..ddafb025f 100644 --- a/bbot/modules/github_codesearch.py +++ b/bbot/modules/github_codesearch.py @@ -1,11 +1,17 @@ from bbot.modules.templates.github import github +from bbot.modules.templates.subdomain_enum import subdomain_enum -class github_codesearch(github): +class github_codesearch(github, subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["CODE_REPOSITORY", "URL_UNVERIFIED"] flags = ["passive", "subdomain-enum", "safe", "code-enum"] - meta = {"description": "Query Github's API for code containing the target domain name", "auth_required": True} + meta = { + "description": "Query Github's API for code containing the target domain name", + "created_date": "2023-12-14", + "author": "@domwhewell-sage", + "auth_required": True, + } options = {"api_key": "", "limit": 100} options_desc = {"api_key": "Github token", "limit": "Limit code search to this many results"} @@ -18,16 +24,21 @@ async def setup(self): async def handle_event(self, event): query = self.make_query(event) for repo_url, raw_urls in (await self.query(query)).items(): - repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", tags="git", source=event) + repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", tags="git", parent=event) if repo_event is None: continue - await self.emit_event(repo_event) + await self.emit_event( + repo_event, + context=f'{{module}} searched github.com for "{query}" and found {{event.type}} with matching content at {repo_url}', + ) for raw_url in raw_urls: - url_event = self.make_event(raw_url, "URL_UNVERIFIED", source=repo_event, tags=["httpx-safe"]) + url_event = self.make_event(raw_url, "URL_UNVERIFIED", parent=repo_event, tags=["httpx-safe"]) if not url_event: continue url_event.scope_distance = repo_event.scope_distance - await self.emit_event(url_event) + await self.emit_event( + url_event, context=f'file matching query "{query}" is at {{event.type}}: {raw_url}' + ) async def query(self, query): repos = {} diff --git a/bbot/modules/github_org.py b/bbot/modules/github_org.py index cf836af3e..1d115b925 100644 --- a/bbot/modules/github_org.py +++ b/bbot/modules/github_org.py @@ -5,7 +5,11 @@ class github_org(github): watched_events = ["ORG_STUB", "SOCIAL"] produced_events = ["CODE_REPOSITORY"] flags = ["passive", "subdomain-enum", "safe", "code-enum"] - meta = {"description": "Query Github's API for organization and member repositories"} + meta = { + "description": "Query Github's API for organization and member repositories", + "created_date": "2023-12-14", + "author": "@domwhewell-sage", + } options = {"api_key": "", "include_members": True, "include_member_repos": False} options_desc = { "api_key": "Github token", @@ -55,21 +59,28 @@ async def handle_event(self, event): self.verbose(f"Searching for repos belonging to user {user}") repos = await self.query_user_repos(user) for repo_url in repos: - repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", tags="git", source=event) + repo_event = self.make_event({"url": repo_url}, "CODE_REPOSITORY", tags="git", parent=event) if not repo_event: continue repo_event.scope_distance = event.scope_distance - await self.emit_event(repo_event) + await self.emit_event( + repo_event, + context=f"{{module}} listed repos for GitHub profile and discovered {{event.type}}: {repo_url}", + ) # find members from org (SOCIAL --> SOCIAL) if is_org and self.include_members: self.verbose(f"Searching for any members belonging to {user}") org_members = await self.query_org_members(user) for member in org_members: - event_data = {"platform": "github", "profile_name": member, "url": f"https://github.com/{member}"} - member_event = self.make_event(event_data, "SOCIAL", tags="github-org-member", source=event) + member_url = f"https://github.com/{member}" + event_data = {"platform": "github", "profile_name": member, "url": member_url} + member_event = self.make_event(event_data, "SOCIAL", tags="github-org-member", parent=event) if member_event: - await self.emit_event(member_event) + await self.emit_event( + member_event, + context=f"{{module}} listed members of GitHub organization and discovered {{event.type}}: {member_url}", + ) # find valid orgs from stub (ORG_STUB --> SOCIAL) elif event.type == "ORG_STUB": @@ -82,11 +93,15 @@ async def handle_event(self, event): self.verbose(f"Unable to validate that {user} is in-scope, skipping...") return - event_data = {"platform": "github", "profile_name": user, "url": f"https://github.com/{user}"} - github_org_event = self.make_event(event_data, "SOCIAL", tags="github-org", source=event) + user_url = f"https://github.com/{user}" + event_data = {"platform": "github", "profile_name": user, "url": user_url} + github_org_event = self.make_event(event_data, "SOCIAL", tags="github-org", parent=event) if github_org_event: github_org_event.scope_distance = event.scope_distance - await self.emit_event(github_org_event) + await self.emit_event( + github_org_event, + context=f'{{module}} tried "{user}" as GitHub profile and discovered {{event.type}}: {user_url}', + ) async def query_org_repos(self, query): repos = [] diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py new file mode 100644 index 000000000..76ed2d5ff --- /dev/null +++ b/bbot/modules/github_workflows.py @@ -0,0 +1,153 @@ +import zipfile +import fnmatch + +from bbot.modules.templates.github import github + + +class github_workflows(github): + watched_events = ["CODE_REPOSITORY"] + produced_events = ["FILESYSTEM"] + flags = ["passive", "safe"] + meta = { + "description": "Download a github repositories workflow logs", + "created_date": "2024-04-29", + "author": "@domwhewell-sage", + } + options = {"api_key": "", "num_logs": 1} + options_desc = { + "api_key": "Github token", + "num_logs": "For each workflow fetch the last N successful runs logs (max 100)", + } + + scope_distance_modifier = 2 + + async def setup(self): + self.num_logs = int(self.config.get("num_logs", 1)) + if self.num_logs > 100: + self.log.error("num_logs option is capped at 100") + return False + self.output_dir = self.scan.home / "workflow_logs" + self.helpers.mkdir(self.output_dir) + return await super().setup() + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if "git" not in event.tags and "github" not in event.data.get("url", ""): + return False, "event is not a git repository" + return True + + async def handle_event(self, event): + repo_url = event.data.get("url") + owner = repo_url.split("/")[-2] + repo = repo_url.split("/")[-1] + for workflow in await self.get_workflows(owner, repo): + workflow_name = workflow.get("name") + workflow_id = workflow.get("id") + self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}") + for run in await self.get_workflow_runs(owner, repo, workflow_id): + run_id = run.get("id") + self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}") + for log in await self.download_run_logs(owner, repo, run_id): + workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}" + logfile_event = self.make_event( + { + "path": str(log), + "description": f"Workflow run logs from {workflow_url}", + }, + "FILESYSTEM", + tags=["textfile"], + parent=event, + ) + logfile_event.scope_distance = event.scope_distance + await self.emit_event( + logfile_event, + context=f"{{module}} downloaded workflow run logs from {workflow_url} to {{event.type}}: {log}", + ) + + async def get_workflows(self, owner, repo): + workflows = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows?per_page=100&page=" + "{page}" + agen = self.helpers.api_page_iter(url, headers=self.headers, json=False) + try: + async for r in agen: + if r is None: + break + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + break + if status_code != 200: + break + try: + j = r.json().get("workflows", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + break + if not j: + break + for item in j: + workflows.append(item) + finally: + agen.aclose() + return workflows + + async def get_workflow_runs(self, owner, repo, workflow_id): + runs = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs?status=success&per_page={self.num_logs}" + r = await self.helpers.request(url, headers=self.headers) + if r is None: + return runs + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return runs + if status_code != 200: + return runs + try: + j = r.json().get("workflow_runs", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return runs + if not j: + return runs + for item in j: + runs.append(item) + return runs + + async def download_run_logs(self, owner, repo, run_id): + folder = self.output_dir / owner / repo + self.helpers.mkdir(folder) + filename = f"run_{run_id}.zip" + file_destination = folder / filename + try: + await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/logs", + filename=file_destination, + headers=self.headers, + raise_error=True, + warn=False, + ) + self.info(f"Downloaded logs for {owner}/{repo}/{run_id} to {file_destination}") + except Exception as e: + file_destination = None + response = getattr(e, "response", None) + status_code = getattr(response, "status_code", 0) + if status_code == 403: + self.warning( + f"The current access key does not have access to workflow {owner}/{repo}/{run_id} (status: {status_code})" + ) + else: + self.info( + f"The logs for {owner}/{repo}/{run_id} have expired and are no longer available (status: {status_code})" + ) + # Secrets are duplicated in the individual workflow steps so just extract the main log files from the top folder + if file_destination: + main_logs = [] + with zipfile.ZipFile(file_destination, "r") as logzip: + for name in logzip.namelist(): + if fnmatch.fnmatch(name, "*.txt") and not "/" in name: + logzip.extract(name, folder) + main_logs.append(folder / name) + return main_logs + else: + return [] diff --git a/bbot/modules/gitlab.py b/bbot/modules/gitlab.py index 6f4892580..3404f3ba3 100644 --- a/bbot/modules/gitlab.py +++ b/bbot/modules/gitlab.py @@ -5,7 +5,11 @@ class gitlab(BaseModule): watched_events = ["HTTP_RESPONSE", "TECHNOLOGY", "SOCIAL"] produced_events = ["TECHNOLOGY", "SOCIAL", "CODE_REPOSITORY", "FINDING"] flags = ["active", "safe", "code-enum"] - meta = {"description": "Detect GitLab instances and query them for repositories"} + meta = { + "description": "Detect GitLab instances and query them for repositories", + "created_date": "2024-03-11", + "author": "@TheTechromancer", + } options = {"api_key": ""} options_desc = {"api_key": "Gitlab access token"} @@ -47,14 +51,19 @@ async def handle_http_response(self, event): # HTTP_RESPONSE --> FINDING headers = event.data.get("header", {}) if "x_gitlab_meta" in headers: - url = event.parsed._replace(path="/").geturl() + url = event.parsed_url._replace(path="/").geturl() await self.emit_event( - {"host": str(event.host), "technology": "GitLab", "url": url}, "TECHNOLOGY", source=event + {"host": str(event.host), "technology": "GitLab", "url": url}, + "TECHNOLOGY", + parent=event, + context=f"{{module}} detected {{event.type}}: GitLab at {url}", ) + description = f"GitLab server at {event.host}" await self.emit_event( - {"host": str(event.host), "description": f"GitLab server at {event.host}"}, + {"host": str(event.host), "description": description}, "FINDING", - source=event, + parent=event, + context=f"{{module}} detected {{event.type}}: {description}", ) async def handle_technology(self, event): @@ -89,9 +98,11 @@ async def handle_projects_url(self, projects_url, event): for project in await self.gitlab_json_request(projects_url): project_url = project.get("web_url", "") if project_url: - code_event = self.make_event({"url": project_url}, "CODE_REPOSITORY", tags="git", source=event) + code_event = self.make_event({"url": project_url}, "CODE_REPOSITORY", tags="git", parent=event) code_event.scope_distance = event.scope_distance - await self.emit_event(code_event) + await self.emit_event( + code_event, context=f"{{module}} enumerated projects and found {{event.type}} at {project_url}" + ) namespace = project.get("namespace", {}) if namespace: await self.handle_namespace(namespace, event) @@ -120,10 +131,13 @@ async def handle_namespace(self, namespace, event): social_event = self.make_event( {"platform": "gitlab", "profile_name": namespace_path, "url": namespace_url}, "SOCIAL", - source=event, + parent=event, ) social_event.scope_distance = event.scope_distance - await self.emit_event(social_event) + await self.emit_event( + social_event, + context=f'{{module}} found GitLab namespace ({{event.type}}) "{namespace_name}" at {namespace_url}', + ) def get_base_url(self, event): base_url = event.data.get("url", "") diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index ea8663bb7..2d7c3679a 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -1,4 +1,6 @@ +import asyncio import sqlite3 +import multiprocessing from pathlib import Path from contextlib import suppress from shutil import copyfile, copymode @@ -10,24 +12,26 @@ class gowitness(BaseModule): watched_events = ["URL", "SOCIAL"] produced_events = ["WEBSCREENSHOT", "URL", "URL_UNVERIFIED", "TECHNOLOGY"] flags = ["active", "safe", "web-screenshots"] - meta = {"description": "Take screenshots of webpages"} + meta = {"description": "Take screenshots of webpages", "created_date": "2022-07-08", "author": "@TheTechromancer"} options = { "version": "2.4.2", - "threads": 4, + "threads": 0, "timeout": 10, "resolution_x": 1440, "resolution_y": 900, "output_path": "", - "social": True, + "social": False, + "idle_timeout": 1800, } options_desc = { - "version": "gowitness version", - "threads": "threads used to run", - "timeout": "preflight check timeout", - "resolution_x": "screenshot resolution x", - "resolution_y": "screenshot resolution y", - "output_path": "where to save screenshots", + "version": "Gowitness version", + "threads": "How many gowitness threads to spawn (default is number of CPUs x 2)", + "timeout": "Preflight check timeout", + "resolution_x": "Screenshot resolution x", + "resolution_y": "Screenshot resolution y", + "output_path": "Where to save screenshots", "social": "Whether to screenshot social media webpages", + "idle_timeout": "Skip the current gowitness batch if it stalls for longer than this many seconds", } deps_common = ["chromium"] deps_ansible = [ @@ -45,8 +49,13 @@ class gowitness(BaseModule): scope_distance_modifier = 2 async def setup(self): + num_cpus = multiprocessing.cpu_count() + default_thread_count = min(20, num_cpus * 2) self.timeout = self.config.get("timeout", 10) - self.threads = self.config.get("threads", 4) + self.idle_timeout = self.config.get("idle_timeout", 1800) + self.threads = self.config.get("threads", 0) + if not self.threads: + self.threads = default_thread_count self.proxy = self.scan.config.get("http_proxy", "") self.resolution_x = self.config.get("resolution_x") self.resolution_y = self.config.get("resolution_y") @@ -118,8 +127,13 @@ async def handle_batch(self, *events): event_dict[key] = e stdin = "\n".join(list(event_dict)) - async for line in self.run_process_live(self.command, input=stdin): - self.debug(line) + try: + async for line in self.run_process_live(self.command, input=stdin, idle_timeout=self.idle_timeout): + self.debug(line) + except asyncio.exceptions.TimeoutError: + urls_str = ",".join(event_dict) + self.warning(f"Gowitness timed out while visiting the following URLs: {urls_str}", trace=False) + return # emit web screenshots for filename, screenshot in self.new_screenshots.items(): @@ -127,8 +141,13 @@ async def handle_batch(self, *events): final_url = screenshot["final_url"] filename = screenshot["filename"] webscreenshot_data = {"filename": filename, "url": final_url} - source_event = event_dict[url] - await self.emit_event(webscreenshot_data, "WEBSCREENSHOT", source=source_event) + parent_event = event_dict[url] + await self.emit_event( + webscreenshot_data, + "WEBSCREENSHOT", + parent=parent_event, + context=f"{{module}} visited {final_url} and saved {{event.type}} to {filename}", + ) # emit URLs for url, row in self.new_network_logs.items(): @@ -137,21 +156,32 @@ async def handle_batch(self, *events): tags = [f"status-{status_code}", f"ip-{ip}"] _id = row["url_id"] - source_url = self.screenshots_taken[_id] - source_event = event_dict[source_url] - if self.helpers.is_spider_danger(source_event, url): + parent_url = self.screenshots_taken[_id] + parent_event = event_dict[parent_url] + if self.helpers.is_spider_danger(parent_event, url): tags.append("spider-danger") if url and url.startswith("http"): - await self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) + await self.emit_event( + url, + "URL_UNVERIFIED", + parent=parent_event, + tags=tags, + context=f"{{module}} visited {{event.type}}: {url}", + ) # emit technologies for _, row in self.new_technologies.items(): - source_id = row["url_id"] - source_url = self.screenshots_taken[source_id] - source_event = event_dict[source_url] + parent_id = row["url_id"] + parent_url = self.screenshots_taken[parent_id] + parent_event = event_dict[parent_url] technology = row["value"] - tech_data = {"technology": technology, "url": source_url, "host": str(source_event.host)} - await self.emit_event(tech_data, "TECHNOLOGY", source=source_event) + tech_data = {"technology": technology, "url": parent_url, "host": str(parent_event.host)} + await self.emit_event( + tech_data, + "TECHNOLOGY", + parent=parent_event, + context=f"{{module}} visited {parent_url} and found {{event.type}}: {technology}", + ) def construct_command(self): # base executable @@ -175,6 +205,8 @@ def construct_command(self): command += ["file", "-f", "-"] # threads command += ["--threads", str(self.threads)] + # timeout + command += ["--timeout", str(self.timeout)] return command @property diff --git a/bbot/modules/hackertarget.py b/bbot/modules/hackertarget.py index d23f5c6cf..aee94ccd3 100644 --- a/bbot/modules/hackertarget.py +++ b/bbot/modules/hackertarget.py @@ -5,7 +5,11 @@ class hackertarget(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the hackertarget.com API for subdomains"} + meta = { + "description": "Query the hackertarget.com API for subdomains", + "created_date": "2022-07-28", + "author": "@TheTechromancer", + } base_url = "https://api.hackertarget.com" diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index 3e0f8069f..5e2393a47 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -6,7 +6,11 @@ class host_header(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] flags = ["active", "aggressive", "web-thorough"] - meta = {"description": "Try common HTTP Host header spoofing techniques"} + meta = { + "description": "Try common HTTP Host header spoofing techniques", + "created_date": "2022-07-27", + "author": "@liquidsec", + } in_scope_only = True per_hostport_only = True @@ -40,14 +44,16 @@ async def interactsh_callback(self, r): matched_event = match[0] matched_technique = match[1] + protocol = r.get("protocol").upper() await self.emit_event( { "host": str(matched_event.host), "url": matched_event.data["url"], - "description": f"Spoofed Host header ({matched_technique}) [{r.get('protocol').upper()}] interaction", + "description": f"Spoofed Host header ({matched_technique}) [{protocol}] interaction", }, "FINDING", matched_event, + context=f"{{module}} spoofed host header and induced {{event.type}}: {protocol} interaction", ) else: # this is likely caused by something trying to resolve the base domain first and can be ignored @@ -74,6 +80,7 @@ async def cleanup(self): async def handle_event(self, event): # get any set-cookie responses from the response and add them to the request + url = event.data["url"] added_cookies = {} @@ -93,7 +100,7 @@ async def handle_event(self, event): subdomain_tag = self.rand_string(4, digits=False) self.subdomain_tags[subdomain_tag] = (event, technique_description) output = await self.helpers.curl( - url=event.data["url"], + url=url, headers={"Host": f"{subdomain_tag}.{self.domain}"}, ignore_bbot_global_settings=True, cookies=added_cookies, @@ -107,8 +114,8 @@ async def handle_event(self, event): subdomain_tag = self.rand_string(4, digits=False) self.subdomain_tags[subdomain_tag] = (event, technique_description) output = await self.helpers.curl( - url=event.data["url"], - path_override=event.data["url"], + url=url, + path_override=url, cookies=added_cookies, ) @@ -118,7 +125,7 @@ async def handle_event(self, event): # duplicate host header tolerance technique_description = "duplicate host header tolerance" output = await self.helpers.curl( - url=event.data["url"], + url=url, # Sending a blank HOST first as a hack to trick curl. This makes it no longer an "internal header", thereby allowing for duplicates # The fact that it's accepting two host headers is rare enough to note on its own, and not too noisy. Having the 3rd header be an interactsh would result in false negatives for the slightly less interesting cases. headers={"Host": ["", str(event.host), str(event.host)]}, @@ -128,14 +135,16 @@ async def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: + description = f"Duplicate Host Header Tolerated" await self.emit_event( { "host": str(event.host), - "url": event.data["url"], - "description": f"Duplicate Host Header Tolerated", + "url": url, + "description": description, }, "FINDING", event, + context=f"{{module}} scanned {event.data['url']} and identified {{event.type}}: {description}", ) # host header overrides @@ -159,7 +168,7 @@ async def handle_event(self, event): override_headers[oh] = f"{subdomain_tag}.{self.domain}" output = await self.helpers.curl( - url=event.data["url"], + url=url, headers=override_headers, cookies=added_cookies, ) @@ -168,12 +177,14 @@ async def handle_event(self, event): # emit all the domain reflections we found for dr in domain_reflections: + description = f"Possible Host header injection. Injection technique: {dr}" await self.emit_event( { "host": str(event.host), - "url": event.data["url"], - "description": f"Possible Host header injection. Injection technique: {dr}", + "url": url, + "description": description, }, "FINDING", event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {description}", ) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 0f74fbcfc..9d77f04a5 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -4,14 +4,17 @@ import subprocess from pathlib import Path from bbot.modules.base import BaseModule -from bbot.core.helpers.web import is_login_page class httpx(BaseModule): watched_events = ["OPEN_TCP_PORT", "URL_UNVERIFIED", "URL"] produced_events = ["URL", "HTTP_RESPONSE"] flags = ["active", "safe", "web-basic", "social-enum", "subdomain-enum", "cloud-enum"] - meta = {"description": "Visit webpages. Many other modules rely on httpx"} + meta = { + "description": "Visit webpages. Many other modules rely on httpx", + "created_date": "2022-07-08", + "author": "@TheTechromancer", + } options = { "threads": 50, @@ -23,7 +26,7 @@ class httpx(BaseModule): } options_desc = { "threads": "Number of httpx threads to use", - "in_scope_only": "Only visit web resources that are in scope.", + "in_scope_only": "Only visit web reparents that are in scope.", "version": "httpx version", "max_response_size": "Max response size in bytes", "store_responses": "Save raw HTTP responses to scan folder", @@ -84,7 +87,7 @@ async def handle_batch(self, *events): if e.type.startswith("URL"): # we NEED the port, otherwise httpx will try HTTPS even for HTTP URLs url = e.with_port().geturl() - if e.parsed.path == "/": + if e.parsed_url.path == "/": url_hash = hash((e.host, e.port)) else: url = str(e.data) @@ -145,15 +148,15 @@ async def handle_batch(self, *events): self.debug(f'No HTTP status code for "{url}"') continue - source_event = stdin.get(j.get("input", ""), None) + parent_event = stdin.get(j.get("input", ""), None) - if source_event is None: - self.warning(f"Unable to correlate source event from: {line}") + if parent_event is None: + self.warning(f"Unable to correlate parent event from: {line}") continue # discard 404s from unverified URLs path = j.get("path", "/") - if source_event.type == "URL_UNVERIFIED" and status_code in (404,) and path != "/": + if parent_event.type == "URL_UNVERIFIED" and status_code in (404,) and path != "/": self.debug(f'Discarding 404 from "{url}"') continue @@ -163,18 +166,38 @@ async def handle_batch(self, *events): if httpx_ip: tags.append(f"ip-{httpx_ip}") # detect login pages - if is_login_page(j.get("body", "")): + if self.helpers.web.is_login_page(j.get("body", "")): tags.append("login-page") # grab title title = self.helpers.tagify(j.get("title", ""), maxlen=30) if title: tags.append(f"http-title-{title}") - url_event = self.make_event(url, "URL", source_event, tags=tags) + + url_context = "{module} visited {event.parent.data} and got status code {event.http_status}" + if parent_event.type == "OPEN_TCP_PORT": + url_context += " at {event.data}" + + url_event = self.make_event( + url, + "URL", + parent_event, + tags=tags, + context=url_context, + ) if url_event: - if url_event != source_event: + if url_event != parent_event: await self.emit_event(url_event) # HTTP response - await self.emit_event(j, "HTTP_RESPONSE", url_event, tags=url_event.tags) + content_type = j.get("header", {}).get("content_type", "unspecified").split(";")[0] + content_length = j.get("content_length", 0) + content_length = self.helpers.bytes_to_human(content_length) + await self.emit_event( + j, + "HTTP_RESPONSE", + url_event, + tags=url_event.tags, + context=f"HTTP_RESPONSE was {content_length} with {content_type} content type", + ) for tempdir in Path(tempfile.gettempdir()).iterdir(): if tempdir.is_dir() and self.httpx_tempdir_regex.match(tempdir.name): diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index 0a759f2cf..026e82f53 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -274,7 +274,11 @@ class hunt(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] flags = ["active", "safe", "web-thorough"] - meta = {"description": "Watch for commonly-exploitable HTTP parameters"} + meta = { + "description": "Watch for commonly-exploitable HTTP parameters", + "created_date": "2022-07-20", + "author": "@liquidsec", + } # accept all events regardless of scope distance scope_distance_modifier = None @@ -288,4 +292,9 @@ async def handle_event(self, event): url = event.data.get("url", "") if url: data["url"] = url - await self.emit_event(data, "FINDING", event) + await self.emit_event( + data, + "FINDING", + event, + context=f'{{module}} analyzed HTTP_RESPONSE from {url} and identified {{event.type}}: potential {k.upper()} parameter "{p}"', + ) diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 792ca6d98..7396df481 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -5,7 +5,12 @@ class hunterio(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS", "DNS_NAME", "URL_UNVERIFIED"] flags = ["passive", "email-enum", "subdomain-enum", "safe"] - meta = {"description": "Query hunter.io for emails", "auth_required": True} + meta = { + "description": "Query hunter.io for emails", + "created_date": "2022-04-25", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "Hunter.IO API key"} @@ -26,14 +31,27 @@ async def handle_event(self, event): if email: email_event = self.make_event(email, "EMAIL_ADDRESS", event) if email_event: - await self.emit_event(email_event) + await self.emit_event( + email_event, + context=f'{{module}} queried Hunter.IO API for "{query}" and found {{event.type}}: {{event.data}}', + ) for source in sources: domain = source.get("domain", "") if domain: - await self.emit_event(domain, "DNS_NAME", email_event) + await self.emit_event( + domain, + "DNS_NAME", + email_event, + context=f"{{module}} originally found {email} at {{event.type}}: {{event.data}}", + ) url = source.get("uri", "") if url: - await self.emit_event(url, "URL_UNVERIFIED", email_event) + await self.emit_event( + url, + "URL_UNVERIFIED", + email_event, + context=f"{{module}} originally found {email} at {{event.type}}: {{event.data}}", + ) async def query(self, query): emails = [] diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index 94d325df8..9db4a135b 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -17,7 +17,11 @@ class iis_shortnames(BaseModule): watched_events = ["URL"] produced_events = ["URL_HINT"] flags = ["active", "safe", "web-basic", "iis-shortnames"] - meta = {"description": "Check for IIS shortname vulnerability"} + meta = { + "description": "Check for IIS shortname vulnerability", + "created_date": "2022-04-15", + "author": "@pmueller", + } options = {"detect_only": True, "max_node_count": 50} options_desc = { "detect_only": "Only detect the vulnerability and do not run the shortname scanner", @@ -34,10 +38,22 @@ async def detect(self, target): control_url = f"{target}{random_string}*~1*/a.aspx" test_url = f"{target}*~1*/a.aspx" + urls_and_kwargs = [] for method in ["GET", "POST", "OPTIONS", "DEBUG", "HEAD", "TRACE"]: - control = await self.helpers.request(method=method, url=control_url, allow_redirects=False, timeout=10) - test = await self.helpers.request(method=method, url=test_url, allow_redirects=False, timeout=10) - if (control != None) and (test != None): + kwargs = dict(method=method, allow_redirects=False, timeout=10) + urls_and_kwargs.append((control_url, kwargs, method)) + urls_and_kwargs.append((test_url, kwargs, method)) + + results = {} + async for url, kwargs, method, response in self.helpers.request_custom_batch(urls_and_kwargs): + try: + results[method][url] = response + except KeyError: + results[method] = {url: response} + for method, result in results.items(): + control = results[method].get(control_url, None) + test = results[method].get(test_url, None) + if (result != None) and (test != None): if control.status_code != test.status_code: technique = f"{str(control.status_code)}/{str(test.status_code)} HTTP Code" detections.append((method, test.status_code, technique)) @@ -63,11 +79,10 @@ async def directory_confirm(self, target, method, url_hint, affirmative_status_c directory_confirm_result = await self.helpers.request( method=method, url=url, allow_redirects=False, retries=2, timeout=10 ) - - if directory_confirm_result.status_code == affirmative_status_code: - return True - else: - return False + if directory_confirm_result is not None: + if directory_confirm_result.status_code == affirmative_status_code: + return True + return False async def duplicate_check(self, target, method, url_hint, affirmative_status_code): duplicates = [] @@ -109,32 +124,23 @@ async def threaded_request(self, method, url, affirmative_status_code, c): async def solve_valid_chars(self, method, target, affirmative_status_code): confirmed_chars = [] confirmed_exts = [] - tasks = [] suffix = "/a.aspx" + urls_and_kwargs = [] + kwargs = dict(method=method, allow_redirects=False, retries=2, timeout=10) for c in valid_chars: - payload = encode_all(f"*{c}*~1*") - url = f"{target}{payload}{suffix}" - task = self.threaded_request(method, url, affirmative_status_code, c) - tasks.append(task) - - async for task in self.helpers.as_completed(tasks): - result, c = await task - if result: - confirmed_chars.append(c) - - tasks = [] - - for c in valid_chars: - payload = encode_all(f"*~1*{c}*") - url = f"{target}{payload}{suffix}" - task = self.threaded_request(method, url, affirmative_status_code, c) - tasks.append(task) + for file_part in ("stem", "ext"): + payload = encode_all(f"*{c}*~1*") + url = f"{target}{payload}{suffix}" + urls_and_kwargs.append((url, kwargs, (c, file_part))) - async for task in self.helpers.as_completed(tasks): - result, c = await task - if result: - confirmed_exts.append(c) + async for url, kwargs, (c, file_part), response in self.helpers.request_custom_batch(urls_and_kwargs): + if response is not None: + if response.status_code == affirmative_status_code: + if file_part == "stem": + confirmed_chars.append(c) + elif file_part == "ext": + confirmed_exts.append(c) return confirmed_chars, confirmed_exts @@ -153,53 +159,55 @@ async def solve_shortname_recursive( url_hint_list = [] found_results = False - tasks = [] - cl = ext_char_list if extension_mode == True else char_list + urls_and_kwargs = [] + for c in cl: suffix = "/a.aspx" wildcard = "*" if extension_mode else "*~1*" payload = encode_all(f"{prefix}{c}{wildcard}") url = f"{target}{payload}{suffix}" - task = self.threaded_request(method, url, affirmative_status_code, c) - tasks.append(task) - - async for task in self.helpers.as_completed(tasks): - result, c = await task - if result: - found_results = True - node_count += 1 - safety_counter.counter += 1 - if safety_counter.counter > 3000: - raise IISShortnamesError(f"Exceeded safety counter threshold ({safety_counter.counter})") - self.verbose(f"node_count: {str(node_count)} for node: {target}") - if node_count > self.config.get("max_node_count"): - self.warning( - f"iis_shortnames: max_node_count ({str(self.config.get('max_node_count'))}) exceeded for node: {target}. Affected branch will be terminated." + kwargs = dict(method=method) + urls_and_kwargs.append((url, kwargs, c)) + + async for url, kwargs, c, response in self.helpers.request_custom_batch(urls_and_kwargs): + if response is not None: + if response.status_code == affirmative_status_code: + found_results = True + node_count += 1 + safety_counter.counter += 1 + if safety_counter.counter > 3000: + raise IISShortnamesError(f"Exceeded safety counter threshold ({safety_counter.counter})") + self.verbose(f"node_count: {str(node_count)} for node: {target}") + if node_count > self.config.get("max_node_count"): + self.warning( + f"iis_shortnames: max_node_count ({str(self.config.get('max_node_count'))}) exceeded for node: {target}. Affected branch will be terminated." + ) + return url_hint_list + + # check to make sure the file isn't shorter than 6 characters + wildcard = "~1*" + payload = encode_all(f"{prefix}{c}{wildcard}") + url = f"{target}{payload}{suffix}" + r = await self.helpers.request( + method=method, url=url, allow_redirects=False, retries=2, timeout=10 + ) + if r is not None: + if r.status_code == affirmative_status_code: + url_hint_list.append(f"{prefix}{c}") + + url_hint_list += await self.solve_shortname_recursive( + safety_counter, + method, + target, + f"{prefix}{c}", + affirmative_status_code, + char_list, + ext_char_list, + extension_mode, + node_count=node_count, ) - return url_hint_list - - # check to make sure the file isn't shorter than 6 characters - wildcard = "~1*" - payload = encode_all(f"{prefix}{c}{wildcard}") - url = f"{target}{payload}{suffix}" - r = await self.helpers.request(method=method, url=url, allow_redirects=False, retries=2, timeout=10) - if r is not None: - if r.status_code == affirmative_status_code: - url_hint_list.append(f"{prefix}{c}") - - url_hint_list += await self.solve_shortname_recursive( - safety_counter, - method, - target, - f"{prefix}{c}", - affirmative_status_code, - char_list, - ext_char_list, - extension_mode, - node_count=node_count, - ) if len(prefix) > 0 and found_results == False: url_hint_list.append(f"{prefix}") self.verbose(f"Found new (possibly partial) URL_HINT: {prefix} from node {target}") @@ -225,6 +233,7 @@ class safety_counter_obj: {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, + context=f"{{module}} detected low {{event.type}}: IIS shortname enumeration", ) if not self.config.get("detect_only"): for detection in detections: @@ -314,7 +323,13 @@ class safety_counter_obj: hint_type = "shortname-file" else: hint_type = "shortname-directory" - await self.emit_event(f"{normalized_url}/{url_hint}", "URL_HINT", event, tags=[hint_type]) + await self.emit_event( + f"{normalized_url}/{url_hint}", + "URL_HINT", + event, + tags=[hint_type], + context=f"{{module}} enumerated shortnames at {normalized_url} and found {{event.type}}: {url_hint}", + ) async def filter_event(self, event): if "dir" in event.tags: diff --git a/bbot/modules/internal/aggregate.py b/bbot/modules/internal/aggregate.py index b1f11b04e..54e3a52cc 100644 --- a/bbot/modules/internal/aggregate.py +++ b/bbot/modules/internal/aggregate.py @@ -3,7 +3,11 @@ class aggregate(BaseReportModule): flags = ["passive", "safe"] - meta = {"description": "Summarize statistics at the end of a scan"} + meta = { + "description": "Summarize statistics at the end of a scan", + "created_date": "2022-07-25", + "author": "@TheTechromancer", + } async def report(self): self.log_table(*self.scan.stats._make_table(), table_name="scan-stats") diff --git a/bbot/modules/internal/cloud.py b/bbot/modules/internal/cloud.py index 7939487fd..6b2da62e4 100644 --- a/bbot/modules/internal/cloud.py +++ b/bbot/modules/internal/cloud.py @@ -8,18 +8,23 @@ class cloud(InterceptModule): _priority = 3 async def setup(self): + self.dummy_modules = None + return True + + def make_dummy_modules(self): self.dummy_modules = {} for provider_name, provider in self.helpers.cloud.providers.items(): self.dummy_modules[provider_name] = self.scan._make_dummy_module(f"cloud_{provider_name}", _type="scan") - return True - async def filter_event(self, event): if (not event.host) or (event.type in ("IP_RANGE",)): return False, "event does not have host attribute" return True async def handle_event(self, event, kwargs): + # don't hold up the event loop loading cloud IPs etc. + if self.dummy_modules is None: + self.make_dummy_modules() # cloud tagging by hosts hosts_to_check = set(str(s) for s in event.resolved_hosts) hosts_to_check.add(str(event.host_original)) @@ -34,7 +39,7 @@ async def handle_event(self, event, kwargs): for provider in self.helpers.cloud.providers.values(): provider_name = provider.name.lower() base_kwargs = dict( - source=event, tags=[f"{provider.provider_type}-{provider_name}"], _provider=provider_name + parent=event, tags=[f"{provider.provider_type}-{provider_name}"], _provider=provider_name ) # loop through the provider's regex signatures, if any for event_type, sigs in provider.signatures.items(): @@ -62,9 +67,11 @@ async def handle_event(self, event, kwargs): if event_type == "STORAGE_BUCKET": bucket_name, bucket_domain = match + bucket_url = f"https://{bucket_name}.{bucket_domain}" _kwargs["data"] = { "name": bucket_name, - "url": f"https://{bucket_name}.{bucket_domain}", + "url": bucket_url, + "context": f"{{module}} analyzed {event.type} and found {{event.type}}: {bucket_url}", } await self.emit_event(**_kwargs) diff --git a/bbot/modules/internal/dns.py b/bbot/modules/internal/dns.py index b96b9b19c..276ebc50c 100644 --- a/bbot/modules/internal/dns.py +++ b/bbot/modules/internal/dns.py @@ -54,14 +54,12 @@ async def handle_event(self, event, kwargs): dns_children = dict() event_whitelisted = False event_blacklisted = False + emit_children = False event_host = str(event.host) event_host_hash = hash(str(event.host)) event_is_ip = self.helpers.is_ip(event.host) - # only emit DNS children if we haven't seen this host before - emit_children = self.dns_resolution and event_host_hash not in self._event_cache - # we do DNS resolution inside a lock to make sure we don't duplicate work # once the resolution happens, it will be cached so it doesn't need to happen again async with self._event_cache_locks.lock(event_host_hash): @@ -123,6 +121,9 @@ async def handle_event(self, event, kwargs): except ValueError: continue + # only emit DNS children if we haven't seen this host before + emit_children = self.dns_resolution and event_host_hash not in self._event_cache + # store results in cache self._event_cache[event_host_hash] = dns_tags, dns_children, event_whitelisted, event_blacklisted @@ -164,23 +165,31 @@ async def handle_event(self, event, kwargs): event.add_tag(tag) # If the event is unresolved, change its type to DNS_NAME_UNRESOLVED - if event.type == "DNS_NAME" and "unresolved" in event.tags and not "target" in event.tags: + if event.type == "DNS_NAME" and "unresolved" in event.tags: event.type = "DNS_NAME_UNRESOLVED" # speculate DNS_NAMES and IP_ADDRESSes from other event types - source_event = event + parent_event = event if ( event.host and event.type not in ("DNS_NAME", "DNS_NAME_UNRESOLVED", "IP_ADDRESS", "IP_RANGE") and not ((event.type in ("OPEN_TCP_PORT", "URL_UNVERIFIED") and str(event.module) == "speculate")) ): - source_event = self.scan.make_event(event.host, "DNS_NAME", module=self.host_module, source=event) + parent_event = self.scan.make_event( + event.host, + "DNS_NAME", + module=self.host_module, + parent=event, + ) # only emit the event if it's not already in the parent chain - if source_event is not None and (source_event.always_emit or source_event not in event.get_sources()): - source_event.scope_distance = event.scope_distance + if parent_event is not None and (parent_event.always_emit or parent_event not in event.get_parents()): + parent_event.scope_distance = event.scope_distance if "target" in event.tags: - source_event.add_tag("target") - await self.emit_event(source_event) + parent_event.add_tag("target") + await self.emit_event( + parent_event, + context="{event.parent.type} has host {event.type}: {event.host}", + ) # emit DNS children if emit_children: @@ -189,10 +198,12 @@ async def handle_event(self, event, kwargs): if dns_children: for rdtype, records in dns_children.items(): module = self.scan._make_dummy_module_dns(rdtype) - module._priority = 4 for record in records: try: - child_event = self.scan.make_event(record, "DNS_NAME", module=module, source=source_event) + child_event = self.scan.make_event(record, "DNS_NAME", module=module, parent=parent_event) + child_event.discovery_context = ( + f"{rdtype} record for {event.host} contains {child_event.type}: {child_event.host}" + ) # if it's a hostname and it's only one hop away, mark it as affiliate if child_event.type == "DNS_NAME" and child_event.scope_distance == 1: child_event.add_tag("affiliate") @@ -200,7 +211,7 @@ async def handle_event(self, event, kwargs): dns_child_events.append(child_event) except ValidationError as e: self.warning( - f'Event validation failed for DNS child of {source_event}: "{record}" ({rdtype}): {e}' + f'Event validation failed for DNS child of {parent_event}: "{record}" ({rdtype}): {e}' ) for child_event in dns_child_events: self.debug(f"Queueing DNS child for {event}: {child_event}") @@ -220,7 +231,7 @@ async def handle_wildcard_event(self, event): event.add_tag(f"{rdtype.lower()}-{wildcard_tag}") # wildcard event modification (www.evilcorp.com --> _wildcard.evilcorp.com) - if wildcard_rdtypes: + if wildcard_rdtypes and not "target" in event.tags: # these are the rdtypes that successfully resolve resolved_rdtypes = set([c.upper() for c in event.dns_children]) # these are the rdtypes that have wildcards diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index d8d69f2eb..eef4ab26a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -52,7 +52,13 @@ async def search(self, content, event, **kwargs): await self.report(domain, event, **kwargs) async def report(self, domain, event, **kwargs): - await self.excavate.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + await self.excavate.emit_event( + domain, + "DNS_NAME", + parent=event, + tags=["affiliate"], + context=f"{{module}}'s CSP extractor found {{event.type}}: {{event.data}} in CSP header", + ) class HostnameExtractor(BaseExtractor): @@ -64,7 +70,13 @@ def __init__(self, excavate): super().__init__(excavate) async def report(self, result, name, event, **kwargs): - await self.excavate.emit_event(result, "DNS_NAME", source=event) + context = kwargs.get("discovery_context", "") + await self.excavate.emit_event( + result, + "DNS_NAME", + parent=event, + context=f"{{module}}'s hostname extractor found {{event.type}}: {{event.data}} from {context} using regex derived from target domain", + ) class URLExtractor(BaseExtractor): @@ -122,7 +134,7 @@ async def search(self, content, event, **kwargs): urls_found += 1 async def _search(self, content, event, **kwargs): - parsed = getattr(event, "parsed", None) + parsed = getattr(event, "parsed_url", None) for name, regex in self.compiled_regexes.items(): # yield to event loop await self.excavate.helpers.sleep(0) @@ -142,7 +154,7 @@ async def _search(self, content, event, **kwargs): continue if not self.compiled_regexes["fullurl"].match(path): - source_url = event.parsed.geturl() + source_url = event.parsed_url.geturl() result = urljoin(source_url, path) # this is necessary to weed out mailto: and such if not self.compiled_regexes["fullurl"].match(result): @@ -153,6 +165,7 @@ async def _search(self, content, event, **kwargs): yield result, name async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") parsed_uri = None try: parsed_uri = self.excavate.helpers.urlparse(result) @@ -167,14 +180,15 @@ async def report(self, result, name, event, **kwargs): # these findings are pretty mundane so don't bother with them if they aren't in scope abort_if = lambda e: e.scope_distance > 0 event_data = {"host": str(host), "description": f"Non-HTTP URI: {result}"} - parsed_url = getattr(event, "parsed", None) + parsed_url = getattr(event, "parsed_url", None) if parsed_url: event_data["url"] = parsed_url.geturl() await self.excavate.emit_event( event_data, "FINDING", - source=event, + parent=event, abort_if=abort_if, + context=f"{{module}}'s URL extractor found {{event.type}}: Non-HTTP URI {{event.data}} in {context}", ) protocol_data = {"protocol": parsed_uri.scheme, "host": str(host)} if port: @@ -182,12 +196,18 @@ async def report(self, result, name, event, **kwargs): await self.excavate.emit_event( protocol_data, "PROTOCOL", - source=event, + parent=event, abort_if=abort_if, + context=f"{{module}}'s URL extractor found {{event.type}}: {{event.data}} in {result} while searching {context}", ) return - return self.excavate.make_event(result, "URL_UNVERIFIED", source=event) + return self.excavate.make_event( + result, + "URL_UNVERIFIED", + parent=event, + context=f"{{module}}'s URL extractor ({name} regex) found {{event.type}}: {{event.data}} in {context}", + ) class EmailExtractor(BaseExtractor): @@ -195,11 +215,17 @@ class EmailExtractor(BaseExtractor): tld_blacklist = ["png", "jpg", "jpeg", "bmp", "ico", "gif", "svg", "css", "ttf", "woff", "woff2"] async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") result = result.lower() tld = result.split(".")[-1] if tld not in self.tld_blacklist: self.excavate.debug(f"Found email address [{result}] from parsing [{event.data.get('url')}]") - await self.excavate.emit_event(result, "EMAIL_ADDRESS", source=event) + await self.excavate.emit_event( + result, + "EMAIL_ADDRESS", + parent=event, + context=f"{{module}}'s email extractor found {{event.type}}: {{event.data}} in {context}", + ) class ErrorExtractor(BaseExtractor): @@ -221,12 +247,14 @@ class ErrorExtractor(BaseExtractor): } async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") self.excavate.debug(f"Found error message from parsing [{event.data.get('url')}] with regex [{name}]") description = f"Error message Detected at Error Type: {name}" await self.excavate.emit_event( {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", - source=event, + parent=event, + context=f"{{module}}'s error message extractor detected {{event.type}}: {name} error message in {context}", ) @@ -234,6 +262,7 @@ class JWTExtractor(BaseExtractor): regexes = {"JWT": r"eyJ(?:[\w-]*\.)(?:[\w-]*\.)[\w-]*"} async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") self.excavate.debug(f"Found JWT candidate [{result}]") try: j.decode(result, options={"verify_signature": False}) @@ -247,6 +276,7 @@ async def report(self, result, name, event, **kwargs): "FINDING", event, tags=tags, + context=f"{{module}}'s JWT extractor found {{event.type}}: {name} in {context}", ) except j.exceptions.DecodeError: @@ -264,9 +294,13 @@ class SerializationExtractor(BaseExtractor): } async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") description = f"{name} serialized object found: [{self.excavate.helpers.truncate_string(result,2000)}]" await self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event + {"host": str(event.host), "url": event.data.get("url"), "description": description}, + "FINDING", + event, + context=f"{{module}}'s serialization extractor found {{event.type}}: {name} serialized object in {context}", ) @@ -277,9 +311,13 @@ class FunctionalityExtractor(BaseExtractor): } async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") description = f"{name} found" await self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url"), "description": description}, "FINDING", event + {"host": str(event.host), "url": event.data.get("url"), "description": description}, + "FINDING", + event, + context=f"{{module}}'s functionality extractor discovered {{event.type}}: {name} in {context}", ) @@ -319,6 +357,7 @@ class JavascriptExtractor(BaseExtractor): } async def report(self, result, name, event, **kwargs): + context = kwargs.get("discovery_context", "") # ensure that basic auth matches aren't false positives if name == "authorization_basic": try: @@ -331,7 +370,10 @@ async def report(self, result, name, event, **kwargs): self.excavate.debug(f"Found Possible Secret in Javascript [{result}]") description = f"Possible secret in JS [{result}] Signature [{name}]" await self.excavate.emit_event( - {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, "FINDING", event + {"host": str(event.host), "url": event.data.get("url", ""), "description": description}, + "FINDING", + event, + context=f"{{module}}'s Javascript extractor discovered {{event.type}}: {description} in {context}", ) @@ -339,7 +381,11 @@ class excavate(BaseInternalModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["URL_UNVERIFIED"] flags = ["passive"] - meta = {"description": "Passively extract juicy tidbits from scan data"} + meta = { + "description": "Passively extract juicy tidbits from scan data", + "created_date": "2022-06-27", + "author": "@liquidsec", + } scope_distance_modifier = None @@ -381,9 +427,12 @@ async def handle_event(self, event): url_event = self.make_event(location, "URL_UNVERIFIED", event, tags="affiliate") if url_event is not None: # inherit web spider distance from parent (don't increment) - source_web_spider_distance = getattr(event, "web_spider_distance", 0) - url_event.web_spider_distance = source_web_spider_distance - await self.emit_event(url_event) + parent_web_spider_distance = getattr(event, "web_spider_distance", 0) + url_event.web_spider_distance = parent_web_spider_distance + await self.emit_event( + url_event, + context='{module} looked in "Location" header and found {event.type}: {event.data}', + ) else: self.verbose(f"Exceeded max HTTP redirects ({self.max_redirects}): {location}") @@ -403,6 +452,7 @@ async def handle_event(self, event): ], event, consider_spider_danger=True, + discovery_context="HTTP response body", ) headers = await self.helpers.re.recursive_decode(event.data.get("raw_header", "")) @@ -411,6 +461,7 @@ async def handle_event(self, event): [self.hostname, self.url, self.email, self.error_extractor, self.jwt, self.serialization, self.csp], event, consider_spider_danger=False, + discovery_context="HTTP response headers", ) else: diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 9f39598d2..93278cc73 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -26,7 +26,11 @@ class speculate(BaseInternalModule): ] produced_events = ["DNS_NAME", "OPEN_TCP_PORT", "IP_ADDRESS", "FINDING", "ORG_STUB"] flags = ["passive"] - meta = {"description": "Derive certain event types from others by common sense"} + meta = { + "description": "Derive certain event types from others by common sense", + "created_date": "2022-05-03", + "author": "@liquidsec", + } options = {"max_hosts": 65536, "ports": "80,443"} options_desc = { @@ -36,6 +40,8 @@ class speculate(BaseInternalModule): scope_distance_modifier = 1 _priority = 4 + default_discovery_context = "speculated {event.type}: {event.data}" + async def setup(self): scan_modules = [m for m in self.scan.modules.values() if m._type == "scan"] self.open_port_consumers = any(["OPEN_TCP_PORT" in m.watched_events for m in scan_modules]) @@ -63,7 +69,7 @@ async def setup(self): self.hugewarning( f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation" ) - self.hugewarning(f"Enabling a port scanner (nmap or masscan) module is highly recommended") + self.hugewarning(f'Enabling the "portscan" module is highly recommended') self.range_to_ip = False return True @@ -75,13 +81,21 @@ async def handle_event(self, event): ips = list(net) random.shuffle(ips) for ip in ips: - await self.emit_event(ip, "IP_ADDRESS", source=event, internal=True) + await self.emit_event( + ip, + "IP_ADDRESS", + parent=event, + internal=True, + context=f"speculate converted range into indivudal IP_ADDRESS: {ip}", + ) # parent domains - if event.type == "DNS_NAME": + if event.type.startswith("DNS_NAME"): parent = self.helpers.parent_domain(event.data) if parent != event.data: - await self.emit_event(parent, "DNS_NAME", source=event, internal=True) + await self.emit_event( + parent, "DNS_NAME", parent=event, context=f"speculated parent {{event.type}}: {{event.data}}" + ) # we speculate on distance-1 stuff too, because distance-1 open ports are needed by certain modules like sslcert event_in_scope_distance = event.scope_distance <= (self.scan.scope_search_distance + 1) @@ -94,37 +108,45 @@ async def handle_event(self, event): await self.emit_event( self.helpers.make_netloc(event.host, event.port), "OPEN_TCP_PORT", - source=event, + parent=event, internal=True, quick=(event.type == "URL"), + context=f"speculated {{event.type}} from {event.type}: {{event.data}}", ) # speculate sub-directory URLS from URLS if event.type == "URL": url_parents = self.helpers.url_parents(event.data) for up in url_parents: - url_event = self.make_event(f"{up}/", "URL_UNVERIFIED", source=event) + url_event = self.make_event(f"{up}/", "URL_UNVERIFIED", parent=event) if url_event is not None: # inherit web spider distance from parent (don't increment) - source_web_spider_distance = getattr(event, "web_spider_distance", 0) - url_event.web_spider_distance = source_web_spider_distance - await self.emit_event(url_event) + parent_web_spider_distance = getattr(event, "web_spider_distance", 0) + url_event.web_spider_distance = parent_web_spider_distance + await self.emit_event(url_event, context="speculated web sub-directory {event.type}: {event.data}") # speculate URL_UNVERIFIED from URL or any event with "url" attribute event_is_url = event.type == "URL" event_has_url = isinstance(event.data, dict) and "url" in event.data + event_tags = ["httpx-safe"] if event.type in ("CODE_REPOSITORY", "SOCIAL") else [] if event_is_url or event_has_url: if event_is_url: url = event.data else: url = event.data["url"] - if not any(e.type == "URL_UNVERIFIED" and e.data == url for e in event.get_sources()): - tags = None - if self.helpers.is_spider_danger(event.source, url): - tags = ["spider-danger"] - await self.emit_event(url, "URL_UNVERIFIED", tags=tags, source=event) + # only emit the url if it's not already in the event's history + if not any(e.type == "URL_UNVERIFIED" and e.data == url for e in event.get_parents()): + if self.helpers.is_spider_danger(event.parent, url): + event_tags.append("spider-danger") + await self.emit_event( + url, + "URL_UNVERIFIED", + tags=event_tags, + parent=event, + context="speculated {event.type}: {event.data}", + ) - # from hosts + # IP_ADDRESS / DNS_NAME --> OPEN_TCP_PORT if speculate_open_ports: # don't act on unresolved DNS_NAMEs usable_dns = False @@ -137,9 +159,10 @@ async def handle_event(self, event): await self.emit_event( self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", - source=event, + parent=event, internal=True, quick=True, + context="speculated {event.type}: {event.data}", ) # ORG_STUB from TLD, SOCIAL, AZURE_TENANT @@ -164,23 +187,17 @@ async def handle_event(self, event): stub_hash = hash(stub) if stub_hash not in self.org_stubs_seen: self.org_stubs_seen.add(stub_hash) - stub_event = self.make_event(stub, "ORG_STUB", source=event) + stub_event = self.make_event(stub, "ORG_STUB", parent=event) if stub_event: if event.scope_distance > 0: stub_event.scope_distance = event.scope_distance - await self.emit_event(stub_event) + await self.emit_event(stub_event, context="speculated {event.type}: {event.data}") # USERNAME --> EMAIL if event.type == "USERNAME": email = event.data.split(":", 1)[-1] if validators.soft_validate(email, "email"): - email_event = self.make_event(email, "EMAIL_ADDRESS", source=event, tags=["affiliate"]) + email_event = self.make_event(email, "EMAIL_ADDRESS", parent=event, tags=["affiliate"]) if email_event: email_event.scope_distance = event.scope_distance - await self.emit_event(email_event) - - async def filter_event(self, event): - # don't accept errored DNS_NAMEs - if any(t in event.tags for t in ("unresolved", "a-error", "aaaa-error")): - return False, "there were errors resolving this hostname" - return True + await self.emit_event(email_event, context="detected {event.type}: {event.data}") diff --git a/bbot/modules/internetdb.py b/bbot/modules/internetdb.py index 847db0c7a..69976ebaf 100644 --- a/bbot/modules/internetdb.py +++ b/bbot/modules/internetdb.py @@ -38,7 +38,11 @@ class internetdb(BaseModule): watched_events = ["IP_ADDRESS", "DNS_NAME"] produced_events = ["TECHNOLOGY", "VULNERABILITY", "FINDING", "OPEN_TCP_PORT", "DNS_NAME"] flags = ["passive", "safe", "portscan", "subdomain-enum"] - meta = {"description": "Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities"} + meta = { + "description": "Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities", + "created_date": "2023-12-22", + "author": "@TheTechromancer", + } _qsize = 500 @@ -75,7 +79,7 @@ async def handle_event(self, event): return if data: if r.status_code == 200: - await self._parse_response(data=data, event=event) + await self._parse_response(data=data, event=event, ip=ip) elif r.status_code == 404: detail = data.get("detail", "") if detail: @@ -85,25 +89,44 @@ async def handle_event(self, event): err_msg = data.get("msg", "") self.verbose(f"Shodan error for {ip}: {err_data}: {err_msg}") - async def _parse_response(self, data: dict, event): + async def _parse_response(self, data: dict, event, ip): """Handles emitting events from returned JSON""" data: dict # has keys: cpes, hostnames, ip, ports, tags, vulns + ip = str(ip) + query_host = ip if event.data == ip else f"{event.data} ({ip})" # ip is a string, ports is a list of ports, the rest is a list of strings for hostname in data.get("hostnames", []): - await self.emit_event(hostname, "DNS_NAME", source=event) + if hostname != event.data: + await self.emit_event( + hostname, + "DNS_NAME", + parent=event, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}', + ) for cpe in data.get("cpes", []): - await self.emit_event({"technology": cpe, "host": str(event.host)}, "TECHNOLOGY", source=event) + await self.emit_event( + {"technology": cpe, "host": str(event.host)}, + "TECHNOLOGY", + parent=event, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}', + ) for port in data.get("ports", []): await self.emit_event( - self.helpers.make_netloc(event.data, port), "OPEN_TCP_PORT", source=event, internal=True, quick=True + self.helpers.make_netloc(event.data, port), + "OPEN_TCP_PORT", + parent=event, + internal=True, + quick=True, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found {{event.type}}: {{event.data}}', ) vulns = data.get("vulns", []) if vulns: vulns_str = ", ".join([str(v) for v in vulns]) await self.emit_event( - {"description": f"Shodan reported verified vulnerabilities: {vulns_str}", "host": str(event.host)}, + {"description": f"Shodan reported possible vulnerabilities: {vulns_str}", "host": str(event.host)}, "FINDING", - source=event, + parent=event, + context=f'{{module}} queried Shodan\'s InternetDB API for "{query_host}" and found potential {{event.type}}: {vulns_str}', ) def get_ip(self, event): diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index e192b2abb..af7dd5d94 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -9,7 +9,12 @@ class IP2Location(BaseModule): watched_events = ["IP_ADDRESS"] produced_events = ["GEOLOCATION"] flags = ["passive", "safe"] - meta = {"description": "Query IP2location.io's API for geolocation information. ", "auth_required": True} + meta = { + "description": "Query IP2location.io's API for geolocation information. ", + "created_date": "2023-09-12", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": "", "lang": ""} options_desc = { "api_key": "IP2location.io API Key", @@ -57,4 +62,15 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - await self.emit_event(geo_data, "GEOLOCATION", event) + country = geo_data.get("country_name", "unknown country") + region = geo_data.get("region_name", "unknown region") + city = geo_data.get("city_name", "unknown city") + lat = geo_data.get("latitude", "") + long = geo_data.get("longitude", "") + description = f"{city}, {region}, {country} ({lat}, {long})" + await self.emit_event( + geo_data, + "GEOLOCATION", + event, + context=f'{{module}} queried IP2Location API for "{event.data}" and found {{event.type}}: {description}', + ) diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index fe9673f5b..3aab345f2 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -7,7 +7,11 @@ class ipneighbor(BaseModule): watched_events = ["IP_ADDRESS"] produced_events = ["IP_ADDRESS"] flags = ["passive", "subdomain-enum", "aggressive"] - meta = {"description": "Look beside IPs in their surrounding subnet"} + meta = { + "description": "Look beside IPs in their surrounding subnet", + "created_date": "2022-06-08", + "author": "@TheTechromancer", + } options = {"num_bits": 4} options_desc = {"num_bits": "Netmask size (in CIDR notation) to check. Default is 4 bits (16 hosts)"} scope_distance_modifier = 1 @@ -35,4 +39,7 @@ async def handle_event(self, event): if ip_event: # keep the scope distance low to give it one more hop for DNS resolution # ip_event.scope_distance = max(1, event.scope_distance) - await self.emit_event(ip_event) + await self.emit_event( + ip_event, + context="{module} produced {event.type}: {event.data}", + ) diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index 98f139505..115a620ba 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -10,7 +10,12 @@ class Ipstack(BaseModule): watched_events = ["IP_ADDRESS"] produced_events = ["GEOLOCATION"] flags = ["passive", "safe"] - meta = {"description": "Query IPStack's GeoIP API", "auth_required": True} + meta = { + "description": "Query IPStack's GeoIP API", + "created_date": "2022-11-26", + "author": "@tycoonslive", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "IPStack GeoIP API Key"} scope_distance_modifier = 1 @@ -47,4 +52,15 @@ async def handle_event(self, event): if error_msg: self.warning(error_msg) elif geo_data: - await self.emit_event(geo_data, "GEOLOCATION", event) + country = geo_data.get("country_name", "unknown country") + region = geo_data.get("region_name", "unknown region") + city = geo_data.get("city", "unknown city") + lat = geo_data.get("latitude", "") + long = geo_data.get("longitude", "") + description = f"{city}, {region}, {country} ({lat}, {long})" + await self.emit_event( + geo_data, + "GEOLOCATION", + event, + context=f'{{module}} queried ipstack.com\'s API for "{event.data}" and found {{event.type}}: {description}', + ) diff --git a/bbot/modules/leakix.py b/bbot/modules/leakix.py index 45053755a..4bc88d3c7 100644 --- a/bbot/modules/leakix.py +++ b/bbot/modules/leakix.py @@ -8,7 +8,11 @@ class leakix(subdomain_enum_apikey): options = {"api_key": ""} # NOTE: API key is not required (but having one will get you more results) options_desc = {"api_key": "LeakIX API Key"} - meta = {"description": "Query leakix.net for subdomains"} + meta = { + "description": "Query leakix.net for subdomains", + "created_date": "2022-07-11", + "author": "@TheTechromancer", + } base_url = "https://leakix.net" diff --git a/bbot/modules/masscan.py b/bbot/modules/masscan.py deleted file mode 100644 index ddeb76163..000000000 --- a/bbot/modules/masscan.py +++ /dev/null @@ -1,278 +0,0 @@ -import json -from contextlib import suppress - -from bbot.modules.templates.portscanner import portscanner - - -class masscan(portscanner): - flags = ["active", "portscan", "aggressive"] - watched_events = ["IP_ADDRESS", "IP_RANGE"] - produced_events = ["OPEN_TCP_PORT"] - meta = {"description": "Port scan with masscan. By default, scans top 100 ports."} - options = { - "top_ports": 100, - "ports": "", - # ping scan at 600 packets/s ~= entire private IP space in 8 hours - "rate": 600, - "wait": 5, - "ping_first": False, - "ping_only": False, - "use_cache": False, - } - options_desc = { - "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')", - "ports": "Ports to scan", - "rate": "Rate in packets per second", - "wait": "Seconds to wait for replies after scan is complete", - "ping_first": "Only portscan hosts that reply to pings", - "ping_only": "Ping sweep only, no portscan", - "use_cache": "Instead of scanning, use the results from the previous scan", - } - deps_ansible = [ - { - "name": "install dev tools", - "package": {"name": ["gcc", "git", "make"], "state": "present"}, - "become": True, - "ignore_errors": True, - }, - { - "name": "Download masscan source code", - "git": { - "repo": "https://github.com/robertdavidgraham/masscan.git", - "dest": "#{BBOT_TEMP}/masscan", - "single_branch": True, - "version": "master", - }, - }, - { - "name": "Build masscan", - "command": { - "chdir": "#{BBOT_TEMP}/masscan", - "cmd": "make -j", - "creates": "#{BBOT_TEMP}/masscan/bin/masscan", - }, - }, - { - "name": "Install masscan", - "copy": {"src": "#{BBOT_TEMP}/masscan/bin/masscan", "dest": "#{BBOT_TOOLS}/", "mode": "u+x,g+x,o+x"}, - }, - ] - batch_size = 1000000 - - async def setup(self): - self.top_ports = self.config.get("top_ports", 100) - self.rate = self.config.get("rate", 600) - self.wait = self.config.get("wait", 10) - self.ping_first = self.config.get("ping_first", False) - self.ping_only = self.config.get("ping_only", False) - self.use_cache = self.config.get("use_cache", False) - self.ports = self.config.get("ports", "") - if self.ports: - try: - self.helpers.parse_port_string(self.ports) - except ValueError as e: - return False, f"Error parsing ports: {e}" - self.alive_hosts = dict() - - _, invalid_targets = self._build_targets(self.scan.target) - if invalid_targets > 0: - self.warning( - f"Masscan can only accept IP addresses or IP ranges as target ({invalid_targets:,} targets were hostnames)" - ) - - self.run_time = self.helpers.make_date() - self.ping_cache = self.scan.home / f"masscan_ping.txt" - self.syn_cache = self.scan.home / f"masscan_syn.txt" - if self.use_cache: - files_exist = self.ping_cache.is_file() or self.syn_cache.is_file() - files_empty = self.helpers.filesize(self.ping_cache) == 0 and self.helpers.filesize(self.syn_cache) == 0 - if not files_exist: - return ( - False, - f"use_cache is True but could not find cache file at {self.ping_cache} or {self.syn_cache}", - ) - if files_empty: - return ( - False, - f"use_cache is True but could cached files {self.ping_cache} and {self.syn_cache} are empty", - ) - else: - self.helpers.depsinstaller.ensure_root(message="Masscan requires root privileges") - self.ping_cache_fd = None - self.syn_cache_fd = None - - return await super().setup() - - async def handle_batch(self, *events): - if self.use_cache: - await self.emit_from_cache() - else: - targets = [str(e.data) for e in events] - if not targets: - self.warning("No targets specified") - return - - # ping scan - if self.ping_first or self.ping_only: - self.verbose("Starting masscan (ping scan)") - - await self.masscan(targets, result_callback=self.append_alive_host, ping=True) - targets = ",".join(str(h) for h in self.alive_hosts) - if not targets: - self.warning("No hosts responded to pings") - return - - # TCP SYN scan - if not self.ping_only: - self.verbose("Starting masscan (TCP SYN scan)") - await self.masscan(targets, result_callback=self.emit_open_tcp_port) - else: - self.verbose("Only ping sweep was requested, skipping TCP SYN scan") - # save memory - self.alive_hosts.clear() - - async def masscan(self, targets, result_callback, ping=False): - target_file = self.helpers.tempfile(targets, pipe=False) - command = self._build_masscan_command(target_file, ping=ping) - stats_file = self.helpers.tempfile_tail(callback=self.verbose) - try: - with open(stats_file, "w") as stats_fh: - async for line in self.run_process_live(command, sudo=True, stderr=stats_fh): - await self.process_output(line, result_callback=result_callback) - finally: - for file in (stats_file, target_file): - file.unlink() - - def _build_masscan_command(self, target_file=None, dry_run=False, ping=False): - command = ( - "masscan", - "--excludefile", - str(self.exclude_file), - "--rate", - self.rate, - "--wait", - self.wait, - "--open-only", - "-oJ", - "-", - ) - if target_file is not None: - command += ("-iL", str(target_file)) - if ping: - command += ("--ping",) - else: - if self.ports: - command += ("-p", self.ports) - else: - command += ("--top-ports", str(self.top_ports)) - if dry_run: - command += ("--echo",) - return command - - async def process_output(self, line, result_callback): - try: - j = json.loads(line) - except Exception: - return - ip = j.get("ip", "") - if not ip: - return - ports = j.get("ports", []) - if not ports: - return - for p in ports: - proto = p.get("proto", "") - port_number = p.get("port", "") - if proto == "" or port_number == "": - continue - result = str(ip) - source = None - with suppress(KeyError): - source = self.alive_hosts[ip] - if proto != "icmp": - result = self.helpers.make_netloc(result, port_number) - if source is None: - source = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) - if not source: - continue - await self.emit_event(source) - await result_callback(result, source=source) - - async def append_alive_host(self, host, source): - host_event = self.make_event(host, "IP_ADDRESS", source=self.get_source_event(host)) - if host_event: - self.alive_hosts[host] = host_event - self._write_ping_result(host) - await self.emit_event(host_event) - - async def emit_open_tcp_port(self, data, source): - self._write_syn_result(data) - await self.emit_event(data, "OPEN_TCP_PORT", source=source) - - async def emit_from_cache(self): - ip_events = {} - # ping scan - if self.ping_cache.is_file(): - cached_pings = list(self.helpers.read_file(self.ping_cache)) - if cached_pings: - self.success(f"{len(cached_pings):,} hosts loaded from previous ping scan") - else: - self.verbose(f"No hosts cached from previous ping scan") - for ip in cached_pings: - if self.scan.stopping: - break - ip_event = self.make_event(ip, "IP_ADDRESS", source=self.get_source_event(ip)) - if ip_event: - ip_events[ip] = ip_event - await self.emit_event(ip_event) - # syn scan - if self.syn_cache.is_file(): - cached_syns = list(self.helpers.read_file(self.syn_cache)) - if cached_syns: - self.success(f"{len(cached_syns):,} hosts loaded from previous SYN scan") - else: - self.warning(f"No hosts cached from previous SYN scan") - for line in cached_syns: - if self.scan.stopping: - break - host, port = self.helpers.split_host_port(line) - host = str(host) - source_event = ip_events.get(host) - if source_event is None: - self.verbose(f"Source event not found for {line}") - source_event = self.make_event(line, "IP_ADDRESS", source=self.get_source_event(line)) - if not source_event: - continue - await self.emit_event(source_event) - await self.emit_event(line, "OPEN_TCP_PORT", source=source_event) - - def get_source_event(self, host): - source_event = self.scan.target.get(host) - if source_event is None: - source_event = self.scan.whitelist.get(host) - if source_event is None: - source_event = self.scan.root_event - return source_event - - async def cleanup(self): - if self.ping_first: - with suppress(Exception): - self.ping_cache_fd.close() - with suppress(Exception): - self.syn_cache_fd.close() - with suppress(Exception): - self.exclude_file.unlink() - - def _write_ping_result(self, host): - if self.ping_cache_fd is None: - self.helpers.backup_file(self.ping_cache) - self.ping_cache_fd = open(self.ping_cache, "w") - self.ping_cache_fd.write(f"{host}\n") - self.ping_cache_fd.flush() - - def _write_syn_result(self, data): - if self.syn_cache_fd is None: - self.helpers.backup_file(self.syn_cache) - self.syn_cache_fd = open(self.syn_cache, "w") - self.syn_cache_fd.write(f"{data}\n") - self.syn_cache_fd.flush() diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py deleted file mode 100644 index ffacb8c64..000000000 --- a/bbot/modules/massdns.py +++ /dev/null @@ -1,417 +0,0 @@ -import json -import random -import subprocess -import regex as re - -from bbot.modules.templates.subdomain_enum import subdomain_enum - - -class massdns(subdomain_enum): - """ - This is BBOT's flagship subdomain enumeration module. - - It uses massdns to brute-force subdomains. - At the end of a scan, it will leverage BBOT's word cloud to recursively discover target-specific subdomain mutations. - - Each subdomain discovered via mutations is tagged with the "mutation" tag. This tag indicates the depth at which - the mutation was found. I.e. the first mutation will be tagged "mutation-1". The second one (a mutation of a - mutation) will be "mutation-2". Mutations of mutations of mutations will be "mutation-3", etc. - - This is especially use for bug bounties because it enables you to recognize distant/rare subdomains at a glance. - Subdomains with higher mutation levels are more likely to be distant/rare or never-before-seen. - """ - - flags = ["subdomain-enum", "passive", "aggressive"] - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - meta = {"description": "Brute-force subdomains with massdns (highly effective)"} - options = { - "wordlist": "https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/subdomains-top1million-5000.txt", - "max_resolvers": 1000, - "max_mutations": 500, - "max_depth": 5, - } - options_desc = { - "wordlist": "Subdomain wordlist URL", - "max_resolvers": "Number of concurrent massdns resolvers", - "max_mutations": "Max number of smart mutations per subdomain", - "max_depth": "How many subdomains deep to brute force, i.e. 5.4.3.2.1.evilcorp.com", - } - subdomain_file = None - deps_common = ["massdns"] - reject_wildcards = "strict" - _qsize = 10000 - - digit_regex = re.compile(r"\d+") - - async def setup(self): - self.found = dict() - self.mutations_tried = set() - self.source_events = self.helpers.make_target() - self.subdomain_file = await self.helpers.wordlist(self.config.get("wordlist")) - self.subdomain_list = set(self.helpers.read_file(self.subdomain_file)) - - ms_on_prem_string_file = self.helpers.wordlist_dir / "ms_on_prem_subdomains.txt" - ms_on_prem_strings = set(self.helpers.read_file(ms_on_prem_string_file)) - self.subdomain_list.update(ms_on_prem_strings) - - self.max_resolvers = self.config.get("max_resolvers", 1000) - self.max_mutations = self.config.get("max_mutations", 500) - self.max_depth = max(1, self.config.get("max_depth", 5)) - nameservers_url = ( - "https://raw.githubusercontent.com/blacklanternsecurity/public-dns-servers/master/nameservers.txt" - ) - self.resolver_file = await self.helpers.wordlist( - nameservers_url, - cache_hrs=24 * 7, - ) - self.devops_mutations = list(self.helpers.word_cloud.devops_mutations) - self._mutation_run = 1 - - return await super().setup() - - async def filter_event(self, event): - query = self.make_query(event) - eligible, reason = await self.eligible_for_enumeration(event) - - # limit brute force depth - subdomain_depth = self.helpers.subdomain_depth(query) + 1 - if subdomain_depth > self.max_depth: - eligible = False - reason = f"subdomain depth of *.{query} ({subdomain_depth}) > max_depth ({self.max_depth})" - - # don't brute-force things that look like autogenerated PTRs - if self.helpers.is_ptr(query): - eligible = False - reason = f'"{query}" looks like an autogenerated PTR' - - if eligible: - self.add_found(event) - # reject if already processed - if self.already_processed(query): - return False, f'Query "{query}" was already processed' - - if eligible: - self.processed.add(hash(query)) - return True, reason - return False, reason - - async def handle_event(self, event): - query = self.make_query(event) - self.source_events.add_target(event) - self.info(f"Brute-forcing subdomains for {query} (source: {event.data})") - for hostname in await self.massdns(query, self.subdomain_list): - await self.emit_result(hostname, event, query) - - def abort_if(self, event): - if not event.scope_distance == 0: - return True, "event is not in scope" - if "wildcard" in event.tags: - return True, "event is a wildcard" - if "unresolved" in event.tags: - return True, "event is unresolved" - return False, "" - - async def emit_result(self, result, source_event, query, tags=None): - if not result == source_event: - kwargs = {"abort_if": self.abort_if} - if tags is not None: - kwargs["tags"] = tags - await self.emit_event(result, "DNS_NAME", source_event, **kwargs) - - def already_processed(self, hostname): - if hash(hostname) in self.processed: - return True - return False - - async def massdns(self, domain, subdomains): - subdomains = list(subdomains) - - domain_wildcard_rdtypes = set() - for _domain, rdtypes in (await self.helpers.is_wildcard_domain(domain)).items(): - for rdtype, results in rdtypes.items(): - if results: - domain_wildcard_rdtypes.add(rdtype) - if any([r in domain_wildcard_rdtypes for r in ("A", "CNAME")]): - self.info( - f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(domain_wildcard_rdtypes)})" - ) - self.found.pop(domain, None) - return [] - else: - self.log.trace(f"{domain}: A is not in domain_wildcard_rdtypes:{domain_wildcard_rdtypes}") - - # before we start, do a canary check for wildcards - abort_msg = f"Aborting massdns on {domain} due to false positive" - canary_result = await self._canary_check(domain) - if canary_result: - self.info(abort_msg + f": {canary_result}") - return [] - else: - self.log.trace(f"Canary result for {domain}: {canary_result}") - - results = [] - async for hostname, ip, rdtype in self._massdns(domain, subdomains): - # allow brute-forcing of wildcard domains - # this is dead code but it's kinda cool so it can live here - if rdtype in domain_wildcard_rdtypes: - # skip wildcard checking on multi-level subdomains for performance reasons - stem = hostname.split(domain)[0].strip(".") - if "." in stem: - self.debug(f"Skipping {hostname}:A because it may be a wildcard (reason: performance)") - continue - wildcard_rdtypes = await self.helpers.is_wildcard(hostname, ips=(ip,), rdtype=rdtype) - if rdtype in wildcard_rdtypes: - self.debug(f"Skipping {hostname}:{rdtype} because it's a wildcard") - continue - results.append(hostname) - - # do another canary check for good measure - if len(results) > 50: - canary_result = await self._canary_check(domain) - if canary_result: - self.info(abort_msg + f": {canary_result}") - return [] - else: - self.log.trace(f"Canary result for {domain}: {canary_result}") - - # abort if there are a suspiciously high number of results - # (the results are over 2000, and this is more than 20 percent of the input size) - if len(results) > 2000: - if len(results) / len(subdomains) > 0.2: - self.info( - f"Aborting because the number of results ({len(results):,}) is suspiciously high for the length of the wordlist ({len(subdomains):,})" - ) - return [] - else: - self.info( - f"{len(results):,} results returned from massdns against {domain} (wordlist size = {len(subdomains):,})" - ) - - # everything checks out - return results - - async def _canary_check(self, domain, num_checks=50): - random_subdomains = list(self.gen_random_subdomains(num_checks)) - self.verbose(f"Testing {len(random_subdomains):,} canaries against {domain}") - canary_results = [h async for h, d, r in self._massdns(domain, random_subdomains)] - self.log.trace(f"canary results for {domain}: {canary_results}") - resolved_canaries = self.helpers.resolve_batch(canary_results) - self.log.trace(f"resolved canary results for {domain}: {canary_results}") - async for query, result in resolved_canaries: - if result: - await resolved_canaries.aclose() - result = f"{query}:{result}" - self.log.trace(f"Found false positive: {result}") - return result - self.log.trace(f"Passed canary check for {domain}") - return False - - async def _massdns(self, domain, subdomains): - """ - { - "name": "www.blacklanternsecurity.com.", - "type": "A", - "class": "IN", - "status": "NOERROR", - "data": { - "answers": [ - { - "ttl": 3600, - "type": "CNAME", - "class": "IN", - "name": "www.blacklanternsecurity.com.", - "data": "blacklanternsecurity.github.io." - }, - { - "ttl": 3600, - "type": "A", - "class": "IN", - "name": "blacklanternsecurity.github.io.", - "data": "185.199.108.153" - } - ] - }, - "resolver": "168.215.165.186:53" - } - """ - if self.scan.stopping: - return - - command = ( - "massdns", - "-r", - self.resolver_file, - "-s", - self.max_resolvers, - "-t", - "A", - "-o", - "J", - "-q", - ) - subdomains = self.gen_subdomains(subdomains, domain) - hosts_yielded = set() - async for line in self.run_process_live(command, stderr=subprocess.DEVNULL, input=subdomains): - try: - j = json.loads(line) - except json.decoder.JSONDecodeError: - self.debug(f"Failed to decode line: {line}") - continue - answers = j.get("data", {}).get("answers", []) - if type(answers) == list and len(answers) > 0: - answer = answers[0] - hostname = answer.get("name", "").strip(".").lower() - if hostname.endswith(f".{domain}"): - data = answer.get("data", "") - rdtype = answer.get("type", "").upper() - # avoid garbage answers like this: - # 8AAAA queries have been locally blocked by dnscrypt-proxy/Set block_ipv6 to false to disable this feature - if data and rdtype and not " " in data: - hostname_hash = hash(hostname) - if hostname_hash not in hosts_yielded: - hosts_yielded.add(hostname_hash) - yield hostname, data, rdtype - - async def finish(self): - found = sorted(self.found.items(), key=lambda x: len(x[-1]), reverse=True) - # if we have a lot of rounds to make, don't try mutations on less-populated domains - trimmed_found = [] - if found: - avg_subdomains = sum([len(subdomains) for domain, subdomains in found[:50]]) / len(found[:50]) - for i, (domain, subdomains) in enumerate(found): - # accept domains that are in the top 50 or have more than 5 percent of the average number of subdomains - if i < 50 or (len(subdomains) > 1 and len(subdomains) >= (avg_subdomains * 0.05)): - trimmed_found.append((domain, subdomains)) - else: - self.verbose( - f"Skipping mutations on {domain} because it only has {len(subdomains):,} subdomain(s) (avg: {avg_subdomains:,})" - ) - - base_mutations = set() - found_mutations = False - try: - for i, (domain, subdomains) in enumerate(trimmed_found): - self.verbose(f"{domain} has {len(subdomains):,} subdomains") - # keep looping as long as we're finding things - while 1: - max_mem_percent = 90 - mem_status = self.helpers.memory_status() - # abort if we don't have the memory - mem_percent = mem_status.percent - if mem_percent > max_mem_percent: - free_memory = mem_status.available - free_memory_human = self.helpers.bytes_to_human(free_memory) - assert ( - False - ), f"Cannot proceed with DNS mutations because system memory is at {mem_percent:.1f}% ({free_memory_human} remaining)" - - query = domain - domain_hash = hash(domain) - if self.scan.stopping: - return - - mutations = set(base_mutations) - - def add_mutation(_domain_hash, m): - h = hash((_domain_hash, m)) - if h not in self.mutations_tried: - self.mutations_tried.add(h) - mutations.add(m) - - # try every subdomain everywhere else - for _domain, _subdomains in found: - if _domain == domain: - continue - for s in _subdomains: - first_segment = s.split(".")[0] - # skip stuff with lots of numbers (e.g. PTRs) - digits = self.digit_regex.findall(first_segment) - excessive_digits = len(digits) > 2 - long_digits = any(len(d) > 3 for d in digits) - if excessive_digits or long_digits: - continue - add_mutation(domain_hash, first_segment) - for word in self.helpers.extract_words( - first_segment, word_regexes=self.helpers.word_cloud.dns_mutator.extract_word_regexes - ): - add_mutation(domain_hash, word) - - # numbers + devops mutations - for mutation in self.helpers.word_cloud.mutations( - subdomains, cloud=False, numbers=3, number_padding=1 - ): - for delimiter in ("", ".", "-"): - m = delimiter.join(mutation).lower() - add_mutation(domain_hash, m) - - # special dns mutator - for subdomain in self.helpers.word_cloud.dns_mutator.mutations( - subdomains, max_mutations=self.max_mutations - ): - add_mutation(domain_hash, subdomain) - - if mutations: - self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") - results = list(await self.massdns(query, mutations)) - for hostname in results: - source_event = self.source_events.get(hostname) - if source_event is None: - self.warning(f"Could not correlate source event from: {hostname}") - source_event = self.scan.root_event - await self.emit_result( - hostname, source_event, query, tags=[f"mutation-{self._mutation_run}"] - ) - if results: - found_mutations = True - continue - break - except AssertionError as e: - self.warning(e) - - if found_mutations: - self._mutation_run += 1 - - def add_found(self, host): - if not isinstance(host, str): - host = host.data - if self.helpers.is_subdomain(host): - subdomain, domain = host.split(".", 1) - is_ptr = self.helpers.is_ptr(subdomain) - in_scope = self.scan.in_scope(domain) - if in_scope and not is_ptr: - try: - self.found[domain].add(subdomain) - except KeyError: - self.found[domain] = set((subdomain,)) - - async def gen_subdomains(self, prefixes, domain): - for p in prefixes: - d = f"{p}.{domain}" - yield d - - def gen_random_subdomains(self, n=50): - delimiters = (".", "-") - lengths = list(range(3, 8)) - for i in range(0, max(0, n - 5)): - d = delimiters[i % len(delimiters)] - l = lengths[i % len(lengths)] - segments = list(random.choice(self.devops_mutations) for _ in range(l)) - segments.append(self.helpers.rand_string(length=8, digits=False)) - subdomain = d.join(segments) - yield subdomain - for _ in range(5): - yield self.helpers.rand_string(length=8, digits=False) - - def has_excessive_digits(self, d): - """ - Identifies dns names with excessive numbers, e.g.: - - w1-2-3.evilcorp.com - - ptr1234.evilcorp.com - """ - digits = self.digit_regex.findall(d) - excessive_digits = len(digits) > 2 - long_digits = any(len(d) > 3 for d in digits) - if excessive_digits or long_digits: - return True - return False diff --git a/bbot/modules/myssl.py b/bbot/modules/myssl.py index a08c885ed..047ced928 100644 --- a/bbot/modules/myssl.py +++ b/bbot/modules/myssl.py @@ -5,7 +5,11 @@ class myssl(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query myssl.com's API for subdomains"} + meta = { + "description": "Query myssl.com's API for subdomains", + "created_date": "2023-07-10", + "author": "@TheTechromancer", + } base_url = "https://myssl.com/api/v1/discover_sub_domain" diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 726d633fe..5f2bac729 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -19,7 +19,11 @@ class newsletters(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] flags = ["active", "safe"] - meta = {"description": "Searches for Newsletter Submission Entry Fields on Websites"} + meta = { + "description": "Searches for Newsletter Submission Entry Fields on Websites", + "created_date": "2024-02-02", + "author": "@stryker2k2", + } # Parse through Website to find a Text Entry Box of 'type = email' # and ensure that there is placeholder text within it. @@ -48,4 +52,9 @@ async def handle_event(self, event): if result: description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" data = {"host": str(_event.host), "description": description, "url": _event.data["url"]} - await self.emit_event(data, "FINDING", _event) + await self.emit_event( + data, + "FINDING", + _event, + context="{module} searched HTTP_RESPONSE and identified {event.type}: a Newsletter Submission Form that could be used for email bombing attacks", + ) diff --git a/bbot/modules/nmap.py b/bbot/modules/nmap.py deleted file mode 100644 index ccdb0974e..000000000 --- a/bbot/modules/nmap.py +++ /dev/null @@ -1,138 +0,0 @@ -from lxml import etree -from bbot.modules.templates.portscanner import portscanner - - -class nmap(portscanner): - watched_events = ["IP_ADDRESS", "DNS_NAME", "IP_RANGE"] - produced_events = ["OPEN_TCP_PORT"] - flags = ["active", "portscan", "aggressive", "web-thorough"] - meta = {"description": "Port scan with nmap. By default, scans top 100 ports."} - options = { - "top_ports": 100, - "ports": "", - "timing": "T4", - "skip_host_discovery": True, - } - options_desc = { - "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')", - "ports": "Ports to scan", - "timing": "-T<0-5>: Set timing template (higher is faster)", - "skip_host_discovery": "skip host discovery (-Pn)", - } - _max_event_handlers = 2 - batch_size = 256 - _priority = 2 - - deps_apt = ["nmap"] - deps_pip = ["lxml~=4.9.2"] - - async def setup(self): - self.helpers.depsinstaller.ensure_root(message="Nmap requires root privileges") - self.ports = self.config.get("ports", "") - self.timing = self.config.get("timing", "T4") - self.top_ports = self.config.get("top_ports", 100) - self.skip_host_discovery = self.config.get("skip_host_discovery", True) - return await super().setup() - - async def handle_batch(self, *events): - target = self.helpers.make_target(*events) - targets = list(set(str(e.data) for e in events)) - command, output_file = self.construct_command(targets) - try: - await self.run_process(command, sudo=True) - for host in self.parse_nmap_xml(output_file): - source_event = None - for h in [host.address] + host.hostnames: - source_event = target.get(h) - if source_event is not None: - break - if source_event is None: - self.warning(f"Failed to correlate source event from {host}") - source_event = self.scan.root_event - for port in host.open_ports: - port_number = int(port.split("/")[0]) - netloc = self.helpers.make_netloc(host.address, port_number) - await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) - for hostname in host.hostnames: - netloc = self.helpers.make_netloc(hostname, port_number) - await self.emit_event(netloc, "OPEN_TCP_PORT", source=source_event) - finally: - output_file.unlink(missing_ok=True) - - def construct_command(self, targets): - ports = self.config.get("ports", "") - top_ports = self.config.get("top_ports", "") - temp_filename = self.helpers.temp_filename(extension="xml") - command = [ - "nmap", - "--noninteractive", - "--excludefile", - str(self.exclude_file), - "-n", - "--resolve-all", - f"-{self.timing}", - "-oX", - temp_filename, - ] - if self.skip_host_discovery: - command += ["-Pn"] - if ports: - command += ["-p", ports] - else: - command += ["--top-ports", top_ports] - command += targets - return command, temp_filename - - def parse_nmap_xml(self, xml_file): - try: - with open(xml_file, "rb") as f: - et = etree.parse(f) - for host in et.iter("host"): - yield NmapHost(host) - except Exception as e: - self.warning(f"Error parsing Nmap XML at {xml_file}: {e}") - - async def cleanup(self): - resume_file = self.helpers.current_dir / "resume.cfg" - resume_file.unlink(missing_ok=True) - - -class NmapHost(str): - def __init__(self, xml): - self.etree = xml - - # convenient host information - self.status = self.etree.find("status").attrib.get("state", "down") - self.address = self.etree.find("address").attrib.get("addr", "") - self.hostnames = [] - for hostname in self.etree.findall("hostnames/hostname"): - hostname = hostname.attrib.get("name") - if hostname and not hostname in self.hostnames: - self.hostnames.append(hostname) - - # convenient port information - self.scripts = dict() - self.open_ports = [] - self.closed_ports = [] - self.filtered_ports = [] - for port in self.etree.findall("ports/port"): - port_name = port.attrib.get("portid", "0") + "/" + port.attrib.get("protocol", "tcp").lower() - port_status = port.find("state").attrib.get("state", "closed") - if port_status in ("open", "closed", "filtered"): - getattr(self, f"{port_status}_ports").append(port_name) - for script in port.iter("script"): - script_name = script.attrib.get("id", "") - script_output = script.attrib.get("output", "") - if script_name: - try: - self.scripts[port_name][script_name] = script_output - except KeyError: - self.scripts[port_name] = {script_name: script_output} - - def __str__(self): - address = self.address + (" " if self.address else "") - hostnames = "(" + ", ".join(self.hostnames) + ")" if self.hostnames else "" - return f"{address}{hostnames}" - - def __repr__(self): - return str(self) diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 93f622566..1c7d4cd89 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -69,22 +69,30 @@ class ntlm(BaseModule): watched_events = ["URL", "HTTP_RESPONSE"] produced_events = ["FINDING", "DNS_NAME"] flags = ["active", "safe", "web-basic"] - meta = {"description": "Watch for HTTP endpoints that support NTLM authentication"} + meta = { + "description": "Watch for HTTP endpoints that support NTLM authentication", + "created_date": "2022-07-25", + "author": "@liquidsec", + } options = {"try_all": False} options_desc = {"try_all": "Try every NTLM endpoint"} in_scope_only = True async def setup(self): - self.processed = set() self.found = set() self.try_all = self.config.get("try_all", False) return True async def handle_event(self, event): found_hash = hash(f"{event.host}:{event.port}") + if event.type == "URL": + url = event.data + else: + url = event.data["url"] + agen = self.handle_url(url, event) if found_hash not in self.found: - for result, request_url in await self.handle_url(event): + async for result, request_url, num_urls in agen: if result and request_url: self.found.add(found_hash) await self.emit_event( @@ -94,11 +102,13 @@ async def handle_event(self, event): "description": f"NTLM AUTH: {result}", }, "FINDING", - source=event, + parent=event, + context=f"{{module}} tried {num_urls:,} NTLM endpoints against {url} and identified NTLM auth ({{event.type}}): {result}", ) fqdn = result.get("FQDN", "") if fqdn: - await self.emit_event(fqdn, "DNS_NAME", source=event) + await self.emit_event(fqdn, "DNS_NAME", parent=event) + await agen.aclose() break async def filter_event(self, event): @@ -111,41 +121,22 @@ async def filter_event(self, event): return True return False - async def handle_url(self, event): - if event.type == "URL": - urls = { - event.data, - } - else: - urls = { - event.data["url"], - } + async def handle_url(self, url, event): + urls = {url} if self.try_all: for endpoint in ntlm_discovery_endpoints: - urls.add(f"{event.parsed.scheme}://{event.parsed.netloc}/{endpoint}") - - tasks = [] - for url in urls: - url_hash = hash(url) - if url_hash in self.processed: - continue - self.processed.add(url_hash) - tasks.append(self.helpers.create_task(self.check_ntlm(url))) - - return await self.helpers.gather(*tasks) + urls.add(f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/{endpoint}") - async def check_ntlm(self, test_url): - # use lower timeout value - http_timeout = self.config.get("httpx_timeout", 5) - r = await self.helpers.request(test_url, headers=NTLM_test_header, allow_redirects=False, timeout=http_timeout) - ntlm_resp = r.headers.get("WWW-Authenticate", "") - if ntlm_resp: - ntlm_resp_b64 = max(ntlm_resp.split(","), key=lambda x: len(x)).split()[-1] - try: - ntlm_resp_decoded = self.helpers.ntlm.ntlmdecode(ntlm_resp_b64) - if ntlm_resp_decoded: - return ntlm_resp_decoded, test_url - except NTLMError as e: - self.verbose(str(e)) - return None, test_url - return None, test_url + num_urls = len(urls) + async for url, response in self.helpers.request_batch( + urls, headers=NTLM_test_header, allow_redirects=False, timeout=self.http_timeout + ): + ntlm_resp = response.headers.get("WWW-Authenticate", "") + if ntlm_resp: + ntlm_resp_b64 = max(ntlm_resp.split(","), key=lambda x: len(x)).split()[-1] + try: + ntlm_resp_decoded = self.helpers.ntlm.ntlmdecode(ntlm_resp_b64) + if ntlm_resp_decoded: + yield ntlm_resp_decoded, url, num_urls + except NTLMError as e: + self.verbose(str(e)) diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index fd6188acd..d4dfcfb4e 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -7,7 +7,11 @@ class OAUTH(BaseModule): watched_events = ["DNS_NAME", "URL_UNVERIFIED"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "active", "safe"] - meta = {"description": "Enumerate OAUTH and OpenID Connect services"} + meta = { + "description": "Enumerate OAUTH and OpenID Connect services", + "created_date": "2023-07-12", + "author": "@TheTechromancer", + } options = {"try_all": False} options_desc = {"try_all": "Check for OAUTH/IODC on every subdomain and URL."} @@ -63,37 +67,53 @@ async def handle_event(self, event): "url": url, }, "FINDING", - source=event, + parent=event, ) if finding_event: finding_event.source_domain = source_domain - await self.emit_event(finding_event) + await self.emit_event( + finding_event, + context=f'{{module}} identified {{event.type}}: OpenID Connect Endpoint for "{source_domain}" at {url}', + ) url_event = self.make_event( - token_endpoint, "URL_UNVERIFIED", source=event, tags=["affiliate", "oauth-token-endpoint"] + token_endpoint, "URL_UNVERIFIED", parent=event, tags=["affiliate", "oauth-token-endpoint"] ) if url_event: url_event.source_domain = source_domain - await self.emit_event(url_event) + await self.emit_event( + url_event, + context=f'{{module}} identified OpenID Connect Endpoint for "{source_domain}" at {{event.type}}: {url}', + ) for result in oidc_results: if result not in (domain, event.data): event_type = "URL_UNVERIFIED" if self.helpers.is_url(result) else "DNS_NAME" - await self.emit_event(result, event_type, source=event, tags=["affiliate"]) + await self.emit_event( + result, + event_type, + parent=event, + tags=["affiliate"], + context=f'{{module}} analyzed OpenID configuration for "{source_domain}" and found {{event.type}}: {{event.data}}', + ) for oauth_task in oauth_tasks: url = await oauth_task if url: + description = f"Potentially Sprayable OAUTH Endpoint (domain: {source_domain}) at {url}" oauth_finding = self.make_event( { - "description": f"Potentially Sprayable OAUTH Endpoint (domain: {source_domain}) at {url}", + "description": description, "host": event.host, "url": url, }, "FINDING", - source=event, + parent=event, ) if oauth_finding: oauth_finding.source_domain = source_domain - await self.emit_event(oauth_finding) + await self.emit_event( + oauth_finding, + context=f"{{module}} identified {{event.type}}: {description}", + ) def url_and_base(self, url): yield url diff --git a/bbot/modules/otx.py b/bbot/modules/otx.py index 72f2e1d5b..5a2319e3d 100644 --- a/bbot/modules/otx.py +++ b/bbot/modules/otx.py @@ -5,7 +5,11 @@ class otx(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query otx.alienvault.com for subdomains"} + meta = { + "description": "Query otx.alienvault.com for subdomains", + "created_date": "2022-08-24", + "author": "@TheTechromancer", + } base_url = "https://otx.alienvault.com" diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index 0372dc1bd..a150c029d 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -33,7 +33,11 @@ class asset_inventory(CSV): "HTTP_RESPONSE", ] produced_events = ["IP_ADDRESS", "OPEN_TCP_PORT"] - meta = {"description": "Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV"} + meta = { + "description": "Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV", + "created_date": "2022-09-30", + "author": "@liquidsec", + } options = {"output_file": "", "use_previous": False, "recheck": False, "summary_netmask": 16} options_desc = { "output_file": "Set a custom output file", @@ -179,25 +183,35 @@ async def finish(self): self.add_custom_headers(list(asset.custom_fields)) if not is_ip(asset.host): host_event = self.make_event( - asset.host, "DNS_NAME", source=self.scan.root_event, raise_error=True + asset.host, "DNS_NAME", parent=self.scan.root_event, raise_error=True + ) + await self.emit_event( + host_event, context="{module} emitted previous result: {event.type}: {event.data}" ) - await self.emit_event(host_event) for port in asset.ports: netloc = self.helpers.make_netloc(asset.host, port) - open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=host_event) + open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", parent=host_event) if open_port_event: - await self.emit_event(open_port_event) + await self.emit_event( + open_port_event, + context="{module} emitted previous result: {event.type}: {event.data}", + ) else: for ip in asset.ip_addresses: ip_event = self.make_event( - ip, "IP_ADDRESS", source=self.scan.root_event, raise_error=True + ip, "IP_ADDRESS", parent=self.scan.root_event, raise_error=True + ) + await self.emit_event( + ip_event, context="{module} emitted previous result: {event.type}: {event.data}" ) - await self.emit_event(ip_event) for port in asset.ports: netloc = self.helpers.make_netloc(ip, port) - open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", source=ip_event) + open_port_event = self.make_event(netloc, "OPEN_TCP_PORT", parent=ip_event) if open_port_event: - await self.emit_event(open_port_event) + await self.emit_event( + open_port_event, + context="{module} emitted previous result: {event.type}: {event.data}", + ) else: self.warning( f"use_previous=True was set but no previous asset inventory was found at {self.output_file}" diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index 9459a880f..8a6eba9eb 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -8,6 +8,7 @@ class BaseOutputModule(BaseModule): _type = "output" scope_distance_modifier = None _stats_exclude = True + _shuffle_incoming_queue = False def human_event_str(self, event): event_type = f"[{event.type}]" @@ -29,19 +30,30 @@ def _event_precheck(self, event): if self.target_only: if "target" not in event.tags: return False, "it did not meet target_only filter criteria" - # exclude certain URLs (e.g. javascript): - if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags: - return False, "its extension was listed in url_extension_httpx_only" - # output module specific stuff - # omitted events such as HTTP_RESPONSE etc. - if event._omit and not event.type in self.get_watched_events(): - return False, "_omit is True" + ### begin output-module specific ### # force-output certain events to the graph if self._is_graph_important(event): return True, "event is critical to the graph" + # exclude certain URLs (e.g. javascript): + # TODO: revisit this after httpx rework + if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags: + return False, (f"Omitting {event} from output because it's marked as httpx-only") + + if event._omit: + return False, "_omit is True" + + # omit certain event types + if event.type in self.scan.omitted_event_types: + if "target" in event.tags: + self.debug(f"Allowing omitted event: {event} because it's a target") + elif event.type in self.get_watched_events(): + self.debug(f"Allowing omitted event: {event} because its type is explicitly in watched_events") + else: + return False, "its type is omitted in the config" + # internal events like those from speculate, ipneighbor # or events that are over our report distance if event._internal: diff --git a/bbot/modules/output/csv.py b/bbot/modules/output/csv.py index fe8af7e89..d48e9cd1d 100644 --- a/bbot/modules/output/csv.py +++ b/bbot/modules/output/csv.py @@ -6,11 +6,19 @@ class CSV(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to CSV"} + meta = {"description": "Output to CSV", "created_date": "2022-04-07", "author": "@TheTechromancer"} options = {"output_file": ""} options_desc = {"output_file": "Output to CSV file"} - header_row = ["Event type", "Event data", "IP Address", "Source Module", "Scope Distance", "Event Tags"] + header_row = [ + "Event type", + "Event data", + "IP Address", + "Source Module", + "Scope Distance", + "Event Tags", + "Discovery Path", + ] filename = "output.csv" accept_dupes = False @@ -56,6 +64,7 @@ async def handle_event(self, event): "Source Module": str(getattr(event, "module_sequence", "")), "Scope Distance": str(getattr(event, "scope_distance", "")), "Event Tags": ",".join(sorted(list(getattr(event, "tags", [])))), + "Discovery Path": " --> ".join(getattr(event, "discovery_path", [])), } ) diff --git a/bbot/modules/output/discord.py b/bbot/modules/output/discord.py index 06684bd30..3a921a900 100644 --- a/bbot/modules/output/discord.py +++ b/bbot/modules/output/discord.py @@ -3,7 +3,11 @@ class Discord(WebhookOutputModule): watched_events = ["*"] - meta = {"description": "Message a Discord channel when certain events are encountered"} + meta = { + "description": "Message a Discord channel when certain events are encountered", + "created_date": "2023-08-14", + "author": "@TheTechromancer", + } options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"} options_desc = { "webhook_url": "Discord webhook URL", diff --git a/bbot/modules/output/emails.py b/bbot/modules/output/emails.py index ae86f4656..60d9a153c 100644 --- a/bbot/modules/output/emails.py +++ b/bbot/modules/output/emails.py @@ -4,7 +4,12 @@ class Emails(TXT): watched_events = ["EMAIL_ADDRESS"] - meta = {"description": "Output any email addresses found belonging to the target domain"} + flags = ["email-enum"] + meta = { + "description": "Output any email addresses found belonging to the target domain", + "created_date": "2023-12-23", + "author": "@domwhewell-sage", + } options = {"output_file": ""} options_desc = {"output_file": "Output to file"} in_scope_only = True diff --git a/bbot/modules/output/http.py b/bbot/modules/output/http.py index 18182056a..e4bc79562 100644 --- a/bbot/modules/output/http.py +++ b/bbot/modules/output/http.py @@ -1,11 +1,14 @@ -from httpx import RequestError - +from bbot.errors import WebError from bbot.modules.output.base import BaseOutputModule class HTTP(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Send every event to a custom URL via a web request"} + meta = { + "description": "Send every event to a custom URL via a web request", + "created_date": "2022-04-13", + "author": "@TheTechromancer", + } options = { "url": "", "method": "POST", @@ -59,6 +62,6 @@ async def handle_event(self, event): raise_error=True, ) break - except RequestError as e: + except WebError as e: self.warning(f"Error sending {event}: {e}, retrying...") await self.helpers.sleep(1) diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index 96905209d..a35fa6aed 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -6,7 +6,11 @@ class JSON(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to Newline-Delimited JSON (NDJSON)"} + meta = { + "description": "Output to Newline-Delimited JSON (NDJSON)", + "created_date": "2022-04-07", + "author": "@TheTechromancer", + } options = {"output_file": "", "siem_friendly": False} options_desc = { "output_file": "Output to file", diff --git a/bbot/modules/output/neo4j.py b/bbot/modules/output/neo4j.py index 2b0548ea9..87220d26d 100644 --- a/bbot/modules/output/neo4j.py +++ b/bbot/modules/output/neo4j.py @@ -26,7 +26,7 @@ class neo4j(BaseOutputModule): """ watched_events = ["*"] - meta = {"description": "Output to Neo4j"} + meta = {"description": "Output to Neo4j", "created_date": "2022-04-07", "author": "@TheTechromancer"} options = {"uri": "bolt://localhost:7687", "username": "neo4j", "password": "bbotislife"} options_desc = { "uri": "Neo4j server + port", @@ -53,7 +53,7 @@ async def setup(self): async def handle_event(self, event): # create events - src_id = await self.merge_event(event.get_source(), id_only=True) + src_id = await self.merge_event(event.get_parent(), id_only=True) dst_id = await self.merge_event(event) # create relationship cypher = f""" diff --git a/bbot/modules/output/python.py b/bbot/modules/output/python.py index 3aebfeb52..81ceb360e 100644 --- a/bbot/modules/output/python.py +++ b/bbot/modules/output/python.py @@ -3,7 +3,7 @@ class python(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output via Python API"} + meta = {"description": "Output via Python API", "created_date": "2022-09-13", "author": "@TheTechromancer"} async def _worker(self): pass diff --git a/bbot/modules/output/slack.py b/bbot/modules/output/slack.py index 1dda46942..438ef4973 100644 --- a/bbot/modules/output/slack.py +++ b/bbot/modules/output/slack.py @@ -5,7 +5,11 @@ class Slack(WebhookOutputModule): watched_events = ["*"] - meta = {"description": "Message a Slack channel when certain events are encountered"} + meta = { + "description": "Message a Slack channel when certain events are encountered", + "created_date": "2023-08-14", + "author": "@TheTechromancer", + } options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"} options_desc = { "webhook_url": "Discord webhook URL", diff --git a/bbot/modules/output/splunk.py b/bbot/modules/output/splunk.py index 00d70876b..0c0a0dd80 100644 --- a/bbot/modules/output/splunk.py +++ b/bbot/modules/output/splunk.py @@ -1,11 +1,14 @@ -from httpx import RequestError - +from bbot.errors import WebError from bbot.modules.output.base import BaseOutputModule class Splunk(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Send every event to a splunk instance through HTTP Event Collector"} + meta = { + "description": "Send every event to a splunk instance through HTTP Event Collector", + "created_date": "2024-02-17", + "author": "@w0Tx", + } options = { "url": "", "hectoken": "", @@ -54,6 +57,6 @@ async def handle_event(self, event): raise_error=True, ) break - except RequestError as e: + except WebError as e: self.warning(f"Error sending {event}: {e}, retrying...") await self.helpers.sleep(1) diff --git a/bbot/modules/output/subdomains.py b/bbot/modules/output/subdomains.py index 7bcbb847c..6c2bfb0b0 100644 --- a/bbot/modules/output/subdomains.py +++ b/bbot/modules/output/subdomains.py @@ -4,7 +4,12 @@ class Subdomains(TXT): watched_events = ["DNS_NAME", "DNS_NAME_UNRESOLVED"] - meta = {"description": "Output only resolved, in-scope subdomains"} + flags = ["subdomain-enum"] + meta = { + "description": "Output only resolved, in-scope subdomains", + "created_date": "2023-07-31", + "author": "@TheTechromancer", + } options = {"output_file": "", "include_unresolved": False} options_desc = {"output_file": "Output to file", "include_unresolved": "Include unresolved subdomains in output"} accept_dupes = False diff --git a/bbot/modules/output/teams.py b/bbot/modules/output/teams.py index 3f018038d..e4f87fb71 100644 --- a/bbot/modules/output/teams.py +++ b/bbot/modules/output/teams.py @@ -3,7 +3,11 @@ class Teams(WebhookOutputModule): watched_events = ["*"] - meta = {"description": "Message a Teams channel when certain events are encountered"} + meta = { + "description": "Message a Teams channel when certain events are encountered", + "created_date": "2023-08-14", + "author": "@TheTechromancer", + } options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"} options_desc = { "webhook_url": "Discord webhook URL", diff --git a/bbot/modules/output/web_report.py b/bbot/modules/output/web_report.py index 793f26c32..92ff98289 100644 --- a/bbot/modules/output/web_report.py +++ b/bbot/modules/output/web_report.py @@ -5,7 +5,11 @@ class web_report(BaseOutputModule): watched_events = ["URL", "TECHNOLOGY", "FINDING", "VULNERABILITY", "VHOST"] - meta = {"description": "Create a markdown report with web assets"} + meta = { + "description": "Create a markdown report with web assets", + "created_date": "2023-02-08", + "author": "@liquidsec", + } options = { "output_file": "", "css_theme_file": "https://cdnjs.cloudflare.com/ajax/libs/github-markdown-css/5.1.0/github-markdown.min.css", @@ -34,36 +38,36 @@ async def setup(self): async def handle_event(self, event): if event.type == "URL": - parsed = event.parsed + parsed = event.parsed_url host = f"{parsed.scheme}://{parsed.netloc}/" if host not in self.web_assets.keys(): self.web_assets[host] = {"URL": []} - source_chain = [] + parent_chain = [] - current_parent = event.source + current_parent = event.parent while not current_parent.type == "SCAN": - source_chain.append( + parent_chain.append( f" ({current_parent.module})---> [{current_parent.type}]:{html.escape(current_parent.pretty_string)}" ) - current_parent = current_parent.source + current_parent = current_parent.parent - source_chain.reverse() - source_chain_text = ( - "".join(source_chain) + parent_chain.reverse() + parent_chain_text = ( + "".join(parent_chain) + f" ({event.module})---> " + f"[{event.type}]:{html.escape(event.pretty_string)}" ) - self.web_assets[host]["URL"].append(f"**{html.escape(event.data)}**: {source_chain_text}") + self.web_assets[host]["URL"].append(f"**{html.escape(event.data)}**: {parent_chain_text}") else: - current_parent = event.source + current_parent = event.parent parsed = None while 1: if current_parent.type == "URL": - parsed = current_parent.parsed + parsed = current_parent.parsed_url break - current_parent = current_parent.source - if current_parent.source.type == "SCAN": + current_parent = current_parent.parent + if current_parent.parent.type == "SCAN": break if parsed: host = f"{parsed.scheme}://{parsed.netloc}/" diff --git a/bbot/modules/output/websocket.py b/bbot/modules/output/websocket.py index 76edcba4b..c8f54097a 100644 --- a/bbot/modules/output/websocket.py +++ b/bbot/modules/output/websocket.py @@ -7,7 +7,7 @@ class Websocket(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to websockets"} + meta = {"description": "Output to websockets", "created_date": "2022-04-15", "author": "@TheTechromancer"} options = {"url": "", "token": "", "preserve_graph": True} options_desc = { "url": "Web URL", diff --git a/bbot/modules/paramminer_cookies.py b/bbot/modules/paramminer_cookies.py index 251931f58..6a113f7f7 100644 --- a/bbot/modules/paramminer_cookies.py +++ b/bbot/modules/paramminer_cookies.py @@ -11,6 +11,8 @@ class paramminer_cookies(paramminer_headers): flags = ["active", "aggressive", "slow", "web-paramminer"] meta = { "description": "Smart brute-force to check for common HTTP cookie parameters", + "created_date": "2022-06-27", + "author": "@liquidsec", } options = { "wordlist": "", # default is defined within setup function diff --git a/bbot/modules/paramminer_getparams.py b/bbot/modules/paramminer_getparams.py index 7891e05cf..c596dd0ec 100644 --- a/bbot/modules/paramminer_getparams.py +++ b/bbot/modules/paramminer_getparams.py @@ -9,7 +9,11 @@ class paramminer_getparams(paramminer_headers): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] flags = ["active", "aggressive", "slow", "web-paramminer"] - meta = {"description": "Use smart brute-force to check for common HTTP GET parameters"} + meta = { + "description": "Use smart brute-force to check for common HTTP GET parameters", + "created_date": "2022-06-28", + "author": "@liquidsec", + } scanned_hosts = [] options = { "wordlist": "", # default is defined within setup function diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 561a05fe2..336ce002a 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -11,7 +11,11 @@ class paramminer_headers(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] flags = ["active", "aggressive", "slow", "web-paramminer"] - meta = {"description": "Use smart brute-force to check for common HTTP header parameters"} + meta = { + "description": "Use smart brute-force to check for common HTTP header parameters", + "created_date": "2022-04-15", + "author": "@pmueller", + } options = { "wordlist": "", # default is defined within setup function "http_extract": True, @@ -126,11 +130,13 @@ async def process_results(self, event, results): if reflection: tags = ["http_reflection"] description = f"[Paramminer] {self.compare_mode.capitalize()}: [{result}] Reasons: [{reasons}] Reflection: [{str(reflection)}]" + reflected = "reflected " if reflection else "" await self.emit_event( {"host": str(event.host), "url": url, "description": description}, "FINDING", event, tags=tags, + context=f'{{module}} scanned {url} and identified {{event.type}}: {reflected}{self.compare_mode} parameter: "{result}"', ) async def handle_event(self, event): @@ -199,11 +205,11 @@ async def load_extracted_words(self, body, content_type): if not body: return None if content_type and "json" in content_type.lower(): - return extract_params_json(body) + return extract_params_json(body, self.compare_mode) elif content_type and "xml" in content_type.lower(): - return extract_params_xml(body) + return extract_params_xml(body, self.compare_mode) else: - return set(await self.helpers.re.extract_params_html(body)) + return set(await self.helpers.re.extract_params_html(body, self.compare_mode)) async def binary_search(self, compare_helper, url, group, reasons=None, reflection=False): if reasons is None: @@ -239,7 +245,7 @@ async def finish(self): compare_helper = self.helpers.http_compare(url) except HttpCompareError as e: self.debug(f"Error initializing compare helper: {e}") - return + continue untested_matches_copy = untested_matches.copy() for i in untested_matches: h = hash(i + url) @@ -249,4 +255,5 @@ async def finish(self): results = await self.do_mining(untested_matches_copy, url, batch_size, compare_helper) except HttpCompareError as e: self.debug(f"Encountered HttpCompareError: [{e}] for URL [{url}]") + continue await self.process_results(event, results) diff --git a/bbot/modules/passivetotal.py b/bbot/modules/passivetotal.py index dfed92939..e22dfc3e5 100644 --- a/bbot/modules/passivetotal.py +++ b/bbot/modules/passivetotal.py @@ -5,7 +5,12 @@ class passivetotal(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the PassiveTotal API for subdomains", "auth_required": True} + meta = { + "description": "Query the PassiveTotal API for subdomains", + "created_date": "2022-08-08", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"username": "", "api_key": ""} options_desc = {"username": "RiskIQ Username", "api_key": "RiskIQ API Key"} diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index 78becbf0e..0c53c2ad4 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -5,11 +5,18 @@ class pgp(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] - meta = {"description": "Query common PGP servers for email addresses"} + meta = { + "description": "Query common PGP servers for email addresses", + "created_date": "2022-08-10", + "author": "@TheTechromancer", + } + # TODO: scan for Web Key Directory (/.well-known/openpgpkey/) options = { "search_urls": [ "https://keyserver.ubuntu.com/pks/lookup?fingerprint=on&op=vindex&search=", "http://the.earth.li:11371/pks/lookup?fingerprint=on&op=vindex&search=", + "https://pgpkeys.eu/pks/lookup?search=&op=index", + "https://pgp.mit.edu/pks/lookup?search=&op=index", ] } options_desc = {"search_urls": "PGP key servers to search"} @@ -18,18 +25,25 @@ async def handle_event(self, event): query = self.make_query(event) results = await self.query(query) if results: - for hostname in results: - if not hostname == event: - await self.emit_event(hostname, "EMAIL_ADDRESS", event, abort_if=self.abort_if) + for email, keyserver in results: + await self.emit_event( + email, + "EMAIL_ADDRESS", + event, + abort_if=self.abort_if, + context=f'{{module}} queried PGP keyserver {keyserver} for "{query}" and found {{event.type}}: {{event.data}}', + ) async def query(self, query): results = set() - for url in self.config.get("search_urls", []): - url = url.replace("", self.helpers.quote(query)) + urls = self.config.get("search_urls", []) + urls = [url.replace("", self.helpers.quote(query)) for url in urls] + async for url, response in self.helpers.request_batch(urls): + keyserver = self.helpers.urlparse(url).netloc response = await self.helpers.request(url) if response is not None: for email in await self.helpers.re.extract_emails(response.text): email = email.lower() if email.endswith(query): - results.add(email) + results.add((email, keyserver)) return results diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py new file mode 100644 index 000000000..7a18306e9 --- /dev/null +++ b/bbot/modules/portscan.py @@ -0,0 +1,250 @@ +import json +import ipaddress +from contextlib import suppress +from radixtarget import RadixTarget + +from bbot.modules.base import BaseModule + + +class portscan(BaseModule): + flags = ["active", "portscan", "safe"] + watched_events = ["IP_ADDRESS", "IP_RANGE", "DNS_NAME"] + produced_events = ["OPEN_TCP_PORT"] + meta = { + "description": "Port scan with masscan. By default, scans top 100 ports.", + "created_date": "2024-05-15", + "author": "@TheTechromancer", + } + options = { + "top_ports": 100, + "ports": "", + # ping scan at 600 packets/s ~= private IP space in 8 hours + "rate": 300, + "wait": 5, + "ping_first": False, + "ping_only": False, + "adapter": "", + "adapter_ip": "", + "adapter_mac": "", + "router_mac": "", + } + options_desc = { + "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')", + "ports": "Ports to scan", + "rate": "Rate in packets per second", + "wait": "Seconds to wait for replies after scan is complete", + "ping_first": "Only portscan hosts that reply to pings", + "ping_only": "Ping sweep only, no portscan", + "adapter": 'Manually specify a network interface, such as "eth0" or "tun0". If not specified, the first network interface found with a default gateway will be used.', + "adapter_ip": "Send packets using this IP address. Not needed unless masscan's autodetection fails", + "adapter_mac": "Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails", + "router_mac": "Send packets to this MAC address as the destination. Not needed unless masscan's autodetection fails", + } + deps_shared = ["masscan"] + batch_size = 1000000 + _shuffle_incoming_queue = False + + async def setup(self): + self.top_ports = self.config.get("top_ports", 100) + self.rate = self.config.get("rate", 300) + self.wait = self.config.get("wait", 10) + self.ping_first = self.config.get("ping_first", False) + self.ping_only = self.config.get("ping_only", False) + self.adapter = self.config.get("adapter", "") + self.adapter_ip = self.config.get("adapter_ip", "") + self.adapter_mac = self.config.get("adapter_mac", "") + self.router_mac = self.config.get("router_mac", "") + self.ports = self.config.get("ports", "") + if self.ports: + try: + self.helpers.parse_port_string(self.ports) + except ValueError as e: + return False, f"Error parsing ports: {e}" + self.alive_hosts = dict() + self.scanned_tracker = RadixTarget() + self.prep_blacklist() + self.helpers.depsinstaller.ensure_root(message="Masscan requires root privileges") + # check if we're set up for IPv6 + self.ipv6_support = True + ipv6_result = await self.run_process( + ["masscan", "-p1", "--wait", "0", "-iL", self.helpers.tempfile(["::1"], pipe=False)], + sudo=True, + _log_stderr=False, + ) + if ipv6_result.returncode and "failed to detect IPv6 address" in ipv6_result.stderr: + self.warning(f"It looks like you are not set up for IPv6. IPv6 targets will not be scanned.") + self.ipv6_support = False + return True + + async def handle_batch(self, *events): + targets = [str(h) for h in self.make_targets(events)] + + # ping scan + if self.ping_first or self.ping_only: + new_targets = [] + async for alive_host, _ in self.masscan(targets, ping=True): + parent_event = self.scanned_tracker.search(alive_host) + # masscan gets the occasional junk result + # this seems to be a side effect of it having its own TCP stack + # see https://github.com/robertdavidgraham/masscan/issues/397 + if parent_event is None: + self.debug(f"Failed to correlate {alive_host} to targets") + continue + await self.emit_event( + alive_host, + "DNS_NAME", + source=parent_event, + context=f"{{module}} pinged {parent_event.data} and got a response: {{event.type}}: {{event.data}}", + ) + new_targets.append(ipaddress.ip_network(alive_host, strict=False)) + targets = new_targets + + # TCP SYN scan + if not self.ping_only: + async for host, port in self.masscan(targets): + parent_event = self.scanned_tracker.search(host) + if parent_event is None: + self.debug(f"Failed to correlate {host} to targets") + continue + if parent_event.type == "DNS_NAME": + host = parent_event.host + netloc = self.helpers.make_netloc(host, port) + await self.emit_event( + netloc, + "OPEN_TCP_PORT", + parent=parent_event, + context=f"{{module}} executed a TCP SYN scan against {parent_event.data} and found: {{event.type}}: {{event.data}}", + ) + else: + self.verbose("Only ping sweep was requested, skipping TCP SYN scan") + + async def masscan(self, targets, ping=False): + scan_type = "ping" if ping else "SYN" + self.verbose(f"Starting masscan {scan_type} scan") + if not targets: + self.verbose("No targets specified, aborting.") + return + + target_file = self.helpers.tempfile(targets, pipe=False) + command = self._build_masscan_command(target_file, ping=ping) + stats_file = self.helpers.tempfile_tail(callback=self.log_masscan_status) + try: + with open(stats_file, "w") as stats_fh: + async for line in self.run_process_live(command, sudo=True, stderr=stats_fh): + for host, port in self.parse_json_line(line): + yield host, port + finally: + for file in (stats_file, target_file): + file.unlink() + + def log_masscan_status(self, s): + if "FAIL" in s: + self.warning(s) + self.warning( + f'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.' + ) + else: + self.verbose(s) + + def _build_masscan_command(self, target_file=None, ping=False, dry_run=False): + command = ( + "masscan", + "--excludefile", + str(self.exclude_file), + "--rate", + self.rate, + "--wait", + self.wait, + "--open-only", + "-oJ", + "-", + ) + if target_file is not None: + command += ("-iL", str(target_file)) + if dry_run: + command += ("-p1", "--wait", "0") + else: + if self.adapter: + command += ("--adapter", self.adapter) + if self.adapter_ip: + command += ("--adapter-ip", self.adapter_ip) + if self.adapter_mac: + command += ("--adapter-mac", self.adapter_mac) + if self.router_mac: + command += ("--router-mac", self.router_mac) + if ping: + command += ("--ping",) + else: + if self.ports: + command += ("-p", self.ports) + else: + command += ("--top-ports", str(self.top_ports)) + return command + + def make_targets(self, events): + # convert events into a list of targets, skipping ones that have already been scanned + targets = set() + for e in events: + # skip events without host + if not e.host: + continue + # skip events that we already scanned + if self.scanned_tracker.search(e.host): + self.debug(f"Skipping {e.host} because it was already scanned") + continue + try: + # first assume it's an ip address / ip range + host = ipaddress.ip_network(e.host, strict=False) + targets.add(host) + self.scanned_tracker.insert(host, e) + except Exception: + # if it's a hostname, get its IPs from resolved_hosts + hosts = set() + for h in e.resolved_hosts: + try: + h = ipaddress.ip_network(h, strict=False) + hosts.add(h) + except Exception: + continue + for h in hosts: + targets.add(h) + self.scanned_tracker.insert(h, e) + # remove IPv6 addresses if we're not scanning IPv6 + if not self.ipv6_support: + targets = [t for t in targets if t.version != 6] + return targets + + def parse_json_line(self, line): + try: + j = json.loads(line) + except Exception: + return + ip = j.get("ip", "") + if not ip: + return + ports = j.get("ports", []) + if not ports: + return + for p in ports: + proto = p.get("proto", "") + port_number = p.get("port", 0) + if proto == "" or port_number == "": + continue + yield ip, port_number + + def prep_blacklist(self): + exclude = [] + for t in self.scan.blacklist: + t = self.helpers.make_ip_type(t.data) + if not isinstance(t, str): + if self.helpers.is_ip(t): + exclude.append(str(ipaddress.ip_network(t))) + else: + exclude.append(str(t)) + if not exclude: + exclude = ["255.255.255.255/32"] + self.exclude_file = self.helpers.tempfile(exclude, pipe=False) + + async def cleanup(self): + with suppress(Exception): + self.exclude_file.unlink() diff --git a/bbot/modules/postman.py b/bbot/modules/postman.py index 348d0dc28..e736bec1a 100644 --- a/bbot/modules/postman.py +++ b/bbot/modules/postman.py @@ -5,7 +5,11 @@ class postman(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] flags = ["passive", "subdomain-enum", "safe", "code-enum"] - meta = {"description": "Query Postman's API for related workspaces, collections, requests"} + meta = { + "description": "Query Postman's API for related workspaces, collections, requests", + "created_date": "2023-12-23", + "author": "@domwhewell-sage", + } base_url = "https://www.postman.com/_api" @@ -25,8 +29,8 @@ class postman(subdomain_enum): async def handle_event(self, event): query = self.make_query(event) self.verbose(f"Searching for any postman workspaces, collections, requests belonging to {query}") - for url in await self.query(query): - await self.emit_event(url, "URL_UNVERIFIED", source=event, tags="httpx-safe") + for url, context in await self.query(query): + await self.emit_event(url, "URL_UNVERIFIED", parent=event, tags="httpx-safe", context=context) async def query(self, query): interesting_urls = [] @@ -74,16 +78,46 @@ async def query(self, query): tldextract = self.helpers.tldextract(query) if tldextract.domain.lower() in name.lower(): self.verbose(f"Discovered workspace {name} ({id})") - interesting_urls.append(f"{self.base_url}/workspace/{id}") + workspace_url = f"{self.base_url}/workspace/{id}" + interesting_urls.append( + ( + workspace_url, + f'{{module}} searched postman.com for "{query}" and found matching workspace "{name}" at {{event.type}}: {workspace_url}', + ) + ) environments, collections = await self.search_workspace(id) - interesting_urls.append(f"{self.base_url}/workspace/{id}/globals") + globals_url = f"{self.base_url}/workspace/{id}/globals" + interesting_urls.append( + ( + globals_url, + f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, and found globals at {{event.type}}: {globals_url}', + ) + ) for e_id in environments: - interesting_urls.append(f"{self.base_url}/environment/{e_id}") + env_url = f"{self.base_url}/environment/{e_id}" + interesting_urls.append( + ( + env_url, + f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, enumerated environments, and found {{event.type}}: {env_url}', + ) + ) for c_id in collections: - interesting_urls.append(f"{self.base_url}/collection/{c_id}") + collection_url = f"{self.base_url}/collection/{c_id}" + interesting_urls.append( + ( + collection_url, + f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, enumerated collections, and found {{event.type}}: {collection_url}', + ) + ) requests = await self.search_collections(id) for r_id in requests: - interesting_urls.append(f"{self.base_url}/request/{r_id}") + request_url = f"{self.base_url}/request/{r_id}" + interesting_urls.append( + ( + request_url, + f'{{module}} searched postman.com for "{query}", found matching workspace "{name}" at {workspace_url}, enumerated requests, and found {{event.type}}: {request_url}', + ) + ) else: self.verbose(f"Skipping workspace {name} ({id}) as it does not appear to be in scope") return interesting_urls diff --git a/bbot/modules/rapiddns.py b/bbot/modules/rapiddns.py index 088288ddb..934beb829 100644 --- a/bbot/modules/rapiddns.py +++ b/bbot/modules/rapiddns.py @@ -5,13 +5,17 @@ class rapiddns(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query rapiddns.io for subdomains"} + meta = { + "description": "Query rapiddns.io for subdomains", + "created_date": "2022-08-24", + "author": "@TheTechromancer", + } base_url = "https://rapiddns.io" async def request_url(self, query): url = f"{self.base_url}/subdomain/{self.helpers.quote(query)}?full=1#result" - response = await self.request_with_fail_count(url) + response = await self.request_with_fail_count(url, timeout=self.http_timeout + 10) return response def parse_results(self, r, query): diff --git a/bbot/modules/report/affiliates.py b/bbot/modules/report/affiliates.py index bb6323664..73b2867e2 100644 --- a/bbot/modules/report/affiliates.py +++ b/bbot/modules/report/affiliates.py @@ -5,7 +5,11 @@ class affiliates(BaseReportModule): watched_events = ["*"] produced_events = [] flags = ["passive", "safe", "affiliates"] - meta = {"description": "Summarize affiliate domains at the end of a scan"} + meta = { + "description": "Summarize affiliate domains at the end of a scan", + "created_date": "2022-07-25", + "author": "@TheTechromancer", + } scope_distance_modifier = None accept_dupes = True diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index 982c76584..61e51a725 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -5,7 +5,11 @@ class asn(BaseReportModule): watched_events = ["IP_ADDRESS"] produced_events = ["ASN"] flags = ["passive", "subdomain-enum", "safe"] - meta = {"description": "Query ripe and bgpview.io for ASNs"} + meta = { + "description": "Query ripe and bgpview.io for ASNs", + "created_date": "2022-07-25", + "author": "@TheTechromancer", + } scope_distance_modifier = 1 # we accept dupes to avoid missing data # because sometimes IP addresses are re-emitted with lower scope distances @@ -34,19 +38,31 @@ async def filter_event(self, event): async def handle_event(self, event): host = event.host if self.cache_get(host) == False: - asns = await self.get_asn(host) + asns, source = await self.get_asn(host) if not asns: self.cache_put(self.unknown_asn) else: for asn in asns: emails = asn.pop("emails", []) self.cache_put(asn) - asn_event = self.make_event(asn, "ASN", source=event) + asn_event = self.make_event(asn, "ASN", parent=event) + asn_number = asn.get("asn", "") + asn_desc = asn.get("description", "") + asn_name = asn.get("name", "") + asn_subnet = asn.get("subnet", "") if not asn_event: continue - await self.emit_event(asn_event) + await self.emit_event( + asn_event, + context=f"{{module}} checked {event.data} against {source} API and got {{event.type}}: AS{asn_number} ({asn_name}, {asn_desc}, {asn_subnet})", + ) for email in emails: - await self.emit_event(email, "EMAIL_ADDRESS", source=asn_event) + await self.emit_event( + email, + "EMAIL_ADDRESS", + parent=asn_event, + context=f"{{module}} retrieved details for AS{asn_number} and found {{event.type}}: {{event.data}}", + ) async def report(self): asn_data = sorted(self.asn_cache.items(), key=lambda x: self.asn_counts[x[0]], reverse=True) @@ -100,9 +116,9 @@ async def get_asn(self, ip, retries=1): self.sources.append(self.sources.pop(i)) self.verbose(f"Failed to contact {source}, retrying") continue - return res + return res, source self.warning(f"Error retrieving ASN for {ip}") - return [] + return [], "" async def get_asn_ripe(self, ip): url = f"https://stat.ripe.net/data/network-info/data.json?resource={ip}" diff --git a/bbot/modules/riddler.py b/bbot/modules/riddler.py index d525acbad..dbc86e6e0 100644 --- a/bbot/modules/riddler.py +++ b/bbot/modules/riddler.py @@ -5,7 +5,11 @@ class riddler(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query riddler.io for subdomains"} + meta = { + "description": "Query riddler.io for subdomains", + "created_date": "2022-08-25", + "author": "@TheTechromancer", + } base_url = "https://riddler.io" diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index d801a755e..ff71709b3 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -5,7 +5,7 @@ class robots(BaseModule): watched_events = ["URL"] produced_events = ["URL_UNVERIFIED"] flags = ["active", "safe", "web-basic"] - meta = {"description": "Look for and parse robots.txt"} + meta = {"description": "Look for and parse robots.txt", "created_date": "2023-02-01", "author": "@liquidsec"} options = {"include_sitemap": False, "include_allow": True, "include_disallow": True} options_desc = { @@ -21,7 +21,7 @@ async def setup(self): return True async def handle_event(self, event): - host = f"{event.parsed.scheme}://{event.parsed.netloc}/" + host = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/" result = None url = f"{host}robots.txt" result = await self.helpers.request(url) @@ -48,4 +48,10 @@ async def handle_event(self, event): tags = [] if self.helpers.is_spider_danger(event, unverified_url): tags.append("spider-danger") - await self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) + await self.emit_event( + unverified_url, + "URL_UNVERIFIED", + parent=event, + tags=tags, + context=f"{{module}} found robots.txt at {url} and extracted {{event.type}}: {{event.data}}", + ) diff --git a/bbot/modules/secretsdb.py b/bbot/modules/secretsdb.py index d94a3b0a2..2d70e538d 100644 --- a/bbot/modules/secretsdb.py +++ b/bbot/modules/secretsdb.py @@ -8,7 +8,11 @@ class secretsdb(BaseModule): watched_events = ["HTTP_RESPONSE"] produced_events = ["FINDING"] flags = ["active", "safe", "web-basic"] - meta = {"description": "Detect common secrets with secrets-patterns-db"} + meta = { + "description": "Detect common secrets with secrets-patterns-db", + "created_date": "2023-03-17", + "author": "@TheTechromancer", + } options = { "min_confidence": 99, "signatures": "https://raw.githubusercontent.com/blacklanternsecurity/secrets-patterns-db/master/db/rules-stable.yml", @@ -51,13 +55,14 @@ async def handle_event(self, event): matches = [m.string[m.start() : m.end()] for m in matches] description = f"Possible secret ({name}): {matches}" event_data = {"host": str(event.host), "description": description} - parsed_url = getattr(event, "parsed", None) + parsed_url = getattr(event, "parsed_url", None) if parsed_url: event_data["url"] = parsed_url.geturl() await self.emit_event( event_data, "FINDING", - source=event, + parent=event, + context=f"{{module}} searched HTTP response and found {{event.type}}: {description}", ) def search_data(self, resp_body, resp_headers): diff --git a/bbot/modules/securitytrails.py b/bbot/modules/securitytrails.py index 8d1d1b6a0..a91db2912 100644 --- a/bbot/modules/securitytrails.py +++ b/bbot/modules/securitytrails.py @@ -5,7 +5,12 @@ class securitytrails(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query the SecurityTrails API for subdomains", "auth_required": True} + meta = { + "description": "Query the SecurityTrails API for subdomains", + "created_date": "2022-07-03", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "SecurityTrails API key"} diff --git a/bbot/modules/shodan_dns.py b/bbot/modules/shodan_dns.py index 4d19b237e..2336e0243 100644 --- a/bbot/modules/shodan_dns.py +++ b/bbot/modules/shodan_dns.py @@ -5,7 +5,12 @@ class shodan_dns(shodan): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query Shodan for subdomains", "auth_required": True} + meta = { + "description": "Query Shodan for subdomains", + "created_date": "2022-07-03", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "Shodan API key"} diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index e6571ea85..fe9015027 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -5,7 +5,11 @@ class sitedossier(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query sitedossier.com for subdomains"} + meta = { + "description": "Query sitedossier.com for subdomains", + "created_date": "2023-08-04", + "author": "@TheTechromancer", + } base_url = "http://www.sitedossier.com/parentdomain" max_pages = 10 @@ -19,7 +23,13 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event( + hostname, + "DNS_NAME", + event, + abort_if=self.abort_if, + context=f'{{module}} searched sitedossier.com for "{query}" and found {{event.type}}: {{event.data}}', + ) async def query(self, query, parse_fn=None, request_fn=None): results = set() diff --git a/bbot/modules/skymem.py b/bbot/modules/skymem.py index 35c010ea1..4aac709b3 100644 --- a/bbot/modules/skymem.py +++ b/bbot/modules/skymem.py @@ -7,9 +7,14 @@ class skymem(emailformat): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] - meta = {"description": "Query skymem.info for email addresses"} + meta = { + "description": "Query skymem.info for email addresses", + "created_date": "2022-07-11", + "author": "@TheTechromancer", + } base_url = "https://www.skymem.info" + _qsize = 1 async def setup(self): self.next_page_regex = self.helpers.re.compile(r'= last_page: - break + if domain_ids: + domain_id = domain_ids[0] + for page in range(2, 22): + r2 = await self.request_with_fail_count(f"{self.base_url}/domain/{domain_id}?p={page}") + if not r2: + continue + responses.append(r2) + pages = re.findall(r"/domain/" + domain_id + r"\?p=(\d+)", r2.text) + if not pages: + break + last_page = max([int(p) for p in pages]) + if page >= last_page: + break + + for i, r in enumerate(responses): + for email in await self.helpers.re.extract_emails(r.text): + await self.emit_event( + email, + "EMAIL_ADDRESS", + parent=event, + context=f'{{module}} searched skymem.info for "{query}" and found {{event.type}} on page {i+1}: {{event.data}}', + ) diff --git a/bbot/modules/smuggler.py b/bbot/modules/smuggler.py index 8ee344452..357fec188 100644 --- a/bbot/modules/smuggler.py +++ b/bbot/modules/smuggler.py @@ -12,7 +12,7 @@ class smuggler(BaseModule): watched_events = ["URL"] produced_events = ["FINDING"] flags = ["active", "aggressive", "slow", "web-thorough"] - meta = {"description": "Check for HTTP smuggling"} + meta = {"description": "Check for HTTP smuggling", "created_date": "2022-07-06", "author": "@liquidsec"} in_scope_only = True per_hostport_only = True @@ -42,5 +42,6 @@ async def handle_event(self, event): await self.emit_event( {"host": str(event.host), "url": event.data, "description": description}, "FINDING", - source=event, + parent=event, + context=f"{{module}} scanned {event.data} and found HTTP smuggling ({{event.type}}): {text}", ) diff --git a/bbot/modules/social.py b/bbot/modules/social.py index 3688d7402..b80f6c18a 100644 --- a/bbot/modules/social.py +++ b/bbot/modules/social.py @@ -5,7 +5,11 @@ class social(BaseModule): watched_events = ["URL_UNVERIFIED"] produced_events = ["SOCIAL"] - meta = {"description": "Look for social media links in webpages"} + meta = { + "description": "Look for social media links in webpages", + "created_date": "2023-03-28", + "author": "@TheTechromancer", + } flags = ["passive", "safe", "social-enum"] # platform name : (regex, case_sensitive) @@ -37,10 +41,14 @@ async def handle_event(self, event): if not case_sensitive: url = url.lower() profile_name = profile_name.lower() + url = f"https://{url}" social_event = self.make_event( - {"platform": platform, "url": f"https://{url}", "profile_name": profile_name}, + {"platform": platform, "url": url, "profile_name": profile_name}, "SOCIAL", - source=event, + parent=event, ) social_event.scope_distance = event.scope_distance - await self.emit_event(social_event) + await self.emit_event( + social_event, + context=f"{{module}} detected {platform} {{event.type}} at {url}", + ) diff --git a/bbot/modules/sslcert.py b/bbot/modules/sslcert.py index 42f34d23e..8312ee2d8 100644 --- a/bbot/modules/sslcert.py +++ b/bbot/modules/sslcert.py @@ -5,6 +5,7 @@ from bbot.errors import ValidationError from bbot.modules.base import BaseModule from bbot.core.helpers.async_helpers import NamedLock +from bbot.core.helpers.web.ssl_context import ssl_context_noverify class sslcert(BaseModule): @@ -13,6 +14,8 @@ class sslcert(BaseModule): flags = ["affiliates", "subdomain-enum", "email-enum", "active", "safe", "web-basic"] meta = { "description": "Visit open ports and retrieve SSL certificates", + "created_date": "2022-03-30", + "author": "@TheTechromancer", } options = {"timeout": 5.0, "skip_non_ssl": True} options_desc = {"timeout": "Socket connect timeout in seconds", "skip_non_ssl": "Don't try common non-SSL ports"} @@ -29,7 +32,7 @@ async def setup(self): # sometimes we run into a server with A LOT of SANs # these are usually stupid and useless, so we abort based on a different threshold - # depending on whether the source event is in scope + # depending on whether the parent event is in scope self.in_scope_abort_threshold = 50 self.out_of_scope_abort_threshold = 10 @@ -72,26 +75,30 @@ async def handle_event(self, event): f"Skipping Subject Alternate Names (SANs) on {netloc} because number of hostnames ({len(dns_names):,}) exceeds threshold ({abort_threshold})" ) dns_names = dns_names[:1] + [n for n in dns_names[1:] if self.scan.in_scope(n)] - for event_type, results in (("DNS_NAME", dns_names), ("EMAIL_ADDRESS", emails)): + for event_type, results in (("DNS_NAME", set(dns_names)), ("EMAIL_ADDRESS", emails)): for event_data in results: if event_data is not None and event_data != event: self.debug(f"Discovered new {event_type} via SSL certificate parsing: [{event_data}]") try: - ssl_event = self.make_event(event_data, event_type, source=event, raise_error=True) - source_event = ssl_event.get_source() - if source_event.scope_distance == 0: + ssl_event = self.make_event(event_data, event_type, parent=event, raise_error=True) + parent_event = ssl_event.get_parent() + if parent_event.scope_distance == 0: tags = ["affiliate"] else: tags = None if ssl_event: - await self.emit_event(ssl_event, tags=tags) + await self.emit_event( + ssl_event, + tags=tags, + context=f"{{module}} parsed SSL certificate at {event.data} and found {{event.type}}: {{event.data}}", + ) except ValidationError as e: self.hugeinfo(f'Malformed {event_type} "{event_data}" at {event.data}') self.debug(f"Invalid data at {host}:{port}: {e}") def on_success_callback(self, event): - source_scope_distance = event.get_source().scope_distance - if source_scope_distance == 0 and event.scope_distance > 0: + parent_scope_distance = event.get_parent().scope_distance + if parent_scope_distance == 0 and event.scope_distance > 0: event.add_tag("affiliate") async def visit_host(self, host, port): @@ -109,17 +116,12 @@ async def visit_host(self, host, port): host = str(host) - # Create an SSL context - try: - ssl_context = self.helpers.ssl_context_noverify() - except Exception as e: - self.warning(f"Error creating SSL context: {e}") - return [], [], (host, port) - # Connect to the host try: transport, _ = await asyncio.wait_for( - self.helpers.loop.create_connection(lambda: asyncio.Protocol(), host, port, ssl=ssl_context), + self.helpers.loop.create_connection( + lambda: asyncio.Protocol(), host, port, ssl=ssl_context_noverify + ), timeout=self.timeout, ) except asyncio.TimeoutError: diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py index c5c69293c..9fdce8c49 100644 --- a/bbot/modules/subdomaincenter.py +++ b/bbot/modules/subdomaincenter.py @@ -5,7 +5,11 @@ class subdomaincenter(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - meta = {"description": "Query subdomain.center's API for subdomains"} + meta = { + "description": "Query subdomain.center's API for subdomains", + "created_date": "2023-07-26", + "author": "@TheTechromancer", + } base_url = "https://api.subdomain.center" retries = 2 diff --git a/bbot/modules/sublist3r.py b/bbot/modules/sublist3r.py index 3c13cf308..502352bc9 100644 --- a/bbot/modules/sublist3r.py +++ b/bbot/modules/sublist3r.py @@ -8,6 +8,8 @@ class sublist3r(subdomain_enum): flags = ["passive", "safe"] meta = { "description": "Query sublist3r's API for subdomains", + "created_date": "2022-03-29", + "author": "@Want-EyeTea", } base_url = "https://api.sublist3r.com/search.php" diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index c86da8897..a64ef3c6f 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -1,4 +1,3 @@ -import asyncio from sys import executable from urllib.parse import urlparse @@ -9,7 +8,11 @@ class telerik(BaseModule): watched_events = ["URL", "HTTP_RESPONSE"] produced_events = ["VULNERABILITY", "FINDING"] flags = ["active", "aggressive", "web-thorough"] - meta = {"description": "Scan for critical Telerik vulnerabilities"} + meta = { + "description": "Scan for critical Telerik vulnerabilities", + "created_date": "2022-04-10", + "author": "@liquidsec", + } telerikVersions = [ "2007.1423", @@ -215,6 +218,7 @@ async def handle_event(self, event): {"host": str(event.host), "url": f"{event.data}{webresource}", "description": description}, "FINDING", event, + context=f"{{module}} scanned {event.data} and identified {{event.type}}: Telerik RAU AXD Handler", ) if self.config.get("exploit_RAU_crypto") == True: hostname = urlparse(event.data).netloc @@ -246,34 +250,29 @@ async def handle_event(self, event): }, "VULNERABILITY", event, + context=f"{{module}} scanned {event.data} and identified critical {{event.type}}: {description}", ) break - tasks = [] + urls = {} for dh in self.DialogHandlerUrls: - tasks.append(self.helpers.create_task(self.test_detector(event.data, f"{dh}?dp=1"))) + url = self.create_url(event.data, f"{dh}?dp=1") + urls[url] = dh + gen = self.helpers.request_batch(list(urls)) fail_count = 0 - gen = self.helpers.as_completed(tasks) - async for task in gen: - try: - result, dh = await task - except asyncio.CancelledError: - continue - + async for url, response in gen: # cancel if we run into timeouts etc. - if result is None: + if response is None: fail_count += 1 # tolerate some random errors if fail_count < 2: continue self.debug(f"Cancelling run against {event.data} due to failed request") - await self.helpers.cancel_tasks(tasks) await gen.aclose() else: - if "Cannot deserialize dialog parameters" in result.text: - await self.helpers.cancel_tasks(tasks) + if "Cannot deserialize dialog parameters" in response.text: self.debug(f"Detected Telerik UI instance ({dh})") description = f"Telerik DialogHandler detected" await self.emit_event( @@ -284,8 +283,6 @@ async def handle_event(self, event): # Once we have a match we need to stop, because the basic handler (Telerik.Web.UI.DialogHandler.aspx) usually works with a path wildcard await gen.aclose() - await self.helpers.cancel_tasks(tasks) - spellcheckhandler = "Telerik.Web.UI.SpellCheckHandler.axd" result, _ = await self.test_detector(event.data, spellcheckhandler) status_code = getattr(result, "status_code", 0) @@ -306,6 +303,7 @@ async def handle_event(self, event): }, "FINDING", event, + context=f"{{module}} scanned {event.data} and identified {{event.type}}: Telerik SpellCheckHandler", ) chartimagehandler = "ChartImage.axd?ImageName=bqYXJAqm315eEd6b%2bY4%2bGqZpe7a1kY0e89gfXli%2bjFw%3d" @@ -324,40 +322,48 @@ async def handle_event(self, event): }, "FINDING", event, + context=f"{{module}} scanned {event.data} and identified {{event.type}}: Telerik ChartImage AXD Handler", ) elif event.type == "HTTP_RESPONSE": resp_body = event.data.get("body", None) + url = event.data["url"] if resp_body: if '":{"SerializedParameters":"' in resp_body: await self.emit_event( { "host": str(event.host), - "url": event.data["url"], + "url": url, "description": "Telerik DialogHandler [SerializedParameters] Detected in HTTP Response", }, "FINDING", event, + context=f"{{module}} searched HTTP_RESPONSE and identified {{event.type}}: Telerik ChartImage AXD Handler", ) elif '"_serializedConfiguration":"' in resp_body: await self.emit_event( { "host": str(event.host), - "url": event.data["url"], + "url": url, "description": "Telerik AsyncUpload [serializedConfiguration] Detected in HTTP Response", }, "FINDING", event, + context=f"{{module}} searched HTTP_RESPONSE and identified {{event.type}}: Telerik AsyncUpload", ) # Check for RAD Controls in URL - async def test_detector(self, baseurl, detector): - result = None - if "/" != baseurl[-1]: + def create_url(self, baseurl, detector): + if not baseurl.endswith("/"): url = f"{baseurl}/{detector}" else: url = f"{baseurl}{detector}" + return url + + async def test_detector(self, baseurl, detector): + result = None + url = self.create_url(baseurl, detector) result = await self.helpers.request(url, timeout=self.timeout) return result, detector diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py index c1fece0a6..9bfb1a3ae 100644 --- a/bbot/modules/templates/bucket.py +++ b/bbot/modules/templates/bucket.py @@ -52,8 +52,14 @@ async def handle_dns_name(self, event): for d in self.delimiters: bucket_name = d.join(split) buckets.add(bucket_name) - async for bucket_name, url, tags in self.brute_buckets(buckets, permutations=self.permutations): - await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + async for bucket_name, url, tags, num_buckets in self.brute_buckets(buckets, permutations=self.permutations): + await self.emit_event( + {"name": bucket_name, "url": url}, + "STORAGE_BUCKET", + parent=event, + tags=tags, + context=f"{{module}} tried {num_buckets:,} bucket variations of {event.data} and found {{event.type}} at {url}", + ) async def handle_storage_bucket(self, event): url = event.data["url"] @@ -62,12 +68,24 @@ async def handle_storage_bucket(self, event): description, tags = await self._check_bucket_open(bucket_name, url) if description: event_data = {"host": event.host, "url": url, "description": description} - await self.emit_event(event_data, "FINDING", source=event, tags=tags) - - async for bucket_name, url, tags in self.brute_buckets( + await self.emit_event( + event_data, + "FINDING", + parent=event, + tags=tags, + context=f"{{module}} scanned {event.type} and identified {{event.type}}: {description}", + ) + + async for bucket_name, new_url, tags, num_buckets in self.brute_buckets( [bucket_name], permutations=self.permutations, omit_base=True ): - await self.emit_event({"name": bucket_name, "url": url}, "STORAGE_BUCKET", source=event, tags=tags) + await self.emit_event( + {"name": bucket_name, "url": new_url}, + "STORAGE_BUCKET", + parent=event, + tags=tags, + context=f"{{module}} tried {num_buckets:,} variations of {url} and found {{event.type}} at {new_url}", + ) async def brute_buckets(self, buckets, permutations=False, omit_base=False): buckets = set(buckets) @@ -80,6 +98,7 @@ async def brute_buckets(self, buckets, permutations=False, omit_base=False): if omit_base: new_buckets = new_buckets - buckets new_buckets = [b for b in new_buckets if self.valid_bucket_name(b)] + num_buckets = len(new_buckets) tasks = [] for base_domain in self.base_domains: for region in self.regions: @@ -89,7 +108,7 @@ async def brute_buckets(self, buckets, permutations=False, omit_base=False): async for task in self.helpers.as_completed(tasks): existent_bucket, tags, bucket_name, url = await task if existent_bucket: - yield bucket_name, url, tags + yield bucket_name, url, tags, num_buckets async def _check_bucket_exists(self, bucket_name, url): self.debug(f'Checking if bucket exists: "{bucket_name}"') diff --git a/bbot/modules/templates/github.py b/bbot/modules/templates/github.py index fcfeb0934..4169647ce 100644 --- a/bbot/modules/templates/github.py +++ b/bbot/modules/templates/github.py @@ -1,7 +1,7 @@ -from bbot.modules.templates.subdomain_enum import subdomain_enum +from bbot.modules.base import BaseModule -class github(subdomain_enum): +class github(BaseModule): """ A template module for use of the GitHub API Inherited by several other github modules. diff --git a/bbot/modules/templates/portscanner.py b/bbot/modules/templates/portscanner.py deleted file mode 100644 index 5d1662d81..000000000 --- a/bbot/modules/templates/portscanner.py +++ /dev/null @@ -1,55 +0,0 @@ -import ipaddress - -from bbot.modules.base import BaseModule - - -class portscanner(BaseModule): - """ - A portscanner containing useful methods for nmap, masscan, etc. - """ - - async def setup(self): - self.ip_ranges = [e.host for e in self.scan.target.events if e.type == "IP_RANGE"] - exclude, invalid_exclude = self._build_targets(self.scan.blacklist) - if not exclude: - exclude = ["255.255.255.255/32"] - self.exclude_file = self.helpers.tempfile(exclude, pipe=False) - if invalid_exclude > 0: - self.warning( - f"Port scanner can only accept IP addresses or IP ranges as blacklist ({invalid_exclude:,} blacklisted were hostnames)" - ) - return True - - async def filter_event(self, event): - """ - The purpose of this filter_event is to decide whether we should accept individual IP_ADDRESS - events that reside inside our target subnets (IP_RANGE), if any. - - This prevents scanning the same IP twice. - """ - # if we are emitting hosts from a previous asset_inventory, this is a special case - # in this case we want to accept the individual IPs even if they overlap with our target ranges - asset_inventory_module = self.scan.modules.get("asset_inventory", None) - asset_inventory_config = getattr(asset_inventory_module, "config", {}) - asset_inventory_use_previous = asset_inventory_config.get("use_previous", False) - if event.type == "IP_ADDRESS" and not asset_inventory_use_previous: - for net in self.helpers.ip_network_parents(event.data, include_self=True): - if net in self.ip_ranges: - return False, f"skipping {event.host} because it is already included in {net}" - elif event.type == "IP_RANGE" and asset_inventory_use_previous: - return False, f"skipping IP_RANGE {event.host} because asset_inventory.use_previous=True" - return True - - def _build_targets(self, target): - invalid_targets = 0 - targets = [] - for t in target: - t = self.helpers.make_ip_type(t.data) - if isinstance(t, str): - invalid_targets += 1 - else: - if self.helpers.is_ip(t): - targets.append(str(ipaddress.ip_network(t))) - else: - targets.append(str(t)) - return targets, invalid_targets diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 790b35515..95c7995d3 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -16,15 +16,29 @@ class subdomain_enum(BaseModule): # set module error state after this many failed requests in a row abort_after_failures = 5 + # whether to reject wildcard DNS_NAMEs reject_wildcards = "strict" - # this helps combat rate limiting by ensuring that a query doesn't execute + + # set qsize to 1. this helps combat rate limiting by ensuring that a query doesn't execute # until the queue is ready to receive its results _qsize = 1 - async def setup(self): - self.processed = set() - return True + # how to deduplicate incoming events + # options: + # "highest_parent": dedupe by highest parent (highest parent of www.api.test.evilcorp.com is evilcorp.com) + # "lowest_parent": dedupe by lowest parent (lowest parent of www.api.test.evilcorp.com is api.test.evilcorp.com) + dedup_strategy = "highest_parent" + + @property + def source_pretty_name(self): + return f"{self.__class__.__name__} API" + + def _incoming_dedup_hash(self, event): + """ + Determines the criteria for what is considered to be a duplicate event if `accept_dupes` is False. + """ + return hash(self.make_query(event)), f"dedup_strategy={self.dedup_strategy}" async def handle_event(self, event): query = self.make_query(event) @@ -38,18 +52,36 @@ async def handle_event(self, event): self.verbose(e) continue if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - await self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + await self.emit_event( + hostname, + "DNS_NAME", + event, + abort_if=self.abort_if, + context=f'{{module}} searched {self.source_pretty_name} for "{query}" and found {{event.type}}: {{event.data}}', + ) async def request_url(self, query): url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" return await self.request_with_fail_count(url) def make_query(self, event): - if "target" in event.tags: - query = str(event.data) + query = event.data + parents = list(self.helpers.domain_parents(event.data)) + if self.dedup_strategy == "highest_parent": + parents = list(reversed(parents)) + elif self.dedup_strategy == "lowest_parent": + pass else: - query = self.helpers.parent_domain(event.data).lower() - return ".".join([s for s in query.split(".") if s != "_wildcard"]) + raise ValueError('self.dedup_strategy attribute must be set to either "highest_parent" or "lowest_parent"') + for p in parents: + if self.scan.in_scope(p): + query = p + break + try: + return ".".join([s for s in query.split(".") if s != "_wildcard"]) + except Exception: + self.critical(query) + raise def parse_results(self, r, query=None): json = r.json() @@ -74,7 +106,7 @@ async def query(self, query, parse_fn=None, request_fn=None): self.info( f'Error parsing results for query "{query}" (status code {response.status_code})', trace=True ) - self.log.trace(response.text) + self.log.trace(repr(response.text)) else: self.info(f'Error parsing results for "{query}": {e}', trace=True) return @@ -92,20 +124,6 @@ async def _is_wildcard(self, query): return False async def filter_event(self, event): - """ - This filter_event is used across many modules - """ - query = self.make_query(event) - # reject if already processed - if self.already_processed(query): - return False, "Event was already processed" - eligible, reason = await self.eligible_for_enumeration(event) - if eligible: - self.processed.add(hash(query)) - return True, reason - return False, reason - - async def eligible_for_enumeration(self, event): query = self.make_query(event) # check if wildcard is_wildcard = await self._is_wildcard(query) @@ -128,12 +146,6 @@ async def eligible_for_enumeration(self, event): return False, "Event is both a cloud resource and a wildcard domain" return True, "" - def already_processed(self, hostname): - for parent in self.helpers.domain_parents(hostname, include_self=True): - if hash(parent) in self.processed: - return True - return False - async def abort_if(self, event): # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains if "in-scope" not in event.tags: diff --git a/bbot/modules/threatminer.py b/bbot/modules/threatminer.py index bbc1e23c3..b4ccad4b8 100644 --- a/bbot/modules/threatminer.py +++ b/bbot/modules/threatminer.py @@ -7,6 +7,8 @@ class threatminer(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] meta = { "description": "Query threatminer's API for subdomains", + "created_date": "2022-07-28", + "author": "@TheTechromancer", } base_url = "https://api.threatminer.org/v2" diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index d9c27b239..e32352d2d 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -6,10 +6,14 @@ class trufflehog(BaseModule): watched_events = ["FILESYSTEM"] produced_events = ["FINDING", "VULNERABILITY"] flags = ["passive", "safe", "code-enum"] - meta = {"description": "TruffleHog is a tool for finding credentials"} + meta = { + "description": "TruffleHog is a tool for finding credentials", + "created_date": "2024-03-12", + "author": "@domwhewell-sage", + } options = { - "version": "3.69.0", + "version": "3.75.1", "only_verified": True, "concurrency": 8, } @@ -37,18 +41,15 @@ async def setup(self): self.concurrency = int(self.config.get("concurrency", 8)) return True - async def filter_event(self, event): - if event.type == "FILESYSTEM": - if "git" not in event.tags and "docker" not in event.tags: - return False, "event is not a git repository or a docker image" - return True - async def handle_event(self, event): path = event.data["path"] + description = event.data.get("description", "") if "git" in event.tags: module = "git" elif "docker" in event.tags: module = "docker" + else: + module = "filesystem" async for decoder_name, detector_name, raw_result, verified, source_metadata in self.execute_trufflehog( module, path ): @@ -56,15 +57,29 @@ async def handle_event(self, event): data = { "severity": "High", "description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", - "host": str(event.source.host), + "host": str(event.parent.host), } - await self.emit_event(data, "VULNERABILITY", event) + if description: + data["description"] += f" Description: [{description}]" + await self.emit_event( + data, + "VULNERABILITY", + event, + context=f'{{module}} searched {event.type} using "{module}" method and found verified secret ({{event.type}}): {raw_result}', + ) else: data = { "description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", - "host": str(event.source.host), + "host": str(event.parent.host), } - await self.emit_event(data, "FINDING", event) + if description: + data["description"] += f" Description: [{description}]" + await self.emit_event( + data, + "FINDING", + event, + context=f'{{module}} searched {event.type} using "{module}" method and found possible secret ({{event.type}}): {raw_result}', + ) async def execute_trufflehog(self, module, path): command = [ @@ -80,6 +95,9 @@ async def execute_trufflehog(self, module, path): elif module == "docker": command.append("docker") command.append("--image=file://" + path) + elif module == "filesystem": + command.append("filesystem") + command.append(path) stats_file = self.helpers.tempfile_tail(callback=self.log_trufflehog_status) try: diff --git a/bbot/modules/url_manipulation.py b/bbot/modules/url_manipulation.py index 74b702eaa..ef7cfe7f3 100644 --- a/bbot/modules/url_manipulation.py +++ b/bbot/modules/url_manipulation.py @@ -6,7 +6,11 @@ class url_manipulation(BaseModule): watched_events = ["URL"] produced_events = ["FINDING"] flags = ["active", "aggressive", "web-thorough"] - meta = {"description": "Attempt to identify URL parsing/routing based vulnerabilities"} + meta = { + "description": "Attempt to identify URL parsing/routing based vulnerabilities", + "created_date": "2022-09-27", + "author": "@liquidsec", + } in_scope_only = True options = {"allow_redirects": True} @@ -77,7 +81,8 @@ async def handle_event(self, event): await self.emit_event( {"description": description, "host": str(event.host), "url": event.data}, "FINDING", - source=event, + parent=event, + context=f"{{module}} probed {event.data} and identified {{event.type}}: {description}", ) else: self.debug(f"Status code changed to {str(subject_response.status_code)}, ignoring") @@ -94,10 +99,10 @@ async def filter_event(self, event): def format_signature(self, sig, event): if sig[2] == True: - cleaned_path = event.parsed.path.strip("/") + cleaned_path = event.parsed_url.path.strip("/") else: - cleaned_path = event.parsed.path.lstrip("/") + cleaned_path = event.parsed_url.path.lstrip("/") - kwargs = {"scheme": event.parsed.scheme, "netloc": event.parsed.netloc, "path": cleaned_path} + kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path} formatted_url = sig[1].format(**kwargs) return (sig[0], formatted_url) diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index 4c3811af0..8b9b53bc8 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -7,6 +7,8 @@ class urlscan(subdomain_enum): produced_events = ["DNS_NAME", "URL_UNVERIFIED"] meta = { "description": "Query urlscan.io for subdomains", + "created_date": "2022-06-09", + "author": "@TheTechromancer", } options = {"urls": False} options_desc = {"urls": "Emit URLs in addition to DNS_NAMEs"} @@ -20,22 +22,34 @@ async def setup(self): async def handle_event(self, event): query = self.make_query(event) for domain, url in await self.query(query): - source_event = event + parent_event = event if domain and domain != query: - domain_event = self.make_event(domain, "DNS_NAME", source=event) + domain_event = self.make_event(domain, "DNS_NAME", parent=event) if domain_event: if str(domain_event.host).endswith(query) and not str(domain_event.host) == str(event.host): - await self.emit_event(domain_event, abort_if=self.abort_if) - source_event = domain_event + await self.emit_event( + domain_event, + abort_if=self.abort_if, + context=f'{{module}} searched urlscan.io API for "{query}" and found {{event.type}}: {{event.data}}', + ) + parent_event = domain_event if url: - url_event = self.make_event(url, "URL_UNVERIFIED", source=source_event) + url_event = self.make_event(url, "URL_UNVERIFIED", parent=parent_event) if url_event: if str(url_event.host).endswith(query): if self.urls: - await self.emit_event(url_event, abort_if=self.abort_if) + await self.emit_event( + url_event, + abort_if=self.abort_if, + context=f'{{module}} searched urlscan.io API for "{query}" and found {{event.type}}: {{event.data}}', + ) else: await self.emit_event( - str(url_event.host), "DNS_NAME", source=event, abort_if=self.abort_if + str(url_event.host), + "DNS_NAME", + parent=event, + abort_if=self.abort_if, + context=f'{{module}} searched urlscan.io API for "{query}" and found {{event.type}}: {{event.data}}', ) else: self.debug(f"{url_event.host} does not match {query}") diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index 0d996bf09..96fd6fe94 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -13,6 +13,8 @@ class viewdns(BaseModule): flags = ["affiliates", "passive", "safe"] meta = { "description": "Query viewdns.info's reverse whois for related domains", + "created_date": "2022-07-04", + "author": "@TheTechromancer", } base_url = "https://viewdns.info" in_scope_only = True @@ -26,7 +28,13 @@ async def setup(self): async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) for domain, _ in await self.query(query): - await self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + await self.emit_event( + domain, + "DNS_NAME", + parent=event, + tags=["affiliate"], + context=f'{{module}} searched viewdns.info for "{query}" and found {{event.type}}: {{event.data}}', + ) async def query(self, query): results = set() diff --git a/bbot/modules/virustotal.py b/bbot/modules/virustotal.py index 8e0c03934..6f0ba5e82 100644 --- a/bbot/modules/virustotal.py +++ b/bbot/modules/virustotal.py @@ -5,7 +5,12 @@ class virustotal(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] - meta = {"description": "Query VirusTotal's API for subdomains", "auth_required": True} + meta = { + "description": "Query VirusTotal's API for subdomains", + "created_date": "2022-08-25", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": ""} options_desc = {"api_key": "VirusTotal API Key"} diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index 4584cdecc..ac408765c 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -16,7 +16,11 @@ class wafw00f(BaseModule): watched_events = ["URL"] produced_events = ["WAF"] flags = ["active", "aggressive"] - meta = {"description": "Web Application Firewall Fingerprinting Tool"} + meta = { + "description": "Web Application Firewall Fingerprinting Tool", + "created_date": "2023-02-15", + "author": "@liquidsec", + } deps_pip = ["wafw00f~=2.2.0"] @@ -32,13 +36,21 @@ async def filter_event(self, event): return False, f"Invalid HTTP status code: {http_status}" return True, "" + def _incoming_dedup_hash(self, event): + return hash(f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/") + async def handle_event(self, event): - url = f"{event.parsed.scheme}://{event.parsed.netloc}/" + url = f"{event.parsed_url.scheme}://{event.parsed_url.netloc}/" WW = await self.helpers.run_in_executor(wafw00f_main.WAFW00F, url, followredirect=False) waf_detections = await self.helpers.run_in_executor(WW.identwaf) if waf_detections: for waf in waf_detections: - await self.emit_event({"host": str(event.host), "url": url, "waf": waf}, "WAF", source=event) + await self.emit_event( + {"host": str(event.host), "url": url, "waf": waf}, + "WAF", + parent=event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {waf}", + ) else: if self.config.get("generic_detect") == True: generic = await self.helpers.run_in_executor(WW.genericdetect) @@ -51,5 +63,6 @@ async def handle_event(self, event): "info": WW.knowledge["generic"]["reason"], }, "WAF", - source=event, + parent=event, + context=f"{{module}} scanned {url} and identified {{event.type}}: {waf}", ) diff --git a/bbot/modules/wappalyzer.py b/bbot/modules/wappalyzer.py index 24fa54bcf..474b5c8d9 100644 --- a/bbot/modules/wappalyzer.py +++ b/bbot/modules/wappalyzer.py @@ -16,6 +16,8 @@ class wappalyzer(BaseModule): flags = ["active", "safe", "web-basic"] meta = { "description": "Extract technologies from web responses", + "created_date": "2022-04-15", + "author": "@liquidsec", } deps_pip = ["python-Wappalyzer~=0.3.1", "aiohttp~=3.9.0b0"] # accept all events regardless of scope distance @@ -28,8 +30,12 @@ async def setup(self): async def handle_event(self, event): for res in await self.helpers.run_in_executor(self.wappalyze, event.data): + res = res.lower() await self.emit_event( - {"technology": res.lower(), "url": event.data["url"], "host": str(event.host)}, "TECHNOLOGY", event + {"technology": res, "url": event.data["url"], "host": str(event.host)}, + "TECHNOLOGY", + event, + context=f"{{module}} analyzed HTTP_RESPONSE and identified {{event.type}}: {res}", ) def wappalyze(self, data): diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 526e0b3eb..647ea342f 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -9,6 +9,8 @@ class wayback(subdomain_enum): produced_events = ["URL_UNVERIFIED", "DNS_NAME"] meta = { "description": "Query archive.org's API for subdomains", + "created_date": "2022-04-01", + "author": "@pmueller", } options = {"urls": False, "garbage_threshold": 10} options_desc = { @@ -27,7 +29,13 @@ async def setup(self): async def handle_event(self, event): query = self.make_query(event) for result, event_type in await self.query(query): - await self.emit_event(result, event_type, event, abort_if=self.abort_if) + await self.emit_event( + result, + event_type, + event, + abort_if=self.abort_if, + context=f'{{module}} queried archive.org for "{query}" and found {{event.type}}: {{event.data}}', + ) async def query(self, query): results = set() @@ -56,6 +64,8 @@ async def query(self, query): dns_names = set() collapsed_urls = 0 start_time = datetime.now() + # we consolidate URLs to cut down on garbage data + # this is CPU-intensive, so we do it in its own core. parsed_urls = await self.helpers.run_in_executor_mp( self.helpers.validators.collapse_urls, urls, diff --git a/bbot/modules/zoomeye.py b/bbot/modules/zoomeye.py index ec921bc25..fc4cfbfee 100644 --- a/bbot/modules/zoomeye.py +++ b/bbot/modules/zoomeye.py @@ -5,7 +5,12 @@ class zoomeye(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "passive", "safe"] - meta = {"description": "Query ZoomEye's API for subdomains", "auth_required": True} + meta = { + "description": "Query ZoomEye's API for subdomains", + "created_date": "2022-08-03", + "author": "@TheTechromancer", + "auth_required": True, + } options = {"api_key": "", "max_pages": 20, "include_related": False} options_desc = { "api_key": "ZoomEye API key", @@ -36,7 +41,13 @@ async def handle_event(self, event): tags = [] if not hostname.endswith(f".{query}"): tags = ["affiliate"] - await self.emit_event(hostname, "DNS_NAME", event, tags=tags) + await self.emit_event( + hostname, + "DNS_NAME", + event, + tags=tags, + context=f'{{module}} searched ZoomEye API for "{query}" and found {{event.type}}: {{event.data}}', + ) async def query(self, query): results = set() diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 37d20b4ac..56459cc67 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -50,10 +50,11 @@ async def init_events(self, events=None): event._dummy = False event.web_spider_distance = 0 event.scan = self.scan - if event.source is None: - event.source = self.scan.root_event + if event.parent is None: + event.parent = self.scan.root_event if event.module is None: event.module = self.scan._make_dummy_module(name="TARGET", _type="TARGET") + event.discovery_context = f"Scan {self.scan.name} seeded with " + "{event.type}: {event.data}" self.verbose(f"Target: {event}") await self.queue_event(event, {}) await asyncio.sleep(0.1) @@ -64,21 +65,45 @@ async def handle_event(self, event, kwargs): if event._dummy: return False, "cannot emit dummy event" - # don't accept events with self as source - if (not event.type == "SCAN") and (event == event.get_source()): - return False, "event's source is itself" + # don't accept events with self as parent + if not event.type == "SCAN": + if event == event.get_parent(): + return False, "event's parent is itself" + if not event.discovery_context: + self.warning(f"Event {event} has no discovery context") # don't accept duplicates - if (not event._graph_important) and self.is_incoming_duplicate(event, add=True): - return False, "event was already emitted by its module" + if self.is_incoming_duplicate(event, add=True): + if not event._graph_important: + return False, "event was already emitted by its module" + else: + self.debug( + f"Event {event} was already emitted by its module, but it's graph-important so it gets a pass" + ) # update event's scope distance based on its parent - event.scope_distance = event.source.scope_distance + 1 - - # blacklist rejections + event.scope_distance = event.parent.scope_distance + 1 + + # special handling of URL extensions + url_extension = getattr(event, "url_extension", None) + if url_extension is not None: + if url_extension in self.scan.url_extension_httpx_only: + event.add_tag("httpx-only") + event._omit = True + + # blacklist by extension + if url_extension in self.scan.url_extension_blacklist: + self.debug( + f"Blacklisting {event} because its extension (.{url_extension}) is blacklisted in the config" + ) + event.add_tag("blacklisted") + + # main scan blacklist event_blacklisted = self.scan.blacklisted(event) + + # reject all blacklisted events if event_blacklisted or "blacklisted" in event.tags: - return False, f"Omitting blacklisted event: {event}" + return False, "event is blacklisted" # Scope shepherding # here is where we make sure in-scope events are set to their proper scope distance @@ -159,11 +184,6 @@ def priority(self): # we are the lowest priority return 99 - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - # track outgoing duplicates (for `accept_dupes` attribute of modules) - self.outgoing_dup_tracker = set() - async def handle_event(self, event, kwargs): abort_if = kwargs.pop("abort_if", None) on_success_callback = kwargs.pop("on_success_callback", None) @@ -179,15 +199,15 @@ async def handle_event(self, event, kwargs): # if we discovered something interesting from an internal event, # make sure we preserve its chain of parents - source = event.source - if source.internal and ((not event.internal) or event._graph_important): - source_in_report_distance = source.scope_distance <= self.scan.scope_report_distance - if source_in_report_distance: - source.internal = False - if not source._graph_important: - source._graph_important = True - log.debug(f"Re-queuing internal event {source} with parent {event}") - await self.emit_event(source) + parent = event.parent + if parent.internal and ((not event.internal) or event._graph_important): + parent_in_report_distance = parent.scope_distance <= self.scan.scope_report_distance + if parent_in_report_distance: + parent.internal = False + if not parent._graph_important: + parent._graph_important = True + log.debug(f"Re-queuing internal event {parent} with parent {event}") + await self.emit_event(parent) abort_result = False if callable(abort_if): @@ -209,31 +229,11 @@ async def forward_event(self, event, kwargs): """ Queue event with modules """ - is_outgoing_duplicate = self.is_outgoing_duplicate(event) - if is_outgoing_duplicate: - self.verbose(f"{event.module}: Duplicate event: {event}") # absorb event into the word cloud if it's in scope - if not is_outgoing_duplicate and -1 < event.scope_distance < 1: + if -1 < event.scope_distance < 1: self.scan.word_cloud.absorb_event(event) for mod in self.scan.modules.values(): # don't distribute events to intercept modules - if mod._intercept: - continue - acceptable_dup = (not is_outgoing_duplicate) or mod.accept_dupes - graph_important = mod._is_graph_important(event) - if acceptable_dup or graph_important: + if not mod._intercept: await mod.queue_event(event) - - def is_outgoing_duplicate(self, event, add=False): - """ - Calculate whether an event is a duplicate in the context of the whole scan, - This will return True if the same event (irregardless of its source module) has been emitted before. - - TODO: Allow modules to use this for custom deduplication such as on a per-host or per-domain basis. - """ - event_hash = hash(event) - is_dup = event_hash in self.outgoing_dup_tracker - if add: - self.outgoing_dup_tracker.add(event_hash) - return is_dup diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index 506b1c6bf..bbeda381f 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -28,7 +28,7 @@ class BBOTArgs: ( "Subdomains + port scan + web screenshots", "Port-scan every subdomain, screenshot every webpage, output to current directory", - "bbot -t evilcorp.com -p subdomain-enum -m nmap gowitness -n my_scan -o .", + "bbot -t evilcorp.com -p subdomain-enum -m portscan gowitness -n my_scan -o .", ), ( "Subdomains + basic web scan", @@ -145,6 +145,7 @@ def preset_from_args(self): args_preset.scan_name = self.parsed.name args_preset.output_dir = self.parsed.output_dir args_preset.force_start = self.parsed.force + args_preset.core.merge_custom({"http_headers": self.parsed.custom_headers}) # CLI config options (dot-syntax) for config_arg in self.parsed.config: @@ -172,7 +173,7 @@ def create_parser(self, *args, **kwargs): "-w", "--whitelist", nargs="+", - default=[], + default=None, help="What's considered in-scope (by default it's the same as --targets)", ) target.add_argument("-b", "--blacklist", nargs="+", default=[], help="Don't touch these things") @@ -299,6 +300,13 @@ def create_parser(self, *args, **kwargs): misc = p.add_argument_group(title="Misc") misc.add_argument("--version", action="store_true", help="show BBOT version and exit") + misc.add_argument( + "-H", + "--custom-headers", + nargs="+", + default=[], + help="List of custom headers as key value pairs (header=value).", + ) return p def sanitize_args(self): @@ -312,9 +320,10 @@ def sanitize_args(self): self.parsed.targets = chain_lists( self.parsed.targets, try_files=True, msg="Reading targets from file: {filename}" ) - self.parsed.whitelist = chain_lists( - self.parsed.whitelist, try_files=True, msg="Reading whitelist from file: {filename}" - ) + if self.parsed.whitelist is not None: + self.parsed.whitelist = chain_lists( + self.parsed.whitelist, try_files=True, msg="Reading whitelist from file: {filename}" + ) self.parsed.blacklist = chain_lists( self.parsed.blacklist, try_files=True, msg="Reading blacklist from file: {filename}" ) @@ -323,6 +332,22 @@ def sanitize_args(self): self.parsed.require_flags = chain_lists(self.parsed.require_flags) self.parsed.event_types = [t.upper() for t in chain_lists(self.parsed.event_types)] + # Custom Header Parsing / Validation + custom_headers_dict = {} + custom_header_example = "Example: --custom-headers foo=bar foo2=bar2" + + for i in self.parsed.custom_headers: + parts = i.split("=", 1) + if len(parts) != 2: + raise ValidationError(f"Custom headers not formatted correctly (missing '='). {custom_header_example}") + k, v = parts + if not k or not v: + raise ValidationError( + f"Custom headers not formatted correctly (missing header name or value). {custom_header_example}" + ) + custom_headers_dict[k] = v + self.parsed.custom_headers = custom_headers_dict + def validate(self): # validate config options sentinel = object() @@ -338,4 +363,4 @@ def validate(self): if self.exclude_from_validation.match(c): continue # otherwise, ensure it exists as a module option - raise ValidationError(get_closest_match(c, all_options, msg="module option")) + raise ValidationError(get_closest_match(c, all_options, msg="config option")) diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py index 7d6fce394..599ae8591 100644 --- a/bbot/scanner/preset/preset.py +++ b/bbot/scanner/preset/preset.py @@ -11,7 +11,6 @@ from bbot.errors import * from bbot.core import CORE -from bbot.core.event.base import make_event from bbot.core.helpers.misc import make_table, mkdir, get_closest_match @@ -234,19 +233,11 @@ def __init__( self._module_dirs = set() self.module_dirs = module_dirs - self.strict_scope = strict_scope - # target / whitelist / blacklist - from bbot.scanner.target import Target + from bbot.scanner.target import BBOTTarget - self.target = Target(*targets, strict_scope=self.strict_scope) - if not whitelist: - self.whitelist = self.target.copy() - else: - self.whitelist = Target(*whitelist, strict_scope=self.strict_scope) - if not blacklist: - blacklist = [] - self.blacklist = Target(*blacklist) + self.strict_scope = strict_scope + self.target = BBOTTarget(targets, whitelist=whitelist, blacklist=blacklist, strict_scope=self.strict_scope) # include other presets if include and not isinstance(include, (list, tuple, set)): @@ -267,6 +258,14 @@ def __init__( def bbot_home(self): return Path(self.config.get("home", "~/.bbot")).expanduser().resolve() + @property + def whitelist(self): + return self.target.whitelist + + @property + def blacklist(self): + return self.target.blacklist + @property def preset_dir(self): return self.bbot_home / "presets" @@ -300,15 +299,15 @@ def merge(self, other): other (Preset): The preset to merge into this one. Examples: - >>> preset1 = Preset(modules=["nmap"]) + >>> preset1 = Preset(modules=["portscan"]) >>> preset1.scan_modules - ['nmap'] + ['portscan'] >>> preset2 = Preset(modules=["sslcert"]) >>> preset2.scan_modules ['sslcert'] >>> preset1.merge(preset2) >>> preset1.scan_modules - ['nmap', 'sslcert'] + ['portscan', 'sslcert'] """ self.log_debug(f'Merging preset "{other.name}" into "{self.name}"') # config @@ -324,13 +323,8 @@ def merge(self, other): self.explicit_scan_modules.update(other.explicit_scan_modules) self.explicit_output_modules.update(other.explicit_output_modules) self.flags.update(other.flags) - # scope - self.target.add_target(other.target) - self.whitelist.add_target(other.whitelist) - self.blacklist.add_target(other.blacklist) - self.strict_scope = self.strict_scope or other.strict_scope - for t in (self.target, self.whitelist): - t.strict_scope = self.strict_scope + # target / scope + self.target.merge(other.target) # log verbosity if other.silent: self.silent = other.silent @@ -365,6 +359,8 @@ def bake(self): baked_preset = copy(self) # copy core baked_preset.core = self.core.copy() + # copy target + baked_preset.target = self.target.copy() # copy module loader baked_preset._module_loader = self.module_loader.copy() # prepare os environment @@ -377,6 +373,10 @@ def bake(self): os.environ.clear() os.environ.update(os_environ) + # ensure whitelist + if baked_preset.target.whitelist is None: + baked_preset.target.whitelist = baked_preset.target.seeds.copy() + # validate flags, config options baked_preset.validate() @@ -565,58 +565,13 @@ def args(self): return self._args def in_scope(self, host): - """ - Check whether a hostname, url, IP, etc. is in scope. - Accepts either events or string data. - - Checks whitelist and blacklist. - If `host` is an event and its scope distance is zero, it will automatically be considered in-scope. - - Examples: - Check if a URL is in scope: - >>> preset.in_scope("http://www.evilcorp.com") - True - """ - try: - e = make_event(host, dummy=True) - except ValidationError: - return False - in_scope = e.scope_distance == 0 or self.whitelisted(e) - return in_scope and not self.blacklisted(e) + return self.target.in_scope(host) def blacklisted(self, host): - """ - Check whether a hostname, url, IP, etc. is blacklisted. - - Note that `host` can be a hostname, IP address, CIDR, email address, or any BBOT `Event` with the `host` attribute. - - Args: - host (str or IPAddress or Event): The host to check against the blacklist - - Examples: - Check if a URL's host is blacklisted: - >>> preset.blacklisted("http://www.evilcorp.com") - True - """ - e = make_event(host, dummy=True) - return e in self.blacklist + return self.target.blacklisted(host) def whitelisted(self, host): - """ - Check whether a hostname, url, IP, etc. is whitelisted. - - Note that `host` can be a hostname, IP address, CIDR, email address, or any BBOT `Event` with the `host` attribute. - - Args: - host (str or IPAddress or Event): The host to check against the whitelist - - Examples: - Check if a URL's host is whitelisted: - >>> preset.whitelisted("http://www.evilcorp.com") - True - """ - e = make_event(host, dummy=True) - return e in self.whitelist + return self.target.whitelisted(host) @classmethod def from_dict(cls, preset_dict, name=None, _exclude=None, _log=False): @@ -633,7 +588,7 @@ def from_dict(cls, preset_dict, name=None, _exclude=None, _log=False): Preset: The loaded preset Examples: - >>> preset = Preset.from_dict({"target": "evilcorp.com", "modules": ["nmap}]}) + >>> preset = Preset.from_dict({"target": "evilcorp.com", "modules": ["portscan}]}) """ new_preset = cls( *preset_dict.get("target", []), @@ -716,9 +671,9 @@ def to_dict(self, include_target=False, full_config=False): dict: The preset in dictionary form Examples: - >>> preset = Preset(flags=["subdomain-enum"], modules=["nmap"]) + >>> preset = Preset(flags=["subdomain-enum"], modules=["portscan"]) >>> preset.to_dict() - {"flags": ["subdomain-enum"], "modules": ["nmap"]} + {"flags": ["subdomain-enum"], "modules": ["portscan"]} """ preset_dict = {} @@ -733,9 +688,11 @@ def to_dict(self, include_target=False, full_config=False): # scope if include_target: - target = sorted(str(t.data) for t in self.target) - whitelist = sorted(str(t.data) for t in self.whitelist) - blacklist = sorted(str(t.data) for t in self.blacklist) + target = sorted(str(t.data) for t in self.target.seeds) + whitelist = set() + if self.target.whitelist is not None: + whitelist = sorted(str(t.data) for t in self.target.whitelist) + blacklist = sorted(str(t.data) for t in self.target.blacklist) if target: preset_dict["target"] = target if whitelist and whitelist != target: @@ -792,12 +749,12 @@ def to_yaml(self, include_target=False, full_config=False, sort_keys=False): str: The preset in the form of a YAML string Examples: - >>> preset = Preset(flags=["subdomain-enum"], modules=["nmap"]) + >>> preset = Preset(flags=["subdomain-enum"], modules=["portscan"]) >>> print(preset.to_yaml()) flags: - subdomain-enum modules: - - nmap + - portscan """ preset_dict = self.to_dict(include_target=include_target, full_config=full_config) return yaml.dump(preset_dict, sort_keys=sort_keys) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 06f55c340..74e457eee 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -11,10 +11,6 @@ from bbot import __version__ - -from .preset import Preset -from .stats import ScanStats -from .dispatcher import Dispatcher from bbot.core.event import make_event from .manager import ScanIngress, ScanEgress from bbot.core.helpers.misc import sha1, rand_string @@ -30,11 +26,11 @@ class Scanner: Examples: Create scan with multiple targets: - >>> my_scan = Scanner("evilcorp.com", "1.2.3.0/24", modules=["nmap", "sslcert", "httpx"]) + >>> my_scan = Scanner("evilcorp.com", "1.2.3.0/24", modules=["portscan", "sslcert", "httpx"]) Create scan with custom config: - >>> config = {"http_proxy": "http://127.0.0.1:8080", "modules": {"nmap": {"top_ports": 2000}}} - >>> my_scan = Scanner("www.evilcorp.com", modules=["nmap", "httpx"], config=config) + >>> config = {"http_proxy": "http://127.0.0.1:8080", "modules": {"portscan": {"top_ports": 2000}}} + >>> my_scan = Scanner("www.evilcorp.com", modules=["portscan", "httpx"], config=config) Start the scan, iterating over events as they're discovered (synchronous): >>> for event in my_scan.start(): @@ -125,6 +121,9 @@ def __init__( preset = kwargs.pop("preset", None) kwargs["_log"] = True + + from .preset import Preset + if preset is None: preset = Preset(*targets, **kwargs) else: @@ -168,11 +167,15 @@ def __init__( self.dummy_modules = {} if dispatcher is None: + from .dispatcher import Dispatcher + self.dispatcher = Dispatcher() else: self.dispatcher = dispatcher self.dispatcher.set_scan(self) + from .stats import ScanStats + self.stats = ScanStats(self) # scope distance @@ -197,6 +200,7 @@ def __init__( self._finished_init = False self._new_activity = False self._cleanedup = False + self._omitted_event_types = None self.__loop = None self._manager_worker_loop_tasks = [] @@ -334,8 +338,7 @@ async def async_start(self): failed = False except BaseException as e: - exception_chain = self.helpers.get_exception_chain(e) - if any(isinstance(exc, (KeyboardInterrupt, asyncio.CancelledError)) for exc in exception_chain): + if self.helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): self.stop() failed = False else: @@ -543,7 +546,7 @@ def queued_event_types(self): queues.add(module.outgoing_event_queue) for q in queues: - for item in q._queue: + for item in getattr(q, "_queue", []): try: event, _ = item except ValueError: @@ -861,6 +864,12 @@ def aborting(self): def status(self): return self._status + @property + def omitted_event_types(self): + if self._omitted_event_types is None: + self._omitted_event_types = self.config.get("omit_event_types", []) + return self._omitted_event_types + @status.setter def status(self, status): """ @@ -899,7 +908,7 @@ def root_event(self): "scope_distance": 0, "scan": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", "timestamp": 1694548779.616255, - "source": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "parent": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", "tags": [ "distance-0" ], @@ -911,7 +920,7 @@ def root_event(self): root_event = self.make_event(data=f"{self.name} ({self.id})", event_type="SCAN", dummy=True) root_event._id = self.id root_event.scope_distance = 0 - root_event.source = root_event + root_event.parent = root_event root_event.module = self._make_dummy_module(name="TARGET", _type="TARGET") return root_event @@ -1121,8 +1130,7 @@ def _handle_exception(self, e, context="scan", finally_callback=None, unhandled_ if callable(context): context = f"{context.__qualname__}()" filename, lineno, funcname = self.helpers.get_traceback_details(e) - exception_chain = self.helpers.get_exception_chain(e) - if any(isinstance(exc, KeyboardInterrupt) for exc in exception_chain): + if self.helpers.in_exception_chain(e, (KeyboardInterrupt,)): log.debug(f"Interrupted") self.stop() elif isinstance(e, BrokenPipeError): @@ -1156,6 +1164,7 @@ def _make_dummy_module_dns(self, name): except KeyError: dummy_module = self._make_dummy_module(name=name, _type="DNS") dummy_module.suppress_dupes = False + dummy_module._priority = 4 self.dummy_modules[name] = dummy_module return dummy_module diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index b19d1b6a6..747ff3081 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -7,11 +7,154 @@ from bbot.errors import * from bbot.modules.base import BaseModule +from bbot.core.helpers.misc import make_ip_type from bbot.core.event import make_event, is_event log = logging.getLogger("bbot.core.target") +class BBOTTarget: + """ + A convenient abstraction of a scan target that includes whitelisting and blacklisting + + Provides high-level functions like in_scope(), which includes both whitelist and blacklist checks. + """ + + def __init__(self, targets, whitelist=None, blacklist=None, strict_scope=False): + self.strict_scope = strict_scope + self.seeds = Target(*targets, strict_scope=self.strict_scope) + if whitelist is None: + self.whitelist = None + else: + self.whitelist = Target(*whitelist, strict_scope=self.strict_scope) + if blacklist is None: + blacklist = [] + self.blacklist = Target(*blacklist) + + def add(self, *args, **kwargs): + self.seeds.add(*args, **kwargs) + + def merge(self, other): + self.seeds.add(other.seeds) + if other.whitelist is not None: + if self.whitelist is None: + self.whitelist = other.whitelist.copy() + else: + self.whitelist.add(other.whitelist) + self.blacklist.add(other.blacklist) + self.strict_scope = self.strict_scope or other.strict_scope + for t in (self.seeds, self.whitelist): + if t is not None: + t.strict_scope = self.strict_scope + + def get(self, host): + return self.seeds.get(host) + + def get_host(self, host): + return self.seeds.get(host) + + def __iter__(self): + return iter(self.seeds) + + def __len__(self): + return len(self.seeds) + + def __contains__(self, other): + if isinstance(other, self.__class__): + other = other.seeds + return other in self.seeds + + def __bool__(self): + return bool(self.seeds) + + def __eq__(self, other): + return hash(self) == hash(other) + + def __hash__(self): + return hash(self.seeds) + + def copy(self): + self_copy = copy.copy(self) + self_copy.seeds = self.seeds.copy() + if self.whitelist is not None: + self_copy.whitelist = self.whitelist.copy() + self_copy.blacklist = self.blacklist.copy() + return self_copy + + @property + def events(self): + return self.seeds.events + + def in_scope(self, host): + """ + Check whether a hostname, url, IP, etc. is in scope. + Accepts either events or string data. + + Checks whitelist and blacklist. + If `host` is an event and its scope distance is zero, it will automatically be considered in-scope. + + Examples: + Check if a URL is in scope: + >>> preset.in_scope("http://www.evilcorp.com") + True + """ + try: + e = make_event(host, dummy=True) + except ValidationError: + return False + in_scope = e.scope_distance == 0 or self.whitelisted(e) + return in_scope and not self.blacklisted(e) + + def blacklisted(self, host): + """ + Check whether a hostname, url, IP, etc. is blacklisted. + + Note that `host` can be a hostname, IP address, CIDR, email address, or any BBOT `Event` with the `host` attribute. + + Args: + host (str or IPAddress or Event): The host to check against the blacklist + + Examples: + Check if a URL's host is blacklisted: + >>> preset.blacklisted("http://www.evilcorp.com") + True + """ + e = make_event(host, dummy=True) + return e in self.blacklist + + def whitelisted(self, host): + """ + Check whether a hostname, url, IP, etc. is whitelisted. + + Note that `host` can be a hostname, IP address, CIDR, email address, or any BBOT `Event` with the `host` attribute. + + Args: + host (str or IPAddress or Event): The host to check against the whitelist + + Examples: + Check if a URL's host is whitelisted: + >>> preset.whitelisted("http://www.evilcorp.com") + True + """ + e = make_event(host, dummy=True) + whitelist = self.whitelist + if whitelist is None: + whitelist = self.seeds + return e in whitelist + + @property + def radix_only(self): + """ + A slimmer, serializable version of the target designed for simple scope checks + """ + return self.__class__( + targets=[e.host for e in self.seeds if e.host], + whitelist=[e.host for e in self.whitelist if e.host], + blacklist=[e.host for e in self.blacklist if e.host], + strict_scope=self.strict_scope, + ) + + class Target: """ A class representing a target. Can contain an unlimited number of hosts, IP or IP ranges, URLs, etc. @@ -93,11 +236,11 @@ def __init__(self, *targets, strict_scope=False): if len(targets) > 0: log.verbose(f"Creating events from {len(targets):,} targets") for t in targets: - self.add_target(t) + self.add(t) self._hash = None - def add_target(self, t, event_type=None): + def add(self, t, event_type=None): """ Add a target or merge events from another Target object into this Target. @@ -108,7 +251,7 @@ def add_target(self, t, event_type=None): _events (dict): The dictionary is updated to include the new target's events. Examples: - >>> target.add_target('example.com') + >>> target.add('example.com') Notes: - If `t` is of the same class as this Target, all its events are merged. @@ -117,13 +260,14 @@ def add_target(self, t, event_type=None): if not isinstance(t, (list, tuple, set)): t = [t] for single_target in t: - if type(single_target) == self.__class__: + if isinstance(single_target, self.__class__): for event in single_target.events: self._add_event(event) else: if is_event(single_target): event = single_target else: + single_target = str(single_target) for eventtype, regex in self.special_event_types.items(): match = regex.match(single_target) if match: @@ -212,20 +356,27 @@ def get(self, host): """ try: - other = make_event(host, dummy=True) + event = make_event(host, dummy=True) except ValidationError: return - if other.host: - with suppress(KeyError, StopIteration): - result = self._radix.search(other.host) - if result is not None: - for event in result: - # if the result is a dns name and strict scope is enabled - if isinstance(event.host, str) and self.strict_scope: - # if the result doesn't exactly equal the host, abort - if event.host != other.host: - return - return event + if event.host: + return self.get_host(event.host) + + def get_host(self, host): + """ + A more efficient version of .get() that only accepts hostnames and IP addresses + """ + host = make_ip_type(host) + with suppress(KeyError, StopIteration): + result = self._radix.search(host) + if result is not None: + for event in result: + # if the result is a dns name and strict scope is enabled + if isinstance(event.host, str) and self.strict_scope: + # if the result doesn't exactly equal the host, abort + if event.host != host: + return + return event def _add_event(self, event): radix_data = self._radix.search(event.host) @@ -249,7 +400,7 @@ def __iter__(self): def __contains__(self, other): # if "other" is a Target - if type(other) == self.__class__: + if isinstance(other, self.__class__): contained_in_self = [self._contains(e) for e in other.events] return all(contained_in_self) else: diff --git a/bbot/scripts/docs.py b/bbot/scripts/docs.py index f8a5050a3..98bd48a2b 100755 --- a/bbot/scripts/docs.py +++ b/bbot/scripts/docs.py @@ -5,7 +5,7 @@ import yaml from pathlib import Path -from bbot.scanner import Preset +from bbot import Preset DEFAULT_PRESET = Preset() diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index 14495f41a..309edcafc 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -127,47 +127,47 @@ def helpers(scan): @pytest.fixture def events(scan): class bbot_events: - localhost = scan.make_event("127.0.0.1", source=scan.root_event) - ipv4 = scan.make_event("8.8.8.8", source=scan.root_event) - netv4 = scan.make_event("8.8.8.8/30", source=scan.root_event) - ipv6 = scan.make_event("2001:4860:4860::8888", source=scan.root_event) - netv6 = scan.make_event("2001:4860:4860::8888/126", source=scan.root_event) - domain = scan.make_event("publicAPIs.org", source=scan.root_event) - subdomain = scan.make_event("api.publicAPIs.org", source=scan.root_event) - email = scan.make_event("bob@evilcorp.co.uk", "EMAIL_ADDRESS", source=scan.root_event) - open_port = scan.make_event("api.publicAPIs.org:443", source=scan.root_event) + localhost = scan.make_event("127.0.0.1", parent=scan.root_event) + ipv4 = scan.make_event("8.8.8.8", parent=scan.root_event) + netv4 = scan.make_event("8.8.8.8/30", parent=scan.root_event) + ipv6 = scan.make_event("2001:4860:4860::8888", parent=scan.root_event) + netv6 = scan.make_event("2001:4860:4860::8888/126", parent=scan.root_event) + domain = scan.make_event("publicAPIs.org", parent=scan.root_event) + subdomain = scan.make_event("api.publicAPIs.org", parent=scan.root_event) + email = scan.make_event("bob@evilcorp.co.uk", "EMAIL_ADDRESS", parent=scan.root_event) + open_port = scan.make_event("api.publicAPIs.org:443", parent=scan.root_event) protocol = scan.make_event( - {"host": "api.publicAPIs.org", "port": 443, "protocol": "HTTP"}, "PROTOCOL", source=scan.root_event + {"host": "api.publicAPIs.org", "port": 443, "protocol": "HTTP"}, "PROTOCOL", parent=scan.root_event ) - ipv4_open_port = scan.make_event("8.8.8.8:443", source=scan.root_event) - ipv6_open_port = scan.make_event("[2001:4860:4860::8888]:443", "OPEN_TCP_PORT", source=scan.root_event) - url_unverified = scan.make_event("https://api.publicAPIs.org:443/hellofriend", source=scan.root_event) - ipv4_url_unverified = scan.make_event("https://8.8.8.8:443/hellofriend", source=scan.root_event) - ipv6_url_unverified = scan.make_event("https://[2001:4860:4860::8888]:443/hellofriend", source=scan.root_event) + ipv4_open_port = scan.make_event("8.8.8.8:443", parent=scan.root_event) + ipv6_open_port = scan.make_event("[2001:4860:4860::8888]:443", "OPEN_TCP_PORT", parent=scan.root_event) + url_unverified = scan.make_event("https://api.publicAPIs.org:443/hellofriend", parent=scan.root_event) + ipv4_url_unverified = scan.make_event("https://8.8.8.8:443/hellofriend", parent=scan.root_event) + ipv6_url_unverified = scan.make_event("https://[2001:4860:4860::8888]:443/hellofriend", parent=scan.root_event) url = scan.make_event( - "https://api.publicAPIs.org:443/hellofriend", "URL", tags=["status-200"], source=scan.root_event + "https://api.publicAPIs.org:443/hellofriend", "URL", tags=["status-200"], parent=scan.root_event ) ipv4_url = scan.make_event( - "https://8.8.8.8:443/hellofriend", "URL", tags=["status-200"], source=scan.root_event + "https://8.8.8.8:443/hellofriend", "URL", tags=["status-200"], parent=scan.root_event ) ipv6_url = scan.make_event( - "https://[2001:4860:4860::8888]:443/hellofriend", "URL", tags=["status-200"], source=scan.root_event + "https://[2001:4860:4860::8888]:443/hellofriend", "URL", tags=["status-200"], parent=scan.root_event ) - url_hint = scan.make_event("https://api.publicAPIs.org:443/hello.ash", "URL_HINT", source=url) + url_hint = scan.make_event("https://api.publicAPIs.org:443/hello.ash", "URL_HINT", parent=url) vulnerability = scan.make_event( {"host": "evilcorp.com", "severity": "INFO", "description": "asdf"}, "VULNERABILITY", - source=scan.root_event, + parent=scan.root_event, ) - finding = scan.make_event({"host": "evilcorp.com", "description": "asdf"}, "FINDING", source=scan.root_event) - vhost = scan.make_event({"host": "evilcorp.com", "vhost": "www.evilcorp.com"}, "VHOST", source=scan.root_event) - http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", source=scan.root_event) + finding = scan.make_event({"host": "evilcorp.com", "description": "asdf"}, "FINDING", parent=scan.root_event) + vhost = scan.make_event({"host": "evilcorp.com", "vhost": "www.evilcorp.com"}, "VHOST", parent=scan.root_event) + http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", parent=scan.root_event) storage_bucket = scan.make_event( {"name": "storage", "url": "https://storage.blob.core.windows.net"}, "STORAGE_BUCKET", - source=scan.root_event, + parent=scan.root_event, ) - emoji = scan.make_event("💩", "WHERE_IS_YOUR_GOD_NOW", source=scan.root_event) + emoji = scan.make_event("💩", "WHERE_IS_YOUR_GOD_NOW", parent=scan.root_event) bbot_events.all = [ # noqa: F841 bbot_events.localhost, diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 3dd403106..b9732eb8f 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -5,6 +5,7 @@ import asyncio import logging from pathlib import Path +from contextlib import suppress from omegaconf import OmegaConf from pytest_httpserver import HTTPServer @@ -43,11 +44,6 @@ def pytest_sessionfinish(session, exitstatus): yield -@pytest.fixture -def non_mocked_hosts() -> list: - return ["127.0.0.1", "localhost", "raw.githubusercontent.com"] + interactsh_servers - - @pytest.fixture def assert_all_responses_were_requested() -> bool: return False @@ -94,6 +90,11 @@ def bbot_httpserver_ssl(): server.clear() +@pytest.fixture +def non_mocked_hosts() -> list: + return ["127.0.0.1", "localhost", "raw.githubusercontent.com"] + interactsh_servers + + @pytest.fixture def bbot_httpserver_allinterfaces(): server = HTTPServer(host="0.0.0.0", port=5556) @@ -108,28 +109,32 @@ def bbot_httpserver_allinterfaces(): server.clear() -@pytest.fixture -def interactsh_mock_instance(): - interactsh_mock = Interactsh_mock() - return interactsh_mock - - class Interactsh_mock: - def __init__(self): + def __init__(self, name): + self.name = name + self.log = logging.getLogger(f"bbot.interactsh.{self.name}") self.interactions = [] self.correlation_id = "deadbeef-dead-beef-dead-beefdeadbeef" self.stop = False + self.poll_task = None - def mock_interaction(self, subdomain_tag): + def mock_interaction(self, subdomain_tag, msg=None): + self.log.info(f"Mocking interaction to subdomain tag: {subdomain_tag}") + if msg is not None: + self.log.info(msg) self.interactions.append(subdomain_tag) async def register(self, callback=None): if callable(callback): - asyncio.create_task(self.poll_loop(callback)) + self.poll_task = asyncio.create_task(self.poll_loop(callback)) return "fakedomain.fakeinteractsh.com" async def deregister(self, callback=None): self.stop = True + if self.poll_task is not None: + self.poll_task.cancel() + with suppress(BaseException): + await self.poll_task async def poll_loop(self, callback=None): while not self.stop: diff --git a/bbot/test/test_step_1/test__module__tests.py b/bbot/test/test_step_1/test__module__tests.py index 9d88b1bcc..791e58f58 100644 --- a/bbot/test/test_step_1/test__module__tests.py +++ b/bbot/test/test_step_1/test__module__tests.py @@ -2,7 +2,7 @@ import importlib from pathlib import Path -from bbot.scanner import Preset +from bbot import Preset from ..test_step_2.module_tests.base import ModuleTestBase log = logging.getLogger("bbot.test.modules") diff --git a/bbot/test/test_step_1/test_bloom_filter.py b/bbot/test/test_step_1/test_bloom_filter.py new file mode 100644 index 000000000..6d8e6918d --- /dev/null +++ b/bbot/test/test_step_1/test_bloom_filter.py @@ -0,0 +1,65 @@ +import time +import string +import random + + +def test_bloom_filter(): + + def generate_random_strings(n, length=10): + """Generate a list of n random strings.""" + return ["".join(random.choices(string.ascii_letters + string.digits, k=length)) for _ in range(n)] + + from bbot.scanner import Scanner + + scan = Scanner() + + n_items_to_add = 100000 + n_items_to_test = 100000 + bloom_filter_size = 8000000 + + # Initialize the simple bloom filter and the set + bloom_filter = scan.helpers.bloom_filter(size=bloom_filter_size) + + test_set = set() + + # Generate random strings to add + print(f"Generating {n_items_to_add:,} items to add") + items_to_add = set(generate_random_strings(n_items_to_add)) + + # Generate random strings to test + print(f"Generating {n_items_to_test:,} items to test") + items_to_test = generate_random_strings(n_items_to_test) + + print("Adding items") + start = time.time() + for item in items_to_add: + bloom_filter.add(item) + test_set.add(hash(item)) + end = time.time() + elapsed = end - start + print(f"elapsed: {elapsed:.2f} ({int(n_items_to_test/elapsed)}/s)") + # this shouldn't take longer than 5 seconds + assert elapsed < 5 + + # make sure we have 100% accuracy + start = time.time() + for item in items_to_add: + assert item in bloom_filter + end = time.time() + elapsed = end - start + print(f"elapsed: {elapsed:.2f} ({int(n_items_to_test/elapsed)}/s)") + # this shouldn't take longer than 5 seconds + assert elapsed < 5 + + print("Measuring false positives") + # Check for false positives + false_positives = 0 + for item in items_to_test: + if bloom_filter.check(item) and hash(item) not in test_set: + false_positives += 1 + false_positive_percent = false_positives / len(items_to_test) * 100 + + print(f"False positive rate: {false_positive_percent:.2f}% ({false_positives}/{len(items_to_test)})") + + # ensure false positives are less than .02 percent + assert false_positive_percent < 0.02 diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index 5b4452b98..b49365d85 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -1,13 +1,93 @@ from ..bbot_fixtures import * +from bbot import cli + @pytest.mark.asyncio -async def test_cli_scan(monkeypatch): - from bbot import cli +async def test_cli_scope(monkeypatch, capsys): + import json monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + # basic target without whitelist + monkeypatch.setattr( + "sys.argv", + ["bbot", "-t", "one.one.one.one", "-c", "scope_report_distance=10", "dns_resolution=true", "--json"], + ) + result = await cli._main() + out, err = capsys.readouterr() + assert result == True + lines = [json.loads(l) for l in out.splitlines()] + dns_events = [l for l in lines if l["type"] == "DNS_NAME" and l["data"] == "one.one.one.one"] + assert dns_events + assert all([l["scope_distance"] == 0 and "in-scope" in l["tags"] for l in dns_events]) + assert 1 == len( + [ + l + for l in dns_events + if l["module"] == "TARGET" + and l["scope_distance"] == 0 + and "in-scope" in l["tags"] + and "target" in l["tags"] + ] + ) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.1.1.1"] + assert ip_events + assert all([l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events]) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.0.0.1"] + assert ip_events + assert all([l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in ip_events]) + + # with whitelist + monkeypatch.setattr( + "sys.argv", + [ + "bbot", + "-t", + "one.one.one.one", + "-w", + "192.168.0.1", + "-c", + "scope_report_distance=10", + "dns_resolution=true", + "scope_dns_search_distance=2", + "--json", + ], + ) + result = await cli._main() + out, err = capsys.readouterr() + assert result == True + lines = [json.loads(l) for l in out.splitlines()] + lines = [l for l in lines if l["type"] != "SCAN"] + assert lines + assert not any([l["scope_distance"] == 0 for l in lines]) + dns_events = [l for l in lines if l["type"] == "DNS_NAME" and l["data"] == "one.one.one.one"] + assert dns_events + assert all([l["scope_distance"] == 1 and "distance-1" in l["tags"] for l in dns_events]) + assert 1 == len( + [ + l + for l in dns_events + if l["module"] == "TARGET" + and l["scope_distance"] == 1 + and "distance-1" in l["tags"] + and "target" in l["tags"] + ] + ) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.1.1.1"] + assert ip_events + assert all([l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events]) + ip_events = [l for l in lines if l["type"] == "IP_ADDRESS" and l["data"] == "1.0.0.1"] + assert ip_events + assert all([l["scope_distance"] == 2 and "distance-2" in l["tags"] for l in ip_events]) + + +@pytest.mark.asyncio +async def test_cli_scan(monkeypatch): + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + scans_home = bbot_test_dir / "scans" # basic scan @@ -32,7 +112,7 @@ async def test_cli_scan(monkeypatch): with open(scan_home / "output.csv") as f: lines = f.readlines() - assert lines[0] == "Event type,Event data,IP Address,Source Module,Scope Distance,Event Tags\n" + assert lines[0] == "Event type,Event data,IP Address,Source Module,Scope Distance,Event Tags,Discovery Path\n" assert len(lines) > 1, "output.csv is not long enough" ip_success = False @@ -50,8 +130,6 @@ async def test_cli_scan(monkeypatch): @pytest.mark.asyncio async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): - from bbot import cli - caplog.set_level(logging.INFO) monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) @@ -99,7 +177,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert "| bool" in out assert "| emit URLs in addition to DNS_NAMEs" in out assert "| False" in out - assert "| modules.massdns.wordlist" in out + assert "| modules.dnsbrute.wordlist" in out assert "| modules.robots.include_allow" in out # list module options by flag @@ -111,17 +189,17 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert "| bool" in out assert "| emit URLs in addition to DNS_NAMEs" in out assert "| False" in out - assert "| modules.massdns.wordlist" in out + assert "| modules.dnsbrute.wordlist" in out assert not "| modules.robots.include_allow" in out # list module options by module - monkeypatch.setattr("sys.argv", ["bbot", "-m", "massdns", "-lmo"]) + monkeypatch.setattr("sys.argv", ["bbot", "-m", "dnsbrute", "-lmo"]) result = await cli._main() out, err = capsys.readouterr() assert result == None - assert out.count("modules.") == out.count("modules.massdns.") + assert out.count("modules.") == out.count("modules.dnsbrute.") assert not "| modules.wayback.urls" in out - assert "| modules.massdns.wordlist" in out + assert "| modules.dnsbrute.wordlist" in out assert not "| modules.robots.include_allow" in out # list output module options by module @@ -171,7 +249,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| massdns" in out + assert "| dnsbrute " in out assert "| httpx" in out assert "| robots" in out @@ -180,7 +258,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| massdns" in out + assert "| dnsbrute " in out assert "| httpx" in out assert not "| robots" in out @@ -189,7 +267,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| massdns" in out + assert "| dnsbrute " in out assert not "| httpx" in out # list modules by flag + excluded flag @@ -197,15 +275,15 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| massdns" in out + assert "| dnsbrute " in out assert not "| httpx" in out # list modules by flag + excluded module - monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-em", "massdns", "-l"]) + monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-em", "dnsbrute", "-l"]) result = await cli._main() out, err = capsys.readouterr() assert result == None - assert not "| massdns" in out + assert not "| dnsbrute " in out assert "| httpx" in out # output modules override @@ -309,14 +387,49 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert result == True, "-m nuclei failed to run with --allow-deadly" # install all deps - # monkeypatch.setattr("sys.argv", ["bbot", "--install-all-deps"]) - # success = await cli._main() - # assert success, "--install-all-deps failed for at least one module" + monkeypatch.setattr("sys.argv", ["bbot", "--install-all-deps"]) + success = await cli._main() + assert success == True, "--install-all-deps failed for at least one module" -def test_cli_config_validation(monkeypatch, caplog): - from bbot import cli +@pytest.mark.asyncio +async def test_cli_customheaders(monkeypatch, caplog, capsys): + monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) + monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) + import yaml + + # test custom headers + monkeypatch.setattr( + "sys.argv", ["bbot", "--custom-headers", "foo=bar", "foo2=bar2", "foo3=bar=3", "--current-preset"] + ) + success = await cli._main() + assert success == None, "setting custom headers on command line failed" + captured = capsys.readouterr() + stdout_preset = yaml.safe_load(captured.out) + assert stdout_preset["config"]["http_headers"] == {"foo": "bar", "foo2": "bar2", "foo3": "bar=3"} + # test custom headers invalid (no "=") + monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "justastring", "--current-preset"]) + result = await cli._main() + assert result == None + assert "Custom headers not formatted correctly (missing '=')" in caplog.text + caplog.clear() + + # test custom headers invalid (missing key) + monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "=nokey", "--current-preset"]) + result = await cli._main() + assert result == None + assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text + caplog.clear() + + # test custom headers invalid (missing value) + monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "missingvalue=", "--current-preset"]) + result = await cli._main() + assert result == None + assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text + + +def test_cli_config_validation(monkeypatch, caplog): monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) @@ -325,7 +438,7 @@ def test_cli_config_validation(monkeypatch, caplog): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-c", "modules.ipnegibhor.num_bits=4"]) cli.main() - assert 'Could not find module option "modules.ipnegibhor.num_bits"' in caplog.text + assert 'Could not find config option "modules.ipnegibhor.num_bits"' in caplog.text assert 'Did you mean "modules.ipneighbor.num_bits"?' in caplog.text # incorrect global option @@ -333,31 +446,29 @@ def test_cli_config_validation(monkeypatch, caplog): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-c", "web_spier_distance=4"]) cli.main() - assert 'Could not find module option "web_spier_distance"' in caplog.text + assert 'Could not find config option "web_spier_distance"' in caplog.text assert 'Did you mean "web_spider_distance"?' in caplog.text def test_cli_module_validation(monkeypatch, caplog): - from bbot import cli - monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) # incorrect module caplog.clear() assert not caplog.text - monkeypatch.setattr("sys.argv", ["bbot", "-m", "massdnss"]) + monkeypatch.setattr("sys.argv", ["bbot", "-m", "dnsbrutes"]) cli.main() - assert 'Could not find scan module "massdnss"' in caplog.text - assert 'Did you mean "massdns"?' in caplog.text + assert 'Could not find scan module "dnsbrutes"' in caplog.text + assert 'Did you mean "dnsbrute"?' in caplog.text # incorrect excluded module caplog.clear() assert not caplog.text - monkeypatch.setattr("sys.argv", ["bbot", "-em", "massdnss"]) + monkeypatch.setattr("sys.argv", ["bbot", "-em", "dnsbrutes"]) cli.main() - assert 'Could not find module "massdnss"' in caplog.text - assert 'Did you mean "massdns"?' in caplog.text + assert 'Could not find module "dnsbrutes"' in caplog.text + assert 'Did you mean "dnsbrute"?' in caplog.text # incorrect output module caplog.clear() @@ -458,7 +569,6 @@ def test_cli_module_validation(monkeypatch, caplog): def test_cli_presets(monkeypatch, capsys, caplog): import yaml - from bbot import cli monkeypatch.setattr(sys, "exit", lambda *args, **kwargs: True) monkeypatch.setattr(os, "_exit", lambda *args, **kwargs: True) @@ -545,3 +655,6 @@ def test_cli_presets(monkeypatch, capsys, caplog): monkeypatch.setattr("sys.argv", ["bbot", "-p", "asdfasdfasdf", "-y"]) cli.main() assert "file does not exist. Use -lp to list available presets" in caplog.text + + preset1_file.unlink() + preset2_file.unlink() diff --git a/bbot/test/test_step_1/test_command.py b/bbot/test/test_step_1/test_command.py index a3772cefe..b0afdcc78 100644 --- a/bbot/test/test_step_1/test_command.py +++ b/bbot/test/test_step_1/test_command.py @@ -1,3 +1,4 @@ +import time from ..bbot_fixtures import * from subprocess import CalledProcessError @@ -6,6 +7,23 @@ async def test_command(bbot_scanner): scan1 = bbot_scanner() + # test timeouts + command = ["sleep", "3"] + start = time.time() + with pytest.raises(asyncio.exceptions.TimeoutError): + await scan1.helpers.run(command, idle_timeout=1) + end = time.time() + elapsed = end - start + assert 0 < elapsed < 2 + + start = time.time() + with pytest.raises(asyncio.exceptions.TimeoutError): + async for line in scan1.helpers.run_live(command, idle_timeout=1): + print(line) + end = time.time() + elapsed = end - start + assert 0 < elapsed < 2 + # run assert "plumbus\n" == (await scan1.helpers.run(["echo", "plumbus"])).stdout assert b"plumbus\n" == (await scan1.helpers.run(["echo", "plumbus"], text=False)).stdout @@ -60,10 +78,10 @@ async def test_command(bbot_scanner): # test check=True with pytest.raises(CalledProcessError) as excinfo: - lines = [l async for line in scan1.helpers.run_live(["ls", "/aslkdjflasdkfsd"], check=True)] + lines = [line async for line in scan1.helpers.run_live(["ls", "/aslkdjflasdkfsd"], check=True)] assert "No such file or directory" in excinfo.value.stderr with pytest.raises(CalledProcessError) as excinfo: - lines = [l async for line in scan1.helpers.run_live(["ls", "/aslkdjflasdkfsd"], check=True, text=False)] + lines = [line async for line in scan1.helpers.run_live(["ls", "/aslkdjflasdkfsd"], check=True, text=False)] assert b"No such file or directory" in excinfo.value.stderr with pytest.raises(CalledProcessError) as excinfo: await scan1.helpers.run(["ls", "/aslkdjflasdkfsd"], check=True) diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index aa2a27907..7684e5dbf 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -4,6 +4,9 @@ @pytest.mark.asyncio async def test_dns_engine(bbot_scanner): scan = bbot_scanner() + await scan.helpers._mock_dns( + {"one.one.one.one": {"A": ["1.1.1.1"]}, "1.1.1.1.in-addr.arpa": {"PTR": ["one.one.one.one"]}} + ) result = await scan.helpers.resolve("one.one.one.one") assert "1.1.1.1" in result assert not "2606:4700:4700::1111" in result @@ -120,8 +123,8 @@ async def test_dns_resolution(bbot_scanner): # Ensure events with hosts have resolved_hosts attribute populated await scan._prep() - resolved_hosts_event1 = scan.make_event("one.one.one.one", "DNS_NAME", source=scan.root_event) - resolved_hosts_event2 = scan.make_event("http://one.one.one.one/", "URL_UNVERIFIED", source=scan.root_event) + resolved_hosts_event1 = scan.make_event("one.one.one.one", "DNS_NAME", parent=scan.root_event) + resolved_hosts_event2 = scan.make_event("http://one.one.one.one/", "URL_UNVERIFIED", parent=scan.root_event) dnsresolve = scan.modules["dns"] assert hash(resolved_hosts_event1.host) not in dnsresolve._event_cache assert hash(resolved_hosts_event2.host) not in dnsresolve._event_cache @@ -206,60 +209,109 @@ async def test_wildcards(bbot_scanner): assert wildcard_event3.data == "github.io" # dns resolve distance - event_distance_0 = scan.make_event("8.8.8.8", module=scan._make_dummy_module_dns("PTR"), source=scan.root_event) + event_distance_0 = scan.make_event("8.8.8.8", module=scan._make_dummy_module_dns("PTR"), parent=scan.root_event) assert event_distance_0.dns_resolve_distance == 0 event_distance_1 = scan.make_event( - "evilcorp.com", module=scan._make_dummy_module_dns("A"), source=event_distance_0 + "evilcorp.com", module=scan._make_dummy_module_dns("A"), parent=event_distance_0 ) assert event_distance_1.dns_resolve_distance == 1 - event_distance_2 = scan.make_event("1.2.3.4", module=scan._make_dummy_module_dns("PTR"), source=event_distance_1) + event_distance_2 = scan.make_event("1.2.3.4", module=scan._make_dummy_module_dns("PTR"), parent=event_distance_1) assert event_distance_2.dns_resolve_distance == 1 event_distance_3 = scan.make_event( - "evilcorp.org", module=scan._make_dummy_module_dns("A"), source=event_distance_2 + "evilcorp.org", module=scan._make_dummy_module_dns("A"), parent=event_distance_2 ) assert event_distance_3.dns_resolve_distance == 2 from bbot.scanner import Scanner # test with full scan - scan2 = Scanner("asdfl.gashdgkjsadgsdf.github.io", config={"dns_resolution": True}) + scan2 = Scanner("asdfl.gashdgkjsadgsdf.github.io", whitelist=["github.io"], config={"dns_resolution": True}) + await scan2._prep() + other_event = scan2.make_event( + "lkjg.sdfgsg.jgkhajshdsadf.github.io", module=scan2.modules["dns"], parent=scan2.root_event + ) + await scan2.ingress_module.queue_event(other_event, {}) + events = [e async for e in scan2.async_start()] + assert len(events) == 3 + assert 1 == len([e for e in events if e.type == "SCAN"]) + unmodified_wildcard_events = [ + e for e in events if e.type == "DNS_NAME" and e.data == "asdfl.gashdgkjsadgsdf.github.io" + ] + assert len(unmodified_wildcard_events) == 1 + assert unmodified_wildcard_events[0].tags.issuperset( + { + "a-record", + "target", + "aaaa-wildcard", + "in-scope", + "subdomain", + "aaaa-record", + "wildcard", + "a-wildcard", + } + ) + modified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and e.data == "_wildcard.github.io"] + assert len(modified_wildcard_events) == 1 + assert modified_wildcard_events[0].tags.issuperset( + { + "a-record", + "aaaa-wildcard", + "in-scope", + "subdomain", + "aaaa-record", + "wildcard", + "a-wildcard", + } + ) + assert modified_wildcard_events[0].host_original == "lkjg.sdfgsg.jgkhajshdsadf.github.io" + + # test with full scan (wildcard detection disabled for domain) + scan2 = Scanner( + "asdfl.gashdgkjsadgsdf.github.io", + whitelist=["github.io"], + config={"dns_wildcard_ignore": ["github.io"], "dns_resolution": True}, + exclude_modules=["cloud"], + ) + await scan2._prep() + other_event = scan2.make_event( + "lkjg.sdfgsg.jgkhajshdsadf.github.io", module=scan2.modules["dns"], parent=scan2.root_event + ) + await scan2.ingress_module.queue_event(other_event, {}) events = [e async for e in scan2.async_start()] - assert len(events) == 2 + assert len(events) == 3 assert 1 == len([e for e in events if e.type == "SCAN"]) + unmodified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and "_wildcard" not in e.data] + assert len(unmodified_wildcard_events) == 2 assert 1 == len( [ e - for e in events - if e.type == "DNS_NAME" - and e.data == "_wildcard.github.io" - and all( - t in e.tags - for t in ( - "a-record", + for e in unmodified_wildcard_events + if e.data == "asdfl.gashdgkjsadgsdf.github.io" + and e.tags.issuperset( + { "target", - "aaaa-wildcard", + "a-record", "in-scope", "subdomain", "aaaa-record", - "wildcard", - "a-wildcard", - ) + } ) ] ) - - # test with full scan (wildcard detection disabled for domain) - scan2 = Scanner("asdfl.gashdgkjsadgsdf.github.io", config={"dns_wildcard_ignore": ["github.io"]}) - events = [e async for e in scan2.async_start()] - assert len(events) == 2 - assert 1 == len([e for e in events if e.type == "SCAN"]) assert 1 == len( [ e - for e in events - if e.type == "DNS_NAME" - and e.data == "asdfl.gashdgkjsadgsdf.github.io" - and all(t in e.tags for t in ("a-record", "target", "in-scope", "subdomain", "aaaa-record")) - and not any(t in e.tags for t in ("wildcard", "a-wildcard", "aaaa-wildcard")) + for e in unmodified_wildcard_events + if e.data == "lkjg.sdfgsg.jgkhajshdsadf.github.io" + and e.tags.issuperset( + { + "a-record", + "in-scope", + "subdomain", + "aaaa-record", + } + ) ] ) + modified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and e.data == "_wildcard.github.io"] + assert len(modified_wildcard_events) == 0 diff --git a/bbot/test/test_step_1/test_engine.py b/bbot/test/test_step_1/test_engine.py new file mode 100644 index 000000000..a8a4156d1 --- /dev/null +++ b/bbot/test/test_step_1/test_engine.py @@ -0,0 +1,146 @@ +from ..bbot_fixtures import * + + +@pytest.mark.asyncio +async def test_engine(): + from bbot.core.engine import EngineClient, EngineServer + + counter = 0 + yield_cancelled = False + yield_errored = False + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + + class TestEngineServer(EngineServer): + + CMDS = { + 0: "return_thing", + 1: "yield_stuff", + } + + async def return_thing(self, n): + nonlocal return_started + nonlocal return_finished + nonlocal return_cancelled + nonlocal return_errored + try: + return_started = True + await asyncio.sleep(n) + return_finished = True + return f"thing{n}" + except asyncio.CancelledError: + return_cancelled = True + raise + except Exception: + return_errored = True + raise + + async def yield_stuff(self, n): + nonlocal counter + nonlocal yield_cancelled + nonlocal yield_errored + try: + for i in range(n): + yield f"thing{i}" + counter += 1 + await asyncio.sleep(0.1) + except asyncio.CancelledError: + yield_cancelled = True + raise + except Exception: + yield_errored = True + raise + + class TestEngineClient(EngineClient): + + SERVER_CLASS = TestEngineServer + + async def return_thing(self, n): + return await self.run_and_return("return_thing", n) + + async def yield_stuff(self, n): + async for _ in self.run_and_yield("yield_stuff", n): + yield _ + + test_engine = TestEngineClient() + + # test return functionality + return_res = await test_engine.return_thing(1) + assert return_res == "thing1" + + # test async generator + assert counter == 0 + assert yield_cancelled == False + yield_res = [r async for r in test_engine.yield_stuff(13)] + assert yield_res == [f"thing{i}" for i in range(13)] + assert len(yield_res) == 13 + assert counter == 13 + + # test async generator with cancellation + counter = 0 + yield_cancelled = False + yield_errored = False + agen = test_engine.yield_stuff(1000) + async for r in agen: + if counter > 10: + await agen.aclose() + break + await asyncio.sleep(5) + assert yield_cancelled == True + assert yield_errored == False + assert counter < 15 + + # test async generator with error + yield_cancelled = False + yield_errored = False + agen = test_engine.yield_stuff(None) + with pytest.raises(BBOTEngineError): + async for _ in agen: + pass + assert yield_cancelled == False + assert yield_errored == True + + # test return with cancellation + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + task = asyncio.create_task(test_engine.return_thing(2)) + await asyncio.sleep(1) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + await asyncio.sleep(0.1) + assert return_started == True + assert return_finished == False + assert return_cancelled == True + assert return_errored == False + + # test return with late cancellation + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + task = asyncio.create_task(test_engine.return_thing(1)) + await asyncio.sleep(2) + task.cancel() + result = await task + assert result == "thing1" + assert return_started == True + assert return_finished == True + assert return_cancelled == False + assert return_errored == False + + # test return with error + return_started = False + return_finished = False + return_cancelled = False + return_errored = False + with pytest.raises(BBOTEngineError): + result = await test_engine.return_thing(None) + assert return_started == True + assert return_finished == False + assert return_cancelled == False + assert return_errored == True diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 3b8ec29bf..988054819 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -3,10 +3,15 @@ import ipaddress from ..bbot_fixtures import * +from bbot.scanner import Scanner @pytest.mark.asyncio -async def test_events(events, scan, helpers): +async def test_events(events, helpers): + + scan = Scanner() + await scan._prep() + assert events.ipv4.type == "IP_ADDRESS" assert events.ipv6.type == "IP_ADDRESS" assert events.netv4.type == "IP_RANGE" @@ -100,7 +105,7 @@ async def test_events(events, scan, helpers): # http response assert events.http_response.host == "example.com" assert events.http_response.port == 80 - assert events.http_response.parsed.scheme == "http" + assert events.http_response.parsed_url.scheme == "http" assert events.http_response.with_port().geturl() == "http://example.com:80/" http_response = scan.make_event( @@ -159,8 +164,9 @@ async def test_events(events, scan, helpers): assert events.ipv6_url_unverified.host == ipaddress.ip_address("2001:4860:4860::8888") assert events.ipv6_url_unverified.port == 443 - javascript_event = scan.make_event("http://evilcorp.com/asdf/a.js?b=c#d", "URL_UNVERIFIED", dummy=True) + javascript_event = scan.make_event("http://evilcorp.com/asdf/a.js?b=c#d", "URL_UNVERIFIED", parent=scan.root_event) assert "extension-js" in javascript_event.tags + await scan.ingress_module.handle_event(javascript_event, {}) assert "httpx-only" in javascript_event.tags # scope distance @@ -168,70 +174,76 @@ async def test_events(events, scan, helpers): assert event1._scope_distance == -1 event1.scope_distance = 0 assert event1._scope_distance == 0 - event2 = scan.make_event("2.3.4.5", source=event1) + event2 = scan.make_event("2.3.4.5", parent=event1) assert event2._scope_distance == 1 - event3 = scan.make_event("3.4.5.6", source=event2) + event3 = scan.make_event("3.4.5.6", parent=event2) assert event3._scope_distance == 2 - event4 = scan.make_event("3.4.5.6", source=event3) + event4 = scan.make_event("3.4.5.6", parent=event3) assert event4._scope_distance == 2 - event5 = scan.make_event("4.5.6.7", source=event4) + event5 = scan.make_event("4.5.6.7", parent=event4) assert event5._scope_distance == 3 - url_1 = scan.make_event("https://127.0.0.1/asdf", "URL_UNVERIFIED", source=scan.root_event) + url_1 = scan.make_event("https://127.0.0.1/asdf", "URL_UNVERIFIED", parent=scan.root_event) assert url_1.scope_distance == 1 - url_2 = scan.make_event("https://127.0.0.1/test", "URL_UNVERIFIED", source=url_1) + url_2 = scan.make_event("https://127.0.0.1/test", "URL_UNVERIFIED", parent=url_1) assert url_2.scope_distance == 1 - url_3 = scan.make_event("https://127.0.0.2/asdf", "URL_UNVERIFIED", source=url_1) + url_3 = scan.make_event("https://127.0.0.2/asdf", "URL_UNVERIFIED", parent=url_1) assert url_3.scope_distance == 2 - org_stub_1 = scan.make_event("STUB1", "ORG_STUB", source=scan.root_event) + org_stub_1 = scan.make_event("STUB1", "ORG_STUB", parent=scan.root_event) org_stub_1.scope_distance == 1 - org_stub_2 = scan.make_event("STUB2", "ORG_STUB", source=org_stub_1) + org_stub_2 = scan.make_event("STUB2", "ORG_STUB", parent=org_stub_1) org_stub_2.scope_distance == 2 # internal event tracking root_event = scan.make_event("0.0.0.0", dummy=True) - internal_event1 = scan.make_event("1.2.3.4", source=root_event, internal=True) + internal_event1 = scan.make_event("1.2.3.4", parent=root_event, internal=True) assert internal_event1._internal == True assert "internal" in internal_event1.tags # tag inheritance for tag in ("affiliate", "mutation-1"): - affiliate_event = scan.make_event("1.2.3.4", source=root_event, tags=tag) + affiliate_event = scan.make_event("1.2.3.4", parent=root_event, tags=tag) assert tag in affiliate_event.tags - affiliate_event2 = scan.make_event("1.2.3.4:88", source=affiliate_event) - affiliate_event3 = scan.make_event("4.3.2.1:88", source=affiliate_event) + affiliate_event2 = scan.make_event("1.2.3.4:88", parent=affiliate_event) + affiliate_event3 = scan.make_event("4.3.2.1:88", parent=affiliate_event) assert tag in affiliate_event2.tags assert tag not in affiliate_event3.tags + # discovery context + event = scan.make_event( + "127.0.0.1", parent=scan.root_event, context="something discovered {event.type}: {event.data}" + ) + assert event.discovery_context == "something discovered IP_ADDRESS: 127.0.0.1" + # updating an already-created event with make_event() # updating tags - event1 = scan.make_event("127.0.0.1", source=scan.root_event) + event1 = scan.make_event("127.0.0.1", parent=scan.root_event) updated_event = scan.make_event(event1, tags="asdf") assert "asdf" not in event1.tags assert "asdf" in updated_event.tags - # updating source - event2 = scan.make_event("127.0.0.1", source=scan.root_event) - updated_event = scan.make_event(event2, source=event1) - assert event2.source == scan.root_event - assert updated_event.source == event1 + # updating parent + event2 = scan.make_event("127.0.0.1", parent=scan.root_event) + updated_event = scan.make_event(event2, parent=event1) + assert event2.parent == scan.root_event + assert updated_event.parent == event1 # updating module - event3 = scan.make_event("127.0.0.1", source=scan.root_event) + event3 = scan.make_event("127.0.0.1", parent=scan.root_event) updated_event = scan.make_event(event3, internal=True) assert event3.internal == False assert updated_event.internal == True # event sorting - parent1 = scan.make_event("127.0.0.1", source=scan.root_event) - parent2 = scan.make_event("127.0.0.1", source=scan.root_event) - parent2_child1 = scan.make_event("127.0.0.1", source=parent2) - parent1_child1 = scan.make_event("127.0.0.1", source=parent1) - parent1_child2 = scan.make_event("127.0.0.1", source=parent1) - parent1_child2_child1 = scan.make_event("127.0.0.1", source=parent1_child2) - parent1_child2_child2 = scan.make_event("127.0.0.1", source=parent1_child2) - parent1_child1_child1 = scan.make_event("127.0.0.1", source=parent1_child1) - parent2_child2 = scan.make_event("127.0.0.1", source=parent2) - parent1_child2_child1_child1 = scan.make_event("127.0.0.1", source=parent1_child2_child1) + parent1 = scan.make_event("127.0.0.1", parent=scan.root_event) + parent2 = scan.make_event("127.0.0.1", parent=scan.root_event) + parent2_child1 = scan.make_event("127.0.0.1", parent=parent2) + parent1_child1 = scan.make_event("127.0.0.1", parent=parent1) + parent1_child2 = scan.make_event("127.0.0.1", parent=parent1) + parent1_child2_child1 = scan.make_event("127.0.0.1", parent=parent1_child2) + parent1_child2_child2 = scan.make_event("127.0.0.1", parent=parent1_child2) + parent1_child1_child1 = scan.make_event("127.0.0.1", parent=parent1_child1) + parent2_child2 = scan.make_event("127.0.0.1", parent=parent2) + parent1_child2_child1_child1 = scan.make_event("127.0.0.1", parent=parent1_child2_child1) sortable_events = { "parent1": parent1, @@ -392,61 +404,78 @@ async def test_events(events, scan, helpers): # test event serialization from bbot.core.event import event_from_json - db_event = scan.make_event("evilcorp.com", dummy=True) + db_event = scan.make_event("evilcorp.com:80", parent=scan.root_event, context="test context") db_event._resolved_hosts = {"127.0.0.1"} db_event.scope_distance = 1 + assert db_event.discovery_context == "test context" + assert db_event.discovery_path == ["test context"] timestamp = db_event.timestamp.timestamp() json_event = db_event.json() assert json_event["scope_distance"] == 1 - assert json_event["data"] == "evilcorp.com" - assert json_event["type"] == "DNS_NAME" + assert json_event["data"] == "evilcorp.com:80" + assert json_event["type"] == "OPEN_TCP_PORT" + assert json_event["host"] == "evilcorp.com" assert json_event["timestamp"] == timestamp + assert json_event["discovery_context"] == "test context" + assert json_event["discovery_path"] == ["test context"] reconstituted_event = event_from_json(json_event) assert reconstituted_event.scope_distance == 1 assert reconstituted_event.timestamp.timestamp() == timestamp - assert reconstituted_event.data == "evilcorp.com" - assert reconstituted_event.type == "DNS_NAME" + assert reconstituted_event.data == "evilcorp.com:80" + assert reconstituted_event.type == "OPEN_TCP_PORT" + assert reconstituted_event.host == "evilcorp.com" + assert reconstituted_event.discovery_context == "test context" + assert reconstituted_event.discovery_path == ["test context"] assert "127.0.0.1" in reconstituted_event.resolved_hosts + hostless_event = scan.make_event("asdf", "ASDF", dummy=True) + hostless_event_json = hostless_event.json() + assert hostless_event_json["type"] == "ASDF" + assert hostless_event_json["data"] == "asdf" + assert not "host" in hostless_event_json # SIEM-friendly serialize/deserialize json_event_siemfriendly = db_event.json(siem_friendly=True) assert json_event_siemfriendly["scope_distance"] == 1 - assert json_event_siemfriendly["data"] == {"DNS_NAME": "evilcorp.com"} - assert json_event_siemfriendly["type"] == "DNS_NAME" + assert json_event_siemfriendly["data"] == {"OPEN_TCP_PORT": "evilcorp.com:80"} + assert json_event_siemfriendly["type"] == "OPEN_TCP_PORT" + assert json_event_siemfriendly["host"] == "evilcorp.com" assert json_event_siemfriendly["timestamp"] == timestamp reconstituted_event2 = event_from_json(json_event_siemfriendly, siem_friendly=True) assert reconstituted_event2.scope_distance == 1 assert reconstituted_event2.timestamp.timestamp() == timestamp - assert reconstituted_event2.data == "evilcorp.com" - assert reconstituted_event2.type == "DNS_NAME" + assert reconstituted_event2.data == "evilcorp.com:80" + assert reconstituted_event2.type == "OPEN_TCP_PORT" + assert reconstituted_event2.host == "evilcorp.com" assert "127.0.0.1" in reconstituted_event2.resolved_hosts - http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", source=scan.root_event) - assert http_response.source_id == scan.root_event.id + http_response = scan.make_event(httpx_response, "HTTP_RESPONSE", parent=scan.root_event) + assert http_response.parent_id == scan.root_event.id assert http_response.data["input"] == "http://example.com:80" json_event = http_response.json(mode="graph") assert isinstance(json_event["data"], str) json_event = http_response.json() assert isinstance(json_event["data"], dict) assert json_event["type"] == "HTTP_RESPONSE" - assert json_event["source"] == scan.root_event.id + assert json_event["host"] == "example.com" + assert json_event["parent"] == scan.root_event.id reconstituted_event = event_from_json(json_event) assert isinstance(reconstituted_event.data, dict) assert reconstituted_event.data["input"] == "http://example.com:80" + assert reconstituted_event.host == "example.com" assert reconstituted_event.type == "HTTP_RESPONSE" - assert reconstituted_event.source_id == scan.root_event.id + assert reconstituted_event.parent_id == scan.root_event.id - event_1 = scan.make_event("127.0.0.1", source=scan.root_event) - event_2 = scan.make_event("127.0.0.2", source=event_1) - event_3 = scan.make_event("127.0.0.3", source=event_2) + event_1 = scan.make_event("127.0.0.1", parent=scan.root_event) + event_2 = scan.make_event("127.0.0.2", parent=event_1) + event_3 = scan.make_event("127.0.0.3", parent=event_2) event_3._omit = True - event_4 = scan.make_event("127.0.0.4", source=event_3) - event_5 = scan.make_event("127.0.0.5", source=event_4) - assert event_5.get_sources() == [event_4, event_3, event_2, event_1, scan.root_event] - assert event_5.get_sources(omit=True) == [event_4, event_2, event_1, scan.root_event] + event_4 = scan.make_event("127.0.0.4", parent=event_3) + event_5 = scan.make_event("127.0.0.5", parent=event_4) + assert event_5.get_parents() == [event_4, event_3, event_2, event_1, scan.root_event] + assert event_5.get_parents(omit=True) == [event_4, event_2, event_1, scan.root_event] # test host backup - host_event = scan.make_event("asdf.evilcorp.com", "DNS_NAME", source=scan.root_event) + host_event = scan.make_event("asdf.evilcorp.com", "DNS_NAME", parent=scan.root_event) assert host_event.host_original == "asdf.evilcorp.com" host_event.host = "_wildcard.evilcorp.com" assert host_event.host == "_wildcard.evilcorp.com" @@ -456,7 +485,179 @@ async def test_events(events, scan, helpers): bucket_event = scan.make_event( {"name": "ASDF.s3.amazonaws.com", "url": "https://ASDF.s3.amazonaws.com"}, "STORAGE_BUCKET", - source=scan.root_event, + parent=scan.root_event, ) assert bucket_event.data["name"] == "asdf.s3.amazonaws.com" assert bucket_event.data["url"] == "https://asdf.s3.amazonaws.com/" + + # test module sequence + module = scan._make_dummy_module("mymodule") + parent_event_1 = scan.make_event("127.0.0.1", module=module, parent=scan.root_event) + assert str(parent_event_1.module) == "mymodule" + assert str(parent_event_1.module_sequence) == "mymodule" + parent_event_2 = scan.make_event("127.0.0.2", module=module, parent=parent_event_1) + assert str(parent_event_2.module) == "mymodule" + assert str(parent_event_2.module_sequence) == "mymodule" + parent_event_3 = scan.make_event("127.0.0.3", module=module, parent=parent_event_2) + assert str(parent_event_3.module) == "mymodule" + assert str(parent_event_3.module_sequence) == "mymodule" + + module = scan._make_dummy_module("mymodule") + parent_event_1 = scan.make_event("127.0.0.1", module=module, parent=scan.root_event) + parent_event_1._omit = True + assert str(parent_event_1.module) == "mymodule" + assert str(parent_event_1.module_sequence) == "mymodule" + parent_event_2 = scan.make_event("127.0.0.2", module=module, parent=parent_event_1) + parent_event_2._omit = True + assert str(parent_event_2.module) == "mymodule" + assert str(parent_event_2.module_sequence) == "mymodule->mymodule" + parent_event_3 = scan.make_event("127.0.0.3", module=module, parent=parent_event_2) + assert str(parent_event_3.module) == "mymodule" + assert str(parent_event_3.module_sequence) == "mymodule->mymodule->mymodule" + + +@pytest.mark.asyncio +async def test_event_discovery_context(): + + from bbot.modules.base import BaseModule + + scan = Scanner("evilcorp.com") + await scan.helpers.dns._mock_dns( + { + "evilcorp.com": {"A": ["1.2.3.4"]}, + "one.evilcorp.com": {"A": ["1.2.3.4"]}, + "two.evilcorp.com": {"A": ["1.2.3.4"]}, + "three.evilcorp.com": {"A": ["1.2.3.4"]}, + "four.evilcorp.com": {"A": ["1.2.3.4"]}, + } + ) + await scan._prep() + + dummy_module_1 = scan._make_dummy_module("module_1") + dummy_module_2 = scan._make_dummy_module("module_2") + + class DummyModule(BaseModule): + watched_events = ["DNS_NAME"] + _name = "dummy_module" + + async def handle_event(self, event): + new_event = None + if event.data == "evilcorp.com": + new_event = scan.make_event( + "one.evilcorp.com", + "DNS_NAME", + event, + context="{module} invoked forbidden magick to discover {event.type} {event.data}", + module=dummy_module_1, + ) + elif event.data == "one.evilcorp.com": + new_event = scan.make_event( + "two.evilcorp.com", + "DNS_NAME", + event, + context="{module} pledged its allegiance to cthulu and was awarded {event.type} {event.data}", + module=dummy_module_1, + ) + elif event.data == "two.evilcorp.com": + new_event = scan.make_event( + "three.evilcorp.com", + "DNS_NAME", + event, + context="{module} asked nicely and was given {event.type} {event.data}", + module=dummy_module_2, + ) + elif event.data == "three.evilcorp.com": + new_event = scan.make_event( + "four.evilcorp.com", + "DNS_NAME", + event, + context="{module} used brute force to obtain {event.type} {event.data}", + module=dummy_module_2, + ) + if new_event is not None: + await self.emit_event(new_event) + + dummy_module = DummyModule(scan) + + scan.modules["dummy_module"] = dummy_module + + test_event = dummy_module.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) + assert test_event.discovery_context == "dummy_module discovered DNS_NAME: evilcorp.com" + + events = [e async for e in scan.async_start()] + assert len(events) == 6 + + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "evilcorp.com" + and e.discovery_context == f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com" + and e.discovery_path == [f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com"] + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "one.evilcorp.com" + and e.discovery_context == "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com" + and e.discovery_path + == [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + ] + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "two.evilcorp.com" + and e.discovery_context + == "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com" + and e.discovery_path + == [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com", + ] + ] + ) + assert 1 == len( + [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "three.evilcorp.com" + and e.discovery_context == "module_2 asked nicely and was given DNS_NAME three.evilcorp.com" + and e.discovery_path + == [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com", + "module_2 asked nicely and was given DNS_NAME three.evilcorp.com", + ] + ] + ) + final_path = [ + f"Scan {scan.name} seeded with DNS_NAME: evilcorp.com", + "module_1 invoked forbidden magick to discover DNS_NAME one.evilcorp.com", + "module_1 pledged its allegiance to cthulu and was awarded DNS_NAME two.evilcorp.com", + "module_2 asked nicely and was given DNS_NAME three.evilcorp.com", + "module_2 used brute force to obtain DNS_NAME four.evilcorp.com", + ] + final_event = [ + e + for e in events + if e.type == "DNS_NAME" + and e.data == "four.evilcorp.com" + and e.discovery_context == "module_2 used brute force to obtain DNS_NAME four.evilcorp.com" + and e.discovery_path == final_path + ] + assert 1 == len(final_event) + j = final_event[0].json() + assert j["discovery_path"] == final_path diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 4e3f3993e..65af36c45 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -178,7 +178,11 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert helpers.subdomain_depth("a.evilcorp.com") == 1 assert helpers.subdomain_depth("a.s.d.f.evilcorp.notreal") == 4 + assert helpers.split_host_port("http://evilcorp.co.uk") == ("evilcorp.co.uk", 80) assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) + assert helpers.split_host_port("ws://evilcorp.co.uk") == ("evilcorp.co.uk", 80) + assert helpers.split_host_port("wss://evilcorp.co.uk") == ("evilcorp.co.uk", 443) + assert helpers.split_host_port("WSS://evilcorp.co.uk") == ("evilcorp.co.uk", 443) assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) assert helpers.split_host_port("evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) assert helpers.split_host_port("evilcorp.co.uk") == ("evilcorp.co.uk", None) @@ -214,7 +218,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert helpers.get_file_extension("/etc/passwd") == "" assert helpers.tagify("HttP -_Web Title-- ") == "http-web-title" - tagged_event = scan.make_event("127.0.0.1", source=scan.root_event, tags=["HttP web -__- title "]) + tagged_event = scan.make_event("127.0.0.1", parent=scan.root_event, tags=["HttP web -__- title "]) assert "http-web-title" in tagged_event.tags tagged_event.remove_tag("http-web-title") assert "http-web-title" not in tagged_event.tags @@ -402,6 +406,16 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): await helpers.wordlist("/tmp/a9pseoysadf/asdkgjaosidf") test_file.unlink() + # filename truncation + super_long_filename = "/tmp/" + ("a" * 1024) + ".txt" + with pytest.raises(OSError): + with open(super_long_filename, "w") as f: + f.write("wat") + truncated_filename = helpers.truncate_filename(super_long_filename) + with open(truncated_filename, "w") as f: + f.write("wat") + truncated_filename.unlink() + # misc DNS helpers assert helpers.is_ptr("wsc-11-22-33-44-wat.evilcorp.com") == True assert helpers.is_ptr("wsc-11-22-33-wat.evilcorp.com") == False @@ -431,6 +445,23 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert helpers.bytes_to_human(459819198709) == "428.24GB" assert helpers.human_to_bytes("428.24GB") == 459819198709 + # ordinals + assert helpers.integer_to_ordinal(1) == "1st" + assert helpers.integer_to_ordinal(2) == "2nd" + assert helpers.integer_to_ordinal(3) == "3rd" + assert helpers.integer_to_ordinal(4) == "4th" + assert helpers.integer_to_ordinal(11) == "11th" + assert helpers.integer_to_ordinal(12) == "12th" + assert helpers.integer_to_ordinal(13) == "13th" + assert helpers.integer_to_ordinal(21) == "21st" + assert helpers.integer_to_ordinal(22) == "22nd" + assert helpers.integer_to_ordinal(23) == "23rd" + assert helpers.integer_to_ordinal(101) == "101st" + assert helpers.integer_to_ordinal(111) == "111th" + assert helpers.integer_to_ordinal(112) == "112th" + assert helpers.integer_to_ordinal(113) == "113th" + assert helpers.integer_to_ordinal(0) == "0th" + scan1 = bbot_scanner(modules="ipneighbor") await scan1.load_modules() assert int(helpers.get_size(scan1.modules["ipneighbor"])) > 0 @@ -451,6 +482,37 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): < first_frequencies["e"] ) + # error handling helpers + test_ran = False + try: + try: + raise KeyboardInterrupt("asdf") + except KeyboardInterrupt: + raise ValueError("asdf") + except Exception as e: + assert len(helpers.get_exception_chain(e)) == 2 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, KeyboardInterrupt)]) == 1 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) == True + assert helpers.in_exception_chain(e, (TypeError, OSError)) == False + test_ran = True + assert test_ran + test_ran = False + try: + try: + raise AttributeError("asdf") + except AttributeError: + raise ValueError("asdf") + except Exception as e: + assert len(helpers.get_exception_chain(e)) == 2 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, AttributeError)]) == 1 + assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) == False + assert helpers.in_exception_chain(e, (KeyboardInterrupt, AttributeError)) == True + assert helpers.in_exception_chain(e, (AttributeError,)) == True + test_ran = True + assert test_ran + def test_word_cloud(helpers, bbot_scanner): number_mutations = helpers.word_cloud.get_number_mutations("base2_p013", n=5, padding=2) @@ -678,3 +740,133 @@ def test_liststring_invalidfnchars(helpers): with pytest.raises(ValueError) as e: helpers.parse_list_string("hello,world,bbot|test") assert str(e.value) == "Invalid character in string: bbot|test" + + +# test extract_params_html +@pytest.mark.asyncio +async def test_extract_params_html(helpers): + + html_tests = """ + + + Get extract + + + + + Universal Valid + + + Mixed Validity + Token Examples + + + Common Web Names + API Style Names + + + Invalid + Invalid + Invalid + ", + "###$$$", + "this_parameter_name_is_seriously_way_too_long_to_be_practical_but_hey_look_its_still_technically_valid_wow", + "parens()", + } + getparam_extracted_params = set(getparam_extract_results) + + # Check that all valid parameters are present + for expected_param in getparam_valid_params: + assert expected_param in getparam_extracted_params, f"Missing expected parameter: {expected_param}" + + # Check that no invalid parameters are present + for bad_param in getparam_invalid_params: + assert bad_param not in getparam_extracted_params, f"Invalid parameter found: {bad_param}" + + header_extract_results = set(await helpers.re.extract_params_html(html_tests, "header")) + header_valid_params = { + "name", + "age", + "valid_name", + "valid-name", + "session_token", + "user-name", + "auth-token", + "access_token", + "abcd", + "jqueryget", + } + header_invalid_params = { + "user.id", + "client.id", + "invalid,name", + " - - - - - - - - - - - - - - diff --git a/bbot/test/test_step_2/template_tests/__init__.py b/bbot/test/test_step_2/template_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py b/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py new file mode 100644 index 000000000..1ef87d3d1 --- /dev/null +++ b/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py @@ -0,0 +1,86 @@ +from ..module_tests.base import ModuleTestBase + + +class TestSubdomainEnum(ModuleTestBase): + targets = ["blacklanternsecurity.com"] + modules_overrides = [] + config_overrides = {"dns_resolution": True, "scope_report_distance": 10} + dedup_strategy = "highest_parent" + + txt = [ + "www.blacklanternsecurity.com", + "asdf.www.blacklanternsecurity.com", + "test.asdf.www.blacklanternsecurity.com", + "api.test.asdf.www.blacklanternsecurity.com", + ] + + async def setup_after_prep(self, module_test): + dns_mock = { + "evilcorp.com": {"A": ["127.0.0.6"]}, + "blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "asdf.www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "test.asdf.www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + "api.test.asdf.www.blacklanternsecurity.com": {"A": ["127.0.0.5"]}, + } + if self.txt: + dns_mock["blacklanternsecurity.com"]["TXT"] = self.txt + await module_test.mock_dns(dns_mock) + + # load subdomain enum template as module + from bbot.modules.templates.subdomain_enum import subdomain_enum + + subdomain_enum_module = subdomain_enum(module_test.scan) + + self.queries = [] + + async def mock_query(query): + self.queries.append(query) + + subdomain_enum_module.query = mock_query + subdomain_enum_module.dedup_strategy = self.dedup_strategy + module_test.scan.modules["subdomain_enum"] = subdomain_enum_module + + def check(self, module_test, events): + in_scope_dns_names = [e for e in events if e.type == "DNS_NAME" and e.scope_distance == 0] + assert len(in_scope_dns_names) == 5 + assert 1 == len([e for e in in_scope_dns_names if e.data == "blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "test.asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "api.test.asdf.www.blacklanternsecurity.com"]) + assert len(self.queries) == 1 + assert self.queries[0] == "blacklanternsecurity.com" + + +class TestSubdomainEnumHighestParent(TestSubdomainEnum): + targets = ["api.test.asdf.www.blacklanternsecurity.com", "evilcorp.com"] + whitelist = ["www.blacklanternsecurity.com"] + modules_overrides = ["speculate"] + dedup_strategy = "highest_parent" + txt = None + + def check(self, module_test, events): + in_scope_dns_names = [e for e in events if e.type == "DNS_NAME" and e.scope_distance == 0] + distance_1_dns_names = [e for e in events if e.type == "DNS_NAME" and e.scope_distance == 1] + assert len(in_scope_dns_names) == 4 + assert 1 == len([e for e in in_scope_dns_names if e.data == "www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "test.asdf.www.blacklanternsecurity.com"]) + assert 1 == len([e for e in in_scope_dns_names if e.data == "api.test.asdf.www.blacklanternsecurity.com"]) + assert len(distance_1_dns_names) == 2 + assert 1 == len([e for e in distance_1_dns_names if e.data == "evilcorp.com"]) + assert 1 == len([e for e in distance_1_dns_names if e.data == "blacklanternsecurity.com"]) + assert len(self.queries) == 1 + assert self.queries[0] == "www.blacklanternsecurity.com" + + +class TestSubdomainEnumLowestParent(TestSubdomainEnumHighestParent): + dedup_strategy = "lowest_parent" + + def check(self, module_test, events): + assert set(self.queries) == { + "test.asdf.www.blacklanternsecurity.com", + "asdf.www.blacklanternsecurity.com", + "www.blacklanternsecurity.com", + } diff --git a/docs/comparison.md b/docs/comparison.md index 3226036f1..183e84319 100644 --- a/docs/comparison.md +++ b/docs/comparison.md @@ -2,7 +2,7 @@ BBOT does a lot more than just subdomain enumeration. However, subdomain enumeration is arguably the most important part of OSINT, and since there's so many subdomain enumeration tools out there, they're the easiest class of tool to compare it to. -Thanks to BBOT's recursive nature (and its `massdns` module with its NLP-powered subdomain mutations), it typically finds about 20-25% more than other tools such as `Amass` or `theHarvester`. This holds true even for larger targets like `delta.com` (1000+ subdomains): +Thanks to BBOT's recursive nature (and its `dnsbrute_mutations` module with its NLP-powered subdomain mutations), it typically finds about 20-25% more than other tools such as `Amass` or `theHarvester`. This holds true especially for larger targets like `delta.com` (1000+ subdomains): ### Subdomains Found diff --git a/docs/dev/helpers/command.md b/docs/dev/helpers/command.md index b2b9171f5..3716d2037 100644 --- a/docs/dev/helpers/command.md +++ b/docs/dev/helpers/command.md @@ -1,6 +1,6 @@ # Command Helpers -These are helpers related to executing shell commands. They are used throughout BBOT and its modules for executing various binaries such as `nmap`, `nuclei`, etc. +These are helpers related to executing shell commands. They are used throughout BBOT and its modules for executing various binaries such as `masscan`, `nuclei`, etc. These helpers can be invoked directly from `self.helpers`, but inside a module they should always use `self.run_process()` or `self.run_process_live()`. These are light wrappers which ensure the running process is tracked by the module so that it can be easily terminated should the user need to kill the module: diff --git a/docs/dev/module_howto.md b/docs/dev/module_howto.md index 94d8ffe60..9f9569150 100644 --- a/docs/dev/module_howto.md +++ b/docs/dev/module_howto.md @@ -40,7 +40,7 @@ class whois(BaseModule): self.hugeinfo(f"Visiting {url}") response = await self.helpers.request(url) if response is not None: - await self.emit_event(response.json(), "WHOIS", source=event) + await self.emit_event(response.json(), "WHOIS", parent=event) ``` After saving the module, you can run it with `-m`: diff --git a/docs/how_it_works.md b/docs/how_it_works.md index d16bb50cf..aa4348fc7 100644 --- a/docs/how_it_works.md +++ b/docs/how_it_works.md @@ -1,6 +1,6 @@ # What is BBOT? -BBOT is a system of individual modules that interchange data **recursively**. Every module (e.g. `nmap`) _consumes_ a type of data (e.g. a `DNS_NAME`) and _emits_ another kind, (an `OPEN_TCP_PORT`). These bits of data, called [events](scanning/events.md), become the output of the tool, but are also redistributed to all the other modules, prompting them to dig deeper, and feeding the recursive cycle of discovery. +BBOT is a system of individual modules that interchange data **recursively**. Every module (e.g. `portscan`) _consumes_ a type of data (e.g. a `DNS_NAME`) and _emits_ another kind, (an `OPEN_TCP_PORT`). These bits of data, called [events](scanning/events.md), become the output of the tool, but are also redistributed to all the other modules, prompting them to dig deeper, and feeding the recursive cycle of discovery. ![recursion](https://github.com/blacklanternsecurity/bbot/assets/20261699/7b2edfca-2692-463b-939b-ab9d52d2fe00) @@ -24,21 +24,21 @@ Recursion is at the heart of BBOT's design. Each newly-discovered piece of data ## Module Example -In a simple example, we run a BBOT scan with **three modules**: `nmap`, `sslcert`, and `httpx`. Each of these modules "consume" a certain type of data: +In a simple example, we run a BBOT scan with **three modules**: `portscan`, `sslcert`, and `httpx`. Each of these modules "consume" a certain type of data: -- **`nmap`** consumes `DNS_NAME`s, port-scans them, and outputs `OPEN_TCP_PORT`s +- **`portscan`** consumes `DNS_NAME`s, port-scans them, and outputs `OPEN_TCP_PORT`s - **`sslcert`** consumes `OPEN_TCP_PORT`s, grabs certs, and extracts `DNS_NAME`s - **`httpx`** consumes `OPEN_TCP_PORT`s and visits any web services, ultimately producing new `DNS_NAME`s ```mermaid graph TD - nmap -->|OPEN_TCP_PORT| sslcert; - nmap -->|OPEN_TCP_PORT| httpx; - sslcert --> |DNS_NAME| nmap; - httpx --> |DNS_NAME| nmap; + portscan -->|OPEN_TCP_PORT| sslcert; + portscan -->|OPEN_TCP_PORT| httpx; + sslcert --> |DNS_NAME| portscan; + httpx --> |DNS_NAME| portscan; ``` -This allows for some interesting chains of events. Given a single target such as `evilcorp.com`, `nmap` may start by discovering an `OPEN_TCP_PORT` `evilcorp.com:443`. `sslcert` and `httpx` will then visit that port and extract more hostnames, which are in turn scanned by `nmap` to produce more open ports which are visited by `sslcert` and `httpx`, which discover more hostnames, which are again passed to `nmap`, and so on... +This allows for some interesting chains of events. Given a single target such as `evilcorp.com`, `portscan` may start by discovering an `OPEN_TCP_PORT` `evilcorp.com:443`. `sslcert` and `httpx` will then visit that port and extract more hostnames, which are in turn scanned by `portscan` to produce more open ports which are visited by `sslcert` and `httpx`, which discover more hostnames, which are again passed to `portscan`, and so on... This is a simple example with only a few modules, but you can being to see how if 30 or 40 modules were enabled, they could feed each other exponentially to produce an immense amount of data. This recursion is exactly how BBOT is able to outperform other tools. diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index f149e27b4..0a0cf21d5 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -1,126 +1,127 @@ # List of Modules -| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | -|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| -| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| baddns | scan | No | Check hosts for domain/subdomain takeovers | active, baddns, cloud-enum, safe, subdomain-hijack, web-basic | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING, VULNERABILITY | -| baddns_zone | scan | No | Check hosts for DNS zone transfers and NSEC walks | active, baddns, cloud-enum, safe, subdomain-enum | DNS_NAME | FINDING, VULNERABILITY | -| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | -| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | -| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | -| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | -| dockerhub | scan | No | Search for docker repositories of discovered orgs/usernames | active, code-enum, safe | ORG_STUB, SOCIAL | CODE_REPOSITORY, SOCIAL, URL_UNVERIFIED | -| dotnetnuke | scan | No | Scan for critical DotNetNuke (DNN) vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE | TECHNOLOGY, VULNERABILITY | -| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | -| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | -| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic | HTTP_RESPONSE, URL_UNVERIFIED | | -| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | -| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | -| git | scan | No | Check for exposed .git repositories | active, code-enum, safe, web-basic | URL | FINDING | -| gitlab | scan | No | Detect GitLab instances and query them for repositories | active, code-enum, safe | HTTP_RESPONSE, SOCIAL, TECHNOLOGY | CODE_REPOSITORY, FINDING, SOCIAL, TECHNOLOGY | -| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | -| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | -| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | -| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | -| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic | URL | URL_HINT | -| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | active, safe | HTTP_RESPONSE | FINDING | -| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | -| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic | HTTP_RESPONSE, URL | DNS_NAME, FINDING | -| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, TECHNOLOGY, VULNERABILITY | -| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic | DNS_NAME, URL_UNVERIFIED | DNS_NAME | -| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | -| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic | URL | URL_UNVERIFIED | -| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic | HTTP_RESPONSE | FINDING | -| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | -| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | -| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | -| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | -| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | -| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | -| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic | HTTP_RESPONSE | TECHNOLOGY | -| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | -| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | -| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | -| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | -| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | -| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| docker_pull | scan | No | Download images from a docker repository | passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | -| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| git_clone | scan | No | Clone code github repositories | passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | -| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | code-enum, passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | -| github_org | scan | No | Query Github's API for organization and member repositories | code-enum, passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | -| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | -| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | -| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | -| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | -| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | -| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | -| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| postman | scan | No | Query Postman's API for related workspaces, collections, requests | code-enum, passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | -| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | -| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | -| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | -| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| trufflehog | scan | No | TruffleHog is a tool for finding credentials | code-enum, passive, safe | FILESYSTEM | FINDING, VULNERABILITY | -| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | -| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | -| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | -| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | -| csv | output | No | Output to CSV | | * | | -| discord | output | No | Message a Discord channel when certain events are encountered | | * | | -| emails | output | No | Output any email addresses found belonging to the target domain | | EMAIL_ADDRESS | | -| http | output | No | Send every event to a custom URL via a web request | | * | | -| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | -| neo4j | output | No | Output to Neo4j | | * | | -| python | output | No | Output via Python API | | * | | -| slack | output | No | Message a Slack channel when certain events are encountered | | * | | -| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | -| stdout | output | No | Output to text | | * | | -| subdomains | output | No | Output only resolved, in-scope subdomains | | DNS_NAME, DNS_NAME_UNRESOLVED | | -| teams | output | No | Message a Teams channel when certain events are encountered | | * | | -| txt | output | No | Output to text | | * | | -| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | -| websocket | output | No | Output to websockets | | * | | -| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | -| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | -| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | +| Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | +|----------------------|----------|-----------------|-----------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------| +| ajaxpro | scan | No | Check for potentially vulnerable Ajaxpro instances | active, safe, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| baddns | scan | No | Check hosts for domain/subdomain takeovers | active, baddns, cloud-enum, safe, subdomain-hijack, web-basic | DNS_NAME, DNS_NAME_UNRESOLVED | FINDING, VULNERABILITY | +| baddns_zone | scan | No | Check hosts for DNS zone transfers and NSEC walks | active, baddns, cloud-enum, safe, subdomain-enum | DNS_NAME | FINDING, VULNERABILITY | +| badsecrets | scan | No | Library for detecting known or weak secrets across many web frameworks | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING, TECHNOLOGY, VULNERABILITY | +| bucket_amazon | scan | No | Check for S3 buckets related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_azure | scan | No | Check for Azure storage blobs related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_digitalocean | scan | No | Check for DigitalOcean spaces related to target | active, cloud-enum, safe, slow, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_firebase | scan | No | Check for open Firebase databases related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bucket_google | scan | No | Check for Google object storage related to target | active, cloud-enum, safe, web-basic, web-thorough | DNS_NAME, STORAGE_BUCKET | FINDING, STORAGE_BUCKET | +| bypass403 | scan | No | Check 403 pages for common bypasses | active, aggressive, web-thorough | URL | FINDING | +| dastardly | scan | No | Lightweight web application security scanner | active, aggressive, deadly, slow, web-thorough | HTTP_RESPONSE | FINDING, VULNERABILITY | +| dotnetnuke | scan | No | Scan for critical DotNetNuke (DNN) vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE | TECHNOLOGY, VULNERABILITY | +| ffuf | scan | No | A fast web fuzzer written in Go | active, aggressive, deadly | URL | URL_UNVERIFIED | +| ffuf_shortnames | scan | No | Use ffuf in combination IIS shortnames | active, aggressive, iis-shortnames, web-thorough | URL_HINT | URL_UNVERIFIED | +| filedownload | scan | No | Download common filetypes such as PDF, DOCX, PPTX, etc. | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL_UNVERIFIED | | +| fingerprintx | scan | No | Fingerprint exposed services like RDP, SSH, MySQL, etc. | active, safe, service-enum, slow | OPEN_TCP_PORT | PROTOCOL | +| generic_ssrf | scan | No | Check for generic SSRFs | active, aggressive, web-thorough | URL | VULNERABILITY | +| git | scan | No | Check for exposed .git repositories | active, safe, web-basic, web-thorough | URL | FINDING | +| gitlab | scan | No | Detect GitLab instances and query them for repositories | active, safe | HTTP_RESPONSE, SOCIAL, TECHNOLOGY | CODE_REPOSITORY, FINDING, SOCIAL, TECHNOLOGY | +| gowitness | scan | No | Take screenshots of webpages | active, safe, web-screenshots | SOCIAL, URL | TECHNOLOGY, URL, URL_UNVERIFIED, WEBSCREENSHOT | +| host_header | scan | No | Try common HTTP Host header spoofing techniques | active, aggressive, web-thorough | HTTP_RESPONSE | FINDING | +| httpx | scan | No | Visit webpages. Many other modules rely on httpx | active, cloud-enum, safe, social-enum, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT, URL, URL_UNVERIFIED | HTTP_RESPONSE, URL | +| hunt | scan | No | Watch for commonly-exploitable HTTP parameters | active, safe, web-thorough | HTTP_RESPONSE | FINDING | +| iis_shortnames | scan | No | Check for IIS shortname vulnerability | active, iis-shortnames, safe, web-basic, web-thorough | URL | URL_HINT | +| masscan | scan | No | Port scan with masscan. By default, scans top 100 ports. | active, aggressive, portscan | IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| newsletters | scan | No | Searches for Newsletter Submission Entry Fields on Websites | active, safe | HTTP_RESPONSE | FINDING | +| nmap | scan | No | Port scan with nmap. By default, scans top 100 ports. | active, aggressive, portscan, web-thorough | DNS_NAME, IP_ADDRESS, IP_RANGE | OPEN_TCP_PORT | +| ntlm | scan | No | Watch for HTTP endpoints that support NTLM authentication | active, safe, web-basic, web-thorough | HTTP_RESPONSE, URL | DNS_NAME, FINDING | +| nuclei | scan | No | Fast and customisable vulnerability scanner | active, aggressive, deadly | URL | FINDING, TECHNOLOGY, VULNERABILITY | +| oauth | scan | No | Enumerate OAUTH and OpenID Connect services | active, affiliates, cloud-enum, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME, URL_UNVERIFIED | DNS_NAME | +| paramminer_cookies | scan | No | Smart brute-force to check for common HTTP cookie parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_getparams | scan | No | Use smart brute-force to check for common HTTP GET parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| paramminer_headers | scan | No | Use smart brute-force to check for common HTTP header parameters | active, aggressive, slow, web-paramminer | HTTP_RESPONSE | FINDING | +| robots | scan | No | Look for and parse robots.txt | active, safe, web-basic, web-thorough | URL | URL_UNVERIFIED | +| secretsdb | scan | No | Detect common secrets with secrets-patterns-db | active, safe, web-basic, web-thorough | HTTP_RESPONSE | FINDING | +| smuggler | scan | No | Check for HTTP smuggling | active, aggressive, slow, web-thorough | URL | FINDING | +| sslcert | scan | No | Visit open ports and retrieve SSL certificates | active, affiliates, email-enum, safe, subdomain-enum, web-basic, web-thorough | OPEN_TCP_PORT | DNS_NAME, EMAIL_ADDRESS | +| telerik | scan | No | Scan for critical Telerik vulnerabilities | active, aggressive, web-thorough | HTTP_RESPONSE, URL | FINDING, VULNERABILITY | +| url_manipulation | scan | No | Attempt to identify URL parsing/routing based vulnerabilities | active, aggressive, web-thorough | URL | FINDING | +| vhost | scan | No | Fuzz for virtual hosts | active, aggressive, deadly, slow | URL | DNS_NAME, VHOST | +| wafw00f | scan | No | Web Application Firewall Fingerprinting Tool | active, aggressive | URL | WAF | +| wappalyzer | scan | No | Extract technologies from web responses | active, safe, web-basic, web-thorough | HTTP_RESPONSE | TECHNOLOGY | +| affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | +| anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic, web-thorough | DNS_NAME | URL_UNVERIFIED | +| azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| bucket_file_enum | scan | No | Works in conjunction with the filedownload module to download files from open storage buckets. Currently supported cloud providers: AWS | cloud-enum, passive, safe | STORAGE_BUCKET | URL_UNVERIFIED | +| builtwith | scan | Yes | Query Builtwith.com for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| code_repository | scan | No | Look for code repository links in webpages | passive, repo-enum, safe | URL_UNVERIFIED | CODE_REPOSITORY | +| columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| credshed | scan | Yes | Send queries to your own credshed server to check for known credentials of your targets | passive, safe | DNS_NAME | EMAIL_ADDRESS, HASHED_PASSWORD, PASSWORD, USERNAME | +| crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dehashed | scan | Yes | Execute queries against dehashed.com for exposed credentials | email-enum, passive, safe | DNS_NAME | HASHED_PASSWORD, PASSWORD, USERNAME | +| digitorus | scan | No | Query certificatedetails.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnscommonsrv | scan | No | Check for common SRV records | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| docker_pull | scan | No | Download images from a docker repository | passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | +| dockerhub | scan | No | Search for docker repositories of discovered orgs/usernames | passive, safe | ORG_STUB, SOCIAL | CODE_REPOSITORY, SOCIAL, URL_UNVERIFIED | +| emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| fullhunt | scan | Yes | Query the fullhunt.io API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| git_clone | scan | No | Clone code github repositories | passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | +| github_codesearch | scan | Yes | Query Github's API for code containing the target domain name | passive, safe, subdomain-enum | DNS_NAME | CODE_REPOSITORY, URL_UNVERIFIED | +| github_org | scan | No | Query Github's API for organization and member repositories | passive, safe, subdomain-enum | ORG_STUB, SOCIAL | CODE_REPOSITORY | +| github_workflows | scan | No | Download a github repositories workflow logs | passive, safe | CODE_REPOSITORY | FILESYSTEM | +| hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | +| internetdb | scan | No | Query Shodan's InternetDB for open ports, hostnames, technologies, and vulnerabilities | passive, portscan, safe, subdomain-enum | DNS_NAME, IP_ADDRESS | DNS_NAME, FINDING, OPEN_TCP_PORT, TECHNOLOGY, VULNERABILITY | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | +| ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | +| leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, subdomain-enum | DNS_NAME | DNS_NAME | +| myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| otx | scan | No | Query otx.alienvault.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| passivetotal | scan | Yes | Query the PassiveTotal API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| pgp | scan | No | Query common PGP servers for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| postman | scan | No | Query Postman's API for related workspaces, collections, requests | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | +| rapiddns | scan | No | Query rapiddns.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| riddler | scan | No | Query riddler.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| securitytrails | scan | Yes | Query the SecurityTrails API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| shodan_dns | scan | Yes | Query Shodan for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sitedossier | scan | No | Query sitedossier.com for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| skymem | scan | No | Query skymem.info for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | +| social | scan | No | Look for social media links in webpages | passive, safe, social-enum | URL_UNVERIFIED | SOCIAL | +| subdomaincenter | scan | No | Query subdomain.center's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| sublist3r | scan | No | Query sublist3r's API for subdomains | passive, safe | DNS_NAME | DNS_NAME | +| threatminer | scan | No | Query threatminer's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| trufflehog | scan | No | TruffleHog is a tool for finding credentials | passive, safe | FILESYSTEM | FINDING, VULNERABILITY | +| urlscan | scan | No | Query urlscan.io for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| viewdns | scan | No | Query viewdns.info's reverse whois for related domains | affiliates, passive, safe | DNS_NAME | DNS_NAME | +| virustotal | scan | Yes | Query VirusTotal's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| wayback | scan | No | Query archive.org's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | +| zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| asset_inventory | output | No | Merge hosts, open ports, technologies, findings, etc. into a single asset inventory CSV | | DNS_NAME, FINDING, HTTP_RESPONSE, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY, WAF | IP_ADDRESS, OPEN_TCP_PORT | +| csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | +| emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | +| http | output | No | Send every event to a custom URL via a web request | | * | | +| human | output | No | Output to text | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | +| neo4j | output | No | Output to Neo4j | | * | | +| python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | +| splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | +| subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Teams channel when certain events are encountered | | * | | +| web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | +| websocket | output | No | Output to websockets | | * | | +| aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | +| excavate | internal | No | Passively extract juicy tidbits from scan data | passive | HTTP_RESPONSE | URL_UNVERIFIED | +| speculate | internal | No | Derive certain event types from others by common sense | passive | AZURE_TENANT, DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, SOCIAL, STORAGE_BUCKET, URL, URL_UNVERIFIED, USERNAME | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, ORG_STUB | For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/modules/nuclei.md b/docs/modules/nuclei.md index 138b2b6ee..f0f3efed6 100644 --- a/docs/modules/nuclei.md +++ b/docs/modules/nuclei.md @@ -103,20 +103,20 @@ The **ratelimit** and **concurrency** settings default to the same defaults that ```bash # Scan a SINGLE target with a basic port scan and web modules -bbot -f web-basic -m nmap nuclei --allow-deadly -t app.evilcorp.com +bbot -f web-basic -m portscan nuclei --allow-deadly -t app.evilcorp.com ``` ```bash # Scanning MULTIPLE targets -bbot -f web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com +bbot -f web-basic -m portscan nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` ```bash # Scanning MULTIPLE targets while performing subdomain enumeration -bbot -f subdomain-enum web-basic -m nmap nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com +bbot -f subdomain-enum web-basic -m portscan nuclei --allow-deadly -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` ```bash # Scanning MULTIPLE targets on a BUDGET -bbot -f subdomain-enum web-basic -m nmap nuclei --allow-deadly -c modules.nuclei.mode=budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com +bbot -f subdomain-enum web-basic -m portscan nuclei --allow-deadly -c modules.nuclei.mode=budget -t app1.evilcorp.com app2.evilcorp.com app3.evilcorp.com ``` diff --git a/docs/release_history.md b/docs/release_history.md index 4b5b15bac..9de5ef6ca 100644 --- a/docs/release_history.md +++ b/docs/release_history.md @@ -1,7 +1,60 @@ +## v1.1.7 +May 15th, 2024 + +### New Modules +- https://github.com/blacklanternsecurity/bbot/pull/1037 +- https://github.com/blacklanternsecurity/bbot/pull/1122 +- https://github.com/blacklanternsecurity/bbot/pull/1176 +- https://github.com/blacklanternsecurity/bbot/pull/1164 +- https://github.com/blacklanternsecurity/bbot/pull/1169 +- https://github.com/blacklanternsecurity/bbot/pull/1175 +- https://github.com/blacklanternsecurity/bbot/pull/1209 +- https://github.com/blacklanternsecurity/bbot/pull/1335 +- https://github.com/blacklanternsecurity/bbot/pull/1380 + +### Improvements +- https://github.com/blacklanternsecurity/bbot/pull/1132 +- https://github.com/blacklanternsecurity/bbot/pull/1156 +- https://github.com/blacklanternsecurity/bbot/pull/1160 +- https://github.com/blacklanternsecurity/bbot/pull/1162 +- https://github.com/blacklanternsecurity/bbot/pull/1165 +- https://github.com/blacklanternsecurity/bbot/pull/1179 +- https://github.com/blacklanternsecurity/bbot/pull/1182 +- https://github.com/blacklanternsecurity/bbot/pull/1185 +- https://github.com/blacklanternsecurity/bbot/pull/1186 +- https://github.com/blacklanternsecurity/bbot/pull/1197 +- https://github.com/blacklanternsecurity/bbot/pull/1198 +- https://github.com/blacklanternsecurity/bbot/pull/1205 +- https://github.com/blacklanternsecurity/bbot/pull/1217 +- https://github.com/blacklanternsecurity/bbot/pull/1233 +- https://github.com/blacklanternsecurity/bbot/pull/1283 +- https://github.com/blacklanternsecurity/bbot/pull/1288 +- https://github.com/blacklanternsecurity/bbot/pull/1296 +- https://github.com/blacklanternsecurity/bbot/pull/1306 +- https://github.com/blacklanternsecurity/bbot/pull/1313 +- https://github.com/blacklanternsecurity/bbot/pull/1349 +- https://github.com/blacklanternsecurity/bbot/pull/1343 + +### Bugfixes +- https://github.com/blacklanternsecurity/bbot/pull/1136 +- https://github.com/blacklanternsecurity/bbot/pull/1140 +- https://github.com/blacklanternsecurity/bbot/pull/1152 +- https://github.com/blacklanternsecurity/bbot/pull/1154 +- https://github.com/blacklanternsecurity/bbot/pull/1181 +- https://github.com/blacklanternsecurity/bbot/pull/1247 +- https://github.com/blacklanternsecurity/bbot/pull/1249 +- https://github.com/blacklanternsecurity/bbot/pull/1273 +- https://github.com/blacklanternsecurity/bbot/pull/1277 +- https://github.com/blacklanternsecurity/bbot/pull/1287 +- https://github.com/blacklanternsecurity/bbot/pull/1298 +- https://github.com/blacklanternsecurity/bbot/pull/1308 +- https://github.com/blacklanternsecurity/bbot/pull/1336 +- https://github.com/blacklanternsecurity/bbot/pull/1348 + ## v1.1.6 February 21, 2024 -## Improvements +### Improvements - https://github.com/blacklanternsecurity/bbot/pull/1001 - https://github.com/blacklanternsecurity/bbot/pull/1006 - https://github.com/blacklanternsecurity/bbot/pull/1010 @@ -22,7 +75,7 @@ February 21, 2024 - https://github.com/blacklanternsecurity/bbot/pull/1101 - https://github.com/blacklanternsecurity/bbot/pull/1103 -## Bigfixes +### Bigfixes - https://github.com/blacklanternsecurity/bbot/pull/1005 - https://github.com/blacklanternsecurity/bbot/pull/1022 - https://github.com/blacklanternsecurity/bbot/pull/1030 @@ -37,7 +90,7 @@ February 21, 2024 - https://github.com/blacklanternsecurity/bbot/pull/1094 - https://github.com/blacklanternsecurity/bbot/pull/1098 -## New Modules +### New Modules - https://github.com/blacklanternsecurity/bbot/pull/1072 - https://github.com/blacklanternsecurity/bbot/pull/1091 diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index c416d6350..9e85ac84f 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -10,7 +10,7 @@ Below you can find some advanced uses of BBOT. from bbot.scanner import Scanner # any number of targets can be specified -scan = Scanner("example.com", "scanme.nmap.org", modules=["nmap", "sslcert"]) +scan = Scanner("example.com", "scanme.nmap.org", modules=["portscan", "sslcert"]) for event in scan.start(): print(event.json()) ``` @@ -21,7 +21,7 @@ for event in scan.start(): from bbot.scanner import Scanner async def main(): - scan = Scanner("example.com", "scanme.nmap.org", modules=["nmap", "sslcert"]) + scan = Scanner("example.com", "scanme.nmap.org", modules=["portscan", "sslcert"]) async for event in scan.async_start(): print(event.json()) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index fafedba1e..3f940d576 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -212,7 +212,7 @@ dns_omit_queries: ## Module Config Options -Many modules accept their own configuration options. These options have the ability to change their behavior. For example, the `nmap` module accepts options for `ports`, `timing`, etc. Below is a list of all possible module config options. +Many modules accept their own configuration options. These options have the ability to change their behavior. For example, the `portscan` module accepts options for `ports`, `rate`, etc. Below is a list of all possible module config options. | Config Option | Type | Description | Default | @@ -244,13 +244,14 @@ Many modules accept their own configuration options. These options have the abil | modules.filedownload.max_filesize | str | Cancel download if filesize is greater than this size | 10MB | | modules.fingerprintx.version | str | fingerprintx version | 1.1.4 | | modules.gitlab.api_key | str | Gitlab access token | | -| modules.gowitness.output_path | str | where to save screenshots | | -| modules.gowitness.resolution_x | int | screenshot resolution x | 1440 | -| modules.gowitness.resolution_y | int | screenshot resolution y | 900 | +| modules.gowitness.idle_timeout | int | Skip the current gowitness batch if it stalls for longer than this many seconds | 1800 | +| modules.gowitness.output_path | str | Where to save screenshots | | +| modules.gowitness.resolution_x | int | Screenshot resolution x | 1440 | +| modules.gowitness.resolution_y | int | Screenshot resolution y | 900 | | modules.gowitness.social | bool | Whether to screenshot social media webpages | True | -| modules.gowitness.threads | int | threads used to run | 4 | -| modules.gowitness.timeout | int | preflight check timeout | 10 | -| modules.gowitness.version | str | gowitness version | 2.4.2 | +| modules.gowitness.threads | int | How many gowitness threads to spawn (default is number of CPUs x 2) | 0 | +| modules.gowitness.timeout | int | Preflight check timeout | 10 | +| modules.gowitness.version | str | Gowitness version | 2.4.2 | | modules.httpx.in_scope_only | bool | Only visit web resources that are in scope. | True | | modules.httpx.max_response_size | int | Max response size in bytes | 5242880 | | modules.httpx.probe_all_ips | bool | Probe all the ips associated with same host | False | @@ -337,6 +338,8 @@ Many modules accept their own configuration options. These options have the abil | modules.github_org.api_key | str | Github token | | | modules.github_org.include_member_repos | bool | Also enumerate organization members' repositories | False | | modules.github_org.include_members | bool | Enumerate organization members | True | +| modules.github_workflows.api_key | str | Github token | | +| modules.github_workflows.num_logs | int | For each workflow fetch the last N successful runs logs (max 100) | 1 | | modules.hunterio.api_key | str | Hunter.IO API key | | | modules.ip2location.api_key | str | IP2location.io API Key | | | modules.ip2location.lang | str | Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name. | | @@ -354,7 +357,7 @@ Many modules accept their own configuration options. These options have the abil | modules.shodan_dns.api_key | str | Shodan API key | | | modules.trufflehog.concurrency | int | Number of concurrent workers | 8 | | modules.trufflehog.only_verified | bool | Only report credentials that have been verified | True | -| modules.trufflehog.version | str | trufflehog version | 3.69.0 | +| modules.trufflehog.version | str | trufflehog version | 3.75.1 | | modules.urlscan.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.virustotal.api_key | str | VirusTotal API Key | | | modules.wayback.garbage_threshold | int | Dedupe similar urls if they are in a group of this size or higher (lower values == less garbage data) | 10 | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 93d6b6793..241629d78 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -12,7 +12,7 @@ In addition to the obvious data (e.g. `www.evilcorp.com`), an event also contain - a `.timestamp` of when the data was discovered - the `.module` that discovered it -- the `.source` event that led to its discovery +- the `.parent` event that led to its discovery - its `.scope_distance` (how many hops it is from the main scope, 0 == in-scope) - a list of `.tags` that describe the data (`mx-record`, `http-title`, etc.) @@ -28,7 +28,7 @@ These attributes allow us to construct a visual graph of events (e.g. in [Neo4j] "scan": "SCAN:4d786912dbc97be199da13074699c318e2067a7f", "timestamp": 1688526222.723366, "resolved_hosts": ["185.199.108.153"], - "source": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", + "parent": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", "tags": [ "in-scope", "distance-0", @@ -54,11 +54,11 @@ Below is a full list of event types along with which modules produce/consume the | * | 13 | 0 | affiliates, csv, discord, http, json, neo4j, python, slack, splunk, stdout, teams, txt, websocket | | | ASN | 0 | 1 | | asn | | AZURE_TENANT | 1 | 0 | speculate | | -| CODE_REPOSITORY | 2 | 4 | docker_pull, git_clone | dockerhub, github_codesearch, github_org, gitlab | +| CODE_REPOSITORY | 3 | 5 | docker_pull, git_clone, github_workflows | code_repository, dockerhub, github_codesearch, github_org, gitlab | | DNS_NAME | 57 | 42 | anubisdb, asset_inventory, azure_realm, azure_tenant, baddns, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, massdns, myssl, nmap, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, fullhunt, hackertarget, hunterio, internetdb, leakix, massdns, myssl, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | | DNS_NAME_UNRESOLVED | 3 | 0 | baddns, speculate, subdomains | | | EMAIL_ADDRESS | 1 | 6 | emails | credshed, emailformat, hunterio, pgp, skymem, sslcert | -| FILESYSTEM | 1 | 2 | trufflehog | docker_pull, git_clone | +| FILESYSTEM | 1 | 3 | trufflehog | docker_pull, git_clone, github_workflows | | FINDING | 2 | 28 | asset_inventory, web_report | ajaxpro, baddns, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, gitlab, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, telerik, trufflehog, url_manipulation | | GEOLOCATION | 0 | 2 | | ip2location, ipstack | | HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | @@ -74,7 +74,7 @@ Below is a full list of event types along with which modules produce/consume the | TECHNOLOGY | 3 | 7 | asset_inventory, gitlab, web_report | badsecrets, dotnetnuke, gitlab, gowitness, internetdb, nuclei, wappalyzer | | URL | 19 | 2 | ajaxpro, asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | | URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | -| URL_UNVERIFIED | 5 | 14 | filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, postman, robots, urlscan, wayback | +| URL_UNVERIFIED | 6 | 14 | code_repository, filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, postman, robots, urlscan, wayback | | USERNAME | 1 | 2 | speculate | credshed, dehashed | | VHOST | 1 | 1 | web_report | vhost | | VULNERABILITY | 2 | 11 | asset_inventory, web_report | ajaxpro, baddns, baddns_zone, badsecrets, dastardly, dotnetnuke, generic_ssrf, internetdb, nuclei, telerik, trufflehog | diff --git a/docs/scanning/index.md b/docs/scanning/index.md index a7fb35001..21615a6fb 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -107,30 +107,30 @@ A single module can have multiple flags. For example, the `securitytrails` modul ### List of Flags -| Flag | # Modules | Description | Modules | -|------------------|-------------|----------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 80 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, baddns, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, docker_pull, dockerhub, emailformat, filedownload, fingerprintx, fullhunt, git, git_clone, github_codesearch, github_org, gitlab, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, sublist3r, threatminer, trufflehog, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 59 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, docker_pull, emailformat, excavate, fullhunt, git_clone, github_codesearch, github_org, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, trufflehog, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 44 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, baddns_zone, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomaincenter, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 43 | Makes active connections to target systems | ajaxpro, baddns, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dockerhub, dotnetnuke, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gitlab, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, newsletters, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| aggressive | 20 | Generates a large amount of network traffic | bypass403, dastardly, dotnetnuke, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, wappalyzer | -| web-thorough | 13 | More advanced web scanning functionality | ajaxpro, bucket_digitalocean, bypass403, dastardly, dotnetnuke, ffuf_shortnames, generic_ssrf, host_header, hunt, nmap, smuggler, telerik, url_manipulation | -| cloud-enum | 12 | Enumerates cloud resources | azure_realm, azure_tenant, baddns, baddns_zone, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth | -| slow | 10 | May take a long time to complete | bucket_digitalocean, dastardly, docker_pull, fingerprintx, git_clone, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| code-enum | 7 | Find public code repositories and search them for secrets etc. | dockerhub, git, github_codesearch, github_org, gitlab, postman, trufflehog | -| email-enum | 6 | Enumerates email addresses | dehashed, emailformat, hunterio, pgp, skymem, sslcert | -| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | -| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| baddns | 2 | Runs all modules from the DNS auditing tool BadDNS | baddns, baddns_zone | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | baddns | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|----------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 82 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, asn, azure_realm, azure_tenant, baddns, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, docker_pull, dockerhub, emailformat, filedownload, fingerprintx, fullhunt, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, leakix, myssl, newsletters, ntlm, oauth, otx, passivetotal, pgp, postman, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, sublist3r, threatminer, trufflehog, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 62 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, credshed, crobat, crt, dehashed, digitorus, dnscommonsrv, dnsdumpster, docker_pull, dockerhub, emailformat, excavate, fullhunt, git_clone, github_codesearch, github_org, github_workflows, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, otx, passivetotal, pgp, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, sublist3r, threatminer, trufflehog, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 45 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, baddns_zone, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, massdns, myssl, oauth, otx, passivetotal, postman, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 42 | Makes active connections to target systems | ajaxpro, baddns, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dotnetnuke, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gitlab, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, newsletters, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, sslcert, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 29 | More advanced web scanning functionality | ajaxpro, azure_realm, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dotnetnuke, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, oauth, robots, secretsdb, smuggler, sslcert, telerik, url_manipulation, wappalyzer | +| aggressive | 20 | Generates a large amount of network traffic | bypass403, dastardly, dotnetnuke, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, wappalyzer | +| cloud-enum | 12 | Enumerates cloud resources | azure_realm, azure_tenant, baddns, baddns_zone, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, httpx, oauth | +| slow | 10 | May take a long time to complete | bucket_digitalocean, dastardly, docker_pull, fingerprintx, git_clone, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| email-enum | 7 | Enumerates email addresses | dehashed, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| portscan | 3 | Discovers open ports | internetdb, masscan, nmap | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| baddns | 2 | Runs all modules from the DNS auditing tool BadDNS | baddns, baddns_zone | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| repo-enum | 1 | Enumerates code repositories | code_repository | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | baddns | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | ## Dependencies diff --git a/docs/scanning/output.md b/docs/scanning/output.md index c7d69e452..b1b0915c4 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -67,7 +67,7 @@ You will then see [events](events.md) like this: "scan": "SCAN:64c0e076516ae7aa6502fd99489693d0d5ec26cc", "timestamp": 1688518967.740472, "resolved_hosts": ["1.2.3.4"], - "source": "DNS_NAME:2da045542abbf86723f22383d04eb453e573723c", + "parent": "DNS_NAME:2da045542abbf86723f22383d04eb453e573723c", "tags": ["distance-1", "ipv4", "internal"], "module": "A", "module_sequence": "A" diff --git a/docs/scanning/presets.md b/docs/scanning/presets.md index 2ef761405..1da027523 100644 --- a/docs/scanning/presets.md +++ b/docs/scanning/presets.md @@ -87,9 +87,6 @@ config: api_key: 21a270d5f59c9b05813a72bb41707266 virustotal: api_key: 4f41243847da693a4f356c0486114bc6 - # other module config options - massdns: - max_resolvers: 5000 ``` To execute your custom preset, you do: diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 885e461dc..f019f742a 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -30,13 +30,13 @@ To change the number of instances, you can set a module's `max_event_handlers` i bbot -t evilcorp.com -m baddns -c modules.baddns.max_event_handlers=20 ``` -### Boost Massdns Thread Count +### Boost DNS Brute-force Speed If you have a fast internet connection or are running BBOT from a cloud VM, you can speed up subdomain enumeration by cranking the threads for `massdns`. The default is `1000`, which is about 1MB/s of DNS traffic: ```bash # massdns with 5000 resolvers, about 5MB/s -bbot -t evilcorp.com -f subdomain-enum -c modules.massdns.max_resolvers=5000 +bbot -t evilcorp.com -f subdomain-enum -c dns.brute_threads=5000 ``` ### Web Spider diff --git a/poetry.lock b/poetry.lock index be6fea410..29fe7eb59 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] @@ -27,13 +27,13 @@ ansible-core = ">=2.15.7,<2.16.0" [[package]] name = "ansible-core" -version = "2.15.10" +version = "2.15.12" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-core-2.15.10.tar.gz", hash = "sha256:954dbe8e4e802a4dd5df0366193975b692a05806aa8d7358418a7e617346b20f"}, - {file = "ansible_core-2.15.10-py3-none-any.whl", hash = "sha256:42e49f1a6d8cf6cccde775c06c1394885353b71ad9e5f670c6f32d2890127ce8"}, + {file = "ansible_core-2.15.12-py3-none-any.whl", hash = "sha256:390edd603420122f7cb1c470d8d1f8bdbbd795a1844dd03c1917db21935aecb9"}, + {file = "ansible_core-2.15.12.tar.gz", hash = "sha256:5fde82cd3928d9857ad880782c644f27d3168b0f25321d5a8d6befa524aa1818"}, ] [package.dependencies] @@ -46,13 +46,13 @@ resolvelib = ">=0.5.3,<1.1.0" [[package]] name = "ansible-runner" -version = "2.3.6" +version = "2.4.0" description = "\"Consistent Ansible Python API and CLI with container and process isolation runtime capabilities\"" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "ansible-runner-2.3.6.tar.gz", hash = "sha256:b2174a12dcad2dc2f342ea82876898f568a0b66c53568600bf80577158fcba1c"}, - {file = "ansible_runner-2.3.6-py3-none-any.whl", hash = "sha256:4f153d9c3000a61b82d7253ca292849e3ad2c5d68dfff4377a6b98c4e6ff6c3e"}, + {file = "ansible-runner-2.4.0.tar.gz", hash = "sha256:82d02b2548830f37a53517b65c823c4af371069406c7d213b5c9041d45e0c5b6"}, + {file = "ansible_runner-2.4.0-py3-none-any.whl", hash = "sha256:a3f592ae4cdfa62a72ad15de60da9c8210f376d67f495c4a78d4cf1dc7ccdf89"}, ] [package.dependencies] @@ -61,7 +61,6 @@ packaging = "*" pexpect = ">=4.5" python-daemon = "*" pyyaml = "*" -six = "*" [[package]] name = "antlr4-python3-runtime" @@ -75,13 +74,13 @@ files = [ [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -97,13 +96,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "babel" -version = "2.14.0" +version = "2.15.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, ] [package.extras] @@ -132,33 +131,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.4.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, - {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, - {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, - {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, - {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, - {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, - {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, - {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, - {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, - {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, - {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, - {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, - {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, - {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, - {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, - {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, - {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, - {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, - {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, - {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, - {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, - {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -388,13 +387,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "5.0.0.350" +version = "5.0.1.398" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "cloudcheck-5.0.0.350-py3-none-any.whl", hash = "sha256:6f2ed981818bde6d8b6c5a6413a843e11d0aa1a4bf8b36452dcae1030a537dd6"}, - {file = "cloudcheck-5.0.0.350.tar.gz", hash = "sha256:cb59dfef966268ebc176e242634b84a3423a84ffaf4fac40566f37edfaddc106"}, + {file = "cloudcheck-5.0.1.398-py3-none-any.whl", hash = "sha256:de9dbff75f4e2349c035f4e332e3b061f809d932ff726a0993284c66f9f8175d"}, + {file = "cloudcheck-5.0.1.398.tar.gz", hash = "sha256:278365bac10234ae5709a9fdd12ff322bbe423a6d44319841fc15edc2bbcbb3a"}, ] [package.dependencies] @@ -416,63 +415,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.4" +version = "7.5.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] [package.dependencies] @@ -483,43 +482,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.5" +version = "42.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, + {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, + {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, + {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, + {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, + {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, + {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, ] [package.dependencies] @@ -586,24 +585,24 @@ wmi = ["wmi (>=1.5.1)"] [[package]] name = "docutils" -version = "0.21.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" files = [ - {file = "docutils-0.21.1-py3-none-any.whl", hash = "sha256:14c8d34a55b46c88f9f714adb29cefbdd69fb82f3fef825e59c5faab935390d8"}, - {file = "docutils-0.21.1.tar.gz", hash = "sha256:65249d8a5345bc95e0f40f280ba63c98eb24de35c6c8f5b662e3e8948adea83f"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "dunamai" -version = "1.20.0" +version = "1.21.1" description = "Dynamic version generation" optional = false python-versions = ">=3.5" files = [ - {file = "dunamai-1.20.0-py3-none-any.whl", hash = "sha256:a2185c227351a52a013c7d7a695d3f3cb6625c3eed14a5295adbbcc7e2f7f8d4"}, - {file = "dunamai-1.20.0.tar.gz", hash = "sha256:c3f1ee64a1e6cc9ebc98adafa944efaccd0db32482d2177e59c1ff6bdf23cd70"}, + {file = "dunamai-1.21.1-py3-none-any.whl", hash = "sha256:fe303541463648b8197c495decf62cd8f15234fb6d891a5f295015e452f656c8"}, + {file = "dunamai-1.21.1.tar.gz", hash = "sha256:d7fea28ad2faf20a6ca5ec121e5c68e55eec6b8ada23d9c387e4e7a574cc559f"}, ] [package.dependencies] @@ -625,13 +624,13 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.13.4" +version = "3.14.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, ] [package.extras] @@ -674,13 +673,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.44.0" +version = "0.45.2" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.44.0-py3-none-any.whl", hash = "sha256:8a4471c469ba980b87c843f1168850ce39d0c1d0c7be140dca2480f76c8e5446"}, - {file = "griffe-0.44.0.tar.gz", hash = "sha256:34aee1571042f9bf00529bc715de4516fb6f482b164e90d030300601009e0223"}, + {file = "griffe-0.45.2-py3-none-any.whl", hash = "sha256:297ec8530d0c68e5b98ff86fb588ebc3aa3559bb5dc21f3caea8d9542a350133"}, + {file = "griffe-0.45.2.tar.gz", hash = "sha256:83ce7dcaafd8cb7f43cbf1a455155015a1eb624b1ffd93249e5e1c4a22b2fdb2"}, ] [package.dependencies] @@ -744,13 +743,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.35" +version = "2.5.36" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, ] [package.extras] @@ -814,13 +813,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -872,165 +871,153 @@ files = [ [[package]] name = "lxml" -version = "5.2.1" +version = "5.2.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, - {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, - {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, - {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, - {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, - {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, - {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, - {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, - {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, - {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, - {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, - {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, - {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, - {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, - {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, - {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, - {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, - {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, - {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, - {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, - {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, - {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, - {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, - {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, - {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, - {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, - {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, - {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, - {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, - {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, - {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, - {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, - {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, - {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, - {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, - {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, - {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, - {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, - {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, - {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, - {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, - {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, - {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, - {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, - {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, ] [package.extras] @@ -1151,34 +1138,34 @@ files = [ [[package]] name = "mkdocs" -version = "1.5.3" +version = "1.6.0" description = "Project documentation with Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, - {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, + {file = "mkdocs-1.6.0-py3-none-any.whl", hash = "sha256:1eb5cb7676b7d89323e62b56235010216319217d4af5ddc543a91beb8d125ea7"}, + {file = "mkdocs-1.6.0.tar.gz", hash = "sha256:a73f735824ef83a4f3bcb7a231dcab23f5a838f88b7efc54a0eef5fbdbc3c512"}, ] [package.dependencies] click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" -markdown = ">=3.2.1" +markdown = ">=3.3.6" markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" packaging = ">=20.5" pathspec = ">=0.11.1" -platformdirs = ">=2.2.0" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] [[package]] name = "mkdocs-autorefs" @@ -1212,15 +1199,32 @@ beautifulsoup4 = ">=4.6.3" libsass = ">=0.15" mkdocs = ">=1.1" +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +mergedeep = ">=1.3.4" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" + [[package]] name = "mkdocs-material" -version = "9.5.18" +version = "9.5.25" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.18-py3-none-any.whl", hash = "sha256:1e0e27fc9fe239f9064318acf548771a4629d5fd5dfd45444fd80a953fe21eb4"}, - {file = "mkdocs_material-9.5.18.tar.gz", hash = "sha256:a43f470947053fa2405c33995f282d24992c752a50114f23f30da9d8d0c57e62"}, + {file = "mkdocs_material-9.5.25-py3-none-any.whl", hash = "sha256:68fdab047a0b9bfbefe79ce267e8a7daaf5128bcf7867065fcd201ee335fece1"}, + {file = "mkdocs_material-9.5.25.tar.gz", hash = "sha256:d0662561efb725b712207e0ee01f035ca15633f29a64628e24f01ec99d7078f4"}, ] [package.dependencies] @@ -1228,7 +1232,7 @@ babel = ">=2.10,<3.0" colorama = ">=0.4,<1.0" jinja2 = ">=3.0,<4.0" markdown = ">=3.2,<4.0" -mkdocs = ">=1.5.3,<1.6.0" +mkdocs = ">=1.6,<2.0" mkdocs-material-extensions = ">=1.3,<2.0" paginate = ">=0.5,<1.0" pygments = ">=2.16,<3.0" @@ -1254,13 +1258,13 @@ files = [ [[package]] name = "mkdocstrings" -version = "0.24.3" +version = "0.25.1" description = "Automatic documentation from sources, for MkDocs." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocstrings-0.24.3-py3-none-any.whl", hash = "sha256:5c9cf2a32958cd161d5428699b79c8b0988856b0d4a8c5baf8395fc1bf4087c3"}, - {file = "mkdocstrings-0.24.3.tar.gz", hash = "sha256:f327b234eb8d2551a306735436e157d0a22d45f79963c60a8b585d5f7a94c1d2"}, + {file = "mkdocstrings-0.25.1-py3-none-any.whl", hash = "sha256:da01fcc2670ad61888e8fe5b60afe9fee5781017d67431996832d63e887c2e51"}, + {file = "mkdocstrings-0.25.1.tar.gz", hash = "sha256:c3a2515f31577f311a9ee58d089e4c51fc6046dbd9e9b4c3de4c3194667fe9bf"}, ] [package.dependencies] @@ -1282,18 +1286,110 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.10.0" +version = "1.10.3" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocstrings_python-1.10.0-py3-none-any.whl", hash = "sha256:ba833fbd9d178a4b9d5cb2553a4df06e51dc1f51e41559a4d2398c16a6f69ecc"}, - {file = "mkdocstrings_python-1.10.0.tar.gz", hash = "sha256:71678fac657d4d2bb301eed4e4d2d91499c095fd1f8a90fa76422a87a5693828"}, + {file = "mkdocstrings_python-1.10.3-py3-none-any.whl", hash = "sha256:11ff6d21d3818fb03af82c3ea6225b1534837e17f790aa5f09626524171f949b"}, + {file = "mkdocstrings_python-1.10.3.tar.gz", hash = "sha256:321cf9c732907ab2b1fedaafa28765eaa089d89320f35f7206d00ea266889d03"}, ] [package.dependencies] griffe = ">=0.44" -mkdocstrings = ">=0.24.2" +mkdocstrings = ">=0.25" + +[[package]] +name = "mmh3" +version = "4.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = false +python-versions = "*" +files = [ + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, + {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, + {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, + {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, + {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, + {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, + {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, + {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, + {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, + {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, + {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, + {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, + {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, + {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, + {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, + {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, +] + +[package.extras] +test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] [[package]] name = "mypy-extensions" @@ -1308,18 +1404,15 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.0" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "omegaconf" version = "2.3.0" @@ -1397,28 +1490,29 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1446,13 +1540,13 @@ plugin = ["poetry (>=1.2.0,<2.0.0)"] [[package]] name = "pre-commit" -version = "3.7.0" +version = "3.7.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, - {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, ] [package.dependencies] @@ -1566,18 +1660,18 @@ files = [ [[package]] name = "pydantic" -version = "2.7.0" +version = "2.7.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, - {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, + {file = "pydantic-2.7.2-py3-none-any.whl", hash = "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7"}, + {file = "pydantic-2.7.2.tar.gz", hash = "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.1" +pydantic-core = "2.18.3" typing-extensions = ">=4.6.1" [package.extras] @@ -1585,90 +1679,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.1" +version = "2.18.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, - {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, - {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, - {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, - {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, - {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, - {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, - {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, - {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, - {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, - {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, - {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, - {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, - {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, - {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, - {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, - {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, - {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, - {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, - {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, - {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, - {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, - {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, - {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, - {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, - {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, - {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, - {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, - {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, - {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, - {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, - {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, - {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812"}, + {file = "pydantic_core-2.18.3-cp310-none-win32.whl", hash = "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779"}, + {file = "pydantic_core-2.18.3-cp310-none-win_amd64.whl", hash = "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d"}, + {file = "pydantic_core-2.18.3-cp311-none-win32.whl", hash = "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_amd64.whl", hash = "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_arm64.whl", hash = "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be"}, + {file = "pydantic_core-2.18.3-cp312-none-win32.whl", hash = "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5"}, + {file = "pydantic_core-2.18.3-cp312-none-win_amd64.whl", hash = "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6"}, + {file = "pydantic_core-2.18.3-cp312-none-win_arm64.whl", hash = "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0"}, + {file = "pydantic_core-2.18.3-cp38-none-win32.whl", hash = "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558"}, + {file = "pydantic_core-2.18.3-cp38-none-win_amd64.whl", hash = "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af"}, + {file = "pydantic_core-2.18.3-cp39-none-win32.whl", hash = "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78"}, + {file = "pydantic_core-2.18.3-cp39-none-win_amd64.whl", hash = "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a"}, + {file = "pydantic_core-2.18.3.tar.gz", hash = "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39"}, ] [package.dependencies] @@ -1687,17 +1781,16 @@ files = [ [[package]] name = "pygments" -version = "2.17.2" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -1719,13 +1812,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymdown-extensions" -version = "10.8" +version = "10.8.1" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.8-py3-none-any.whl", hash = "sha256:3539003ff0d5e219ba979d2dc961d18fcad5ac259e66c764482e8347b4c0503c"}, - {file = "pymdown_extensions-10.8.tar.gz", hash = "sha256:91ca336caf414e1e5e0626feca86e145de9f85a3921a7bcbd32890b51738c428"}, + {file = "pymdown_extensions-10.8.1-py3-none-any.whl", hash = "sha256:f938326115884f48c6059c67377c46cf631c733ef3629b6eed1349989d1b30cb"}, + {file = "pymdown_extensions-10.8.1.tar.gz", hash = "sha256:3ab1db5c9e21728dabf75192d71471f8e50f216627e9a1fa9535ecb0231b9940"}, ] [package.dependencies] @@ -1737,13 +1830,13 @@ extra = ["pygments (>=2.12)"] [[package]] name = "pytest" -version = "8.1.1" +version = "8.2.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, + {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, ] [package.dependencies] @@ -1751,21 +1844,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" +pluggy = ">=1.5,<2.0" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.23.6" +version = "0.23.7" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, - {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, + {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, + {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, ] [package.dependencies] @@ -2087,126 +2180,112 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "radixtarget" -version = "1.0.0.15" +version = "1.1.0.18" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "radixtarget-1.0.0.15-py3-none-any.whl", hash = "sha256:4e3f0620bfbc0ef2ff3d71270dd281c0e8428906d260f737f82b573a7b636dd8"}, - {file = "radixtarget-1.0.0.15.tar.gz", hash = "sha256:c8294ebbb76e6d2826deaa8fe18d568308eddfd25f20644e166c492d2626a70c"}, + {file = "radixtarget-1.1.0.18-py3-none-any.whl", hash = "sha256:05e95de6afb0ee4dfa31c53bd25a34a193ae5bb46dc7624e0424bbcfed2c4cea"}, + {file = "radixtarget-1.1.0.18.tar.gz", hash = "sha256:1a3306891a22f7ff2c71d6cd42202af8852cdb4fb68e9a1e9a76a3f60aa98ab6"}, ] [[package]] name = "regex" -version = "2024.4.16" +version = "2024.5.15" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb83cc090eac63c006871fd24db5e30a1f282faa46328572661c0a24a2323a08"}, - {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c91e1763696c0eb66340c4df98623c2d4e77d0746b8f8f2bee2c6883fd1fe18"}, - {file = "regex-2024.4.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10188fe732dec829c7acca7422cdd1bf57d853c7199d5a9e96bb4d40db239c73"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:956b58d692f235cfbf5b4f3abd6d99bf102f161ccfe20d2fd0904f51c72c4c66"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a70b51f55fd954d1f194271695821dd62054d949efd6368d8be64edd37f55c86"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c02fcd2bf45162280613d2e4a1ca3ac558ff921ae4e308ecb307650d3a6ee51"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ed75ea6892a56896d78f11006161eea52c45a14994794bcfa1654430984b22"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd727ad276bb91928879f3aa6396c9a1d34e5e180dce40578421a691eeb77f47"}, - {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7cbc5d9e8a1781e7be17da67b92580d6ce4dcef5819c1b1b89f49d9678cc278c"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78fddb22b9ef810b63ef341c9fcf6455232d97cfe03938cbc29e2672c436670e"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:445ca8d3c5a01309633a0c9db57150312a181146315693273e35d936472df912"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:95399831a206211d6bc40224af1c635cb8790ddd5c7493e0bd03b85711076a53"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7731728b6568fc286d86745f27f07266de49603a6fdc4d19c87e8c247be452af"}, - {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4facc913e10bdba42ec0aee76d029aedda628161a7ce4116b16680a0413f658a"}, - {file = "regex-2024.4.16-cp310-cp310-win32.whl", hash = "sha256:911742856ce98d879acbea33fcc03c1d8dc1106234c5e7d068932c945db209c0"}, - {file = "regex-2024.4.16-cp310-cp310-win_amd64.whl", hash = "sha256:e0a2df336d1135a0b3a67f3bbf78a75f69562c1199ed9935372b82215cddd6e2"}, - {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1210365faba7c2150451eb78ec5687871c796b0f1fa701bfd2a4a25420482d26"}, - {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ab40412f8cd6f615bfedea40c8bf0407d41bf83b96f6fc9ff34976d6b7037fd"}, - {file = "regex-2024.4.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd80d1280d473500d8086d104962a82d77bfbf2b118053824b7be28cd5a79ea5"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb966fdd9217e53abf824f437a5a2d643a38d4fd5fd0ca711b9da683d452969"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20b7a68444f536365af42a75ccecb7ab41a896a04acf58432db9e206f4e525d6"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74586dd0b039c62416034f811d7ee62810174bb70dffcca6439f5236249eb09"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8290b44d8b0af4e77048646c10c6e3aa583c1ca67f3b5ffb6e06cf0c6f0f89"}, - {file = "regex-2024.4.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2d80a6749724b37853ece57988b39c4e79d2b5fe2869a86e8aeae3bbeef9eb0"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a1018e97aeb24e4f939afcd88211ace472ba566efc5bdf53fd8fd7f41fa7170"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8d015604ee6204e76569d2f44e5a210728fa917115bef0d102f4107e622b08d5"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3d5ac5234fb5053850d79dd8eb1015cb0d7d9ed951fa37aa9e6249a19aa4f336"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:0a38d151e2cdd66d16dab550c22f9521ba79761423b87c01dae0a6e9add79c0d"}, - {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159dc4e59a159cb8e4e8f8961eb1fa5d58f93cb1acd1701d8aff38d45e1a84a6"}, - {file = "regex-2024.4.16-cp311-cp311-win32.whl", hash = "sha256:ba2336d6548dee3117520545cfe44dc28a250aa091f8281d28804aa8d707d93d"}, - {file = "regex-2024.4.16-cp311-cp311-win_amd64.whl", hash = "sha256:8f83b6fd3dc3ba94d2b22717f9c8b8512354fd95221ac661784df2769ea9bba9"}, - {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80b696e8972b81edf0af2a259e1b2a4a661f818fae22e5fa4fa1a995fb4a40fd"}, - {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d61ae114d2a2311f61d90c2ef1358518e8f05eafda76eaf9c772a077e0b465ec"}, - {file = "regex-2024.4.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ba6745440b9a27336443b0c285d705ce73adb9ec90e2f2004c64d95ab5a7598"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295004b2dd37b0835ea5c14a33e00e8cfa3c4add4d587b77287825f3418d310"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aba818dcc7263852aabb172ec27b71d2abca02a593b95fa79351b2774eb1d2b"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0800631e565c47520aaa04ae38b96abc5196fe8b4aa9bd864445bd2b5848a7a"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08dea89f859c3df48a440dbdcd7b7155bc675f2fa2ec8c521d02dc69e877db70"}, - {file = "regex-2024.4.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eeaa0b5328b785abc344acc6241cffde50dc394a0644a968add75fcefe15b9d4"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4e819a806420bc010489f4e741b3036071aba209f2e0989d4750b08b12a9343f"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c2d0e7cbb6341e830adcbfa2479fdeebbfbb328f11edd6b5675674e7a1e37730"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:91797b98f5e34b6a49f54be33f72e2fb658018ae532be2f79f7c63b4ae225145"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:d2da13568eff02b30fd54fccd1e042a70fe920d816616fda4bf54ec705668d81"}, - {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:370c68dc5570b394cbaadff50e64d705f64debed30573e5c313c360689b6aadc"}, - {file = "regex-2024.4.16-cp312-cp312-win32.whl", hash = "sha256:904c883cf10a975b02ab3478bce652f0f5346a2c28d0a8521d97bb23c323cc8b"}, - {file = "regex-2024.4.16-cp312-cp312-win_amd64.whl", hash = "sha256:785c071c982dce54d44ea0b79cd6dfafddeccdd98cfa5f7b86ef69b381b457d9"}, - {file = "regex-2024.4.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2f142b45c6fed48166faeb4303b4b58c9fcd827da63f4cf0a123c3480ae11fb"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87ab229332ceb127a165612d839ab87795972102cb9830e5f12b8c9a5c1b508"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81500ed5af2090b4a9157a59dbc89873a25c33db1bb9a8cf123837dcc9765047"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b340cccad138ecb363324aa26893963dcabb02bb25e440ebdf42e30963f1a4e0"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c72608e70f053643437bd2be0608f7f1c46d4022e4104d76826f0839199347a"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01fe2305e6232ef3e8f40bfc0f0f3a04def9aab514910fa4203bafbc0bb4682"}, - {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:03576e3a423d19dda13e55598f0fd507b5d660d42c51b02df4e0d97824fdcae3"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:549c3584993772e25f02d0656ac48abdda73169fe347263948cf2b1cead622f3"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:34422d5a69a60b7e9a07a690094e824b66f5ddc662a5fc600d65b7c174a05f04"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f580c651a72b75c39e311343fe6875d6f58cf51c471a97f15a938d9fe4e0d37"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3399dd8a7495bbb2bacd59b84840eef9057826c664472e86c91d675d007137f5"}, - {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d1f86f3f4e2388aa3310b50694ac44daefbd1681def26b4519bd050a398dc5a"}, - {file = "regex-2024.4.16-cp37-cp37m-win32.whl", hash = "sha256:dd5acc0a7d38fdc7a3a6fd3ad14c880819008ecb3379626e56b163165162cc46"}, - {file = "regex-2024.4.16-cp37-cp37m-win_amd64.whl", hash = "sha256:ba8122e3bb94ecda29a8de4cf889f600171424ea586847aa92c334772d200331"}, - {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:743deffdf3b3481da32e8a96887e2aa945ec6685af1cfe2bcc292638c9ba2f48"}, - {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7571f19f4a3fd00af9341c7801d1ad1967fc9c3f5e62402683047e7166b9f2b4"}, - {file = "regex-2024.4.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df79012ebf6f4efb8d307b1328226aef24ca446b3ff8d0e30202d7ebcb977a8c"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e757d475953269fbf4b441207bb7dbdd1c43180711b6208e129b637792ac0b93"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4313ab9bf6a81206c8ac28fdfcddc0435299dc88cad12cc6305fd0e78b81f9e4"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d83c2bc678453646f1a18f8db1e927a2d3f4935031b9ad8a76e56760461105dd"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df1bfef97db938469ef0a7354b2d591a2d438bc497b2c489471bec0e6baf7c4"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62120ed0de69b3649cc68e2965376048793f466c5a6c4370fb27c16c1beac22d"}, - {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c2ef6f7990b6e8758fe48ad08f7e2f66c8f11dc66e24093304b87cae9037bb4a"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8fc6976a3395fe4d1fbeb984adaa8ec652a1e12f36b56ec8c236e5117b585427"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:03e68f44340528111067cecf12721c3df4811c67268b897fbe695c95f860ac42"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ec7e0043b91115f427998febaa2beb82c82df708168b35ece3accb610b91fac1"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c21fc21a4c7480479d12fd8e679b699f744f76bb05f53a1d14182b31f55aac76"}, - {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12f6a3f2f58bb7344751919a1876ee1b976fe08b9ffccb4bbea66f26af6017b9"}, - {file = "regex-2024.4.16-cp38-cp38-win32.whl", hash = "sha256:479595a4fbe9ed8f8f72c59717e8cf222da2e4c07b6ae5b65411e6302af9708e"}, - {file = "regex-2024.4.16-cp38-cp38-win_amd64.whl", hash = "sha256:0534b034fba6101611968fae8e856c1698da97ce2efb5c2b895fc8b9e23a5834"}, - {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7ccdd1c4a3472a7533b0a7aa9ee34c9a2bef859ba86deec07aff2ad7e0c3b94"}, - {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f2f017c5be19984fbbf55f8af6caba25e62c71293213f044da3ada7091a4455"}, - {file = "regex-2024.4.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:803b8905b52de78b173d3c1e83df0efb929621e7b7c5766c0843704d5332682f"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:684008ec44ad275832a5a152f6e764bbe1914bea10968017b6feaecdad5736e0"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65436dce9fdc0aeeb0a0effe0839cb3d6a05f45aa45a4d9f9c60989beca78b9c"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea355eb43b11764cf799dda62c658c4d2fdb16af41f59bb1ccfec517b60bcb07"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c1165f3809ce7774f05cb74e5408cd3aa93ee8573ae959a97a53db3ca3180d"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cccc79a9be9b64c881f18305a7c715ba199e471a3973faeb7ba84172abb3f317"}, - {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00169caa125f35d1bca6045d65a662af0202704489fada95346cfa092ec23f39"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6cc38067209354e16c5609b66285af17a2863a47585bcf75285cab33d4c3b8df"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:23cff1b267038501b179ccbbd74a821ac4a7192a1852d1d558e562b507d46013"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d320b3bf82a39f248769fc7f188e00f93526cc0fe739cfa197868633d44701"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:89ec7f2c08937421bbbb8b48c54096fa4f88347946d4747021ad85f1b3021b3c"}, - {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4918fd5f8b43aa7ec031e0fef1ee02deb80b6afd49c85f0790be1dc4ce34cb50"}, - {file = "regex-2024.4.16-cp39-cp39-win32.whl", hash = "sha256:684e52023aec43bdf0250e843e1fdd6febbe831bd9d52da72333fa201aaa2335"}, - {file = "regex-2024.4.16-cp39-cp39-win_amd64.whl", hash = "sha256:e697e1c0238133589e00c244a8b676bc2cfc3ab4961318d902040d099fec7483"}, - {file = "regex-2024.4.16.tar.gz", hash = "sha256:fa454d26f2e87ad661c4f0c5a5fe4cf6aab1e307d1b94f16ffdfcb089ba685c0"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, + {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, + {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, + {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, + {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, + {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, + {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, + {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, + {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, + {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, + {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, + {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.2" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, + {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, ] [package.dependencies] @@ -2221,12 +2300,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "2.0.0" +version = "2.1.0" description = "File transport adapter for Requests" optional = false python-versions = "*" files = [ - {file = "requests-file-2.0.0.tar.gz", hash = "sha256:20c5931629c558fda566cacc10cfe2cd502433e628f568c34c80d96a0cc95972"}, + {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, + {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, ] [package.dependencies] @@ -2249,21 +2329,120 @@ lint = ["black", "flake8", "isort", "mypy", "types-requests"] release = ["build", "towncrier", "twine"] test = ["commentjson", "packaging", "pytest"] +[[package]] +name = "setproctitle" +version = "1.3.3" +description = "A Python module to customize the process title" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"}, + {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f"}, + {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5"}, + {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d"}, + {file = "setproctitle-1.3.3-cp310-cp310-win32.whl", hash = "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb"}, + {file = "setproctitle-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8"}, + {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5"}, + {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0"}, + {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18"}, + {file = "setproctitle-1.3.3-cp311-cp311-win32.whl", hash = "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476"}, + {file = "setproctitle-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc"}, + {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120"}, + {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8"}, + {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3"}, + {file = "setproctitle-1.3.3-cp312-cp312-win32.whl", hash = "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4"}, + {file = "setproctitle-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7"}, + {file = "setproctitle-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:816330675e3504ae4d9a2185c46b573105d2310c20b19ea2b4596a9460a4f674"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68f960bc22d8d8e4ac886d1e2e21ccbd283adcf3c43136161c1ba0fa509088e0"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e6e7adff74796ef12753ff399491b8827f84f6c77659d71bd0b35870a17d8f"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53bc0d2358507596c22b02db079618451f3bd720755d88e3cccd840bafb4c41c"}, + {file = "setproctitle-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6d20f9541f5f6ac63df553b6d7a04f313947f550eab6a61aa758b45f0d5657"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c1c84beab776b0becaa368254801e57692ed749d935469ac10e2b9b825dbdd8e"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:507e8dc2891021350eaea40a44ddd887c9f006e6b599af8d64a505c0f718f170"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b1067647ac7aba0b44b591936118a22847bda3c507b0a42d74272256a7a798e9"}, + {file = "setproctitle-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2e71f6365744bf53714e8bd2522b3c9c1d83f52ffa6324bd7cbb4da707312cd8"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:7f1d36a1e15a46e8ede4e953abb104fdbc0845a266ec0e99cc0492a4364f8c44"}, + {file = "setproctitle-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9a402881ec269d0cc9c354b149fc29f9ec1a1939a777f1c858cdb09c7a261df"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ff814dea1e5c492a4980e3e7d094286077054e7ea116cbeda138819db194b2cd"}, + {file = "setproctitle-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:accb66d7b3ccb00d5cd11d8c6e07055a4568a24c95cf86109894dcc0c134cc89"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554eae5a5b28f02705b83a230e9d163d645c9a08914c0ad921df363a07cf39b1"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a911b26264dbe9e8066c7531c0591cfab27b464459c74385b276fe487ca91c12"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2982efe7640c4835f7355fdb4da313ad37fb3b40f5c69069912f8048f77b28c8"}, + {file = "setproctitle-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3f4274b80709d8bcab2f9a862973d453b308b97a0b423a501bcd93582852e3"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:af2c67ae4c795d1674a8d3ac1988676fa306bcfa1e23fddb5e0bd5f5635309ca"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af4061f67fd7ec01624c5e3c21f6b7af2ef0e6bab7fbb43f209e6506c9ce0092"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37a62cbe16d4c6294e84670b59cf7adcc73faafe6af07f8cb9adaf1f0e775b19"}, + {file = "setproctitle-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a83ca086fbb017f0d87f240a8f9bbcf0809f3b754ee01cec928fff926542c450"}, + {file = "setproctitle-1.3.3-cp38-cp38-win32.whl", hash = "sha256:059f4ce86f8cc92e5860abfc43a1dceb21137b26a02373618d88f6b4b86ba9b2"}, + {file = "setproctitle-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ab92e51cd4a218208efee4c6d37db7368fdf182f6e7ff148fb295ecddf264287"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c7951820b77abe03d88b114b998867c0f99da03859e5ab2623d94690848d3e45"}, + {file = "setproctitle-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc94cf128676e8fac6503b37763adb378e2b6be1249d207630f83fc325d9b11"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d9027eeda64d353cf21a3ceb74bb1760bd534526c9214e19f052424b37e42"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e4a8104db15d3462e29d9946f26bed817a5b1d7a47eabca2d9dc2b995991503"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c32c41ace41f344d317399efff4cffb133e709cec2ef09c99e7a13e9f3b9483c"}, + {file = "setproctitle-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf16381c7bf7f963b58fb4daaa65684e10966ee14d26f5cc90f07049bfd8c1e"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e18b7bd0898398cc97ce2dfc83bb192a13a087ef6b2d5a8a36460311cb09e775"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69d565d20efe527bd8a9b92e7f299ae5e73b6c0470f3719bd66f3cd821e0d5bd"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ddedd300cd690a3b06e7eac90ed4452348b1348635777ce23d460d913b5b63c3"}, + {file = "setproctitle-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:415bfcfd01d1fbf5cbd75004599ef167a533395955305f42220a585f64036081"}, + {file = "setproctitle-1.3.3-cp39-cp39-win32.whl", hash = "sha256:21112fcd2195d48f25760f0eafa7a76510871bbb3b750219310cf88b04456ae3"}, + {file = "setproctitle-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:5a740f05d0968a5a17da3d676ce6afefebeeeb5ce137510901bf6306ba8ee002"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5"}, + {file = "setproctitle-1.3.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a"}, + {file = "setproctitle-1.3.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6"}, + {file = "setproctitle-1.3.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9"}, + {file = "setproctitle-1.3.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d"}, + {file = "setproctitle-1.3.3.tar.gz", hash = "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae"}, +] + +[package.extras] +test = ["pytest"] + [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2357,13 +2536,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.4" +version = "0.12.5" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, ] [[package]] @@ -2388,13 +2567,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, + {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, ] [[package]] @@ -2427,13 +2606,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.25.3" +version = "20.26.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.3-py3-none-any.whl", hash = "sha256:8aac4332f2ea6ef519c648d0bc48a5b1d324994753519919bddbb1aff25a104e"}, - {file = "virtualenv-20.25.3.tar.gz", hash = "sha256:7bb554bbdfeaacc3349fa614ea5bff6ac300fc7c335e9facf3a3bcfc703f45be"}, + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, ] [package.dependencies] @@ -2447,40 +2626,43 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "4.0.0" +version = "4.0.1" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, ] [package.extras] @@ -2569,13 +2751,13 @@ files = [ [[package]] name = "werkzeug" -version = "3.0.2" +version = "3.0.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, - {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, ] [package.dependencies] @@ -2621,20 +2803,20 @@ xmltodict = ">=0.12.0,<0.13.0" [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"}, + {file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "ed8bb07e4ff5a5f665402db33f9016409547bef1ccb6a8c2c626c44fde075abb" +content-hash = "77fc88821048a39978de1e7a9d4a2ff2e824be4eab4c78730de89f8950509085" diff --git a/pyproject.toml b/pyproject.toml index 627f323d8..bc02ded72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,8 @@ regex = "^2024.4.16" unidecode = "^1.3.8" radixtarget = "^1.0.0.15" cloudcheck = "^5.0.0.350" +mmh3 = "^4.1.0" +setproctitle = "^1.3.3" [tool.poetry.group.dev.dependencies] flake8 = ">=6,<8" @@ -74,7 +76,7 @@ mkdocs = "^1.5.2" mkdocs-extra-sass-plugin = "^0.1.0" mkdocs-material = "^9.2.5" mkdocs-material-extensions = "^1.1.1" -mkdocstrings = ">=0.22,<0.25" +mkdocstrings = ">=0.22,<0.26" mkdocstrings-python = "^1.6.0" livereload = "^2.6.3"