diff --git a/bbot/cli.py b/bbot/cli.py index 406986715..96e734974 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -298,7 +298,7 @@ async def _main(): scanner.helpers.word_cloud.load() - await scanner.prep() + await scanner._prep() if not options.dry_run: if not options.agent_mode and not options.yes and sys.stdin.isatty(): diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 22326fb49..3453622ef 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -33,6 +33,60 @@ class BaseEvent: + """ + Represents a piece of data discovered during a BBOT scan. + + An Event contains various attributes that provide metadata about the discovered data. + The attributes assist in understanding the context of the Event and facilitate further + filtering and querying. Events are integral in the construction of visual graphs and + are the cornerstone of data exchange between BBOT modules. + + You can inherit from this class when creating a new event type. However, it's not always + necessary. You only need to subclass if you want to layer additional functionality on + top of the base class. + + Attributes: + type (str): Specifies the type of the event, e.g., `IP_ADDRESS`, `DNS_NAME`. + id (str): A unique identifier for the event. + data (str or dict): The main data for the event, e.g., a URL or IP address. + data_graph (str): Representation of `self.data` for Neo4j graph nodes. + data_human (str): Representation of `self.data` for human output. + data_id (str): Representation of `self.data` used to calculate the event's ID (and ultimately its hash, which is used for deduplication) + data_json (str): Representation of `self.data` to be used in JSON serialization. + host (str, IPvXAddress, or IPvXNetwork): The associated IP address or hostname for the event + host_stem (str): An abbreviated representation of hostname that removes the TLD, e.g. "www.evilcorp". Used by the word cloud. + port (int or None): The port associated with the event, if applicable, else None. + words (set): A list of relevant keywords extracted from the event. Used by the word cloud. + scope_distance (int): Indicates how many hops the event is from the main scope; 0 means in-scope. + web_spider_distance (int): The spider distance from the web root, specific to web crawling. + scan (Scanner): The scan object that generated the event. + timestamp (datetime.datetime): The time at which the data was discovered. + resolved_hosts (list of str): List of hosts to which the event data resolves, applicable for URLs and DNS names. + source (BaseEvent): The source event that led to the discovery of this event. + source_id (str): The `id` attribute of the source event. + tags (set of str): Descriptive tags for the event, e.g., `mx-record`, `in-scope`. + module (BaseModule): The module that discovered the event. + module_sequence (str): The sequence of modules that participated in the discovery. + + Examples: + ```json + { + "type": "URL", + "id": "URL:017ec8e5dc158c0fd46f07169f8577fb4b45e89a", + "data": "http://www.blacklanternsecurity.com/", + "web_spider_distance": 0, + "scope_distance": 0, + "scan": "SCAN:4d786912dbc97be199da13074699c318e2067a7f", + "timestamp": 1688526222.723366, + "resolved_hosts": ["185.199.108.153"], + "source": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", + "tags": ["in-scope", "distance-0", "dir", "ip-185-199-108-153", "status-301", "http-title-301-moved-permanently"], + "module": "httpx", + "module_sequence": "httpx" + } + ``` + """ + # Always emit this event type even if it's not in scope _always_emit = False # Always emit events with these tags even if they're not in scope @@ -47,7 +101,7 @@ class BaseEvent: def __init__( self, data, - event_type=None, + event_type, source=None, module=None, scan=None, @@ -58,6 +112,29 @@ def __init__( _dummy=False, _internal=None, ): + """ + Initializes an Event object with the given parameters. + + In most cases, you should use `make_event()` instead of instantiating this class directly. + `make_event()` is much friendlier, and can auto-detect the event type for you. + + Attributes: + data (str, dict): The primary data for the event. + event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. + source (BaseEvent, optional): Source event that led to this event's discovery. Defaults to None. + module (str, optional): Module that discovered the event. Defaults to None. + scan (Scan, optional): BBOT Scan object. Required unless _dummy is True. Defaults to None. + scans (list of Scan, optional): BBOT Scan objects, used primarily when unserializing an Event from the database. Defaults to None. + tags (list of str, optional): Descriptive tags for the event. Defaults to None. + confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + timestamp (datetime, optional): Time of event discovery. Defaults to current UTC time. + _dummy (bool, optional): If True, disables certain data validations. Defaults to False. + _internal (Any, optional): If specified, makes the event internal. Defaults to None. + + Raises: + ValidationError: If either `scan` or `source` are not specified and `_dummy` is False. + """ + self._id = None self._hash = None self.__host = None @@ -230,6 +307,19 @@ def scope_distance(self): @scope_distance.setter def scope_distance(self, scope_distance): + """ + Setter for the scope_distance attribute, ensuring it only decreases. + + The scope_distance attribute is designed to never increase; it can only be set to smaller values than + the current one. If a larger value is provided, it is ignored. The setter also updates the event's + tags to reflect the new scope distance. + + Parameters: + scope_distance (int): The new scope distance to set, must be a non-negative integer. + + Note: + The method will automatically update the relevant 'distance-' tags associated with the event. + """ if scope_distance >= 0: new_scope_distance = None # ensure scope distance does not increase (only allow setting to smaller values) @@ -250,6 +340,19 @@ def source(self): @source.setter def source(self, source): + """ + Setter for the source attribute, ensuring it's a valid event and updating scope distance. + + Sets the source of the event and automatically adjusts the scope distance based on the source event's + scope distance. The scope distance is incremented by 1 if the host of the source event is different + from the current event's host. + + Parameters: + source (BaseEvent): The new source event to set. Must be a valid event object. + + Note: + If an invalid source is provided and the event is not a dummy, a warning will be logged. + """ if is_event(source): self._source = source if source.scope_distance >= 0: @@ -291,12 +394,47 @@ def get_sources(self, omit=False): return sources def make_internal(self): + """ + Marks the event as internal, excluding it from output but allowing normal exchange between scan modules. + + Internal events are typically speculative and may not be interesting by themselves but can lead to + the discovery of interesting events. This method sets the `_internal` attribute to True, adds the + "internal" tag, and ensures the event is marked as made internal (useful for later reversion). + + Examples of internal events include `OPEN_TCP_PORT`s from the `speculate` module, + `IP_ADDRESS`es from the `ipneighbor` module, or out-of-scope `DNS_NAME`s that originate + from DNS resolutions. + + Once an event is marked as internal, all of its future children become internal as well. + If `ScanManager._emit_event()` determines the event is interesting, it may be reverted back to its + original state and forcefully re-emitted along with the whole chain of internal events. + + The purpose of internal events is to enable speculative/explorative discovery without cluttering + the console with irrelevant or uninteresting events. + """ if not self._made_internal: self._internal = True self.add_tag("internal") self._made_internal = True def unmake_internal(self, set_scope_distance=None, force_output=False): + """ + Reverts the event from being internal, optionally forcing it to be included in output and setting its scope distance. + + Removes the 'internal' tag, resets the `_internal` attribute, and adjusts scope distance if specified. + Optionally, forces the event to be included in the output. Also, if any source events are internal, they + are also reverted recursively. + + This typically happens in `ScanManager._emit_event()` if the event is determined to be interesting. + + Parameters: + set_scope_distance (int, optional): If specified, sets the scope distance to this value. + force_output (bool or str, optional): If True, forces the event to be included in output. + If set to "trail_only", only its source events are modified. + + Returns: + list: A list of source events that were also reverted from being internal. + """ source_trail = [] self.remove_tag("internal") if self._made_internal: @@ -323,7 +461,17 @@ def unmake_internal(self, set_scope_distance=None, force_output=False): def set_scope_distance(self, d=0): """ - Set the scope of an event and its parents + Sets the scope distance for the event and its parent events, while considering module-specific scoping rules. + + Unmakes the event internal if needed and adjusts its scope distance. If the distance is set to 0, + adds the 'in-scope' tag to the event. Takes into account module-specific scoping preferences unless + the event type is "DNS_NAME". + + Parameters: + d (int): The scope distance to set for this event. + + Returns: + list: A list of parent events whose scope distance was also set. """ source_trail = [] # keep the event internal if the module requests so, unless it's a DNS_NAME @@ -338,6 +486,19 @@ def _host(self): return "" def _sanitize_data(self, data): + """ + Validates and sanitizes the event's data during instantiation. + + By default, uses the '_data_load' method to pre-process the data and then applies the '_data_validator' + to validate and create a sanitized dictionary. Raises a ValidationError if any of the validations fail. + Subclasses can override this method to provide custom validation logic. + + Returns: + Any: The sanitized data. + + Raises: + ValidationError: If the data fails to validate. + """ data = self._data_load(data) if self._data_validator is not None: if not isinstance(data, dict): @@ -378,7 +539,15 @@ def _data_id(self): @property def pretty_string(self): """ - Graph representation of event.data + A human-friendly representation of the event's data. Used for graph representation. + + If the event's data is a dictionary, the function will try to return a JSON-formatted string. + Otherwise, it will use smart_decode to convert the data into a string representation. + + Override if necessary. + + Returns: + str: The graphical representation of the event's data. """ return self._pretty_string() @@ -425,6 +594,18 @@ def __contains__(self, other): return False def json(self, mode="json"): + """ + Serializes the event object to a JSON-compatible dictionary. + + By default, it includes attributes such as 'type', 'id', 'data', 'scope_distance', and others that are present. + Additional specific attributes can be serialized based on the mode specified. + + Parameters: + mode (str): Specifies the data serialization mode. Default is "json". Other options include "graph", "human", and "id". + + Returns: + dict: JSON-serializable dictionary representation of the event object. + """ j = dict() for i in ("type", "id"): v = getattr(self, i, "") @@ -467,14 +648,28 @@ def json(self, mode="json"): @staticmethod def from_json(j): + """ + Convenience shortcut to create an Event object from a JSON-compatible dictionary. + + Calls the `event_from_json()` function to deserialize the event. + + Parameters: + j (dict): The JSON-compatible dictionary containing event data. + + Returns: + Event: The deserialized Event object. + """ return event_from_json(j) @property def module_sequence(self): """ - A human-friendly representation of the module name that includes modules from omitted source events + Get a human-friendly string that represents the sequence of modules responsible for generating this event. + + Includes the names of omitted source events to provide a complete view of the module sequence leading to this event. - Helpful in identifying where a URL came from + Returns: + str: The module sequence in human-friendly format. """ module_name = getattr(self.module, "name", "") if getattr(self.source, "_omit", False): @@ -975,7 +1170,47 @@ def make_event( internal=None, ): """ - If data is already an event, simply return it + Creates and returns a new event object or modifies an existing one. + + This function serves as a factory for creating new event objects, either by generating a new `Event` + object or by updating an existing event with additional metadata. If `data` is already an event, + it updates the event based on the additional parameters provided. + + Parameters: + data (Union[str, dict, BaseEvent]): The primary data for the event or an existing event object. + event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. Auto-detected if not provided. + source (BaseEvent, optional): Source event leading to this event's discovery. + module (str, optional): Module that discovered the event. + scan (Scan, optional): BBOT Scan object associated with the event. + scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. + tags (Union[str, List[str]], optional): Descriptive tags for the event, as a list or a single string. + confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + dummy (bool, optional): Disables data validations if set to True. Defaults to False. + internal (Any, optional): Makes the event internal if set to True. Defaults to None. + + Returns: + BaseEvent: A new or updated event object. + + Raises: + ValidationError: Raised when there's an error in event data or type sanitization. + + Examples: + If inside a module, e.g. from within its `handle_event()`: + >>> self.make_event("1.2.3.4", source=event) + IP_ADDRESS("1.2.3.4", module=nmap, tags={'ipv4', 'distance-1'}) + + If you're outside a module but you have a scan object: + >>> scan.make_event("1.2.3.4", source=scan.root_event) + IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'}) + + If you're outside a scan and just messing around: + >>> from bbot.core.event.base import make_event + >>> make_event("1.2.3.4", dummy=True) + IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4'}) + + Note: + When working within a module's `handle_event()`, use the instance method + `self.make_event()` instead of calling this function directly. """ # allow tags to be either a string or an array @@ -1040,6 +1275,27 @@ def make_event( def event_from_json(j): + """ + Creates an event object from a JSON dictionary. + + This function deserializes a JSON dictionary to create a new event object, using the `make_event` function + for the actual object creation. It sets additional attributes such as the timestamp and scope distance + based on the input JSON. + + Parameters: + j (Dict): JSON dictionary containing the event attributes. + Must include keys "data" and "type". + + Returns: + BaseEvent: A new event object initialized with attributes from the JSON dictionary. + + Raises: + ValidationError: Raised when the JSON dictionary is missing required fields. + + Note: + The function assumes that the input JSON dictionary is valid and may raise exceptions + if required keys are missing. Make sure to validate the JSON input beforehand. + """ try: kwargs = { "data": j["data"], diff --git a/bbot/core/event/helpers.py b/bbot/core/event/helpers.py index 6df0fe2ee..68a4f3cb8 100644 --- a/bbot/core/event/helpers.py +++ b/bbot/core/event/helpers.py @@ -12,7 +12,21 @@ def get_event_type(data): """ - Attempt to divine event type from data + Determines the type of event based on the given data. + + Args: + data (str): The data to be used for determining the event type. + + Returns: + str: The type of event such as "IP_ADDRESS", "IP_RANGE", or "URL_UNVERIFIED". + + Raises: + ValidationError: If the event type could not be determined. + + Notes: + - Utilizes `smart_decode_punycode` and `smart_decode` to preprocess the data. + - Makes use of `ipaddress` standard library to check for IP and network types. + - Checks against a set of predefined regular expressions stored in `event_type_regexes`. """ data = smart_decode_punycode(smart_decode(data).strip()) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 0d03ccac1..bc28cbc82 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -10,10 +10,28 @@ async def run(self, *command, check=False, text=True, **kwargs): - """ - Simple helper for running a command, and getting its output as a string - process = await run(["ls", "/tmp"]) - process.stdout --> "file1.txt\nfile2.txt" + """Runs a command asynchronously and gets its output as a string. + + This method is a simple helper for executing a command and capturing its output. + If an error occurs during execution, it can optionally raise an error or just log the stderr. + + Args: + *command (str): The command to run as separate arguments. + check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. + Defaults to False. + text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + **kwargs (dict): Additional keyword arguments for the subprocess. + + Returns: + CompletedProcess: A completed process object with attributes for the command, return code, stdout, and stderr. + + Raises: + CalledProcessError: If the subprocess exits with a non-zero status and `check=True`. + + Examples: + >>> process = await run(["ls", "/tmp"]) + >>> process.stdout + "file1.txt\nfile2.txt" """ proc, _input, command = await self._spawn_proc(*command, **kwargs) if proc is not None: @@ -41,10 +59,28 @@ async def run(self, *command, check=False, text=True, **kwargs): async def run_live(self, *command, check=False, text=True, **kwargs): - """ - Simple helper for running a command and iterating through its output line by line in realtime - async for line in run_live(["ls", "/tmp"]): - log.info(line) + """Runs a command asynchronously and iterates through its output line by line in realtime. + + This method is useful for executing a command and capturing its output on-the-fly, as it is generated. + If an error occurs during execution, it can optionally raise an error or just log the stderr. + + Args: + *command (str): The command to run as separate arguments. + check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. + Defaults to False. + text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + **kwargs (dict): Additional keyword arguments for the subprocess. + + Yields: + str or bytes: The output lines of the command, either as a decoded string (if `text=True`) + or as bytes (if `text=False`). + + Raises: + CalledProcessError: If the subprocess exits with a non-zero status and `check=True`. + + Examples: + >>> async for line in run_live(["tail", "-f", "/var/log/auth.log"]): + ... log.info(line) """ proc, _input, command = await self._spawn_proc(*command, **kwargs) if proc is not None: @@ -92,6 +128,27 @@ async def run_live(self, *command, check=False, text=True, **kwargs): async def _spawn_proc(self, *command, **kwargs): + """Spawns an asynchronous subprocess. + + Prepares the command and associated keyword arguments. If the `input` argument is provided, + it checks to ensure that the `stdin` argument is not also provided. Once prepared, it creates + and returns the subprocess. If the command executable is not found, it logs a warning and traceback. + + Args: + *command (str): The command to run as separate arguments. + **kwargs (dict): Additional keyword arguments for the subprocess. + + Raises: + ValueError: If both stdin and input arguments are provided. + + Returns: + tuple: A tuple containing the created process (or None if creation failed), the input (or None if not provided), + and the prepared command (or None if subprocess creation failed). + + Examples: + >>> _spawn_proc("ls", "-l", input="data") + (, "data", ["ls", "-l"]) + """ command, kwargs = self._prepare_command_kwargs(command, kwargs) _input = kwargs.pop("input", None) if _input is not None: @@ -110,6 +167,17 @@ async def _spawn_proc(self, *command, **kwargs): async def _write_stdin(proc, _input): + """ + Asynchronously writes input to an active subprocess's stdin. + + This function takes an `_input` parameter, which can be of type str, bytes, + list, tuple, or an asynchronous generator. The input is then written line by + line to the stdin of the given `proc`. + + Args: + proc (subprocess.Popen): An active subprocess object. + _input (str, bytes, list, tuple, async generator): The data to write to stdin. + """ if _input is not None: if isinstance(_input, (str, bytes)): _input = [_input] @@ -124,6 +192,28 @@ async def _write_stdin(proc, _input): def _prepare_command_kwargs(self, command, kwargs): + """ + Prepare arguments for passing into `asyncio.create_subprocess_exec()`. + + This method modifies the `kwargs` dictionary in place to prepare it for + use in the `asyncio.create_subprocess_exec()` method. It sets the default + values for keys like 'limit', 'stdout', and 'stderr' if they are not + already present. It also handles the case when 'sudo' needs to be run. + + Args: + command (list): The command to be run in the subprocess. + kwargs (dict): The keyword arguments to be passed to `asyncio.create_subprocess_exec()`. + + Returns: + tuple: A tuple containing the modified `command` and `kwargs`. + + Examples: + >>> _prepare_command_kwargs(['ls', '-l'], {}) + (['ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1}) + + >>> _prepare_command_kwargs(['ls', '-l'], {'sudo': True}) + (['sudo', '-E', '-A', 'LD_LIBRARY_PATH=...', 'PATH=...', 'ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1, 'env': environ(...)}) + """ # limit = 100MB (this is needed for cases like httpx that are sending large JSON blobs over stdout) if not "limit" in kwargs: kwargs["limit"] = 1024 * 1024 * 100 diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 1d5fb7bef..860853661 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -18,6 +18,21 @@ class BBOTAsyncResolver(dns.asyncresolver.Resolver): + """Custom asynchronous resolver for BBOT with rate limiting. + + This class extends dnspython's async resolver and provides additional support for rate-limiting DNS queries. + The maximum number of queries allowed per second can be customized via BBOT's config. + + Attributes: + _parent_helper: A reference to the instantiated `ConfigAwareHelper` (typically `scan.helpers`). + _dns_rate_limiter (RateLimiter): An instance of the RateLimiter class for DNS query rate-limiting. + + Args: + *args: Positional arguments passed to the base resolver. + **kwargs: Keyword arguments. '_parent_helper' is expected among these to provide configuration data for + rate-limiting. All other keyword arguments are passed to the base resolver. + """ + def __init__(self, *args, **kwargs): self._parent_helper = kwargs.pop("_parent_helper") dns_queries_per_second = self._parent_helper.config.get("dns_queries_per_second", 100) @@ -30,8 +45,37 @@ async def resolve(self, *args, **kwargs): class DNSHelper: - """ - For host resolution, automatic wildcard detection, etc. + """Helper class for DNS-related operations within BBOT. + + This class provides mechanisms for host resolution, wildcard domain detection, event tagging, and more. + It centralizes all DNS-related activities in BBOT, offering both synchronous and asynchronous methods + for DNS resolution, as well as various utilities for batch resolution and DNS query filtering. + + Attributes: + parent_helper: A reference to the instantiated `ConfigAwareHelper` (typically `scan.helpers`). + resolver (BBOTAsyncResolver): An asynchronous DNS resolver tailored for BBOT with rate-limiting capabilities. + timeout (int): The timeout value for DNS queries. Defaults to 5 seconds. + retries (int): The number of retries for failed DNS queries. Defaults to 1. + abort_threshold (int): The threshold for aborting after consecutive failed queries. Defaults to 50. + max_dns_resolve_distance (int): Maximum allowed distance for DNS resolution. Defaults to 4. + all_rdtypes (list): A list of DNS record types to be considered during operations. + wildcard_ignore (tuple): Domains to be ignored during wildcard detection. + wildcard_tests (int): Number of tests to be run for wildcard detection. Defaults to 5. + _wildcard_cache (dict): Cache for wildcard detection results. + _dns_cache (CacheDict): Cache for DNS resolution results, limited in size. + _event_cache (CacheDict): Cache for event resolution results, tags. Limited in size. + resolver_file (Path): File containing system's current resolver nameservers. + filter_bad_ptrs (bool): Whether to filter out DNS names that appear to be auto-generated PTR records. Defaults to True. + + Args: + parent_helper: The parent helper object with configuration details and utilities. + + Raises: + DNSError: If an issue arises when creating the BBOTAsyncResolver instance. + + Examples: + >>> dns_helper = DNSHelper(parent_config) + >>> resolved_host = dns_helper.resolver.resolve("example.com") """ all_rdtypes = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"] @@ -44,7 +88,7 @@ def __init__(self, parent_helper): raise DNSError(f"Failed to create BBOT DNS resolver: {e}") self.timeout = self.parent_helper.config.get("dns_timeout", 5) self.retries = self.parent_helper.config.get("dns_retries", 1) - self.abort_threshold = self.parent_helper.config.get("dns_abort_threshold", 5) + self.abort_threshold = self.parent_helper.config.get("dns_abort_threshold", 50) self.max_dns_resolve_distance = self.parent_helper.config.get("max_dns_resolve_distance", 4) self.resolver.timeout = self.timeout self.resolver.lifetime = self.timeout @@ -95,14 +139,25 @@ def __init__(self, parent_helper): self.filter_bad_ptrs = self.parent_helper.config.get("dns_filter_ptrs", True) async def resolve(self, query, **kwargs): - """ - "1.2.3.4" --> { - "evilcorp.com", - } - "evilcorp.com" --> { - "1.2.3.4", - "dead::beef" - } + """Resolve DNS names and IP addresses to their corresponding results. + + This is a high-level function that can translate a given domain name to its associated IP addresses + or an IP address to its corresponding domain names. It's structured for ease of use within modules + and will abstract away most of the complexity of DNS resolution, returning a simple set of results. + + Args: + query (str): The domain name or IP address to resolve. + **kwargs: Additional arguments to be passed to the resolution process. + + Returns: + set: A set containing resolved domain names or IP addresses. + + Examples: + >>> results = await resolve("1.2.3.4") + {"evilcorp.com"} + + >>> results = await resolve("evilcorp.com") + {"1.2.3.4", "dead::beef"} """ results = set() try: @@ -122,6 +177,32 @@ async def resolve(self, query, **kwargs): return results async def resolve_raw(self, query, **kwargs): + """Resolves the given query to its associated DNS records. + + This function is a foundational method for DNS resolution in this class. It understands both IP addresses and + hostnames and returns their associated records in a raw format provided by the dnspython library. + + Args: + query (str): The IP address or hostname to resolve. + type (str or list[str], optional): Specifies the DNS record type(s) to fetch. Can be a single type like 'A' + or a list like ['A', 'AAAA']. If set to 'any', 'all', or '*', it fetches all supported types. If not + specified, the function defaults to fetching 'A' and 'AAAA' records. + **kwargs: Additional arguments that might be passed to the resolver. + + Returns: + tuple: A tuple containing two lists: + - list: A list of tuples where each tuple consists of a record type string (like 'A') and the associated + raw dnspython answer. + - list: A list of tuples where each tuple consists of a record type string and the associated error if + there was an issue fetching the record. + + Examples: + >>> await resolve_raw("8.8.8.8") + ([('PTR', )], []) + + >>> await resolve_raw("dns.google") + ([('A', ), ('AAAA', )], []) + """ # DNS over TCP is more reliable # But setting this breaks DNS resolution on Ubuntu because systemd-resolve doesn't support TCP # kwargs["tcp"] = True @@ -160,6 +241,29 @@ async def resolve_raw(self, query, **kwargs): return (results, errors) async def _resolve_hostname(self, query, **kwargs): + """Translate a hostname into its corresponding IP addresses. + + This is the foundational function for converting a domain name into its associated IP addresses. It's designed + for internal use within the class and handles retries, caching, and a variety of error/timeout scenarios. + It also respects certain configurations that might ask to skip certain types of queries. Results are returned + in the default dnspython answer object format. + + Args: + query (str): The hostname to resolve. + rdtype (str, optional): The type of DNS record to query (e.g., 'A', 'AAAA'). Defaults to 'A'. + retries (int, optional): The number of times to retry on failure. Defaults to class-wide `retries`. + use_cache (bool, optional): Whether to check the cache before trying a fresh resolution. Defaults to True. + **kwargs: Additional arguments that might be passed to the resolver. + + Returns: + tuple: A tuple containing: + - list: A list of resolved IP addresses. + - list: A list of errors encountered during the resolution process. + + Examples: + >>> results, errors = await _resolve_hostname("google.com") + (, []) + """ self.debug(f"Resolving {query} with kwargs={kwargs}") results = [] errors = [] @@ -232,6 +336,27 @@ async def _resolve_hostname(self, query, **kwargs): return results, errors async def _resolve_ip(self, query, **kwargs): + """Translate an IP address into a corresponding DNS name. + + This is the most basic function that will convert an IP address into its associated domain name. It handles + retries, caching, and multiple types of timeout/error scenarios internally. The function is intended for + internal use and should not be directly called by modules without understanding its intricacies. + + Args: + query (str): The IP address to be reverse-resolved. + retries (int, optional): The number of times to retry on failure. Defaults to 0. + use_cache (bool, optional): Whether to check the cache for the result before attempting resolution. Defaults to True. + **kwargs: Additional arguments to be passed to the resolution process. + + Returns: + tuple: A tuple containing: + - list: A list of resolved domain names (in default dnspython answer format). + - list: A list of errors encountered during resolution. + + Examples: + >>> results, errors = await _resolve_ip("8.8.8.8") + (, []) + """ self.debug(f"Reverse-resolving {query} with kwargs={kwargs}") retries = kwargs.pop("retries", 0) use_cache = kwargs.pop("use_cache", True) @@ -271,6 +396,25 @@ async def _resolve_ip(self, query, **kwargs): return results, errors async def handle_wildcard_event(self, event, children): + """ + Used within BBOT's scan manager to detect and tag DNS wildcard events. + + Wildcards are detected for every major record type. If a wildcard is detected, its data + is overwritten, for example: `_wildcard.evilcorp.com`. + + Args: + event (object): The event to check for wildcards. + children (list): A list of the event's resulting DNS children after resolution. + + Returns: + None: This method modifies the `event` in place and does not return a value. + + Examples: + >>> handle_wildcard_event(event, children) + # The `event` might now have tags like ["wildcard", "a-wildcard", "aaaa-wildcard"] and + # its `data` attribute might be modified to "_wildcard.evilcorp.com" if it was detected + # as a wildcard. + """ log.debug(f"Entering handle_wildcard_event({event}, children={children})") try: event_host = str(event.host) @@ -324,8 +468,29 @@ async def handle_wildcard_event(self, event, children): async def resolve_event(self, event, minimal=False): """ - Tag event with appropriate dns record types - Optionally create child events from dns resolutions + Tag the given event with the appropriate DNS record types and optionally create child + events based on DNS resolutions. + + Args: + event (object): The event to be resolved and tagged. + minimal (bool, optional): If set to True, the function will perform minimal DNS + resolution. Defaults to False. + + Returns: + tuple: A 4-tuple containing the following items: + - event_tags (set): Set of tags for the event. + - event_whitelisted (bool): Whether the event is whitelisted. + - event_blacklisted (bool): Whether the event is blacklisted. + - dns_children (dict): Dictionary containing child events from DNS resolutions. + + Examples: + >>> event = make_event("evilcorp.com") + >>> resolve_event(event) + ({'resolved', 'ns-record', 'a-record',}, False, False, {'A': {IPv4Address('1.2.3.4'), IPv4Address('1.2.3.5')}, 'NS': {'ns1.evilcorp.com'}}) + + Note: + This method does not modify the passed in `event`. Instead, it returns data + that can be used to modify or act upon the `event`. """ log.debug(f"Resolving {event}") event_host = str(event.host) @@ -430,8 +595,33 @@ async def resolve_event(self, event, minimal=False): log.debug(f"Finished resolving {event}") def event_cache_get(self, host): + """ + Retrieves cached event data based on the given host. + + Args: + host (str): The host for which the event data is to be retrieved. + + Returns: + tuple: A 4-tuple containing the following items: + - event_tags (set): Set of tags for the event. + - event_whitelisted (bool or None): Whether the event is whitelisted. Returns None if not found. + - event_blacklisted (bool or None): Whether the event is blacklisted. Returns None if not found. + - dns_children (set): Set containing child events from DNS resolutions. + + Examples: + Assuming an event with host "www.evilcorp.com" has been cached: + + >>> event_cache_get("www.evilcorp.com") + ({"resolved", "a-record"}, False, False, {'1.2.3.4'}) + + Assuming no event with host "www.notincache.com" has been cached: + + >>> event_cache_get("www.notincache.com") + (set(), None, None, set()) + """ try: - return self._event_cache[host] + event_tags, event_whitelisted, event_blacklisted, dns_children = self._event_cache[host] + return (event_tags, event_whitelisted, event_blacklisted, dns_children) except KeyError: return set(), None, None, set() @@ -444,10 +634,27 @@ async def _resolve_batch_coro_wrapper(self, q, **kwargs): async def resolve_batch(self, queries, **kwargs): """ - await resolve_batch(["www.evilcorp.com", "evilcorp.com"]) --> [ - ("www.evilcorp.com", {"1.1.1.1"}), - ("evilcorp.com", {"2.2.2.2"}) - ] + Asynchronously resolves a batch of queries in parallel and yields the results as they are completed. + + This method wraps around `_resolve_batch_coro_wrapper` to resolve a list of queries in parallel. + It batches the queries to a manageable size and executes them asynchronously, respecting + global rate limits. + + Args: + queries (list): List of queries to resolve. + **kwargs: Additional keyword arguments to pass to `_resolve_batch_coro_wrapper`. + + Yields: + tuple: A tuple containing the original query and its resolved value. + + Examples: + >>> import asyncio + >>> async def example_usage(): + ... async for result in resolve_batch(['www.evilcorp.com', 'evilcorp.com']): + ... print(result) + ('www.evilcorp.com', {'1.1.1.1'}) + ('evilcorp.com', {'2.2.2.2'}) + """ queries = list(queries) batch_size = 250 @@ -459,7 +666,28 @@ async def resolve_batch(self, queries, **kwargs): def extract_targets(self, record): """ - Extract whatever hostnames/IPs a DNS records points to + Extracts hostnames or IP addresses from a given DNS record. + + This method reads the DNS record's type and based on that, extracts the target + hostnames or IP addresses it points to. The type of DNS record + (e.g., "A", "MX", "CNAME", etc.) determines which fields are used for extraction. + + Args: + record (dns.rdata.Rdata): The DNS record to extract information from. + + Returns: + set: A set of tuples, each containing the DNS record type and the extracted value. + + Examples: + >>> from dns.rrset import from_text + >>> record = from_text('www.example.com', 3600, 'IN', 'A', '192.0.2.1') + >>> extract_targets(record[0]) + {('A', '192.0.2.1')} + + >>> record = from_text('example.com', 3600, 'IN', 'MX', '10 mail.example.com.') + >>> extract_targets(record[0]) + {('MX', 'mail.example.com')} + """ results = set() rdtype = str(record.rdtype.name).upper() @@ -486,11 +714,50 @@ def extract_targets(self, record): @staticmethod def _clean_dns_record(record): + """ + Cleans and formats a given DNS record for further processing. + + This static method converts the DNS record to text format if it's not already a string. + It also removes any trailing dots and converts the record to lowercase. + + Args: + record (str or dns.rdata.Rdata): The DNS record to clean. + + Returns: + str: The cleaned and formatted DNS record. + + Examples: + >>> _clean_dns_record('www.evilcorp.com.') + 'www.evilcorp.com' + + >>> from dns.rrset import from_text + >>> record = from_text('www.evilcorp.com', 3600, 'IN', 'A', '1.2.3.4')[0] + >>> _clean_dns_record(record) + '1.2.3.4' + """ if not isinstance(record, str): record = str(record.to_text()) return str(record).rstrip(".").lower() async def _catch(self, callback, *args, **kwargs): + """ + Asynchronously catches exceptions thrown during DNS resolution and logs them. + + This method wraps around a given asynchronous callback function to handle different + types of DNS exceptions and general exceptions. It logs the exceptions for debugging + and, in some cases, re-raises them. + + Args: + callback (callable): The asynchronous function to be executed. + *args: Positional arguments to pass to the callback. + **kwargs: Keyword arguments to pass to the callback. + + Returns: + Any: The return value of the callback function, or an empty list if an exception is caught. + + Raises: + dns.resolver.NoNameservers: When no nameservers could be reached. + """ try: return await callback(*args, **kwargs) except dns.resolver.NoNameservers: @@ -509,16 +776,33 @@ async def is_wildcard(self, query, ips=None, rdtype=None): """ Use this method to check whether a *host* is a wildcard entry - This can reliably tell the difference between a valid DNS record and a wildcard inside a wildcard domain. + This can reliably tell the difference between a valid DNS record and a wildcard within a wildcard domain. + + If you want to know whether a domain is using wildcard DNS, use `is_wildcard_domain()` instead. + + Args: + query (str): The hostname to check for a wildcard entry. + ips (list, optional): List of IPs to compare against, typically obtained from a previous DNS resolution of the query. + rdtype (str, optional): The DNS record type (e.g., "A", "AAAA") to consider during the check. + + Returns: + dict: A dictionary indicating if the query is a wildcard for each checked DNS record type. + Keys are DNS record types like "A", "AAAA", etc. + Values are tuples where the first element is a boolean indicating if the query is a wildcard, + and the second element is the wildcard parent if it's a wildcard. - If you want to know whether a domain is using wildcard DNS, use is_wildcard_domain() instead. + Raises: + ValueError: If only one of `ips` or `rdtype` is specified or if no valid IPs are specified. - Returns a dictionary in the following format: - {rdtype: (is_wildcard, wildcard_parent)} + Examples: + >>> is_wildcard("www.github.io") + {"A": (True, "github.io"), "AAAA": (True, "github.io")} - is_wildcard("www.github.io") --> {"A": (True, "github.io"), "AAAA": (True, "github.io")} + >>> is_wildcard("www.evilcorp.com", ips=["93.184.216.34"], rdtype="A") + {"A": (False, "evilcorp.com")} - Note that is_wildcard can be True, False, or None (indicating that wildcard detection was inconclusive) + Note: + `is_wildcard` can be True, False, or None (indicating that wildcard detection was inconclusive) """ result = {} @@ -618,12 +902,25 @@ async def is_wildcard(self, query, ips=None, rdtype=None): async def is_wildcard_domain(self, domain, log_info=False): """ - Check whether a domain is using wildcard DNS + Check whether a given host or its children make use of wildcard DNS entries. Wildcard DNS can have + various implications, particularly in subdomain enumeration and subdomain takeovers. - Returns a dictionary containing any DNS record types that are wildcards, and their associated IPs - is_wildcard_domain("github.io") --> {"A": {"1.2.3.4",}, "AAAA": {"dead::beef",}} - """ + Args: + domain (str): The domain to check for wildcard DNS entries. + log_info (bool, optional): Whether to log the result of the check. Defaults to False. + + Returns: + dict: A dictionary where the keys are the parent domains that have wildcard DNS entries, + and the values are another dictionary of DNS record types ("A", "AAAA", etc.) mapped to + sets of their resolved IP addresses. + Examples: + >>> is_wildcard_domain("github.io") + {"github.io": {"A": {"1.2.3.4"}, "AAAA": {"dead::beef"}}} + + >>> is_wildcard_domain("example.com") + {} + """ wildcard_domain_results = {} domain = self._clean_dns_record(domain) @@ -690,7 +987,18 @@ async def is_wildcard_domain(self, domain, log_info=False): async def _connectivity_check(self, interval=5): """ - Used to periodically check whether the scan has an internet connection + Periodically checks for an active internet connection by attempting DNS resolution. + + Args: + interval (int, optional): The time interval, in seconds, at which to perform the check. + Defaults to 5 seconds. + + Returns: + bool: True if there is an active internet connection, False otherwise. + + Examples: + >>> await _connectivity_check() + True """ if self._last_dns_success is not None: if time.time() - self._last_dns_success < interval: diff --git a/bbot/core/helpers/files.py b/bbot/core/helpers/files.py index 27ed71948..438f74112 100644 --- a/bbot/core/helpers/files.py +++ b/bbot/core/helpers/files.py @@ -12,11 +12,23 @@ def tempfile(self, content, pipe=True): """ - tempfile(["temp", "file", "content"]) --> Path("/home/user/.bbot/temp/pgxml13bov87oqrvjz7a") + Creates a temporary file or named pipe and populates it with content. - if "pipe" is True (the default), a named pipe is used instead of - a true file, which allows python data to be piped directly into the - process without taking up disk space + Args: + content (list, set, tuple, str): The content to populate the temporary file with. + pipe (bool, optional): If True, a named pipe is used instead of a true file. + This allows Python data to be piped directly into the process without taking up disk space. + Defaults to True. + + Returns: + str: The filepath of the created temporary file or named pipe. + + Examples: + >>> tempfile(["This", "is", "temp", "content"]) + '/home/user/.bbot/temp/pgxml13bov87oqrvjz7a' + + >>> tempfile(["Another", "temp", "file"], pipe=False) + '/home/user/.bbot/temp/someotherfile' """ filename = self.temp_filename() rm_at_exit(filename) @@ -39,6 +51,19 @@ def tempfile(self, content, pipe=True): def _feed_pipe(self, pipe, content, text=True): + """ + Feeds content into a named pipe or file-like object. + + Args: + pipe (str or file-like object): The named pipe or file-like object to feed the content into. + content (iterable): The content to be written into the pipe or file. + text (bool, optional): If True, the content is decoded using smart_decode function. + If False, smart_encode function is used. Defaults to True. + + Notes: + The method tries to determine if 'pipe' is a file-like object that has a 'write' method. + If so, it writes directly to that object. Otherwise, it opens 'pipe' as a file for writing. + """ try: if text: decode_fn = self.smart_decode @@ -70,13 +95,33 @@ def _feed_pipe(self, pipe, content, text=True): def feed_pipe(self, pipe, content, text=True): + """ + Starts a new thread to feed content into a named pipe or file-like object using _feed_pipe(). + + Args: + pipe (str or file-like object): The named pipe or file-like object to feed the content into. + content (iterable): The content to be written into the pipe or file. + text (bool, optional): If True, the content is decoded using smart_decode function. + If False, smart_encode function is used. Defaults to True. + """ t = threading.Thread(target=self._feed_pipe, args=(pipe, content), kwargs={"text": text}, daemon=True) t.start() def tempfile_tail(self, callback): """ - Create a named pipe and execute a callback on each line + Create a named pipe and execute a callback function on each line that is written to the pipe. + + Useful for ingesting output from a program (e.g. nuclei) directly from a file in real-time as + each line is written. The idea is you create the file with this function and then tell the CLI + program to output to it as a normal output file. We are then able to scoop up the output line + by line as it's written to our "file" (which is actually a named pipe, shhh! ;) + + Args: + callback (Callable): A function that will be invoked with each line written to the pipe as its argument. + + Returns: + str: The filename of the created named pipe. """ filename = self.temp_filename() rm_at_exit(filename) @@ -92,6 +137,18 @@ def tempfile_tail(self, callback): def tail(filename, callback): + """ + Continuously read lines from a file and execute a callback function on each line. + + Args: + filename (str): The path of the file to tail. + callback (Callable): A function to call on each line read from the file. + + Examples: + >>> def print_callback(line): + ... print(f"Received: {line}") + >>> tail("/path/to/file", print_callback) + """ try: with open(filename, errors="ignore") as f: for line in f: diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index b052a63fb..8b19bc9f2 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -18,6 +18,32 @@ class ConfigAwareHelper: + """ + Centralized helper class that provides unified access to various helper functions. + + This class serves as a convenient interface for accessing helper methods across different files. + It is designed to be configuration-aware, allowing helper functions to utilize scan-specific + configurations like rate-limits. The class leverages Python's `__getattribute__` magic method + to provide seamless access to helper functions across various namespaces. + + Attributes: + config (dict): Configuration settings for the BBOT scan instance. + _scan (Scan): A BBOT scan instance. + bbot_home (Path): Home directory for BBOT. + cache_dir (Path): Directory for storing cache files. + temp_dir (Path): Directory for storing temporary files. + tools_dir (Path): Directory for storing tools, e.g. compiled binaries. + lib_dir (Path): Directory for storing libraries. + scans_dir (Path): Directory for storing scan results. + wordlist_dir (Path): Directory for storing wordlists. + current_dir (Path): The current working directory. + keep_old_scans (int): The number of old scans to keep. + + Examples: + >>> helper = ConfigAwareHelper(config) + >>> ips = helper.dns.resolve("www.evilcorp.com") + """ + from . import ntlm from . import regexes from . import validators @@ -98,7 +124,22 @@ def _make_dummy_module(self, name, _type="scan"): def __getattribute__(self, attr): """ - Allow static functions from sub-helpers to be accessed from the main class + Do not be afraid, the angel said. + + Overrides Python's built-in __getattribute__ to provide convenient access to helper methods. + + This method first attempts to find an attribute within this class itself. If unsuccessful, + it then looks in the 'misc', 'dns', and 'web' helper modules, in that order. If the attribute + is still not found, an AttributeError is raised. + + Args: + attr (str): The attribute name to look for. + + Returns: + Any: The attribute value, if found. + + Raises: + AttributeError: If the attribute is not found in any of the specified places. """ try: # first try self diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 205fc2bb3..695fd6260 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -19,6 +19,65 @@ class Interactsh: + """ + A pure python implementation of ProjectDiscovery's interact.sh. + + *"Interactsh is an open-source tool for detecting out-of-band interactions. It is a tool designed to detect vulnerabilities that cause external interactions."* + + - https://app.interactsh.com + - https://github.com/projectdiscovery/interactsh + + This class facilitates interactions with the interact.sh service for + out-of-band data exfiltration and vulnerability confirmation. It allows + for customization by accepting server and token parameters from the + configuration provided by `parent_helper`. + + Attributes: + parent_helper (ConfigAwareHelper): An instance of a helper class containing configuration data. + server (str): The server to be used. If None (the default), a random server will be chosen from a predetermined list. + correlation_id (str): An identifier to correlate requests and responses. Default is None. + custom_server (str): Optional. A custom interact.sh server. Loaded from configuration. + token (str): Optional. A token for interact.sh API. Loaded from configuration. + _poll_task (AsyncTask): The task responsible for polling the interact.sh server. + + Examples: + ```python + # instantiate interact.sh client (no requests are sent yet) + >>> interactsh_client = self.helpers.interactsh() + # register with an interact.sh server + >>> interactsh_domain = await interactsh_client.register() + [INFO] Registering with interact.sh server: oast.me + [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] + # simulate an out-of-band interaction + >>> await self.helpers.request(f"https://{interactsh_domain}/test") + # wait for out-of-band interaction to be registered + >>> await asyncio.sleep(10) + >>> data_list = await interactsh_client.poll() + >>> print(data_list) + [ + { + "protocol": "dns", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "full-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "q-type": "A", + "raw-request": "...", + "remote-address": "1.2.3.4", + "timestamp": "2023-09-15T21:09:23.187226851Z" + }, + { + "protocol": "http", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "full-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "raw-request": "GET /test HTTP/1.1 ...", + "remote-address": "1.2.3.4", + "timestamp": "2023-09-15T21:09:24.155677967Z" + } + ] + # finally, shut down the client + >>> await interactsh_client.deregister() + ``` + """ + def __init__(self, parent_helper): self.parent_helper = parent_helper self.server = None @@ -28,6 +87,28 @@ def __init__(self, parent_helper): self._poll_task = None async def register(self, callback=None): + """ + Registers the instance with an interact.sh server and sets up polling. + + Generates RSA keys for secure communication, builds a correlation ID, + and sends a POST request to an interact.sh server to register. Optionally, + starts an asynchronous polling task to listen for interactions. + + Args: + callback (callable, optional): A function to be called each time new interactions are received. + + Returns: + str: The registered domain for out-of-band interactions. + + Raises: + InteractshError: If registration with an interact.sh server fails. + + Examples: + >>> interactsh_client = self.helpers.interactsh() + >>> registered_domain = await interactsh_client.register() + [INFO] Registering with interact.sh server: oast.me + [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] + """ rsa = RSA.generate(1024) self.public_key = rsa.publickey().exportKey() @@ -84,6 +165,19 @@ async def register(self, callback=None): return self.domain async def deregister(self): + """ + Deregisters the instance from the interact.sh server and cancels the polling task. + + Sends a POST request to the server to deregister, using the correlation ID + and secret key generated during registration. Optionally, if a polling + task was started, it is cancelled. + + Raises: + InteractshError: If required information is missing or if deregistration fails. + + Examples: + >>> await interactsh_client.deregister() + """ if not self.server or not self.correlation_id or not self.secret: raise InteractshError(f"Missing required information to deregister") @@ -104,6 +198,31 @@ async def deregister(self): raise InteractshError(f"Failed to de-register with interactsh server {self.server}") async def poll(self): + """ + Polls the interact.sh server for interactions tied to the current instance. + + Sends a GET request to the server to fetch interactions associated with the + current correlation_id and secret key. Returned interactions are decrypted + using an AES key provided by the server response. + + Raises: + InteractshError: If required information for polling is missing. + + Returns: + list: A list of decrypted interaction data dictionaries. + + Examples: + >>> data_list = await interactsh_client.poll() + >>> print(data_list) + [ + { + "protocol": "dns", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + ... + }, + ... + ] + """ if not self.server or not self.correlation_id or not self.secret: raise InteractshError(f"Missing required information to poll") @@ -121,12 +240,28 @@ async def poll(self): aes_key = r.json()["aes_key"] for data in data_list: - decrypted_data = self.decrypt(aes_key, data) + decrypted_data = self._decrypt(aes_key, data) ret.append(decrypted_data) return ret async def poll_loop(self, callback): - async with self.parent_helper.scan.acatch(context=self._poll_loop): + """ + Starts a polling loop to continuously check for interactions with the interact.sh server. + + Continuously polls the interact.sh server for interactions tied to the current instance, + using the `poll` method. When interactions are received, it executes the given callback + function with each interaction data. + + Parameters: + callback (callable): The function to be called for every interaction received from the server. + + Returns: + awaitable: An awaitable object that executes the internal `_poll_loop` method. + + Examples: + >>> await interactsh_client.poll_loop(my_callback) + """ + async with self.parent_helper.scan._acatch(context=self._poll_loop): return await self._poll_loop(callback) async def _poll_loop(self, callback): @@ -147,7 +282,23 @@ async def _poll_loop(self, callback): if data: await self.parent_helper.execute_sync_or_async(callback, data) - def decrypt(self, aes_key, data): + def _decrypt(self, aes_key, data): + """ + Decrypts and returns the data received from the interact.sh server. + + Uses RSA and AES for decrypting the data. RSA with PKCS1_OAEP and SHA256 is used to decrypt the AES key, + and then AES (CFB mode) is used to decrypt the actual data payload. + + Parameters: + aes_key (str): The AES key for decryption, encrypted with RSA and base64 encoded. + data (str): The data payload to decrypt, which is base64 encoded and AES encrypted. + + Returns: + dict: The decrypted data, loaded as a JSON object. + + Examples: + >>> decrypted_data = self._decrypt(aes_key, data) + """ private_key = RSA.importKey(self.private_key) cipher = PKCS1_OAEP.new(private_key, hashAlgo=SHA256) aes_plain_key = cipher.decrypt(base64.b64decode(aes_key)) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 3f9e86425..942eaabdd 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -44,8 +44,26 @@ def is_domain(d): """ - "evilcorp.co.uk" --> True - "www.evilcorp.co.uk" --> False + Check if the given input represents a domain without subdomains. + + This function takes an input string `d` and returns True if it represents a domain without any subdomains. + Otherwise, it returns False. + + Args: + d (str): The input string containing the domain. + + Returns: + bool: True if the input is a domain without subdomains, False otherwise. + + Examples: + >>> is_domain("evilcorp.co.uk") + True + + >>> is_domain("www.evilcorp.co.uk") + False + + Notes: + - Port, if present in input, is ignored. """ d, _ = split_host_port(d) extracted = tldextract(d) @@ -56,8 +74,26 @@ def is_domain(d): def is_subdomain(d): """ - "www.evilcorp.co.uk" --> True - "evilcorp.co.uk" --> False + Check if the given input represents a subdomain. + + This function takes an input string `d` and returns True if it represents a subdomain. + Otherwise, it returns False. + + Args: + d (str): The input string containing the domain or subdomain. + + Returns: + bool: True if the input is a subdomain, False otherwise. + + Examples: + >>> is_subdomain("www.evilcorp.co.uk") + True + + >>> is_subdomain("evilcorp.co.uk") + False + + Notes: + - Port, if present in input, is ignored. """ d, _ = split_host_port(d) extracted = tldextract(d) @@ -68,13 +104,47 @@ def is_subdomain(d): def is_ptr(d): """ - "wsc-11-22-33-44.evilcorp.com" --> True - "www2.evilcorp.com" --> False + Check if the given input represents a PTR record domain. + + This function takes an input string `d` and returns True if it matches the PTR record format. + Otherwise, it returns False. + + Args: + d (str): The input string potentially representing a PTR record domain. + + Returns: + bool: True if the input matches PTR record format, False otherwise. + + Examples: + >>> is_ptr("wsc-11-22-33-44.evilcorp.com") + True + + >>> is_ptr("www2.evilcorp.com") + False """ return bool(bbot_regexes.ptr_regex.search(str(d))) def is_url(u): + """ + Check if the given input represents a valid URL. + + This function takes an input string `u` and returns True if it matches any of the predefined URL formats. + Otherwise, it returns False. + + Args: + u (str): The input string potentially representing a URL. + + Returns: + bool: True if the input matches a valid URL format, False otherwise. + + Examples: + >>> is_url("https://evilcorp.com") + True + + >>> is_url("not-a-url") + False + """ u = str(u) for r in bbot_regexes.event_type_regexes["URL"]: if r.match(u): @@ -87,10 +157,30 @@ def is_url(u): def is_uri(u, return_scheme=False): """ - is_uri("http://evilcorp.com") --> True - is_uri("ftp://evilcorp.com") --> True - is_uri("evilcorp.com") --> False - is_uri("ftp://evilcorp.com", return_scheme=True) --> "ftp" + Check if the given input represents a URI and optionally return its scheme. + + This function takes an input string `u` and returns True if it matches a URI format. + When `return_scheme` is True, it returns the URI scheme instead of a boolean. + + Args: + u (str): The input string potentially representing a URI. + return_scheme (bool, optional): Whether to return the URI scheme. Defaults to False. + + Returns: + Union[bool, str]: True if the input matches a URI format; the URI scheme if `return_scheme` is True. + + Examples: + >>> is_uri("http://evilcorp.com") + True + + >>> is_uri("ftp://evilcorp.com") + True + + >>> is_uri("evilcorp.com") + False + + >>> is_uri("ftp://evilcorp.com", return_scheme=True) + "ftp" """ match = uri_regex.match(u) if return_scheme: @@ -102,9 +192,32 @@ def is_uri(u, return_scheme=False): def split_host_port(d): """ - "evilcorp.com:443" --> ("evilcorp.com", 443) - "192.168.1.1:443" --> (IPv4Address('192.168.1.1'), 443) - "[dead::beef]:443" --> (IPv6Address('dead::beef'), 443) + Parse a string containing a host and port into a tuple. + + This function takes an input string `d` and returns a tuple containing the host and port. + The host is converted to its appropriate IP address type if possible. The port is inferred + based on the scheme if not provided. + + Args: + d (str): The input string containing the host and possibly the port. + + Returns: + Tuple[Union[IPv4Address, IPv6Address, str], Optional[int]]: Tuple containing the host and port. + + Examples: + >>> split_host_port("evilcorp.com:443") + ("evilcorp.com", 443) + + >>> split_host_port("192.168.1.1:443") + (IPv4Address('192.168.1.1'), 443) + + >>> split_host_port("[dead::beef]:443") + (IPv6Address('dead::beef'), 443) + + Notes: + - If port is not provided, it is inferred based on the scheme: + - For "https" and "wss", port 443 is used. + - For "http" and "ws", port 80 is used. """ if not "://" in d: d = f"d://{d}" @@ -126,10 +239,32 @@ def split_host_port(d): def parent_domain(d): """ - "www.internal.evilcorp.co.uk" --> "internal.evilcorp.co.uk" - "www.internal.evilcorp.co.uk:8080" --> "internal.evilcorp.co.uk:8080" - "www.evilcorp.co.uk" --> "evilcorp.co.uk" - "evilcorp.co.uk" --> "evilcorp.co.uk" + Retrieve the parent domain of a given subdomain string. + + This function takes an input string `d` representing a subdomain and returns its parent domain. + If the input does not represent a subdomain, it returns the input as is. + + Args: + d (str): The input string representing a subdomain or domain. + + Returns: + str: The parent domain of the subdomain, or the original input if it is not a subdomain. + + Examples: + >>> parent_domain("www.internal.evilcorp.co.uk") + "internal.evilcorp.co.uk" + + >>> parent_domain("www.internal.evilcorp.co.uk:8080") + "internal.evilcorp.co.uk:8080" + + >>> parent_domain("www.evilcorp.co.uk") + "evilcorp.co.uk" + + >>> parent_domain("evilcorp.co.uk") + "evilcorp.co.uk" + + Notes: + - Port, if present in input, is preserved in the output. """ host, port = split_host_port(d) if is_subdomain(d): @@ -139,8 +274,26 @@ def parent_domain(d): def domain_parents(d, include_self=False): """ - "test.www.evilcorp.co.uk" --> ["www.evilcorp.co.uk", "evilcorp.co.uk"] + Generate a list of parent domains for a given domain string. + + This function takes an input string `d` and generates a list of parent domains in decreasing order of specificity. + If `include_self` is set to True, the list will also include the input domain if it is not a top-level domain. + + Args: + d (str): The input string representing a domain or subdomain. + include_self (bool, optional): Whether to include the input domain itself. Defaults to False. + + Yields: + str: Parent domains of the input string in decreasing order of specificity. + + Examples: + >>> list(domain_parents("test.www.evilcorp.co.uk")) + ["www.evilcorp.co.uk", "evilcorp.co.uk"] + + Notes: + - Port, if present in input, is preserved in the output. """ + parent = str(d) if include_self and not is_domain(parent): yield parent @@ -155,6 +308,29 @@ def domain_parents(d, include_self=False): def parent_url(u): + """ + Retrieve the parent URL of a given URL. + + This function takes an input string `u` representing a URL and returns its parent URL. + If the input URL does not have a parent (i.e., it's already the top-level), it returns None. + + Args: + u (str): The input string representing a URL. + + Returns: + Union[str, None]: The parent URL of the input URL, or None if it has no parent. + + Examples: + >>> parent_url("https://evilcorp.com/sub/path/") + "https://evilcorp.com/sub/" + + >>> parent_url("https://evilcorp.com/") + None + + Notes: + - Only the path component of the URL is modified. + - All other components like scheme, netloc, query, and fragment are preserved. + """ parsed = urlparse(u) path = Path(parsed.path) if path.parent == path: @@ -165,30 +341,74 @@ def parent_url(u): def url_parents(u): """ - "http://www.evilcorp.co.uk/admin/tools/cmd.php" --> ["http://www.evilcorp.co.uk/admin/tools/","http://www.evilcorp.co.uk/admin/", "http://www.evilcorp.co.uk/"] - """ + Generate a list of parent URLs for a given URL string. + + This function takes an input string `u` representing a URL and generates a list of its parent URLs in decreasing order of specificity. + + Args: + u (str): The input string representing a URL. + + Returns: + List[str]: A list of parent URLs of the input URL in decreasing order of specificity. + + Examples: + >>> url_parents("http://www.evilcorp.co.uk/admin/tools/cmd.php") + ["http://www.evilcorp.co.uk/admin/tools/", "http://www.evilcorp.co.uk/admin/", "http://www.evilcorp.co.uk/"] - parent_list = set() + Notes: + - The list is generated by continuously calling `parent_url` until it returns None. + - All components of the URL except for the path are preserved. + """ + parent_list = [] while 1: parent = parent_url(u) if parent == None: - return list(parent_list) - else: - parent_list.add(parent) + return parent_list + elif parent not in parent_list: + parent_list.append(parent) u = parent def tldextract(data): """ - "www.evilcorp.co.uk" --> ExtractResult(subdomain='www', domain='evilcorp', suffix='co.uk') + Extracts the subdomain, domain, and suffix from a URL string. + + Args: + data (str): The URL string to be processed. + + Returns: + ExtractResult: A named tuple containing the subdomain, domain, and suffix. + + Examples: + >>> tldextract("www.evilcorp.co.uk") + ExtractResult(subdomain='www', domain='evilcorp', suffix='co.uk') + + Notes: + - Utilizes `smart_decode` to preprocess the data. + - Makes use of the `tldextract` library for extraction. """ return _tldextract.extract(smart_decode(data)) def split_domain(hostname): """ - "www.internal.evilcorp.co.uk" --> ("www.internal", "evilcorp.co.uk") + Splits the hostname into its subdomain and registered domain components. + + Args: + hostname (str): The full hostname to be split. + + Returns: + tuple: A tuple containing the subdomain and registered domain. + + Examples: + >>> split_domain("www.internal.evilcorp.co.uk") + ("www.internal", "evilcorp.co.uk") + + Notes: + - Utilizes the `tldextract` function to first break down the hostname. """ + if is_ip(hostname): + return ("", hostname) parsed = tldextract(hostname) subdomain = parsed.subdomain domain = parsed.registered_domain @@ -201,8 +421,20 @@ def split_domain(hostname): def domain_stem(domain): """ - An abbreviated representation of hostname that removes the TLD - www.evilcorp.com --> www.evilcorp + Returns an abbreviated representation of the hostname by removing the TLD (Top-Level Domain). + + Args: + domain (str): The full domain name to be abbreviated. + + Returns: + str: An abbreviated domain string without the TLD. + + Examples: + >>> domain_stem("www.evilcorp.com") + "www.evilcorp" + + Notes: + - Utilizes the `tldextract` function for domain parsing. """ parsed = tldextract(str(domain)) return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") @@ -210,7 +442,21 @@ def domain_stem(domain): def ip_network_parents(i, include_self=False): """ - "192.168.1.1" --> [192.168.1.0/31, 192.168.1.0/30 ... 128.0.0.0/1, 0.0.0.0/0] + Generates all parent IP networks for a given IP address or network, optionally including the network itself. + + Args: + i (str or ipaddress.IPv4Network/ipaddress.IPv6Network): The IP address or network to find parents for. + include_self (bool, optional): Whether to include the network itself in the result. Default is False. + + Yields: + ipaddress.IPv4Network or ipaddress.IPv6Network: Parent IP networks in descending order of prefix length. + + Examples: + >>> list(ip_network_parents("192.168.1.1")) + [ipaddress.IPv4Network('192.168.1.0/31'), ipaddress.IPv4Network('192.168.1.0/30'), ... , ipaddress.IPv4Network('0.0.0.0/0')] + + Notes: + - Utilizes Python's built-in `ipaddress` module for network operations. """ net = ipaddress.ip_network(i, strict=False) for i in range(net.prefixlen - (0 if include_self else 1), -1, -1): @@ -218,11 +464,44 @@ def ip_network_parents(i, include_self=False): def is_port(p): + """ + Checks if the given string represents a valid port number. + + Args: + p (str or int): The port number to check. + + Returns: + bool: True if the port number is valid, False otherwise. + + Examples: + >>> is_port('80') + True + >>> is_port('70000') + False + """ + p = str(p) return p and p.isdigit() and 0 <= int(p) <= 65535 def is_dns_name(d): + """ + Determines if the given string is a valid DNS name. + + Args: + d (str): The string to be checked. + + Returns: + bool: True if the string is a valid DNS name, False otherwise. + + Examples: + >>> is_dns_name('www.example.com') + True + >>> is_dns_name('localhost') + True + >>> is_dns_name('192.168.1.1') + False + """ if is_ip(d): return False d = smart_decode(d) @@ -235,9 +514,24 @@ def is_dns_name(d): def is_ip(d, version=None): """ - "192.168.1.1" --> True - "bad::c0de" --> True - "evilcorp.com" --> False + Checks if the given string or object represents a valid IP address. + + Args: + d (str or ipaddress.IPvXAddress): The IP address to check. + version (int, optional): The IP version to validate (4 or 6). Default is None. + + Returns: + bool: True if the string or object is a valid IP address, False otherwise. + + Examples: + >>> is_ip('192.168.1.1') + True + >>> is_ip('bad::c0de', version=6) + True + >>> is_ip('bad::c0de', version=4) + False + >>> is_ip('evilcorp.com') + False """ if isinstance(d, (ipaddress.IPv4Address, ipaddress.IPv6Address)): if version is None or version == d.version: @@ -253,18 +547,47 @@ def is_ip(d, version=None): def is_ip_type(i): """ - IPv6Address('dead::beef') --> True - IPv4Network('192.168.1.0/24') --> True - "192.168.1.0/24" --> False + Checks if the given object is an instance of an IPv4 or IPv6 type from the ipaddress module. + + Args: + i (ipaddress._BaseV4 or ipaddress._BaseV6): The IP object to check. + + Returns: + bool: True if the object is an instance of ipaddress._BaseV4 or ipaddress._BaseV6, False otherwise. + + Examples: + >>> is_ip_type(ipaddress.IPv6Address('dead::beef')) + True + >>> is_ip_type(ipaddress.IPv4Network('192.168.1.0/24')) + True + >>> is_ip_type("192.168.1.0/24") + False """ - return hasattr(i, "is_multicast") + return isinstance(i, ipaddress._BaseV4) or isinstance(i, ipaddress._BaseV6) def make_ip_type(s): """ - "dead::beef" --> IPv6Address('dead::beef') - "192.168.1.0/24" --> IPv4Network('192.168.1.0/24') - "evilcorp.com" --> "evilcorp.com" + Convert a string to its corresponding IP address or network type. + + This function attempts to convert the input string `s` into either an IPv4 or IPv6 address object, + or an IPv4 or IPv6 network object. If none of these conversions are possible, the original string is returned. + + Args: + s (str): The input string to be converted. + + Returns: + Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network, str]: The converted object or original string. + + Examples: + >>> make_ip_type("dead::beef") + IPv6Address('dead::beef') + + >>> make_ip_type("192.168.1.0/24") + IPv4Network('192.168.1.0/24') + + >>> make_ip_type("evilcorp.com") + 'evilcorp.com' """ # IP address with suppress(Exception): @@ -276,12 +599,43 @@ def make_ip_type(s): def host_in_host(host1, host2): + """ + Checks if host1 is included within host2, either as a subdomain, IP, or IP network. + Used for scope calculations/decisions within BBOT. + + Args: + host1 (str or ipaddress.IPv4Address or ipaddress.IPv6Address or ipaddress.IPv4Network or ipaddress.IPv6Network): + The host to check for inclusion within host2. + host2 (str or ipaddress.IPv4Address or ipaddress.IPv6Address or ipaddress.IPv4Network or ipaddress.IPv6Network): + The host within which to check for the inclusion of host1. + + Returns: + bool: True if host1 is included in host2, otherwise False. + + Examples: + >>> host_in_host("www.evilcorp.com", "evilcorp.com") + True + >>> host_in_host("evilcorp.com", "www.evilcorp.com") + False + >>> host_in_host(ipaddress.IPv6Address('dead::beef'), ipaddress.IPv6Network('dead::/64')) + True + >>> host_in_host(ipaddress.IPv4Address('192.168.1.1'), ipaddress.IPv4Network('10.0.0.0/8')) + False + + Notes: + - If checking an IP address/network, you MUST FIRST convert your IP into an ipaddress object (e.g. via `make_ip_type()`) before passing it to this function. + """ + """ Is host1 included in host2? "www.evilcorp.com" in "evilcorp.com"? --> True "evilcorp.com" in "www.evilcorp.com"? --> False IPv6Address('dead::beef') in IPv6Network('dead::/64')? --> True IPv4Address('192.168.1.1') in IPv4Network('10.0.0.0/8')? --> False + + Very important! Used throughout BBOT for scope calculations/decisions. + + Works with hostnames, IPs, and IP networks. """ if not host1 or not host2: @@ -309,7 +663,17 @@ def host_in_host(host1, host2): def sha1(data): """ - sha1("asdf").hexdigest() --> "3da541559918a808c2402bba5012f6c60b27661c" + Computes the SHA-1 hash of the given data. + + Args: + data (str or dict): The data to hash. If a dictionary, it is first converted to a JSON string with sorted keys. + + Returns: + hashlib.Hash: SHA-1 hash object of the input data. + + Examples: + >>> sha1("asdf").hexdigest() + '3da541559918a808c2402bba5012f6c60b27661c' """ if isinstance(data, dict): data = json.dumps(data, sort_keys=True) @@ -318,9 +682,19 @@ def sha1(data): def smart_decode(data): """ - Turn data into a string without complaining about it - b"asdf" --> "asdf" - "asdf" --> "asdf" + Decodes the input data to a UTF-8 string, silently ignoring errors. + + Args: + data (str or bytes): The data to decode. + + Returns: + str: The decoded string. + + Examples: + >>> smart_decode(b"asdf") + "asdf" + >>> smart_decode("asdf") + "asdf" """ if isinstance(data, bytes): return data.decode("utf-8", errors="ignore") @@ -330,8 +704,19 @@ def smart_decode(data): def smart_encode(data): """ - Turn data into bytes without complaining about it - "asdf" --> b"asdf" + Encodes the input data to bytes using UTF-8 encoding, silently ignoring errors. + + Args: + data (str or bytes): The data to encode. + + Returns: + bytes: The encoded bytes. + + Examples: + >>> smart_encode("asdf") + b"asdf" + >>> smart_encode(b"asdf") + b"asdf" """ if isinstance(data, bytes): return data @@ -344,7 +729,24 @@ def smart_encode(data): def recursive_decode(data, max_depth=5): """ - Encode double or triple-encoded strings + Recursively decodes doubly or triply-encoded strings to their original form. + + Supports both URL-encoding and backslash-escapes (including unicode) + + Args: + data (str): The data to decode. + max_depth (int, optional): Maximum recursion depth for decoding. Defaults to 5. + + Returns: + str: The decoded string. + + Examples: + >>> recursive_decode("Hello%20world%21") + "Hello world!" + >>> recursive_decode("Hello%20%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442") + "Hello Привет" + >>> recursive_dcode("%5Cu0020%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442%5Cu0021") + " Привет!" """ # Decode newline and tab escapes data = backslash_regex.sub( @@ -372,9 +774,22 @@ def recursive_decode(data, max_depth=5): def rand_string(length=10, digits=True): """ - rand_string() --> "c4hp4i9jzx" - rand_string(20) --> "ap4rsdtg5iw7ey7y3oa5" - rand_string(30) --> "xdmyxtglqf0z3q8t46n430kesq68yu" + Generates a random string of specified length. + + Args: + length (int, optional): The length of the random string. Defaults to 10. + digits (bool, optional): Whether to include digits in the string. Defaults to True. + + Returns: + str: A random string of the specified length. + + Examples: + >>> rand_string() + 'c4hp4i9jzx' + >>> rand_string(20) + 'ap4rsdtg5iw7ey7y3oa5' + >>> rand_string(30, digits=False) + 'xdmyxtglqfzqktngkesyulwbfrihva' """ pool = rand_pool if digits: @@ -383,6 +798,22 @@ def rand_string(length=10, digits=True): def extract_params_json(json_data): + """ + Extracts keys from a JSON object and returns them as a set. Used by the `paramminer_headers` module. + + Args: + json_data (str): JSON-formatted string containing key-value pairs. + + Returns: + set: A set containing the keys present in the JSON object. + + Raises: + Logs a message if JSONDecodeError occurs. + + Examples: + >>> extract_params_json('{"a": 1, "b": {"c": 2}}') + {'a', 'b', 'c'} + """ try: data = json.loads(json_data) except json.JSONDecodeError: @@ -408,6 +839,22 @@ def extract_params_json(json_data): def extract_params_xml(xml_data): + """ + Extracts tags from an XML object and returns them as a set. + + Args: + xml_data (str): XML-formatted string containing elements. + + Returns: + set: A set containing the tags present in the XML object. + + Raises: + Logs a message if ParseError occurs. + + Examples: + >>> extract_params_xml('') + {'child1', 'child2', 'root'} + """ try: root = ET.fromstring(xml_data) except ET.ParseError: @@ -426,6 +873,31 @@ def extract_params_xml(xml_data): def extract_params_html(html_data): + """ + Extracts parameters from an HTML object, yielding them one at a time. + + Args: + html_data (str): HTML-formatted string. + + Yields: + str: A string containing the parameter found in HTML object. + + Examples: + >>> html_data = ''' + ... + ... + ... + ... Click Me + ... + ... + ... + ... ''' + >>> list(extract_params_html(html_data)) + ['user', 'param2', 'param3'] + """ input_tag = bbot_regexes.input_tag_regex.findall(html_data) for i in input_tag: @@ -455,10 +927,27 @@ def extract_params_html(html_data): def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=100, word_regexes=None): + """Intelligently extracts words from given data. + + This function uses regular expressions and optionally wordninja to extract words + from a given text string. Thanks to wordninja it can handle concatenated words intelligently. + + Args: + data (str): The data from which words are to be extracted. + acronyms (bool, optional): Whether to include acronyms. Defaults to True. + wordninja (bool, optional): Whether to use the wordninja library to split concatenated words. Defaults to True. + model (object, optional): A custom wordninja model for special types of data such as DNS names. + max_length (int, optional): Maximum length for a word to be included. Defaults to 100. + word_regexes (list, optional): A list of compiled regular expression objects for word extraction. Defaults to None. + + Returns: + set: A set of extracted words. + + Examples: + >>> extract_words('blacklanternsecurity') + {'black', 'lantern', 'security', 'bls', 'blacklanternsecurity'} """ - Intelligently extract words from given data - Returns set() of extracted words - """ + if word_regexes is None: word_regexes = bbot_regexes.word_regexes words = set() @@ -478,6 +967,8 @@ def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=10 subwords = model.split(word) for subword in subwords: words.add(subword) + # this section generates compound words + # it is interesting but currently disabled the quality of its output doesn't quite justify its quantity # blacklanternsecurity --> ['black', 'lantern', 'security', 'blacklantern', 'lanternsecurity'] # for s, e in combinations(range(len(subwords) + 1), 2): # if e - s <= max_slice_length: @@ -492,11 +983,25 @@ def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=10 def closest_match(s, choices, n=1, cutoff=0.0): - """ - Given a string and a list of choices, returns the best match + """Finds the closest matching strings from a list of choices based on a given string. + + This function uses the difflib library to find the closest matches to a given string `s` from a list of `choices`. + It can return either the single best match or a list of the top `n` best matches. - closest_match("asdf", ["asd", "fds"]) --> "asd" - closest_match("asdf", ["asd", "fds", "asdff"], n=3) --> ["asd", "asdff", "fds"] + Args: + s (str): The string for which to find the closest match. + choices (list): A list of strings to compare against. + n (int, optional): The number of best matches to return. Defaults to 1. + cutoff (float, optional): A float value that defines the similarity threshold. Strings with similarity below this value are not considered. Defaults to 0.0. + + Returns: + str or list: Either the closest matching string or a list of the `n` closest matching strings. + + Examples: + >>> closest_match("asdf", ["asd", "fds"]) + 'asd' + >>> closest_match("asdf", ["asd", "fds", "asdff"], n=3) + ['asdff', 'asd', 'fds'] """ matches = difflib.get_close_matches(s, choices, n=n, cutoff=cutoff) if not choices or not matches: @@ -507,8 +1012,21 @@ def closest_match(s, choices, n=1, cutoff=0.0): def match_and_exit(s, choices, msg=None, loglevel="HUGEWARNING", exitcode=2): - """ - Return the closest match, warn, and exit + """Finds the closest match from a list of choices for a given string, logs a warning, and exits the program. + + This function is particularly useful for CLI applications where you want to validate flags or modules. + + Args: + s (str): The string for which to find the closest match. + choices (list): A list of strings to compare against. + msg (str, optional): Additional message to prepend in the warning message. Defaults to None. + loglevel (str, optional): The log level to use for the warning message. Defaults to "HUGEWARNING". + exitcode (int, optional): The exit code to use when exiting the program. Defaults to 2. + + Examples: + >>> match_and_exit("some_module", ["some_mod", "some_other_mod"], msg="module") + # Output: Could not find module "some_module". Did you mean "some_mod"? + # Exits with code 2 """ if msg is None: msg = "" @@ -541,9 +1059,22 @@ def kill_children(parent_pid=None, sig=signal.SIGTERM): def str_or_file(s): - """ - "file.txt" --> ["file_line1", "file_line2", "file_line3"] - "not_a_file" --> ["not_a_file"] + """Reads a string or file and yields its content line-by-line. + + This function tries to open the given string `s` as a file and yields its lines. + If it fails to open `s` as a file, it treats `s` as a regular string and yields it as is. + + Args: + s (str): The string or file path to read. + + Yields: + str: Either lines from the file or the original string. + + Examples: + >>> list(str_or_file("file.txt")) + ['file_line1', 'file_line2', 'file_line3'] + >>> list(str_or_file("not_a_file")) + ['not_a_file'] """ try: with open(s, errors="ignore") as f: @@ -554,13 +1085,26 @@ def str_or_file(s): def chain_lists(l, try_files=False, msg=None, remove_blank=True): - """ - Chain together list, splitting entries on comma - - Optionally try to open entries as files and add their contents to the list - - Used for parsing a list of arguments that may include space and/or comma-separated values - - ["a", "b,c,d"] --> ["a", "b", "c", "d"] - - try_files=True: - - ["a,file.txt", "c,d"] --> ["a", "f_line1", "f_line2", "f_line3", "c", "d"] + """Chains together list elements, allowing for entries separated by commas. + + This function takes a list `l` and flattens it by splitting its entries on commas. + It also allows you to optionally open entries as files and add their contents to the list. + + Args: + l (list): The list of strings to chain together. + try_files (bool, optional): Whether to try to open entries as files. Defaults to False. + msg (str, optional): An optional message to log when reading from a file. Defaults to None. + remove_blank (bool, optional): Whether to remove blank entries from the list. Defaults to True. + + Returns: + list: The list of chained elements. + + Examples: + >>> chain_lists(["a", "b,c,d"]) + ['a', 'b', 'c', 'd'] + + >>> chain_lists(["a,file.txt", "c,d"], try_files=True) + ['a', 'f_line1', 'f_line2', 'f_line3', 'c', 'd'] """ final_list = dict() for entry in l: @@ -583,8 +1127,21 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): def list_files(directory, filter=lambda x: True): - """ - "/tmp/test" --> ["file1.txt", "file2.txt"] + """Lists files in a given directory that meet a specified filter condition. + + Args: + directory (str): The directory where to list files. + filter (callable, optional): A function to filter the files. Defaults to a lambda function that returns True for all files. + + Yields: + Path: A Path object for each file that meets the filter condition. + + Examples: + >>> list(list_files("/tmp/test")) + [Path('/tmp/test/file1.py'), Path('/tmp/test/file2.txt')] + + >>> list(list_files("/tmp/test"), filter=lambda f: f.suffix == ".py") + [Path('/tmp/test/file1.py')] """ directory = Path(directory).resolve() if directory.is_dir(): @@ -594,20 +1151,48 @@ def list_files(directory, filter=lambda x: True): def rm_at_exit(path): + """Registers a file to be automatically deleted when the program exits. + + Args: + path (str or Path): The path to the file to be deleted upon program exit. + + Examples: + >>> rm_at_exit("/tmp/test/file1.txt") """ - Removes a file automatically when BBOT exits - """ - atexit.register(_rm_at_exit, path) + atexit.register(delete_file, path) + + +def delete_file(path): + """Deletes a file at the given path. + Args: + path (str or Path): The path to the file to be deleted. -def _rm_at_exit(path): + Note: + This function suppresses all exceptions to ensure that the program continues running even if the file could not be deleted. + + Examples: + >>> delete_file("/tmp/test/file1.txt") + """ with suppress(Exception): Path(path).unlink(missing_ok=True) def read_file(filename): - """ - "/tmp/file.txt" --> ["file_line1", "file_line2", "file_line3"] + """Reads a file line by line and yields each line without line breaks. + + Args: + filename (str or Path): The path to the file to read. + + Yields: + str: A line from the file without the trailing line break. + + Examples: + >>> for line in read_file("/tmp/file.txt"): + ... print(line) + file_line1 + file_line2 + file_line3 """ with open(filename, errors="ignore") as f: for line in f: @@ -615,10 +1200,24 @@ def read_file(filename): def gen_numbers(n, padding=2): - """ - n=5 --> ['0', '00', '01', '02', '03', '04', '1', '2', '3', '4'] - n=3, padding=3 --> ['0', '00', '000', '001', '002', '01', '02', '1', '2'] - n=5, padding=1 --> ['0', '1', '2', '3', '4'] + """Generates numbers with variable padding and returns them as a set of strings. + + Args: + n (int): The upper limit of numbers to generate, exclusive. + padding (int, optional): The maximum number of digits to pad the numbers with. Defaults to 2. + + Returns: + set: A set of string representations of numbers with varying degrees of padding. + + Examples: + >>> gen_numbers(5) + {'0', '00', '01', '02', '03', '04', '1', '2', '3', '4'} + + >>> gen_numbers(3, padding=3) + {'0', '00', '000', '001', '002', '01', '02', '1', '2'} + + >>> gen_numbers(5, padding=1) + {'0', '1', '2', '3', '4'} """ results = set() for i in range(n): @@ -628,11 +1227,27 @@ def gen_numbers(n, padding=2): def make_netloc(host, port): - """ - ("192.168.1.1", None) --> "192.168.1.1" - ("192.168.1.1", 443) --> "192.168.1.1:443" - ("evilcorp.com", 80) --> "evilcorp.com:80" - ("dead::beef", 443) --> "[dead::beef]:443" + """Constructs a network location string from a given host and port. + + Args: + host (str): The hostname or IP address. + port (int, optional): The port number. If None, the port is omitted. + + Returns: + str: A network location string in the form 'host' or 'host:port'. + + Examples: + >>> make_netloc("192.168.1.1", None) + "192.168.1.1" + + >>> make_netloc("192.168.1.1", 443) + "192.168.1.1:443" + + >>> make_netloc("evilcorp.com", 80) + "evilcorp.com:80" + + >>> make_netloc("dead::beef", 443) + "[dead::beef]:443" """ if port is None: return host @@ -642,8 +1257,17 @@ def make_netloc(host, port): def which(*executables): - """ - "python" --> "/usr/bin/python" + """Finds the full path of the first available executable from a list of executables. + + Args: + *executables (str): One or more executable names to search for. + + Returns: + str: The full path of the first available executable, or None if none are found. + + Examples: + >>> which("python", "python3") + "/usr/bin/python" """ for e in executables: location = shutil.which(e) @@ -652,9 +1276,19 @@ def which(*executables): def search_dict_by_key(key, d): - """ - Search a dictionary by key name - Generator, yields all values with matching keys + """Search a nested dictionary or list of dictionaries by a key and yield all matching values. + + Args: + key (str): The key to search for. + d (Union[dict, list]): The dictionary or list of dictionaries to search. + + Yields: + Any: Yields all values that match the provided key. + + Examples: + >>> d = {'a': 1, 'b': {'c': 2, 'a': 3}, 'd': [{'a': 4}, {'e': 5}]} + >>> list(search_dict_by_key('a', d)) + [1, 3, 4] """ if isinstance(d, dict): if key in d: @@ -667,10 +1301,18 @@ def search_dict_by_key(key, d): def search_format_dict(d, **kwargs): - """ - Recursively .format() string values in dictionary values - search_format_dict({"test": "#{name} is awesome"}, name="keanu") - --> {"test": "keanu is awesome"} + """Recursively format string values in a dictionary or list using the provided keyword arguments. + + Args: + d (Union[dict, list, str]): The dictionary, list, or string to format. + **kwargs: Arbitrary keyword arguments used for string formatting. + + Returns: + Union[dict, list, str]: The formatted dictionary, list, or string. + + Examples: + >>> search_format_dict({"test": "#{name} is awesome"}, name="keanu") + {"test": "keanu is awesome"} """ if isinstance(d, dict): return {k: search_format_dict(v, **kwargs) for k, v in d.items()} @@ -684,21 +1326,30 @@ def search_format_dict(d, **kwargs): def search_dict_values(d, *regexes): + """Recursively search a dictionary's values based on provided regex patterns. + + Args: + d (Union[dict, list, str]): The dictionary, list, or string to search. + *regexes: Arbitrary number of compiled regex patterns. + + Returns: + Generator: Yields matching values based on the provided regex patterns. + + Examples: + >>> dict_to_search = { + ... "key1": { + ... "key2": [ + ... { + ... "key3": "A URL: https://www.evilcorp.com" + ... } + ... ] + ... } + ... } + >>> url_regexes = re.compile(r'https?://[^\s<>"]+|www\.[^\s<>"]+') + >>> list(search_dict_values(dict_to_search, url_regexes)) + ["https://www.evilcorp.com"] """ - Recursively search a dictionary's values based on regexes - - dict_to_search = { - "key1": { - "key2": [ - { - "key3": "A URL: https://www.evilcorp.com" - } - ] - } - }) - search_dict_values(dict_to_search, url_regexes) --> "https://www.evilcorp.com" - """ results = set() if isinstance(d, str): for r in regexes: @@ -716,11 +1367,25 @@ def search_dict_values(d, *regexes): yield from search_dict_values(v, *regexes) -def filter_dict(d, *key_names, fuzzy=False, invert=False, exclude_keys=None, prev_key=None): +def filter_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): """ - Recursively filter a dictionary based on key names - filter_dict({"key1": "test", "key2": "asdf"}, "key2") - --> {"key2": "asdf"} + Recursively filter a dictionary based on key names. + + Args: + d (dict): The input dictionary. + *key_names: Names of keys to filter for. + fuzzy (bool): Whether to perform fuzzy matching on keys. + exclude_keys (list, None): List of keys to be excluded from the final dict. + _prev_key (str, None): For internal recursive use; the previous key in the hierarchy. + + Returns: + dict: A dictionary containing only the keys specified in key_names. + + Examples: + >>> filter_dict({"key1": "test", "key2": "asdf"}, "key2") + {"key2": "asdf"} + >>> filter_dict({"key1": "test", "key2": {"key3": "asdf"}}, "key1", "key3", exclude_keys="key2") + {'key1': 'test'} """ if exclude_keys is None: exclude_keys = [] @@ -730,16 +1395,31 @@ def filter_dict(d, *key_names, fuzzy=False, invert=False, exclude_keys=None, pre if isinstance(d, dict): for key in d: if key in key_names or (fuzzy and any(k in key for k in key_names)): - if not prev_key in exclude_keys: + if not any(k in exclude_keys for k in [key, _prev_key]): ret[key] = copy.deepcopy(d[key]) elif isinstance(d[key], list) or isinstance(d[key], dict): - child = filter_dict(d[key], *key_names, fuzzy=fuzzy, prev_key=key, exclude_keys=exclude_keys) + child = filter_dict(d[key], *key_names, fuzzy=fuzzy, _prev_key=key, exclude_keys=exclude_keys) if child: ret[key] = child return ret -def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, prev_key=None): +def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): + """ + Recursively clean unwanted keys from a dictionary. + Useful for removing secrets from a config. + + Args: + d (dict): The input dictionary. + *key_names: Names of keys to remove. + fuzzy (bool): Whether to perform fuzzy matching on keys. + exclude_keys (list, None): List of keys to be excluded from removal. + _prev_key (str, None): For internal recursive use; the previous key in the hierarchy. + + Returns: + dict: A dictionary cleaned of the keys specified in key_names. + + """ if exclude_keys is None: exclude_keys = [] if isinstance(exclude_keys, str): @@ -748,26 +1428,47 @@ def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, prev_key=None): if isinstance(d, dict): for key, val in list(d.items()): if key in key_names or (fuzzy and any(k in key for k in key_names)): - if prev_key not in exclude_keys: + if _prev_key not in exclude_keys: d.pop(key) else: - d[key] = clean_dict(val, *key_names, fuzzy=fuzzy, prev_key=key, exclude_keys=exclude_keys) + d[key] = clean_dict(val, *key_names, fuzzy=fuzzy, _prev_key=key, exclude_keys=exclude_keys) return d def grouper(iterable, n): """ - >>> list(grouper('ABCDEFG', 3)) - [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] + Grouper groups an iterable into chunks of a given size. + + Args: + iterable (iterable): The iterable to be chunked. + n (int): The size of each chunk. + + Returns: + iterator: An iterator that produces lists of elements from the original iterable, each of length `n` or less. + + Examples: + >>> list(grouper('ABCDEFG', 3)) + [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] """ + iterable = iter(iterable) return iter(lambda: list(islice(iterable, n)), []) def split_list(alist, wanted_parts=2): """ - >>> split_list([1,2,3,4,5]) - [[1, 2], [3, 4, 5]] + Splits a list into a specified number of approximately equal parts. + + Args: + alist (list): The list to be split. + wanted_parts (int): The number of parts to split the list into. + + Returns: + list: A list of lists, each containing a portion of the original list. + + Examples: + >>> split_list([1, 2, 3, 4, 5]) + [[1, 2], [3, 4, 5]] """ length = len(alist) return [alist[i * length // wanted_parts : (i + 1) * length // wanted_parts] for i in range(wanted_parts)] @@ -775,7 +1476,24 @@ def split_list(alist, wanted_parts=2): def mkdir(path, check_writable=True, raise_error=True): """ - Create a directory and ensure that it's writable + Creates a directory and optionally checks if it's writable. + + Args: + path (str or Path): The directory to create. + check_writable (bool, optional): Whether to check if the directory is writable. Default is True. + raise_error (bool, optional): Whether to raise an error if the directory creation fails. Default is True. + + Returns: + bool: True if the directory is successfully created (and writable, if check_writable=True); otherwise False. + + Raises: + DirectoryCreationError: Raised if the directory cannot be created and `raise_error=True`. + + Examples: + >>> mkdir("/tmp/new_dir") + True + >>> mkdir("/restricted_dir", check_writable=False, raise_error=False) + False """ path = Path(path).resolve() touchfile = path / f".{rand_string()}" @@ -794,8 +1512,20 @@ def mkdir(path, check_writable=True, raise_error=True): def make_date(d=None, microseconds=False): """ - make_date() --> "20220707_1325_50" - make_date(microseconds=True) --> "20220707_1330_35167617" + Generates a string representation of the current date and time, with optional microsecond precision. + + Args: + d (datetime, optional): A datetime object to convert. Defaults to the current date and time. + microseconds (bool, optional): Whether to include microseconds. Defaults to False. + + Returns: + str: A string representation of the date and time, formatted as YYYYMMDD_HHMM_SS or YYYYMMDD_HHMM_SSFFFFFF if microseconds are included. + + Examples: + >>> make_date() + "20220707_1325_50" + >>> make_date(microseconds=True) + "20220707_1330_35167617" """ f = "%Y%m%d_%H%M_%S" if microseconds: @@ -812,9 +1542,21 @@ def error_and_exit(msg): def get_file_extension(s): """ - https://evilcorp.com/api/test.php --> "php" - /etc/test.conf --> "conf" - /etc/passwd --> "" + Extracts the file extension from a given string representing a URL or file path. + + Args: + s (str): The string from which to extract the file extension. + + Returns: + str: The file extension, or an empty string if no extension is found. + + Examples: + >>> get_file_extension("https://evilcorp.com/api/test.php") + "php" + >>> get_file_extension("/etc/test.conf") + "conf" + >>> get_file_extension("/etc/passwd") + "" """ s = str(s).lower().strip() rightmost_section = s.rsplit("/", 1)[-1] @@ -826,13 +1568,23 @@ def get_file_extension(s): def backup_file(filename, max_backups=10): """ - rename a file as a backup + Renames a file by appending an iteration number as a backup. Recursively renames + files up to a specified maximum number of backups. - recursively renames files up to max_backups + Args: + filename (str or pathlib.Path): The file to backup. + max_backups (int, optional): The maximum number of backups to keep. Defaults to 10. - backup_file("/tmp/test.txt") --> "/tmp/test.0.txt" - backup_file("/tmp/test.0.txt") --> "/tmp/test.1.txt" - backup_file("/tmp/test.1.txt") --> "/tmp/test.2.txt" + Returns: + pathlib.Path: The new backup filepath. + + Examples: + >>> backup_file("/tmp/test.txt") + PosixPath("/tmp/test.0.txt") + >>> backup_file("/tmp/test.0.txt") + PosixPath("/tmp/test.1.txt") + >>> backup_file("/tmp/test.1.txt") + PosixPath("/tmp/test.2.txt") """ filename = Path(filename).resolve() suffixes = [s.strip(".") for s in filename.suffixes] @@ -850,11 +1602,21 @@ def backup_file(filename, max_backups=10): def latest_mtime(d): - """ - Given a directory, return the latest modified time of any contained file or directory (recursive) - Useful for sorting directories by modified time for the purpose of cleanup, etc. + """Get the latest modified time of any file or sub-directory in a given directory. + + This function takes a directory path as an argument and returns the latest modified time + of any contained file or directory, recursively. It's useful for sorting directories by + modified time for cleanup or other purposes. - latest_mtime("~/.bbot/scans/mushy_susan") --> 1659016928.2848816 + Args: + d (str or Path): The directory path to search for the latest modified time. + + Returns: + float: The latest modified time in Unix timestamp format. + + Examples: + >>> latest_mtime("~/.bbot/scans/mushy_susan") + 1659016928.2848816 """ d = Path(d).resolve() mtimes = [d.lstat().st_mtime] @@ -868,6 +1630,21 @@ def latest_mtime(d): def filesize(f): + """Get the file size of a given file. + + This function takes a file path as an argument and returns its size in bytes. If the path + does not point to a file, the function returns 0. + + Args: + f (str or Path): The file path for which to get the size. + + Returns: + int: The size of the file in bytes, or 0 if the path does not point to a file. + + Examples: + >>> filesize("/path/to/file.txt") + 1024 + """ f = Path(f) if f.is_file(): return f.stat().st_size @@ -875,11 +1652,23 @@ def filesize(f): def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, raise_error=False): - """ - Given a directory "d", measure the number of subdirectories and files (matching "filter") - And remove (rm -r) the oldest ones past the threshold of "keep" + """Clean up old files and directories within a given directory based on various filtering and sorting options. + + This function removes the oldest files and directories in the provided directory 'd' that exceed a specified + threshold ('keep'). The items to be deleted can be filtered using a lambda function 'filter', and they are + sorted by a key function, defaulting to latest modification time. - clean_old_dirs("~/.bbot/scans", filter=lambda x: x.is_dir() and scan_name_regex.match(x.name)) + Args: + d (str or Path): The directory path to clean up. + keep (int): The number of items to keep. Ones beyond this count will be removed. + filter (Callable): A lambda function for filtering which files or directories to consider. + Defaults to a lambda function that returns True for all. + key (Callable): A function to sort the files and directories. Defaults to latest modification time. + reverse (bool): Whether to reverse the order of sorted items before removing. Defaults to True. + raise_error (bool): Whether to raise an error if directory deletion fails. Defaults to False. + + Examples: + >>> clean_old("~/.bbot/scans", filter=lambda x: x.is_dir() and scan_name_regex.match(x.name)) """ d = Path(d) if not d.is_dir(): @@ -898,13 +1687,38 @@ def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, def extract_emails(s): + """Extract email addresses from a given string. + + This function takes in a string and yields all email addresses found in it. + The emails are converted to lower case before yielding. It utilizes + regular expressions for email pattern matching. + + Args: + s (str): The input string from which to extract email addresses. + + Yields: + str: Yields email addresses found in the input string, in lower case. + + Examples: + >>> list(extract_emails("Contact us at info@evilcorp.com and support@evilcorp.com")) + ['info@evilcorp.com', 'support@evilcorp.com'] + """ for email in bbot_regexes.email_regex.findall(smart_decode(s)): yield email.lower() def can_sudo_without_password(): - """ - Return True if the current user can sudo without a password + """Check if the current user has passwordless sudo access. + + This function checks whether the current user can use sudo without entering a password. + It runs a command with sudo and checks the return code to determine this. + + Returns: + bool: True if the current user can use sudo without a password, False otherwise. + + Examples: + >>> can_sudo_without_password() + True """ if os.geteuid() != 0: env = dict(os.environ) @@ -918,8 +1732,20 @@ def can_sudo_without_password(): def verify_sudo_password(sudo_pass): - """ - Return True if the sudo password is correct + """Verify if the given sudo password is correct. + + This function checks whether the sudo password provided is valid for the current user. + It runs a command with sudo, feeding in the password via stdin, and checks the return code. + + Args: + sudo_pass (str): The sudo password to verify. + + Returns: + bool: True if the sudo password is correct, False otherwise. + + Examples: + >>> verify_sudo_password("mysecretpassword") + True """ try: sp.run( @@ -935,16 +1761,30 @@ def verify_sudo_password(sudo_pass): def make_table(*args, **kwargs): - """ - make_table([["row1", "row1"], ["row2", "row2"]], ["header1", "header2"]) --> - - +-----------+-----------+ - | header1 | header2 | - +===========+===========+ - | row1 | row1 | - +-----------+-----------+ - | row2 | row2 | - +-----------+-----------+ + """Generate a formatted table from the given rows and headers. + + This function uses the `tabulate` package to generate a table with formatting options. + It can accept various input formats and table styles, which can be customized using optional arguments. + + Args: + *args: Positional arguments to be passed to `tabulate.tabulate`. + **kwargs: Keyword arguments to customize table formatting. + - tablefmt (str, optional): Table format. Default is 'grid'. + - disable_numparse (bool, optional): Disable automatic number parsing. Default is True. + - maxcolwidths (int, optional): Maximum column width. Default is 40. + + Returns: + str: A string representing the formatted table. + + Examples: + >>> print(make_table([["row1", "row1"], ["row2", "row2"]], ["header1", "header2"])) + +-----------+-----------+ + | header1 | header2 | + +===========+===========+ + | row1 | row1 | + +-----------+-----------+ + | row2 | row2 | + +-----------+-----------+ """ # fix IndexError: list index out of range if args and not args[0]: @@ -965,8 +1805,25 @@ def make_table(*args, **kwargs): def human_timedelta(d): - """ - Format a TimeDelta object in human-readable form + """Convert a TimeDelta object into a human-readable string. + + This function takes a datetime.timedelta object and converts it into a string format that + is easier to read and understand. + + Args: + d (datetime.timedelta): The TimeDelta object to convert. + + Returns: + str: A string representation of the TimeDelta object in human-readable form. + + Examples: + >>> from datetime import datetime + >>> + >>> start_time = datetime.now() + >>> end_time = datetime.now() + >>> elapsed_time = end_time - start_time + >>> human_timedelta(elapsed_time) + '2 hours, 30 minutes, 15 seconds' """ hours, remainder = divmod(d.seconds, 3600) minutes, seconds = divmod(remainder, 60) @@ -984,9 +1841,21 @@ def human_timedelta(d): def bytes_to_human(_bytes): - """ - Converts bytes to human-readable filesize - bytes_to_human(1234129384) --> "1.15GB" + """Convert a bytes size to a human-readable string. + + This function converts a numeric bytes value into a human-readable string format, complete + with the appropriate unit symbol (B, KB, MB, GB, etc.). + + Args: + _bytes (int): The number of bytes to convert. + + Returns: + str: A string representing the number of bytes in a more readable format, rounded to two + decimal places. + + Examples: + >>> bytes_to_human(1234129384) + '1.15GB' """ sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"] units = {} @@ -1007,9 +1876,23 @@ def bytes_to_human(_bytes): def human_to_bytes(filesize): - """ - Converts human-readable filesize to bytes - human_to_bytes("23.23gb") --> 24943022571 + """Convert a human-readable file size string to its bytes equivalent. + + This function takes a human-readable file size string, such as "2.5GB", and converts it + to its equivalent number of bytes. + + Args: + filesize (str or int): The human-readable file size string or integer bytes value to convert. + + Returns: + int: The number of bytes equivalent to the input human-readable file size. + + Raises: + ValueError: If the input string cannot be converted to bytes. + + Examples: + >>> human_to_bytes("23.23gb") + 24943022571 """ if isinstance(filesize, int): return filesize @@ -1033,8 +1916,17 @@ def human_to_bytes(filesize): def cpu_architecture(): - """ - Returns the CPU architecture, e.g. "amd64, "armv7", "arm64", etc. + """Return the CPU architecture of the current system. + + This function fetches and returns the architecture type of the CPU where the code is being executed. + It maps common identifiers like "x86_64" to more general types like "amd64". + + Returns: + str: A string representing the CPU architecture, such as "amd64", "armv7", or "arm64". + + Examples: + >>> cpu_architecture() + 'amd64' """ uname = platform.uname() arch = uname.machine.lower() @@ -1046,15 +1938,33 @@ def cpu_architecture(): def os_platform(): - """ - Returns the OS platform, e.g. "linux", "darwin", "windows", etc. + """Return the OS platform of the current system. + + This function fetches and returns the OS type where the code is being executed. + It converts the platform identifier to lowercase. + + Returns: + str: A string representing the OS platform, such as "linux", "darwin", or "windows". + + Examples: + >>> os_platform() + 'linux' """ return platform.system().lower() def os_platform_friendly(): - """ - Returns the OS platform in a more human-friendly format, because apple is indecisive + """Return a human-friendly OS platform string, suitable for golang release binaries. + + This function fetches the OS platform and modifies it to a more human-readable format if necessary. + Specifically, it changes "darwin" to "macOS". + + Returns: + str: A string representing the human-friendly OS platform, such as "macOS", "linux", or "windows". + + Examples: + >>> os_platform_friendly() + 'macOS' """ p = os_platform() if p == "darwin": @@ -1066,44 +1976,91 @@ def os_platform_friendly(): def tagify(s, maxlen=None): - """ - Sanitize a string into a tag-friendly format + """Sanitize a string into a tag-friendly format. + + Converts a given string to lowercase and replaces all characters not matching + [a-z0-9] with hyphens. Optionally truncates the result to 'maxlen' characters. - tagify("HTTP Web Title") --> "http-web-title" + Args: + s (str): The input string to sanitize. + maxlen (int, optional): The maximum length for the tag. Defaults to None. + + Returns: + str: A sanitized, tag-friendly string. + + Examples: + >>> tagify("HTTP Web Title") + 'http-web-title' + >>> tagify("HTTP Web Title", maxlen=8) + 'http-web' """ ret = str(s).lower() return tag_filter_regex.sub("-", ret)[:maxlen].strip("-") def memory_status(): - """ - Return statistics on system memory consumption + """Return statistics on system memory consumption. - Example: to get available memory (not including swap): - memory_status().available + The function returns a `psutil` named tuple that contains statistics on + system virtual memory usage, such as total memory, used memory, available + memory, and more. - Example: to get percent memory used: - memory_status().percent + Returns: + psutil._pslinux.svmem: A named tuple representing various statistics + about system virtual memory usage. + + Examples: + >>> mem = memory_status() + >>> mem.available + 13195399168 + + >>> mem = memory_status() + >>> mem.percent + 79.0 """ return psutil.virtual_memory() def swap_status(): - """ - Return statistics on swap memory consumption + """Return statistics on swap memory consumption. - Example: to get total swap: - swap_status().total + The function returns a `psutil` named tuple that contains statistics on + system swap memory usage, such as total swap, used swap, free swap, and more. - Example: to get in-use swap: - swap_status().used + Returns: + psutil._common.sswap: A named tuple representing various statistics + about system swap memory usage. + + Examples: + >>> swap = swap_status() + >>> swap.total + 4294967296 + + >>> swap = swap_status() + >>> swap.used + 2097152 """ return psutil.swap_memory() def get_size(obj, max_depth=5, seen=None): """ - Rough recursive measurement of a python object's memory footprint + Roughly estimate the memory footprint of a Python object using recursion. + + Parameters: + obj (any): The object whose size is to be determined. + max_depth (int, optional): Maximum depth to which nested objects will be inspected. Defaults to 5. + seen (set, optional): Objects that have already been accounted for, to avoid loops. + + Returns: + int: Approximate memory footprint of the object in bytes. + + Examples: + >>> get_size(my_list) + 4200 + + >>> get_size(my_dict, max_depth=3) + 8400 """ # If seen is not provided, initialize an empty set if seen is None: @@ -1145,6 +2102,22 @@ def get_size(obj, max_depth=5, seen=None): def is_file(f): + """ + Check if a path points to a file. + + Parameters: + f (str): Path to the file. + + Returns: + bool: True if the path is a file, False otherwise. + + Examples: + >>> is_file("/etc/passwd") + True + + >>> is_file("/nonexistent") + False + """ with suppress(Exception): return Path(f).is_file() return False @@ -1155,12 +2128,17 @@ def is_file(f): def cloudcheck(ip): """ - Check whether an IP address belongs to a cloud provider + Check whether an IP address belongs to a cloud provider and returns the provider name, type, and subnet. - provider, provider_type, subnet = cloudcheck("168.62.20.37") - print(provider) # "Azure" - print(provider_type) # "cloud" - print(subnet) # IPv4Network('168.62.0.0/19') + Args: + ip (str): The IP address to check. + + Returns: + tuple: A tuple containing provider name (str), provider type (str), and subnet (IPv4Network). + + Examples: + >>> cloudcheck("168.62.20.37") + ('Azure', 'cloud', IPv4Network('168.62.0.0/19')) """ provider, provider_type, subnet = _cloudcheck.check(ip) if provider: @@ -1170,10 +2148,48 @@ def cloudcheck(ip): def is_async_function(f): + """ + Check if a given function is an asynchronous function. + + Args: + f (function): The function to check. + + Returns: + bool: True if the function is asynchronous, False otherwise. + + Examples: + >>> async def foo(): + ... pass + >>> is_async_function(foo) + True + """ return inspect.iscoroutinefunction(f) async def execute_sync_or_async(callback, *args, **kwargs): + """ + Execute a function or coroutine, handling either synchronous or asynchronous invocation. + + Args: + callback (Union[Callable, Coroutine]): The function or coroutine to execute. + *args: Variable-length argument list to pass to the callback. + **kwargs: Arbitrary keyword arguments to pass to the callback. + + Returns: + Any: The return value from the executed function or coroutine. + + Examples: + >>> async def foo_async(x): + ... return x + 1 + >>> def foo_sync(x): + ... return x + 1 + + >>> asyncio.run(execute_sync_or_async(foo_async, 1)) + 2 + + >>> asyncio.run(execute_sync_or_async(foo_sync, 1)) + 2 + """ if is_async_function(callback): return await callback(*args, **kwargs) else: @@ -1182,7 +2198,22 @@ async def execute_sync_or_async(callback, *args, **kwargs): def get_exception_chain(e): """ - Get the full chain of exceptions that led to the current one + Retrieves the full chain of exceptions leading to the given exception. + + Args: + e (BaseException): The exception for which to get the chain. + + Returns: + list[BaseException]: List of exceptions in the chain, from the given exception back to the root cause. + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except ValueError as e: + ... exc_chain = get_exception_chain(e) + ... for exc in exc_chain: + ... print(exc) + This is a value error """ exception_chain = [] current_exception = e @@ -1193,6 +2224,23 @@ def get_exception_chain(e): def get_traceback_details(e): + """ + Retrieves detailed information from the traceback of an exception. + + Args: + e (BaseException): The exception for which to get traceback details. + + Returns: + tuple: A tuple containing filename (str), line number (int), and function name (str) where the exception was raised. + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except ValueError as e: + ... filename, lineno, funcname = get_traceback_details(e) + ... print(f"File: {filename}, Line: {lineno}, Function: {funcname}") + File: , Line: 2, Function: + """ tb = traceback.extract_tb(e.__traceback__) last_frame = tb[-1] # Get the last frame in the traceback (the one where the exception was raised) filename = last_frame.filename @@ -1202,6 +2250,24 @@ def get_traceback_details(e): async def cancel_tasks(tasks, ignore_errors=True): + """ + Asynchronously cancels a list of asyncio tasks. + + Args: + tasks (list[Task]): A list of asyncio Task objects to cancel. + ignore_errors (bool, optional): Whether to ignore errors other than asyncio.CancelledError. Defaults to True. + + Examples: + >>> async def main(): + ... task1 = asyncio.create_task(async_function1()) + ... task2 = asyncio.create_task(async_function2()) + ... await cancel_tasks([task1, task2]) + ... + >>> asyncio.run(main()) + + Note: + This function will not cancel the current task that it is called from. + """ current_task = asyncio.current_task() tasks = [t for t in tasks if t != current_task] for task in tasks: @@ -1217,6 +2283,21 @@ async def cancel_tasks(tasks, ignore_errors=True): def cancel_tasks_sync(tasks): + """ + Synchronously cancels a list of asyncio tasks. + + Args: + tasks (list[Task]): A list of asyncio Task objects to cancel. + + Examples: + >>> loop = asyncio.get_event_loop() + >>> task1 = loop.create_task(some_async_function1()) + >>> task2 = loop.create_task(some_async_function2()) + >>> cancel_tasks_sync([task1, task2]) + + Note: + This function will not cancel the current task from which it is called. + """ current_task = asyncio.current_task() for task in tasks: if task != current_task: @@ -1225,6 +2306,31 @@ def cancel_tasks_sync(tasks): def weighted_shuffle(items, weights): + """ + Shuffles a list of items based on their corresponding weights. + + Args: + items (list): The list of items to shuffle. + weights (list): The list of weights corresponding to each item. + + Returns: + list: A new list containing the shuffled items. + + Examples: + >>> items = ['apple', 'banana', 'cherry'] + >>> weights = [0.4, 0.5, 0.1] + >>> weighted_shuffle(items, weights) + ['banana', 'apple', 'cherry'] + >>> weighted_shuffle(items, weights) + ['apple', 'banana', 'cherry'] + >>> weighted_shuffle(items, weights) + ['apple', 'banana', 'cherry'] + >>> weighted_shuffle(items, weights) + ['banana', 'apple', 'cherry'] + + Note: + The sum of all weights does not have to be 1. They will be normalized internally. + """ # Create a list of tuples where each tuple is (item, weight) pool = list(zip(items, weights)) @@ -1247,6 +2353,28 @@ def weighted_shuffle(items, weights): def parse_port_string(port_string): + """ + Parses a string containing ports and port ranges into a list of individual ports. + + Args: + port_string (str): The string containing individual ports and port ranges separated by commas. + + Returns: + list: A list of individual ports parsed from the input string. + + Raises: + ValueError: If the input string contains invalid ports or port ranges. + + Examples: + >>> parse_port_string("22,80,1000-1002") + [22, 80, 1000, 1001, 1002] + + >>> parse_port_string("1-2,3-5") + [1, 2, 3, 4, 5] + + >>> parse_port_string("invalid") + ValueError: Invalid port or port range: invalid + """ elements = port_string.split(",") ports = [] @@ -1272,6 +2400,28 @@ def parse_port_string(port_string): def parse_list_string(list_string): + """ + Parses a comma-separated string into a list, removing invalid characters. + + Args: + list_string (str): The string containing elements separated by commas. + + Returns: + list: A list of individual elements parsed from the input string. + + Raises: + ValueError: If the input string contains invalid characters. + + Examples: + >>> parse_list_string("html,js,css") + ['html', 'js', 'css'] + + >>> parse_list_string("png,jpg,gif") + ['png', 'jpg', 'gif'] + + >>> parse_list_string("invalid<>char") + ValueError: Invalid character in string: invalid<>char + """ elements = list_string.split(",") result = [] @@ -1283,6 +2433,23 @@ def parse_list_string(list_string): async def as_completed(coros): + """ + Async generator that yields completed Tasks as they are completed. + + Args: + coros (iterable): An iterable of coroutine objects or asyncio Tasks. + + Yields: + asyncio.Task: A Task object that has completed its execution. + + Examples: + >>> async def main(): + ... async for task in as_completed([coro1(), coro2(), coro3()]): + ... result = task.result() + ... print(f'Task completed with result: {result}') + + >>> asyncio.run(main()) + """ tasks = {coro if isinstance(coro, asyncio.Task) else asyncio.create_task(coro): coro for coro in coros} while tasks: done, _ = await asyncio.wait(tasks.keys(), return_when=asyncio.FIRST_COMPLETED) diff --git a/bbot/core/helpers/modules.py b/bbot/core/helpers/modules.py index fe449a3ee..c6cc52f42 100644 --- a/bbot/core/helpers/modules.py +++ b/bbot/core/helpers/modules.py @@ -11,6 +11,14 @@ class ModuleLoader: + """ + Main class responsible for loading BBOT modules. + + This class is in charge of preloading modules to determine their dependencies. + Once dependencies are identified, they are installed before the actual module is imported. + This ensures that all requisite libraries and components are available for the module to function correctly. + """ + def __init__(self): self._preloaded = {} self._preloaded_orig = None @@ -18,11 +26,31 @@ def __init__(self): self._configs = {} def file_filter(self, file): + file = file.resolve() + if "templates" in file.parts: + return False return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] def preload(self, module_dir): - """ - Preload modules from a specified directory + """Preloads all modules within a directory. + + This function recursively iterates through each file in the specified directory + and preloads the BBOT module to gather its meta-information and dependencies. + + Args: + module_dir (str or Path): Directory containing BBOT modules to be preloaded. + + Returns: + dict: A dictionary where keys are the names of the preloaded modules and + values are their respective preloaded data. + + Examples: + >>> preload("/path/to/bbot_modules/") + { + "module1": {...}, + "module2": {...}, + ... + } """ module_dir = Path(module_dir) for module_file in list_files(module_dir, filter=self.file_filter): @@ -47,7 +75,7 @@ def preload(self, module_dir): print(f"[CRIT] Error in {module_file.name}") sys.exit(1) - return self.preloaded + return self._preloaded def preloaded(self, type=None): preloaded = {} @@ -74,6 +102,51 @@ def check_type(self, module, type): return self._preloaded[module]["type"] == type def preload_module(self, module_file): + """ + Preloads a BBOT module to gather its meta-information and dependencies. + + This function reads a BBOT module file, extracts its attributes such as + events watched and produced, flags, meta-information, and dependencies. + + Args: + module_file (str): Path to the BBOT module file. + + Returns: + dict: A dictionary containing meta-information and dependencies for the module. + + Examples: + >>> preload_module("bbot/modules/wappalyzer.py") + { + "watched_events": [ + "HTTP_RESPONSE" + ], + "produced_events": [ + "TECHNOLOGY" + ], + "flags": [ + "active", + "safe", + "web-basic", + "web-thorough" + ], + "meta": { + "description": "Extract technologies from web responses" + }, + "config": {}, + "options_desc": {}, + "hash": "d5a88dd3866c876b81939c920bf4959716e2a374", + "deps": { + "pip": [ + "python-Wappalyzer~=0.3.1" + ], + "pip_constraints": [], + "shell": [], + "apt": [], + "ansible": [] + }, + "sudo": false + } + """ watched_events = [] produced_events = [] flags = [] @@ -182,6 +255,22 @@ def load_modules(self, module_names): return modules def load_module(self, module_name): + """Loads a BBOT module by its name. + + Imports the module from its namespace, locates its class, and returns it. + Identifies modules based on the presence of `watched_events` and `produced_events` attributes. + + Args: + module_name (str): The name of the module to load. + + Returns: + object: The loaded module class object. + + Examples: + >>> module = load_module("example_module") + >>> isinstance(module, object) + True + """ namespace = self._preloaded[module_name]["namespace"] import_path = f"{namespace}.{module_name}" module_variables = importlib.import_module(import_path, "bbot") @@ -205,6 +294,8 @@ def load_module(self, module_name): def recommend_dependencies(self, modules): """ Returns a dictionary containing missing dependencies and their suggested resolutions + + Needs work. For this we should probably be building a dependency graph """ resolve_choices = {} # step 1: build a dictionary containing event types and their associated modules @@ -269,6 +360,27 @@ def add_or_create(d, k, *items): d[k] = set(items) def modules_table(self, modules=None, mod_type=None): + """Generates a table of module information. + + Constructs a table to display information such as module name, type, and event details. + + Args: + modules (list, optional): List of module names to include in the table. + mod_type (str, optional): Type of modules to include ('scan', 'output', 'internal'). + + Returns: + str: A formatted table string. + + Examples: + >>> print(modules_table(["nmap"])) + +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ + | Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | + +==========+========+=================+==============================+===============================+======================+===================+ + | nmap | scan | No | Execute port scans with nmap | active, aggressive, portscan, | DNS_NAME, IP_ADDRESS | OPEN_TCP_PORT | + | | | | | web-thorough | | | + +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ + """ + table = [] header = ["Module", "Type", "Needs API Key", "Description", "Flags", "Consumed Events", "Produced Events"] maxcolwidths = [20, 10, 5, 30, 30, 20, 20] diff --git a/bbot/core/helpers/punycode.py b/bbot/core/helpers/punycode.py index d7055f6db..d9b1e4a6a 100644 --- a/bbot/core/helpers/punycode.py +++ b/bbot/core/helpers/punycode.py @@ -17,7 +17,17 @@ def split_text(text): def smart_encode_punycode(text: str) -> str: """ - ドメイン.テスト --> xn--eckwd4c7c.xn--zckzah + Encodes a given string using Punycode, while leaving non-alphanumeric segments untouched. + + Args: + text (str): The string to be encoded. + + Returns: + str: The Punycode encoded string. + + Examples: + >>> smart_encode_punycode("ドメイン.テスト") + "xn--eckwd4c7c.xn--zckzah" """ segments = split_text(text) result_segments = [] @@ -36,7 +46,17 @@ def smart_encode_punycode(text: str) -> str: def smart_decode_punycode(text: str) -> str: """ - xn--eckwd4c7c.xn--zckzah --> ドメイン.テスト + Decodes a given Punycode encoded string, while leaving non-alphanumeric segments untouched. + + Args: + text (str): The Punycode encoded string to be decoded. + + Returns: + str: The decoded string. + + Examples: + >>> smart_decode_punycode("xn--eckwd4c7c.xn--zckzah") + "ドメイン.テスト" """ segments = split_text(text) result_segments = [] diff --git a/bbot/core/helpers/ratelimiter.py b/bbot/core/helpers/ratelimiter.py index a7b37a42e..482be66e8 100644 --- a/bbot/core/helpers/ratelimiter.py +++ b/bbot/core/helpers/ratelimiter.py @@ -6,6 +6,20 @@ class RateLimiter: + """ + An asynchronous rate limiter class designed to be used as a context manager. + + Args: + rate (int): The number of allowed requests per second. + name (str): The name of the rate limiter, used for logging. + + Examples: + >>> rate_limiter = RateLimiter(100, "web") + >>> async def rate_limited_request(url): + ... async with rate_limiter: + ... return await request(url) + """ + def __init__(self, rate, name): self.rate = rate / 10 self.name = name diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 5ed169345..d620bf008 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -19,14 +19,20 @@ word_regex = re.compile(r"[^\d\W_]+") word_num_regex = re.compile(r"[^\W_]+") num_regex = re.compile(r"\d+") + _ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*" ipv6_regex = re.compile(_ipv6_regex, re.I) + # dns names with periods _dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+[^\W_]{1,63}\.?" +dns_name_regex = re.compile(_dns_name_regex, re.I) # dns names without periods _hostname_regex = r"(?!\w*\.\w+)\w(?:[\w-]{0,100}\w)?" +hostname_regex = re.compile(r"^" + _hostname_regex + r"$", re.I) + _email_regex = r"(?:[^\W_][\w\-\.\+]{,100})@" + _dns_name_regex email_regex = re.compile(_email_regex, re.I) + _ptr_regex = r"(?:[0-9]{1,3}[-_\.]){3}[0-9]{1,3}" ptr_regex = re.compile(_ptr_regex) # uuid regex @@ -50,6 +56,7 @@ _double_slash_regex = r"/{2,}" double_slash_regex = re.compile(_double_slash_regex) +# event type regexes, used throughout BBOT for autodetection of event types, validation, and excavation. event_type_regexes = OrderedDict( ( (k, tuple(re.compile(r, re.I) for r in regexes)) @@ -78,9 +85,8 @@ ) event_id_regex = re.compile(r"[0-9a-f]{40}:[A-Z0-9_]+") -dns_name_regex = re.compile(_dns_name_regex, re.I) scan_name_regex = re.compile(r"[a-z]{3,20}_[a-z]{3,20}") -hostname_regex = re.compile(r"^" + _hostname_regex + r"$", re.I) + # For use with extract_params_html helper input_tag_regex = re.compile(r"]+?name=[\"\'](\w+)[\"\']") diff --git a/bbot/core/helpers/url.py b/bbot/core/helpers/url.py index e595a99bb..5482e54c5 100644 --- a/bbot/core/helpers/url.py +++ b/bbot/core/helpers/url.py @@ -10,12 +10,49 @@ def parse_url(url): - if type(url) == ParseResult: + """ + Parse the given URL string or ParseResult object and return a ParseResult. + + This function checks if the input is already a ParseResult object. If it is, + it returns the object as-is. Otherwise, it parses the given URL string using + `urlparse`. + + Args: + url (Union[str, ParseResult]): The URL string or ParseResult object to be parsed. + + Returns: + ParseResult: A named 6-tuple that contains the components of a URL. + + Examples: + >>> parse_url('https://www.evilcorp.com') + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='', fragment='') + """ + if isinstance(url, ParseResult): return url return urlparse(url) def add_get_params(url, params): + """ + Add or update query parameters to the given URL. + + This function takes an existing URL and a dictionary of query parameters, + updates or adds these parameters to the URL, and returns a new URL. + + Args: + url (Union[str, ParseResult]): The original URL. + params (Dict[str, Any]): A dictionary containing the query parameters to be added or updated. + + Returns: + ParseResult: A named 6-tuple containing the components of the modified URL. + + Examples: + >>> add_get_params('https://www.evilcorp.com?foo=1', {'bar': 2}) + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=1&bar=2', fragment='') + + >>> add_get_params('https://www.evilcorp.com?foo=1', {'foo': 2}) + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=2', fragment='') + """ parsed = parse_url(url) old_params = dict(parse_qs(parsed.query)) old_params.update(params) @@ -23,6 +60,22 @@ def add_get_params(url, params): def get_get_params(url): + """ + Extract the query parameters from the given URL as a dictionary. + + Args: + url (Union[str, ParseResult]): The URL from which to extract query parameters. + + Returns: + Dict[str, List[str]]: A dictionary containing the query parameters and their values. + + Examples: + >>> get_get_params('https://www.evilcorp.com?foo=1&bar=2') + {'foo': ['1'], 'bar': ['2']} + + >>> get_get_params('https://www.evilcorp.com?foo=1&foo=2') + {'foo': ['1', '2']} + """ parsed = parse_url(url) return dict(parse_qs(parsed.query)) @@ -34,6 +87,32 @@ def get_get_params(url): def charset(p): + """ + Determine the character set of the given string based on the types of characters it contains. + + Args: + p (str): The string whose character set is to be determined. + + Returns: + int: A bitmask representing the types of characters present in the string. + - CHAR_LOWER = 1: Lowercase alphabets + - CHAR_UPPER = 2: Uppercase alphabets + - CHAR_DIGIT = 4: Digits + - CHAR_SYMBOL = 8: Symbols/Special characters + + Examples: + >>> charset('abc') + 1 + + >>> charset('abcABC') + 3 + + >>> charset('abc123') + 5 + + >>> charset('!abc123') + 13 + """ ret = 0 for c in p: if c.islower(): @@ -48,6 +127,28 @@ def charset(p): def param_type(p): + """ + Evaluates the type of the given parameter. + + Args: + p (str): The parameter whose type is to be evaluated. + + Returns: + int: An integer representing the type of parameter. + - 1: Integer + - 2: UUID + - 3: Other + + Examples: + >>> param_type('123') + 1 + + >>> param_type('550e8400-e29b-41d4-a716-446655440000') + 2 + + >>> param_type('abc') + 3 + """ try: int(p) return 1 @@ -59,6 +160,25 @@ def param_type(p): def hash_url(url): + """ + Hashes a URL for the purpose of cleaning or collapsing similar URLs. + + Args: + url (str): The URL to be hashed. + + Returns: + int: The hash value of the cleaned URL. + + Examples: + >>> hash_url('https://www.evilcorp.com') + -7448777882396416944 + + >>> hash_url('https://www.evilcorp.com/page/1') + -8101275613229735915 + + >>> hash_url('https://www.evilcorp.com/page/2') + -8101275613229735915 + """ parsed = parse_url(url) parsed = parsed._replace(fragment="", query="") to_hash = [parsed.netloc] @@ -76,6 +196,22 @@ def hash_url(url): def url_depth(url): + """ + Calculate the depth of the given URL based on its path components. + + Args: + url (Union[str, ParseResult]): The URL whose depth is to be calculated. + + Returns: + int: The depth of the URL, based on its path components. + + Examples: + >>> url_depth('https://www.evilcorp.com/foo/bar/') + 2 + + >>> url_depth('https://www.evilcorp.com/foo//bar/baz/') + 3 + """ parsed = parse_url(url) parsed = parsed._replace(path=double_slash_regex.sub("/", parsed.path)) split_path = str(parsed.path).strip("/").split("/") diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 3fa759b95..591ba0092 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -3,6 +3,7 @@ from contextlib import suppress from bbot.core.helpers import regexes +from bbot.core.errors import ValidationError from bbot.core.helpers.url import parse_url, hash_url from bbot.core.helpers.punycode import smart_decode_punycode from bbot.core.helpers.misc import split_host_port, make_netloc, is_ip @@ -12,7 +13,18 @@ def validator(func): """ - Decorator for squashing all errors into ValueError + Decorator that squashes all errors raised by the wrapped function into a ValueError. + + Args: + func (Callable): The function to be decorated. + + Returns: + Callable: The wrapped function. + + Examples: + >>> @validator + ... def validate_port(port): + ... return max(1, min(65535, int(str(port)))) """ def validate_wrapper(*args, **kwargs): @@ -26,6 +38,28 @@ def validate_wrapper(*args, **kwargs): @validator def validate_port(port): + """ + Validates and sanitizes a port number by ensuring it falls within the allowed range (1-65535). + + Args: + port (int or str): The port number to validate. + + Returns: + int: The sanitized port number. + + Raises: + ValueError: If the port number cannot be converted to an integer or is out of range. + + Examples: + >>> validate_port(22) + 22 + + >>> validate_port(70000) + 65535 + + >>> validate_port(-123) + 1 + """ return max(1, min(65535, int(str(port)))) @@ -40,6 +74,33 @@ def validate_open_port(open_port): @validator def validate_host(host): + """ + Validates and sanitizes a host string. This function handles IPv4, IPv6, and domain names. + + It automatically strips ports, trailing periods, and clinging asterisks and dashes. + + Args: + host (str): The host string to validate. + + Returns: + str: The sanitized host string. + + Raises: + ValidationError: If the host is invalid or does not conform to IPv4, IPv6, or DNS_NAME formats. + + Examples: + >>> validate_host("2001:db8::ff00:42:8329") + '2001:db8::ff00:42:8329' + + >>> validate_host("192.168.0.1:443") + '192.168.0.1' + + >>> validate_host(".*.eViLCoRP.com.") + 'evilcorp.com' + + >>> validate_host("Invalid<>Host") + ValueError: Validation failed for ('Invalid<>Host',), {}: Invalid hostname: "invalid<>host" + """ # stringify, strip and lowercase host = str(host).strip().lower() # handle IPv6 netlocs @@ -63,7 +124,7 @@ def validate_host(host): for r in regexes.event_type_regexes["DNS_NAME"]: if r.match(host): return host - assert False, f'Invalid hostname: "{host}"' + raise ValidationError(f'Invalid hostname: "{host}"') @validator @@ -75,7 +136,7 @@ def validate_url(url): def validate_url_parsed(url): url = str(url).strip() if not any(r.match(url) for r in regexes.event_type_regexes["URL"]): - assert False, f'Invalid URL: "{url}"' + raise ValidationError(f'Invalid URL: "{url}"') return clean_url(url) @@ -92,16 +153,29 @@ def validate_email(email): email = smart_decode_punycode(str(email).strip().lower()) if any(r.match(email) for r in regexes.event_type_regexes["EMAIL_ADDRESS"]): return email - assert False, f'Invalid email: "{email}"' + raise ValidationError(f'Invalid email: "{email}"') def clean_url(url): """ - Remove query string and fragment, lowercase netloc, remove redundant port + Cleans and normalizes a URL. This function removes the query string and fragment, + lowercases the netloc, and removes redundant port numbers. + + Args: + url (str): The URL string to clean. + + Returns: + ParseResult: A ParseResult object containing the cleaned URL. + + Examples: + >>> clean_url("http://evilcorp.com:80") + ParseResult(scheme='http', netloc='evilcorp.com', path='/', params='', query='', fragment='') - http://evilcorp.com:80 --> http://evilcorp.com/ - http://eViLcORp.com/ --> http://evilcorp.com/ - http://evilcorp.com/api?user=bob#place --> http://evilcorp.com/api + >>> clean_url("http://eViLcORp.com/") + ParseResult(scheme='http', netloc='evilcorp.com', path='/', params='', query='', fragment='') + + >>> clean_url("http://evilcorp.com/api?user=bob#place") + ParseResult(scheme='http', netloc='evilcorp.com', path='/api', params='', query='', fragment='') """ parsed = parse_url(url) parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="", query="") @@ -133,14 +207,20 @@ def clean_url(url): def collapse_urls(urls, threshold=10): """ - Smartly dedupe suspiciously-similar URLs like these: - - http://evilcorp.com/user/11111/info - - http://evilcorp.com/user/2222/info - - http://evilcorp.com/user/333/info - - http://evilcorp.com/user/44/info - - http://evilcorp.com/user/5/info - - Useful for cleaning large lists of garbage-riddled URLs from sources like wayback + Collapses a list of URLs by deduping similar URLs based on a hashing mechanism. + Useful for cleaning large lists of noisy URLs, such as those retrieved from wayback. + + Args: + urls (list): The list of URL strings to collapse. + threshold (int): The number of allowed duplicate URLs before collapsing. + + Yields: + str: A deduped URL from the input list. + + Example: + >>> list(collapse_urls(["http://evilcorp.com/user/11111/info", "http://evilcorp.com/user/2222/info"], threshold=1)) + ["http://evilcorp.com/user/11111/info"] + """ url_hashes = {} for url in urls: @@ -164,10 +244,26 @@ def collapse_urls(urls, threshold=10): def soft_validate(s, t): """ - Friendly validation wrapper that returns True/False instead of raising an error + Softly validates a given string against a specified type. This function returns a boolean + instead of raising an error. + + Args: + s (str): The string to validate. + t (str): The type to validate against, e.g., "url" or "host". + + Returns: + bool: True if the string is valid, False otherwise. + + Raises: + ValueError: If no validator for the specified type is found. - is_valid_url = soft_validate("http://evilcorp.com", "url") - is_valid_host = soft_validate("http://evilcorp.com", "host") + Examples: + >>> soft_validate("http://evilcorp.com", "url") + True + >>> soft_validate("evilcorp.com", "url") + False + >>> soft_validate("http://evilcorp", "wrong_type") + ValueError: No validator for type "wrong_type" """ try: validator_fn = globals()[f"validate_{t.strip().lower()}"] diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index 7d801ca5a..441cece9c 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -7,6 +7,7 @@ import traceback from pathlib import Path from bs4 import BeautifulSoup +from contextlib import asynccontextmanager from httpx._models import Cookies @@ -28,6 +29,24 @@ def extract_cookies(self, *args, **kwargs): class BBOTAsyncClient(httpx.AsyncClient): + """ + A subclass of httpx.AsyncClient tailored with BBOT-specific configurations and functionalities. + This class provides rate limiting, logging, configurable timeouts, user-agent customization, custom + headers, and proxy settings. Additionally, it allows the disabling of cookies, making it suitable + for use across an entire scan. + + Attributes: + _bbot_scan (object): BBOT scan object containing configuration details. + _rate_limiter (RateLimiter): A rate limiter object to limit web requests. + _persist_cookies (bool): Flag to determine whether cookies should be persisted across requests. + + Examples: + >>> async with BBOTAsyncClient(_bbot_scan=bbot_scan_object) as client: + >>> response = await client.request("GET", "https://example.com") + >>> print(response.status_code) + 200 + """ + def __init__(self, *args, **kwargs): self._bbot_scan = kwargs.pop("_bbot_scan") web_requests_per_second = self._bbot_scan.config.get("web_requests_per_second", 100) @@ -83,7 +102,26 @@ def _merge_cookies(self, cookies): class WebHelper: """ - For making HTTP requests + Main utility class for managing HTTP operations in BBOT. It serves as a wrapper around the BBOTAsyncClient, + which itself is a subclass of httpx.AsyncClient. The class provides functionalities to make HTTP requests, + download files, and handle cached wordlists. + + Attributes: + parent_helper (object): The parent helper object containing scan configurations. + http_debug (bool): Flag to indicate whether HTTP debugging is enabled. + ssl_verify (bool): Flag to indicate whether SSL verification is enabled. + web_client (BBOTAsyncClient): An instance of BBOTAsyncClient for making HTTP requests. + client_only_options (tuple): A tuple of options only applicable to the web client. + + Examples: + Basic web request: + >>> response = await self.helpers.request("https://www.evilcorp.com") + + Download file: + >>> filename = await self.helpers.download("https://www.evilcorp.com/passwords.docx") + + Download wordlist (cached for 30 days by default): + >>> filename = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") """ client_only_options = ( @@ -100,11 +138,59 @@ def __init__(self, parent_helper): def AsyncClient(self, *args, **kwargs): kwargs["_bbot_scan"] = self.parent_helper.scan retries = kwargs.pop("retries", self.parent_helper.config.get("http_retries", 1)) - kwargs["transport"] = httpx.AsyncHTTPTransport(retries=retries, verify=self.ssl_verify) + transport = httpx.AsyncHTTPTransport(retries=retries, verify=self.ssl_verify) + if not self.ssl_verify: + # if we don't want to verify cert validity, we REALLY don't want to verify. + transport._pool._ssl_context = self.ssl_context_noverify() + kwargs["transport"] = transport kwargs["verify"] = self.ssl_verify return BBOTAsyncClient(*args, **kwargs) async def request(self, *args, **kwargs): + """ + Asynchronous function for making HTTP requests, intended to be the most basic web request function + used widely across BBOT and within this helper class. Handles various exceptions and timeouts + that might occur during the request. + + This function automatically respects the scan's global timeout, proxy, headers, etc. + Headers you specify will be merged with the scan's. Your arguments take ultimate precedence, + meaning you can override the scan's values if you want. + + Args: + url (str): The URL to send the request to. + method (str, optional): The HTTP method to use for the request. Defaults to 'GET'. + headers (dict, optional): Dictionary of HTTP headers to send with the request. + params (dict, optional): Dictionary, list of tuples, or bytes to send in the query string. + cookies (dict, optional): Dictionary or CookieJar object containing cookies. + json (Any, optional): A JSON serializable Python object to send in the body. + data (dict, optional): Dictionary, list of tuples, or bytes to send in the body. + files (dict, optional): Dictionary of 'name': file-like-objects for multipart encoding upload. + auth (tuple, optional): Auth tuple to enable Basic/Digest/Custom HTTP auth. + timeout (float, optional): The maximum time to wait for the request to complete. + proxies (dict, optional): Dictionary mapping protocol schemes to proxy URLs. + allow_redirects (bool, optional): Enables or disables redirection. Defaults to None. + stream (bool, optional): Enables or disables response streaming. + raise_error (bool, optional): Whether to raise exceptions for HTTP connect, timeout errors. Defaults to False. + client (httpx.AsyncClient, optional): A specific httpx.AsyncClient to use for the request. Defaults to self.web_client. + cache_for (int, optional): Time in seconds to cache the request. Not used currently. Defaults to None. + + Raises: + httpx.TimeoutException: If the request times out. + httpx.ConnectError: If the connection fails. + httpx.RequestError: For other request-related errors. + + Returns: + httpx.Response or None: The HTTP response object returned by the httpx library. + + Examples: + >>> response = await self.helpers.request("https://www.evilcorp.com") + + >>> response = await self.helpers.request("https://api.evilcorp.com/", method="POST", data="stuff") + + Note: + If the web request fails, it will return None unless `raise_error` is `True`. + """ + raise_error = kwargs.pop("raise_error", False) # TODO: use this cache_for = kwargs.pop("cache_for", None) # noqa @@ -135,7 +221,7 @@ async def request(self, *args, **kwargs): if client_kwargs: client = self.AsyncClient(**client_kwargs) - try: + async with self._acatch(url, raise_error): if self.http_debug: logstr = f"Web request: {str(args)}, {str(kwargs)}" log.debug(logstr) @@ -145,52 +231,41 @@ async def request(self, *args, **kwargs): f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}" ) return response - except httpx.PoolTimeout: - # this block exists because of this: - # https://github.com/encode/httpcore/discussions/783 - log.verbose(f"PoolTimeout to URL: {url}") - self.web_client = self.AsyncClient(persist_cookies=False) - return await self.request(*args, **kwargs) - except httpx.TimeoutException: - log.verbose(f"HTTP timeout to URL: {url}") - if raise_error: - raise - except httpx.ConnectError: - log.verbose(f"HTTP connect failed to URL: {url}") - if raise_error: - raise - except httpx.RequestError as e: - log.trace(f"Error with request to URL: {url}: {e}") - log.trace(traceback.format_exc()) - if raise_error: - raise - except ssl.SSLError as e: - msg = f"SSL error with request to URL: {url}: {e}" - log.trace(msg) - log.trace(traceback.format_exc()) - if raise_error: - raise httpx.RequestError(msg) - except anyio.EndOfStream as e: - msg = f"AnyIO error with request to URL: {url}: {e}" - log.trace(msg) - log.trace(traceback.format_exc()) - if raise_error: - raise httpx.RequestError(msg) - except BaseException as e: - log.trace(f"Unhandled exception with request to URL: {url}: {e}") - log.trace(traceback.format_exc()) - raise async def download(self, url, **kwargs): """ - Downloads file, returns full path of filename - If download failed, returns None + Asynchronous function for downloading files from a given URL. Supports caching with an optional + time period in hours via the "cache_hrs" keyword argument. In case of successful download, + returns the full path of the saved filename. If the download fails, returns None. - Caching supported via "cache_hrs" + Args: + url (str): The URL of the file to download. + filename (str, optional): The filename to save the downloaded file as. + If not provided, will generate based on URL. + max_size (str or int): Maximum filesize as a string ("5MB") or integer in bytes. + cache_hrs (float, optional): The number of hours to cache the downloaded file. + A negative value disables caching. Defaults to -1. + method (str, optional): The HTTP method to use for the request, defaults to 'GET'. + raise_error (bool, optional): Whether to raise exceptions for HTTP connect, timeout errors. Defaults to False. + **kwargs: Additional keyword arguments to pass to the httpx request. + + Returns: + Path or None: The full path of the downloaded file as a Path object if successful, otherwise None. + + Examples: + >>> filepath = await self.helpers.download("https://www.evilcorp.com/passwords.docx", cache_hrs=24) """ success = False filename = kwargs.pop("filename", self.parent_helper.cache_filename(url)) + follow_redirects = kwargs.pop("follow_redirects", True) + max_size = kwargs.pop("max_size", None) + warn = kwargs.pop("warn", True) + raise_error = kwargs.pop("raise_error", False) + if max_size is not None: + max_size = self.parent_helper.human_to_bytes(max_size) cache_hrs = float(kwargs.pop("cache_hrs", -1)) + total_size = 0 + chunk_size = 8192 log.debug(f"Downloading file from {url} with cache_hrs={cache_hrs}") if cache_hrs > 0 and self.parent_helper.is_cached(url): log.debug(f"{url} is cached at {self.parent_helper.cache_filename(url)}") @@ -198,26 +273,64 @@ async def download(self, url, **kwargs): else: # kwargs["raise_error"] = True # kwargs["stream"] = True + kwargs["follow_redirects"] = follow_redirects if not "method" in kwargs: kwargs["method"] = "GET" try: - async with self.AsyncClient().stream(url=url, **kwargs) as response: + async with self._acatch(url, raise_error), self.AsyncClient().stream(url=url, **kwargs) as response: status_code = getattr(response, "status_code", 0) log.debug(f"Download result: HTTP {status_code}") if status_code != 0: response.raise_for_status() with open(filename, "wb") as f: - async for chunk in response.aiter_bytes(chunk_size=8192): + agen = response.aiter_bytes(chunk_size=chunk_size) + async for chunk in agen: + if max_size is not None and total_size + chunk_size > max_size: + log.verbose( + f"Filesize of {url} exceeds {self.parent_helper.bytes_to_human(max_size)}, file will be truncated" + ) + agen.aclose() + break + total_size += chunk_size f.write(chunk) success = True except httpx.HTTPError as e: - log.warning(f"Failed to download {url}: {e}") + log_fn = log.verbose + if warn: + log_fn = log.warning + log_fn(f"Failed to download {url}: {e}") return if success: return filename.resolve() async def wordlist(self, path, lines=None, **kwargs): + """ + Asynchronous function for retrieving wordlists, either from a local path or a URL. + Allows for optional line-based truncation and caching. Returns the full path of the wordlist + file or a truncated version of it. + + Args: + path (str): The local or remote path of the wordlist. + lines (int, optional): Number of lines to read from the wordlist. + If specified, will return a truncated wordlist with this many lines. + cache_hrs (float, optional): Number of hours to cache the downloaded wordlist. + Defaults to 720 hours (30 days) for remote wordlists. + **kwargs: Additional keyword arguments to pass to the 'download' function for remote wordlists. + + Returns: + Path: The full path of the wordlist (or its truncated version) as a Path object. + + Raises: + WordlistError: If the path is invalid or the wordlist could not be retrieved or found. + + Examples: + Fetching full wordlist + >>> wordlist_path = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") + + Fetching and truncating to the first 100 lines + >>> wordlist_path = await self.helpers.wordlist("/root/rockyou.txt", lines=100) + """ if not path: raise WordlistError(f"Invalid wordlist: {path}") if not "cache_hrs" in kwargs: @@ -246,38 +359,35 @@ async def wordlist(self, path, lines=None, **kwargs): async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **requests_kwargs): """ - An async generator to fetch and loop through API pages. + An asynchronous generator function for iterating through paginated API data. - This function keeps calling the API with the provided URL, increasing the page number each time, and spits out - the results one page at a time. It's perfect for APIs that split their data across multiple pages. + This function continuously makes requests to a specified API URL, incrementing the page number + or applying a custom pagination function, and yields the received data one page at a time. + It is well-suited for APIs that provide paginated results. Args: - url (str): The API endpoint. May contain placeholders for 'page' and 'page_size'. - page_size (int, optional): How many items you want per page. Defaults to 100. - json (bool, optional): If True, we'll try to convert the response to JSON. Defaults to True. - next_key (callable, optional): If your API has a weird way to get to the next page, give us a function - that takes the response and spits out the new URL. Defaults to None. - **requests_kwargs: Any other stuff you want to pass to the request. + url (str): The initial API URL. Can contain placeholders for 'page', 'page_size', and 'offset'. + page_size (int, optional): The number of items per page. Defaults to 100. + json (bool, optional): If True, attempts to deserialize the response content to a JSON object. Defaults to True. + next_key (callable, optional): A function that takes the last page's data and returns the URL for the next page. Defaults to None. + **requests_kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function. Yields: - If 'json' is True, you'll get a dict with the API's response, else you'll get the raw response. + dict or httpx.Response: If 'json' is True, yields a dictionary containing the parsed JSON data. Otherwise, yields the raw HTTP response. Note: - You MUST break out of the loop when you stop getting useful results! Otherwise it will loop forever. - - Example: - Here's a quick example of how to use this: - ``` - agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}') - try: - async for page in agen: - subdomains = json["subdomains"] - self.hugesuccess(subdomains) - if not subdomains: - break - finally: - agen.aclose() - ``` + The loop will continue indefinitely unless manually stopped. Make sure to break out of the loop once the last page has been received. + + Examples: + >>> agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}') + >>> try: + >>> async for page in agen: + >>> subdomains = page["subdomains"] + >>> self.hugesuccess(subdomains) + >>> if not subdomains: + >>> break + >>> finally: + >>> agen.aclose() """ page = 1 offset = 0 @@ -305,6 +415,36 @@ async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **re page += 1 async def curl(self, *args, **kwargs): + """ + An asynchronous function that runs a cURL command with specified arguments and options. + + This function constructs and executes a cURL command based on the provided parameters. + It offers support for various cURL options such as headers, post data, and cookies. + + Args: + *args: Variable length argument list for positional arguments. Unused in this function. + url (str): The URL for the cURL request. Mandatory. + raw_path (bool, optional): If True, activates '--path-as-is' in cURL. Defaults to False. + headers (dict, optional): A dictionary of HTTP headers to include in the request. + ignore_bbot_global_settings (bool, optional): If True, ignores the global settings of BBOT. Defaults to False. + post_data (dict, optional): A dictionary containing data to be sent in the request body. + method (str, optional): The HTTP method to use for the request (e.g., 'GET', 'POST'). + cookies (dict, optional): A dictionary of cookies to include in the request. + path_override (str, optional): Overrides the request-target to use in the HTTP request line. + head_mode (bool, optional): If True, includes '-I' to fetch headers only. Defaults to None. + raw_body (str, optional): Raw string to be sent in the body of the request. + **kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function. + + Returns: + str: The output of the cURL command. + + Raises: + CurlError: If 'url' is not supplied. + + Examples: + >>> output = await curl(url="https://example.com", headers={"X-Header": "Wat"}) + >>> print(output) + """ url = kwargs.get("url", "") if not url: @@ -393,12 +533,118 @@ async def curl(self, *args, **kwargs): output = (await self.parent_helper.run(curl_command)).stdout return output + def is_spider_danger(self, source_event, url): + """ + Determines whether visiting a URL could potentially trigger a web-spider-like happening. + + This function assesses the depth and distance of a URL in relation to the parent helper's + configuration settings for web spidering. If the URL exceeds the specified depth or distance, + the function returns True, indicating a possible web-spider risk. + + Args: + source_event: The source event object that discovered the URL. + url (str): The URL to evaluate for web-spider risk. + + Returns: + bool: True if visiting the URL might trigger a web-spider-like event, False otherwise. + + Todo: + - Write tests for this function + + Examples: + >>> is_spider_danger(source_event_obj, "https://example.com/subpage") + True + + >>> is_spider_danger(source_event_obj, "https://example.com/") + False + """ + url_depth = self.parent_helper.url_depth(url) + web_spider_depth = self.parent_helper.scan.config.get("web_spider_depth", 1) + spider_distance = getattr(source_event, "web_spider_distance", 0) + 1 + web_spider_distance = self.parent_helper.scan.config.get("web_spider_distance", 0) + if (url_depth > web_spider_depth) or (spider_distance > web_spider_distance): + return True + return False + + def ssl_context_noverify(self): + ssl_context = ssl.create_default_context() + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + ssl_context.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 + ssl_context.set_ciphers("ALL:@SECLEVEL=0") + ssl_context.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option + return ssl_context + + @asynccontextmanager + async def _acatch(self, url, raise_error): + """ + Asynchronous context manager to handle various httpx errors during a request. + + Yields: + None + + Note: + This function is internal and should generally not be used directly. + `url`, `args`, `kwargs`, and `raise_error` should be in the same context as this function. + """ + try: + yield + except httpx.TimeoutException: + log.verbose(f"HTTP timeout to URL: {url}") + if raise_error: + raise + except httpx.ConnectError: + log.verbose(f"HTTP connect failed to URL: {url}") + if raise_error: + raise + except httpx.RequestError as e: + log.trace(f"Error with request to URL: {url}: {e}") + log.trace(traceback.format_exc()) + if raise_error: + raise + except ssl.SSLError as e: + msg = f"SSL error with request to URL: {url}: {e}" + log.trace(msg) + log.trace(traceback.format_exc()) + if raise_error: + raise httpx.RequestError(msg) + except anyio.EndOfStream as e: + msg = f"AnyIO error with request to URL: {url}: {e}" + log.trace(msg) + log.trace(traceback.format_exc()) + if raise_error: + raise httpx.RequestError(msg) + except BaseException as e: + log.trace(f"Unhandled exception with request to URL: {url}: {e}") + log.trace(traceback.format_exc()) + raise + user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]] pass_keywords = [re.compile(r, re.I) for r in ["pass"]] def is_login_page(html): + """ + Determines if the provided HTML content contains a login page. + + This function parses the HTML to search for forms with input fields typically used for + authentication. If it identifies password fields or a combination of username and password + fields, it returns True. + + Args: + html (str): The HTML content to analyze. + + Returns: + bool: True if the HTML contains a login page, otherwise False. + + Examples: + >>> is_login_page('
') + True + + >>> is_login_page('
') + False + """ try: soup = BeautifulSoup(html, "html.parser") except Exception as e: diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index 1d387df81..7531a93e2 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -13,9 +13,71 @@ class WordCloud(dict): + """ + WordCloud is a specialized dictionary-like class for storing and aggregating + words extracted from various data sources such as DNS names and URLs. The class + is intended to facilitate the generation of target-specific wordlists and mutations. + + The WordCloud class can be accessed and manipulated like a standard Python dictionary. + It also offers additional methods for generating mutations based on the words it contains. + + Attributes: + parent_helper: The parent helper object that provides necessary utilities. + devops_mutations: A set containing common devops-related mutations, loaded from a file. + dns_mutator: An instance of the DNSMutator class for generating DNS-based mutations. + + Examples: + >>> s = Scanner("www1.evilcorp.com", "www-test.evilcorp.com") + >>> s.start_without_generator() + >>> print(s.helpers.word_cloud) + { + "evilcorp": 2, + "ec": 2, + "www1": 1, + "evil": 2, + "www": 2, + "w1": 1, + "corp": 2, + "1": 1, + "wt": 1, + "test": 1, + "www-test": 1 + } + + >>> s.helpers.word_cloud.mutations(["word"], cloud=True, numbers=0, devops=False, letters=False) + [ + [ + "1", + "word" + ], + [ + "corp", + "word" + ], + [ + "ec", + "word" + ], + [ + "evil", + "word" + ], + ... + ] + + >>> s.helpers.word_cloud.dns_mutator.mutations("word") + [ + "word", + "word-test", + "word1", + "wordtest", + "www-word", + "wwwword" + ] + """ + def __init__(self, parent_helper, *args, **kwargs): self.parent_helper = parent_helper - self.max_backups = 20 devops_filename = self.parent_helper.wordlist_dir / "devops_mutations.txt" self.devops_mutations = set(self.parent_helper.read_file(devops_filename)) @@ -27,6 +89,23 @@ def __init__(self, parent_helper, *args, **kwargs): def mutations( self, words, devops=True, cloud=True, letters=True, numbers=5, number_padding=2, substitute_numbers=True ): + """ + Generate various mutations for the given list of words based on different criteria. + + Yields tuples of strings which can be joined on the desired delimiter, e.g. "-" or "_". + + Args: + words (Union[str, Iterable[str]]): A single word or list of words to mutate. + devops (bool): Whether to include devops-related mutations. + cloud (bool): Whether to include mutations from the word cloud. + letters (bool): Whether to include letter-based mutations. + numbers (int): The maximum numeric mutations to include. + number_padding (int): Padding for numeric mutations. + substitute_numbers (bool): Whether to substitute numbers in mutations. + + Yields: + tuple: A tuple containing each of the mutation segments. + """ if isinstance(words, str): words = (words,) results = set() @@ -68,6 +147,15 @@ def modifiers(self, devops=True, cloud=True, letters=True, numbers=5, number_pad return modifiers def absorb_event(self, event): + """ + Absorbs an event from a BBOT scan into the word cloud. + + This method updates the word cloud by extracting words from the given event. It aims to avoid including PTR + (Pointer) records, as they tend to produce unhelpful mutations in the word cloud. + + Args: + event (Event): The event object containing the words to be absorbed into the word cloud. + """ for word in event.words: self.add_word(word) if event.scope_distance == 0 and event.type.startswith("DNS_NAME"): @@ -78,13 +166,45 @@ def absorb_event(self, event): def absorb_word(self, word, ninja=True): """ - Use word ninja to smartly split the word, - e.g. "blacklantern" --> "black", "lantern" + Absorbs a word into the word cloud after splitting it using a word extraction algorithm. + + This method splits the input word into smaller meaningful words using word extraction, and then adds each + of them to the word cloud. The splitting is done using a predefined algorithm in the parent helper. + + Args: + word (str): The word to be split and absorbed into the word cloud. + ninja (bool, optional): If True, word extraction is enabled. Defaults to True. + + Examples: + >>> self.helpers.word_cloud.absorb_word("blacklantern") + >>> print(self.helpers.word_cloud) + { + "blacklantern": 1, + "black": 1, + "bl": 1, + "lantern": 1 + } """ for w in self.parent_helper.extract_words(word): self.add_word(w) def add_word(self, word, lowercase=True): + """ + Adds a word to the word cloud. + + This method updates the word cloud by adding a given word. If the word already exists in the cloud, + its frequency count is incremented by 1. Optionally, the word can be converted to lowercase before adding. + + Args: + word (str): The word to be added to the word cloud. + lowercase (bool, optional): If True, the word will be converted to lowercase before adding. Defaults to True. + + Examples: + >>> self.helpers.word_cloud.add_word("Example") + >>> self.helpers.word_cloud.add_word("example") + >>> print(self.helpers.word_cloud) + {'example': 2} + """ if lowercase: word = word.lower() try: @@ -93,6 +213,34 @@ def add_word(self, word, lowercase=True): self[word] = 1 def get_number_mutations(self, base, n=5, padding=2): + """ + Generates mutations of a base string by modifying the numerical parts or appending numbers. + + This method detects existing numbers in the base string and tries incrementing and decrementing them within a + specified range. It also appends numbers at the end or after each word to generate more mutations. + + Args: + base (str): The base string to generate mutations from. + n (int, optional): The range of numbers to use for incrementing/decrementing. Defaults to 5. + padding (int, optional): Zero-pad numbers up to this length. Defaults to 2. + + Returns: + set: A set of mutated strings based on the base input. + + Examples: + >>> self.helpers.word_cloud.get_number_mutations("www2-test", n=2) + { + "www0-test", + "www1-test", + "www2-test", + "www2-test0", + "www2-test00", + "www2-test01", + "www2-test1", + "www3-test", + "www4-test" + } + """ results = set() # detects numbers and increments/decrements them @@ -136,11 +284,37 @@ def get_number_mutations(self, base, n=5, padding=2): return results def truncate(self, limit): + """ + Truncates the word cloud dictionary to retain only the top `limit` entries based on their occurrence frequencies. + + Args: + limit (int): The maximum number of entries to retain in the word cloud. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.truncate(2) + >>> self.helpers.word_cloud + {'cherry': 8, 'apple': 5} + """ new_self = dict(self.json(limit=limit)) self.clear() self.update(new_self) def json(self, limit=None): + """ + Returns the word cloud as a sorted OrderedDict, optionally truncated to the top `limit` entries. + + Args: + limit (int, optional): The maximum number of entries to include in the returned OrderedDict. If None, all entries are included. + + Returns: + OrderedDict: A dictionary sorted by word frequencies, potentially truncated to the top `limit` entries. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.json(limit=2) + OrderedDict([('cherry', 8), ('apple', 5)]) + """ cloud_sorted = sorted(self.items(), key=lambda x: x[-1], reverse=True) if limit is not None: cloud_sorted = cloud_sorted[:limit] @@ -151,6 +325,21 @@ def default_filename(self): return self.parent_helper.scan.home / f"wordcloud.tsv" def save(self, filename=None, limit=None): + """ + Saves the word cloud to a file. The cloud can optionally be truncated to the top `limit` entries. + + Args: + filename (str, optional): The path to the file where the word cloud will be saved. If None, uses a default filename. + limit (int, optional): The maximum number of entries to save to the file. If None, all entries are saved. + + Returns: + tuple: A tuple containing a boolean indicating success or failure, and the resolved filename. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.save(filename="word_cloud.txt", limit=2) + (True, Path('word_cloud.txt')) + """ if filename is None: filename = self.default_filename else: @@ -177,6 +366,13 @@ def save(self, filename=None, limit=None): return False, filename def load(self, filename=None): + """ + Loads a word cloud from a file. The file can be either a standard wordlist with one entry per line + or a .tsv (tab-separated) file where the first row is the count and the second row is the associated entry. + + Args: + filename (str, optional): The path to the file from which to load the word cloud. If None, uses a default filename. + """ if filename is None: wordcloud_path = self.default_filename else: @@ -207,6 +403,11 @@ def load(self, filename=None): class Mutator(dict): + """ + Base class for generating mutations from a list of words. + It accumulates words and produces mutations from them. + """ + def mutations(self, words, max_mutations=None): mutations = self.top_mutations(max_mutations) ret = set() @@ -249,6 +450,27 @@ def add_word(self, word): class DNSMutator(Mutator): + """ + DNS-specific mutator used by the `massdns` module to generate target-specific subdomain mutations. + + This class extends the Mutator base class to add DNS-specific logic for generating + subdomain mutations based on input words. It utilizes custom word extraction patterns + and a wordninja model trained on DNS-specific data. + + Examples: + >>> s = Scanner("www1.evilcorp.com", "www-test.evilcorp.com") + >>> s.start_without_generator() + >>> s.helpers.word_cloud.dns_mutator.mutations("word") + [ + "word", + "word-test", + "word1", + "wordtest", + "www-word", + "wwwword" + ] + """ + extract_word_regexes = [ re.compile(r, re.I) for r in [ diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index c580c9e9d..7b0cda171 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class anubisdb(crobat): +class anubisdb(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index 8ba59dcfc..4fcf9d7d9 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -1,10 +1,10 @@ import re from contextlib import suppress -from .viewdns import viewdns +from bbot.modules.base import BaseModule -class azure_tenant(viewdns): +class azure_tenant(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "passive", "safe"] @@ -12,6 +12,7 @@ class azure_tenant(viewdns): base_url = "https://autodiscover-s.outlook.com" in_scope_only = True + per_domain_only = True async def setup(self): self.processed = set() diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 2057212b6..2470f060b 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -10,79 +10,117 @@ class BaseModule: - # Event types to watch + """The base class for all BBOT modules. + + Attributes: + watched_events (List): Event types to watch. + + produced_events (List): Event types to produce. + + meta (Dict): Metadata about the module, such as whether authentication is required and a description. + + flags (List): Flags indicating the type of module (must have at least "safe" or "aggressive" and "passive" or "active"). + + deps_pip (List): Python dependencies to install via pip. Empty list by default. + + deps_apt (List): APT package dependencies to install. Empty list by default. + + deps_shell (List): Other dependencies installed via shell commands. Uses [ansible.builtin.shell](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html). Empty list by default. + + deps_ansible (List): Additional Ansible tasks for complex dependencies. Empty list by default. + + accept_dupes (bool): Whether to accept incoming duplicate events. Default is False. + + suppress_dupes (bool): Whether to suppress outgoing duplicate events. Default is True. + + per_host_only (bool): Limit the module to only scanning once per host:port. Default is False. + + per_domain_only (bool): Limit the module to only scanning once per domain. Default is False. + + scope_distance_modifier (int, None): Modifies scope distance acceptance for events. Default is 0. + ``` + None == accept all events + 2 == accept events up to and including the scan's configured search distance plus two + 1 == accept events up to and including the scan's configured search distance plus one + 0 == (DEFAULT) accept events up to and including the scan's configured search distance + ``` + + target_only (bool): Accept only the initial target event(s). Default is False. + + in_scope_only (bool): Accept only explicitly in-scope events. Default is False. + + options (Dict): Customizable options for the module, e.g., {"api_key": ""}. Empty dict by default. + + options_desc (Dict): Descriptions for options, e.g., {"api_key": "API Key"}. Empty dict by default. + + max_event_handlers (int): Maximum concurrent instances of handle_event() or handle_batch(). Default is 1. + + batch_size (int): Size of batches processed by handle_batch(). Default is 1. + + batch_wait (int): Seconds to wait before force-submitting a batch. Default is 10. + + failed_request_abort_threshold (int): Threshold for setting error state after failed HTTP requests (only takes effect when `request_with_fail_count()` is used. Default is 5. + + _scope_shepherding (bool): When set to false, prevents events generated by this module from being automatically marked as in-scope. Default is True. Useful for low-confidence modules like speculate and ipneighbor. + + _stats_exclude (bool): Whether to exclude this module from scan statistics. Default is False. + + _qsize (int): Outgoing queue size (0 for infinite). Default is 0. + + _priority (int): Priority level of events raised by this module, 1-5. Default is 3. + + _name (str): Module name, overridden automatically. Default is 'base'. + + _type (str): Module type, for differentiating between normal and output modules. Default is 'scan'. + """ + watched_events = [] - # Event types to produce produced_events = [] - # Module description, etc. meta = {"auth_required": False, "description": "Base module"} - # Flags, must include either "passive" or "active" flags = [] + options = {} + options_desc = {} - # python dependencies (pip install ____) deps_pip = [] - # apt dependencies (apt install ____) deps_apt = [] - # other dependences as shell commands - # uses ansible.builtin.shell (https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html) deps_shell = [] - # list of ansible tasks for when other dependency installation methods aren't enough deps_ansible = [] - # Whether to accept incoming duplicate events + accept_dupes = False - # Whether to block outgoing duplicate events suppress_dupes = True - # Limit the module to only scanning once per host. By default, defined by event.host, but can be customized by overriding per_host_only = False - - # Scope distance modifier - accept/deny events based on scope distance - # None == accept all events - # 2 == accept events up to and including the scan's configured search distance plus two - # 1 == accept events up to and including the scan's configured search distance plus one - # 0 == (DEFAULT) accept events up to and including the scan's configured search distance - # -1 == accept events up to and including the scan's configured search distance minus one - # -2 == accept events up to and including the scan's configured search distance minus two + per_domain_only = False scope_distance_modifier = 0 - # Only accept the initial target event(s) target_only = False - # Only accept explicitly in-scope events (scope distance == 0) - # Use this options if your module is aggressive or if you don't want it to scale with - # the scan's search distance in_scope_only = False - # Options, e.g. {"api_key": ""} - options = {} - # Options description, e.g. {"api_key": "API Key"} - options_desc = {} - # Maximum concurrent instances of handle_event() or handle_batch() max_event_handlers = 1 - # Batch size - # If batch size > 1, override handle_batch() instead of handle_event() batch_size = 1 - # Seconds to wait before force-submitting batch batch_wait = 10 - # Use in conjunction with .request_with_fail_count() to set_error_state() after this many failed HTTP requests failed_request_abort_threshold = 5 - # When set to false, prevents events generated by this module from being automatically marked as in-scope - # Useful for low-confidence modules like speculate and ipneighbor + _scope_shepherding = True - # Exclude from scan statistics _stats_exclude = False - # outgoing queue size (0 == infinite) _qsize = 0 - # Priority of events raised by this module, 1-5, lower numbers == higher priority _priority = 3 - # Name, overridden automatically _name = "base" - # Type, for differentiating between normal modules and output modules, etc. _type = "scan" def __init__(self, scan): + """Initializes a module instance. + + Args: + scan: The BBOT scan object associated with this module instance. + + Attributes: + scan: The scan object associated with this module. + + errored (bool): Whether the module has errored out. Default is False. + """ self.scan = scan self.errored = False self._log = None self._incoming_event_queue = None - # seconds since we've submitted a batch self._outgoing_event_queue = None # seconds since we've submitted a batch self._last_submitted_batch = None @@ -107,66 +145,117 @@ def __init__(self, scan): self._per_host_tracker = set() async def setup(self): - """ - Perform setup functions at the beginning of the scan. - Optionally override this method. + """Asynchronously sets up the module at the beginning of the scan. + + This method can be overridden to perform any necessary setup logic. - Must return True or False based on whether the setup was successful + Returns: + bool or None: True if setup was successful. None for a soft-fail, which will produce a warning but not abort the scan. False for a hard-fail, which will abort the scan. """ return True async def handle_event(self, event): - """ - Override this method if batch_size == 1. + """Asynchronously handles incoming events that the module is configured to watch. + + This method is automatically invoked when an event that matches any in `watched_events` is encountered during a scan. Override this method to implement custom event-handling logic for your module. + + Args: + event (Event): The event object containing details about the incoming event. + + Note: + This method should be overridden if the `batch_size` attribute of the module is set to 1. + + Returns: + None """ pass def handle_batch(self, *events): - """ - Override this method if batch_size > 1. + """Handles incoming events in batches for optimized processing. + + This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module. + + Args: + *events (Event): A variable number of Event objects to be processed in a batch. + + Note: + This method should be overridden if the `batch_size` attribute of the module is set to a value greater than 1. + + Returns: + None """ pass async def filter_event(self, event): - """ - Accept/reject events based on custom criteria + """Asynchronously filters incoming events based on custom criteria. + + Override this method for more granular control over which events are accepted by your module. This method is called automatically before `handle_event()` for each incoming event that matches any in `watched_events`. + + Args: + event (Event): The incoming Event object to be filtered. + + Returns: + tuple: A 2-tuple where the first value is a bool indicating whether the event should be accepted, and the second value is a string explaining the reason for its acceptance or rejection. By default, returns `(True, None)` to indicate acceptance without reason. - Override this method if you need more granular control - over which events are distributed to your module + Note: + This method should be overridden if the module requires custom logic for event filtering. """ return True async def finish(self): - """ - Perform final functions when scan is nearing completion + """Asynchronously performs final tasks as the scan nears completion. + + This method can be overridden to execute any necessary finalization logic. For example, if the module relies on a word cloud, you might wait for the scan to finish to ensure the word cloud is most complete before running an operation. - For example, if your module relies on the word cloud, you may choose to wait until - the scan is finished (and the word cloud is most complete) before running an operation. + Returns: + None - Note that this method may be called multiple times, because it may raise events. - Optionally override this method. + Warnings: + This method may be called multiple times since it can raise events, which may re-trigger the "finish" phase of the scan. Optional to override. """ return async def report(self): - """ - Perform a final task when the scan is finished, but before cleanup happens + """Asynchronously executes a final task after the scan is complete but before cleanup. - This is useful for modules that aggregate data and raise summary events at the end of a scan + This method can be overridden to aggregate data and raise summary events at the end of the scan. + + Returns: + None + + Note: + This method is called only once per scan. """ return async def cleanup(self): - """ - Perform final cleanup after the scan has finished - This method is called only once, and may not raise events. - Optionally override this method. + """Asynchronously performs final cleanup operations after the scan is complete. + + This method can be overridden to implement custom cleanup logic. It is called only once per scan and may not raise events. + + Returns: + None + + Note: + This method is called only once per scan and may not raise events. """ return async def require_api_key(self): """ - Use in setup() to ensure the module is configured with an API key + Asynchronously checks if an API key is required and valid. + + Args: + None + + Returns: + bool or tuple: Returns True if API key is valid and ready. + Returns a tuple (None, "error message") otherwise. + + Notes: + - Fetches the API key from the configuration. + - Calls the 'ping()' method to test API accessibility. + - Sets the API key readiness status accordingly. """ self.api_key = self.config.get("api_key", "") if self.auth_secret: @@ -180,55 +269,104 @@ async def require_api_key(self): return None, "No API key set" async def ping(self): - """ - Used in conjuction with require_api_key to ensure an API is up and responding + """Asynchronously checks the health of the configured API. - Requires the use of an assert statement. + This method is used in conjunction with require_api_key() to verify that the API is not just configured, but also responsive. This method should include an assert statement to validate the API's health, typically by making a test request to a known endpoint. - E.g. if your API has a "/ping" endpoint, you can use it like this: - def ping(self): - r = self.request_with_fail_count(f"{self.base_url}/ping") + Example Usage: + In your implementation, if the API has a "/ping" endpoint: + async def ping(self): + r = await self.request_with_fail_count(f"{self.base_url}/ping") resp_content = getattr(r, "text", "") assert getattr(r, "status_code", 0) == 200, resp_content + + Returns: + None + + Raises: + AssertionError: If the API does not respond as expected. """ return @property def auth_secret(self): - """ - Use this to indicate whether the module has everything it needs for authentication + """Indicates if the module is properly configured for authentication. + + This read-only property should be used to check whether all necessary attributes (e.g., API keys, tokens, etc.) are configured to perform authenticated requests in the module. Commonly used in setup or initialization steps. + + Returns: + bool: True if the module is properly configured for authentication, otherwise False. """ return getattr(self, "api_key", "") def get_watched_events(self): - """ - Override if you need your watched_events to be dynamic + """Retrieve the set of events that the module is interested in observing. + + Override this method if the set of events the module should watch needs to be determined dynamically, e.g., based on configuration options or other runtime conditions. + + Returns: + set: The set of event types that this module will handle. """ if self._watched_events is None: self._watched_events = set(self.watched_events) return self._watched_events async def _handle_batch(self): + """ + Asynchronously handles a batch of events in the module. + + Args: + None + + Returns: + bool: True if events were submitted for processing, False otherwise. + + Notes: + - The method is wrapped in a task counter to monitor asynchronous operations. + - Checks if there are any events in the incoming queue and module is not in an error state. + - Invokes '_events_waiting()' to fetch a batch of events. + - Calls the module's 'handle_batch()' method to process these events. + - If a "FINISHED" event is found, invokes 'finish()' method of the module. + """ finish = False async with self._task_counter.count(f"{self.name}.handle_batch()"): submitted = False if self.batch_size <= 1: return if self.num_incoming_events > 0: - events, finish = await self.events_waiting() + events, finish = await self._events_waiting() if events and not self.errored: self.debug(f"Handling batch of {len(events):,} events") submitted = True - async with self.scan.acatch(f"{self.name}.handle_batch()"): + async with self.scan._acatch(f"{self.name}.handle_batch()"): await self.handle_batch(*events) self.debug(f"Finished handling batch of {len(events):,} events") if finish: context = f"{self.name}.finish()" - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.finish() return submitted def make_event(self, *args, **kwargs): + """Create an event for the scan. + + Raises a validation error if the event could not be created, unless raise_error is set to False. + + Args: + *args: Positional arguments to be passed to the scan's make_event method. + **kwargs: Keyword arguments to be passed to the scan's make_event method. + raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. + + Examples: + >>> new_event = self.make_event("1.2.3.4", source=event) + >>> self.emit_event(new_event) + + Returns: + Event or None: The created event, or None if a validation error occurred and raise_error was False. + + Raises: + ValidationError: If the event could not be validated and raise_error is True. + """ raise_error = kwargs.pop("raise_error", False) try: event = self.scan.make_event(*args, **kwargs) @@ -242,6 +380,34 @@ def make_event(self, *args, **kwargs): return event def emit_event(self, *args, **kwargs): + """Emit an event to the event queue and distribute it to interested modules. + + This is how modules "return" data. + + The method first creates an event object by calling `self.make_event()` with the provided arguments. + Then, the event is queued for outgoing distribution using `self.queue_outgoing_event()`. + + Args: + *args: Positional arguments to be passed to `self.make_event()` for event creation. + **kwargs: Keyword arguments to be passed for event creation or configuration of the emit action. + ```markdown + - on_success_callback: Optional callback function to execute upon successful event emission. + - abort_if: Optional condition under which the event emission should be aborted. + - quick: Optional flag to indicate whether the event should be processed quickly. + ``` + + Examples: + >>> self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + + >>> new_event = self.make_event("1.2.3.4", source=event) + >>> self.emit_event(new_event) + + Returns: + None + + Raises: + ValidationError: If the event cannot be validated (handled in `self.make_event()`). + """ event_kwargs = dict(kwargs) emit_kwargs = {} for o in ("on_success_callback", "abort_if", "quick"): @@ -253,16 +419,42 @@ def emit_event(self, *args, **kwargs): self.queue_outgoing_event(event, **emit_kwargs) async def emit_event_wait(self, *args, **kwargs): - """ - Same as emit_event except we wait on the outgoing queue + """Emit an event to the event queue and await until there is space in the outgoing queue. + + This method is similar to `emit_event`, but it waits until there's sufficient space in the outgoing + event queue before emitting the event. It utilizes the queue size threshold defined in `self._qsize`. + + Args: + *args: Positional arguments to be passed to `emit_event()` for event creation. + **kwargs: Keyword arguments to be passed to `emit_event()` for event creation or configuration. + + Returns: + None + + See Also: + emit_event: For emitting an event without waiting on the queue size. """ while self.outgoing_event_queue.qsize() > self._qsize: await self.helpers.sleep(0.2) return self.emit_event(*args, **kwargs) - async def events_waiting(self): + async def _events_waiting(self): """ - yields all events in queue, up to maximum batch size + Asynchronously fetches events from the incoming_event_queue, up to a specified batch size. + + Args: + None + + Returns: + tuple: A tuple containing two elements: + - events (list): A list of acceptable events from the queue. + - finish (bool): A flag indicating if a "FINISHED" event is encountered. + + Notes: + - The method pulls events from incoming_event_queue using 'get_nowait()'. + - Events go through '_event_postcheck()' for validation. + - "FINISHED" events are handled differently and the finish flag is set to True. + - If the queue is empty or the batch size is reached, the loop breaks. """ events = [] finish = False @@ -300,6 +492,27 @@ def start(self): self._tasks = [asyncio.create_task(self._worker()) for _ in range(self._max_event_handlers)] async def _setup(self): + """ + Asynchronously sets up the module by invoking its 'setup()' method. + + This method catches exceptions during setup, sets the module's error state if necessary, and determines the + status code based on the result of the setup process. + + Args: + None + + Returns: + tuple: A tuple containing the module's name, status (True for success, False for hard-fail, None for soft-fail), + and an optional status message. + + Raises: + Exception: Captured exceptions from the 'setup()' method are logged, but not propagated. + + Notes: + - The 'setup()' method can return either a simple boolean status or a tuple of status and message. + - A WordlistError exception triggers a soft-fail status. + - The debug log will contain setup status information for the module. + """ status_codes = {False: "hard-fail", None: "soft-fail", True: "success"} status = False @@ -322,7 +535,30 @@ async def _setup(self): return self.name, status, str(msg) async def _worker(self): - async with self.scan.acatch(context=self._worker): + """ + The core worker loop for the module, responsible for handling events from the incoming event queue. + + This method is a coroutine and is run asynchronously. Multiple instances can run simultaneously based on + the 'max_event_handlers' configuration. The worker dequeues events from 'incoming_event_queue', performs + necessary prechecks, and passes the event to the appropriate handler function. + + Args: + None + + Returns: + None + + Raises: + asyncio.CancelledError: If the worker is cancelled during its operation. + + Notes: + - The worker is sensitive to the 'stopping' flag of the scan. It will terminate if this flag is set. + - The worker handles backpressure by pausing when the outgoing event queue is full. + - Batch processing is supported and is activated when 'batch_size' > 1. + - Each event is subject to a post-check via '_event_postcheck()' to decide whether it should be handled. + - Special 'FINISHED' events trigger the 'finish()' method of the module. + """ + async with self.scan._acatch(context=self._worker): try: while not self.scan.stopping: # hold the reigns if our outgoing queue is full @@ -351,13 +587,13 @@ async def _worker(self): if acceptable: if event.type == "FINISHED": context = f"{self.name}.finish()" - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.finish() else: context = f"{self.name}.handle_event({event})" self.scan.stats.event_consumed(event, self) self.debug(f"Handling {event}") - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.handle_event(event) self.debug(f"Finished handling {event}") else: @@ -375,9 +611,33 @@ def max_scope_distance(self): def _event_precheck(self, event): """ - Check if an event should be accepted by the module - Used when putting an event INTO the modules' queue + Pre-checks an event to determine if it should be accepted by the module for queuing. + + This method is called when an event is about to be enqueued into the module's incoming event queue. + It applies various filters such as special signal event types, module error state, watched event types, and more + to decide whether or not the event should be enqueued. + + Args: + event (Event): The event object to check. + + Returns: + tuple: A tuple (bool, str) where the bool indicates if the event should be accepted, and the str gives the reason. + + Examples: + >>> result, reason = self._event_precheck(event) + >>> if result: + ... self.incoming_event_queue.put_nowait(event) + ... else: + ... self.debug(f"Not accepting {event} because {reason}") + + Notes: + - The method considers special signal event types like "FINISHED". + - Checks whether the module is in an error state. + - Checks if the event type matches the types this module is interested in (`watched_events`). + - Checks for events tagged as 'target' if the module has `target_only` flag set. + - Applies specific filtering based on event type and module name. """ + # special signal event types if event.type in ("FINISHED",): return True, "its type is FINISHED" @@ -409,8 +669,29 @@ def _event_precheck(self, event): async def _event_postcheck(self, event): """ - Check if an event should be accepted by the module - Used when taking an event FROM the module's queue (immediately before it's handled) + Post-checks an event to determine if it should be accepted by the module for handling. + + This method is called when an event is dequeued from the module's incoming event queue, right before it is actually processed. + It applies various filters such as scope, custom filtering logic, and per-host tracking to decide the event's fate. + + Args: + event (Event): The event object to check. + + Returns: + tuple: A tuple (bool, str) where the bool indicates if the event should be accepted, and the str gives the reason. + + Examples: + >>> async def custom_filter(event): + ... if event.data not in ["evilcorp.com"]: + ... return False, "it's not on the cool list" + ... + >>> self.filter_event = custom_filter + >>> result, reason = await self._event_postcheck(event) + + Notes: + - Override the `filter_event` method for custom filtering logic. + - This method also maintains host-based tracking when the `per_host_only` flag is set. + - The method will also update event production stats for output modules. """ # special exception for "FINISHED" event if event.type in ("FINISHED",): @@ -427,7 +708,7 @@ async def _event_postcheck(self, event): return filter_result, reason # custom filtering - async with self.scan.acatch(context=self.filter_event): + async with self.scan._acatch(context=self.filter_event): filter_result = await self.filter_event(event) msg = str(self._custom_filter_criteria_msg) with suppress(ValueError, TypeError): @@ -437,10 +718,18 @@ async def _event_postcheck(self, event): return False, msg if self.per_host_only: - if self.get_per_host_hash(event) in self._per_host_tracker: + _hash = self.get_per_host_hash(event) + if _hash in self._per_host_tracker: return False, "per_host_only enabled and already seen host" else: - self._per_host_tracker.add(self.get_per_host_hash(event)) + self._per_host_tracker.add(_hash) + + if self.per_domain_only: + _hash = self.get_per_domain_hash(event) + if _hash in self._per_host_tracker: + return False, "per_domain_only enabled and already seen domain" + else: + self._per_host_tracker.add(_hash) if self._type == "output" and not event._stats_recorded: event._stats_recorded = True @@ -469,12 +758,27 @@ async def _cleanup(self): for callback in [self.cleanup] + self.cleanup_callbacks: context = f"{self.name}.cleanup()" if callable(callback): - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.helpers.execute_sync_or_async(callback) async def queue_event(self, event): """ - Queue (incoming) event with module + Asynchronously queues an incoming event to the module's event queue for further processing. + + The function performs an initial check to see if the event is acceptable for queuing. + If the event passes the check, it is put into the `incoming_event_queue`. + + Args: + event: The event object to be queued. + + Returns: + None: The function doesn't return anything but modifies the state of the `incoming_event_queue`. + + Examples: + >>> await self.queue_event(some_event) + + Raises: + AttributeError: If the module is not in an acceptable state to queue incoming events. """ async with self._task_counter.count("queue_event()", _log=False): if self.incoming_event_queue is False: @@ -498,7 +802,23 @@ async def queue_event(self, event): def queue_outgoing_event(self, event, **kwargs): """ - Queue (outgoing) event with module + Queues an outgoing event to the module's outgoing event queue for further processing. + + The function attempts to put the event into the `outgoing_event_queue` immediately. + If it's not possible due to the current state of the module, an AttributeError is raised, and a debug log is generated. + + Args: + event: The event object to be queued. + **kwargs: Additional keyword arguments to be associated with the event. + + Returns: + None: The function doesn't return anything but modifies the state of the `outgoing_event_queue`. + + Examples: + >>> self.queue_outgoing_event(some_outgoing_event, abort_if=lambda e: "unresolved" in e.tags) + + Raises: + AttributeError: If the module is not in an acceptable state to queue outgoing events. """ try: self.outgoing_event_queue.put_nowait((event, kwargs)) @@ -506,6 +826,26 @@ def queue_outgoing_event(self, event, **kwargs): self.debug(f"Not in an acceptable state to queue outgoing event") def set_error_state(self, message=None): + """ + Puts the module into an errored state where it cannot accept new events. Optionally logs a warning message. + + The function sets the module's `errored` attribute to True and logs a warning with the optional message. + It also clears the incoming event queue to prevent further processing and updates its status to False. + + Args: + message (str, optional): Additional message to be logged along with the warning. + + Returns: + None: The function doesn't return anything but updates the `errored` state and clears the incoming event queue. + + Examples: + >>> self.set_error_state() + >>> self.set_error_state("Failed to connect to the server") + + Notes: + - The function sets `self._incoming_event_queue` to False to prevent its further use. + - If the module was already in an errored state, the function will not reset the error state or the queue. + """ if not self.errored: log_msg = f"Setting error state for module {self.name}" if message is not None: @@ -522,8 +862,27 @@ def set_error_state(self, message=None): # if there are leftover objects in the queue, the scan will hang. self._incoming_event_queue = False - # override in the module to define different values to comprise the hash def get_per_host_hash(self, event): + """ + Computes a per-host hash value for a given event. This method may be optionally overridden in subclasses. + + The function uses the event's `host` and `port` or the parsed URL to create a string to be hashed. + The hash value is used for distinguishing events related to the same host. + + Args: + event (Event): The event object containing host, port, or parsed URL information. + + Returns: + int: The hash value computed for the host. + + Examples: + >>> event = self.make_event("https://example.com:8443") + >>> self.get_per_host_hash(event) + + Notes: + - To change the behavior, override this method in your custom module. + - The hash value is dependent on the `host` and `port` or the `parsed` attribute in the event object. + """ parsed = getattr(event, "parsed", None) if parsed is None: to_hash = self.helpers.make_netloc(event.host, event.port) @@ -531,6 +890,25 @@ def get_per_host_hash(self, event): to_hash = f"{parsed.scheme}://{parsed.netloc}/" return hash(to_hash) + def get_per_domain_hash(self, event): + """ + Computes a per-domain hash value for a given event. This method may be optionally overridden in subclasses. + + Events with the same root domain will receive the same hash value. + + Args: + event (Event): The event object containing host, port, or parsed URL information. + + Returns: + int: The hash value computed for the domain. + + Examples: + >>> event = self.make_event("https://www.example.com:8443") + >>> self.get_per_domain_hash(event) + """ + _, domain = self.helpers.split_domain(event.host) + return hash(domain) + @property def name(self): return str(self._name) @@ -541,6 +919,22 @@ def helpers(self): @property def status(self): + """ + Provides the current status of the module as a dictionary. + + The dictionary contains the following keys: + - 'events': A sub-dictionary with 'incoming' and 'outgoing' keys, representing the number of events in the respective queues. + - 'tasks': The current value of the task counter. + - 'errored': A boolean value indicating if the module is in an error state. + - 'running': A boolean value indicating if the module is currently processing data. + + Returns: + dict: A dictionary containing the current status of the module. + + Examples: + >>> self.status + {'events': {'incoming': 5, 'outgoing': 2}, 'tasks': 3, 'errored': False, 'running': True} + """ status = { "events": {"incoming": self.num_incoming_events, "outgoing": self.outgoing_event_queue.qsize()}, "tasks": self._task_counter.value, @@ -551,19 +945,47 @@ def status(self): @property def running(self): - """ - Indicates whether the module is currently processing data. + """Property indicating whether the module is currently processing data. + + This property checks if the task counter (`self._task_counter.value`) is greater than zero, + indicating that there are ongoing tasks in the module. + + Returns: + bool: True if the module is currently processing data, False otherwise. """ return self._task_counter.value > 0 @property def finished(self): - """ - Indicates whether the module is finished (not running and nothing in queues) + """Property indicating whether the module has finished processing. + + This property checks three conditions to determine if the module is finished: + 1. The module is not currently running (`self.running` is False). + 2. The number of incoming events in the queue is zero or less (`self.num_incoming_events <= 0`). + 3. The number of outgoing events in the queue is zero or less (`self.outgoing_event_queue.qsize() <= 0`). + + Returns: + bool: True if the module has finished processing, False otherwise. """ return not self.running and self.num_incoming_events <= 0 and self.outgoing_event_queue.qsize() <= 0 async def request_with_fail_count(self, *args, **kwargs): + """Asynchronously perform an HTTP request while keeping track of consecutive failures. + + This function wraps the `self.helpers.request` method, incrementing a failure counter if + the request returns None. When the failure counter exceeds `self.failed_request_abort_threshold`, + the module is set to an error state. + + Args: + *args: Positional arguments to pass to `self.helpers.request`. + **kwargs: Keyword arguments to pass to `self.helpers.request`. + + Returns: + Any: The response object or None if the request failed. + + Raises: + None: Sets the module to an error state when the failure threshold is reached. + """ r = await self.helpers.request(*args, **kwargs) if r is None: self._request_failures += 1 @@ -573,17 +995,16 @@ async def request_with_fail_count(self, *args, **kwargs): self.set_error_state(f"Setting error state due to {self._request_failures:,} failed HTTP requests") return r - def is_spider_danger(self, source_event, url): - url_depth = self.helpers.url_depth(url) - web_spider_depth = self.scan.config.get("web_spider_depth", 1) - spider_distance = getattr(source_event, "web_spider_distance", 0) + 1 - web_spider_distance = self.scan.config.get("web_spider_distance", 0) - if (url_depth > web_spider_depth) or (spider_distance > web_spider_distance): - return True - return False - @property def config(self): + """Property that provides easy access to the module's configuration in the scan's config. + + This property serves as a shortcut to retrieve the module-specific configuration from + `self.scan.config`. If no configuration is found for this module, an empty dictionary is returned. + + Returns: + dict: The configuration dictionary specific to this module. + """ config = self.scan.config.get("modules", {}).get(self.name, {}) if config is None: config = {} @@ -603,12 +1024,32 @@ def outgoing_event_queue(self): @property def priority(self): + """ + Gets the priority level of the module as an integer. + + The priority level is constrained to be between 1 and 5, inclusive. + A lower value indicates a higher priority. + + Returns: + int: The priority level of the module, constrained between 1 and 5. + + Examples: + >>> self.priority + 3 + """ return int(max(1, min(5, self._priority))) @property def auth_required(self): return self.meta.get("auth_required", False) + @property + def http_timeout(self): + """ + Convenience shortcut to `http_timeout` in the config + """ + return self.scan.config.get("http_timeout", 10) + @property def log(self): if getattr(self, "_log", None) is None: @@ -617,8 +1058,15 @@ def log(self): @property def memory_usage(self): - """ - Return how much memory the module is currently using in bytes + """Property that calculates the current memory usage of the module in bytes. + + This property uses the `get_size` function to estimate the memory consumption + of the module object. The depth of the object graph traversal is limited to 3 levels + to avoid performance issues. Commonly shared objects like `self.scan`, `self.helpers`, + are excluded from the calculation to prevent double-counting. + + Returns: + int: The estimated memory usage of the module in bytes. """ seen = {self.scan, self.helpers, self.log} # noqa return get_size(self, max_depth=3, seen=seen) @@ -627,6 +1075,21 @@ def __str__(self): return self.name def log_table(self, *args, **kwargs): + """Logs a table to the console and optionally writes it to a file. + + This function generates a table using `self.helpers.make_table`, then logs each line + of the table as an info-level log. If a table_name is provided, it also writes the table to a file. + + Args: + *args: Variable length argument list to be passed to `self.helpers.make_table`. + **kwargs: Arbitrary keyword arguments. If 'table_name' is specified, the table will be written to a file. + + Returns: + str: The generated table as a string. + + Examples: + >>> self.log_table(['Header1', 'Header2'], [['row1col1', 'row1col2'], ['row2col1', 'row2col2']], table_name="my_table") + """ table_name = kwargs.pop("table_name", None) table = self.helpers.make_table(*args, **kwargs) for line in table.splitlines(): @@ -640,64 +1103,208 @@ def log_table(self, *args, **kwargs): return table def stdout(self, *args, **kwargs): + """Writes log messages directly to standard output. + + This is typically reserved for output modules only, e.g. `human` or `json`. + + Args: + *args: Variable length argument list to be passed to `self.log.stdout`. + **kwargs: Arbitrary keyword arguments to be passed to `self.log.stdout`. + + Examples: + >>> self.stdout("This will be printed to stdout") + """ self.log.stdout(*args, extra={"scan_id": self.scan.id}, **kwargs) def debug(self, *args, trace=False, **kwargs): + """Logs debug messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.debug("This is a debug message") + >>> self.debug("This is a debug message with a trace", trace=True) + """ self.log.debug(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def verbose(self, *args, trace=False, **kwargs): + """Logs messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.verbose("This is a verbose message") + >>> self.verbose("This is a verbose message with a trace", trace=True) + """ self.log.verbose(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugeverbose(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened white text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugeverbose("This is a huge verbose message") + >>> self.hugeverbose("This is a huge verbose message with a trace", trace=True) + """ self.log.hugeverbose(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def info(self, *args, trace=False, **kwargs): + """Logs informational messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.info("This is an informational message") + >>> self.info("This is an informational message with a trace", trace=True) + """ self.log.info(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugeinfo(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened blue text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugeinfo("This is a huge informational message") + >>> self.hugeinfo("This is a huge informational message with a trace", trace=True) + """ self.log.hugeinfo(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def success(self, *args, trace=False, **kwargs): + """Logs a success message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.success("Operation completed successfully") + >>> self.success("Operation completed with a trace", trace=True) + """ self.log.success(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugesuccess(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened green text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugesuccess("This is a huge success message") + >>> self.hugesuccess("This is a huge success message with a trace", trace=True) + """ self.log.hugesuccess(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def warning(self, *args, trace=True, **kwargs): + """Logs a warning message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.warning("This is a warning message") + >>> self.warning("This is a warning message with a trace", trace=False) + """ self.log.warning(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugewarning(self, *args, trace=True, **kwargs): + """Logs a whole message in emboldened orange text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugewarning("This is a huge warning message") + >>> self.hugewarning("This is a huge warning message with a trace", trace=False) + """ self.log.hugewarning(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def error(self, *args, trace=True, **kwargs): + """Logs an error message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.error("This is an error message") + >>> self.error("This is an error message with a trace", trace=False) + """ self.log.error(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def trace(self): + """Logs the stack trace of the most recently caught exception. + + This method captures the type, value, and traceback of the most recent exception and logs it using the trace level. It is typically used for debugging purposes. + + Anything logged using this method will always be written to the scan's `debug.log`, even if debugging is not enabled. + + Examples: + >>> try: + >>> 1 / 0 + >>> except ZeroDivisionError: + >>> self.trace() + """ e_type, e_val, e_traceback = exc_info() if e_type is not None: self.log.trace(traceback.format_exc()) def critical(self, *args, trace=True, **kwargs): + """Logs a whole message in emboldened red text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.critical("This is a critical message") + >>> self.critical("This is a critical message with a trace", trace=False) + """ self.log.critical(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index 87d81d838..0b54d40f4 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class bevigil(shodan_dns): +class bevigil(subdomain_enum_apikey): """ Retrieve OSINT data from mobile applications using BeVigil """ diff --git a/bbot/modules/binaryedge.py b/bbot/modules/binaryedge.py index 637585f9f..64970c861 100644 --- a/bbot/modules/binaryedge.py +++ b/bbot/modules/binaryedge.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class binaryedge(shodan_dns): +class binaryedge(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py index 4adcd2cb0..25a46ddf5 100644 --- a/bbot/modules/builtwith.py +++ b/bbot/modules/builtwith.py @@ -10,10 +10,10 @@ # # ############################################################ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class builtwith(shodan_dns): +class builtwith(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/c99.py b/bbot/modules/c99.py index 5b0179def..8e05a1c4b 100644 --- a/bbot/modules/c99.py +++ b/bbot/modules/c99.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class c99(shodan_dns): +class c99(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/censys.py b/bbot/modules/censys.py index ed7a62f3b..339f10bf7 100644 --- a/bbot/modules/censys.py +++ b/bbot/modules/censys.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class censys(shodan_dns): +class censys(subdomain_enum_apikey): """ thanks to https://github.com/owasp-amass/amass/blob/master/resources/scripts/cert/censys.ads """ diff --git a/bbot/modules/certspotter.py b/bbot/modules/certspotter.py index d943bf8eb..4441b9d98 100644 --- a/bbot/modules/certspotter.py +++ b/bbot/modules/certspotter.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class certspotter(crobat): +class certspotter(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/chaos.py b/bbot/modules/chaos.py index f247ce18d..3eb763573 100644 --- a/bbot/modules/chaos.py +++ b/bbot/modules/chaos.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class chaos(shodan_dns): +class chaos(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/columbus.py b/bbot/modules/columbus.py index 057a35c90..2e8901359 100644 --- a/bbot/modules/columbus.py +++ b/bbot/modules/columbus.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class columbus(crobat): +class columbus(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/crobat.py b/bbot/modules/crobat.py index 5aecf3f6e..7ece44fdb 100644 --- a/bbot/modules/crobat.py +++ b/bbot/modules/crobat.py @@ -1,145 +1,10 @@ -from bbot.modules.base import BaseModule +from bbot.modules.templates.subdomain_enum import subdomain_enum -class crobat(BaseModule): - """ - A typical free API-based subdomain enumeration module - Inherited by several other modules including sublist3r, dnsdumpster, etc. - """ - +class crobat(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] # tag "subdomain-enum" removed 2023-02-24 because API is offline flags = ["passive", "safe"] meta = {"description": "Query Project Crobat for subdomains"} - base_url = "https://sonar.omnisint.io" - # set module error state after this many failed requests in a row - abort_after_failures = 5 - # whether to reject wildcard DNS_NAMEs - reject_wildcards = "strict" - # this helps combat rate limiting by ensuring that a query doesn't execute - # until the queue is ready to receive its results - _qsize = 1 - - async def setup(self): - self.processed = set() - self.http_timeout = self.scan.config.get("http_timeout", 10) - self._failures = 0 - return True - - async def _is_wildcard(self, query): - if self.helpers.is_dns_name(query): - for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query)).items(): - if any(t in wildcard_rdtypes for t in ("A", "AAAA", "CNAME")): - return True - return False - - async def filter_event(self, event): - """ - This filter_event is used across many modules - """ - query = self.make_query(event) - # reject if already processed - if self.already_processed(query): - return False, "Event was already processed" - eligible, reason = await self.eligible_for_enumeration(event) - if eligible: - self.processed.add(hash(query)) - return True, reason - return False, reason - - async def eligible_for_enumeration(self, event): - query = self.make_query(event) - # check if wildcard - is_wildcard = await self._is_wildcard(query) - # check if cloud - is_cloud = False - if any(t.startswith("cloud-") for t in event.tags): - is_cloud = True - # reject if it's a cloud resource and not in our target - if is_cloud and event not in self.scan.target: - return False, "Event is a cloud resource and not a direct target" - # optionally reject events with wildcards / errors - if self.reject_wildcards: - if any(t in event.tags for t in ("a-error", "aaaa-error")): - return False, "Event has a DNS resolution error" - if self.reject_wildcards == "strict": - if is_wildcard: - return False, "Event is a wildcard domain" - elif self.reject_wildcards == "cloud_only": - if is_wildcard and is_cloud: - return False, "Event is both a cloud resource and a wildcard domain" - return True, "" - - def already_processed(self, hostname): - for parent in self.helpers.domain_parents(hostname, include_self=True): - if hash(parent) in self.processed: - return True - return False - - async def abort_if(self, event): - # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains - if "in-scope" not in event.tags: - return True - if await self._is_wildcard(event.data): - return True - return False - - async def handle_event(self, event): - query = self.make_query(event) - results = await self.query(query) - if results: - for hostname in set(results): - if hostname: - try: - hostname = self.helpers.validators.validate_host(hostname) - except ValueError as e: - self.verbose(e) - continue - if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) - - async def request_url(self, query): - url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" - return await self.request_with_fail_count(url) - - def make_query(self, event): - if "target" in event.tags: - query = str(event.data) - else: - query = self.helpers.parent_domain(event.data).lower() - return ".".join([s for s in query.split(".") if s != "_wildcard"]) - - def parse_results(self, r, query=None): - json = r.json() - if json: - for hostname in json: - yield hostname - - async def query(self, query, parse_fn=None, request_fn=None): - if parse_fn is None: - parse_fn = self.parse_results - if request_fn is None: - request_fn = self.request_url - try: - response = await request_fn(query) - if response is None: - self.info(f'Query "{query}" failed (no response)') - return [] - try: - results = list(parse_fn(response, query)) - except Exception as e: - if response: - self.info( - f'Error parsing results for query "{query}" (status code {response.status_code})', trace=True - ) - self.log.trace(response.text) - else: - self.info(f'Error parsing results for "{query}": {e}', trace=True) - return - if results: - return results - self.debug(f'No results for "{query}"') - except Exception as e: - self.info(f"Error retrieving results for {query}: {e}", trace=True) diff --git a/bbot/modules/crt.py b/bbot/modules/crt.py index b2b5837c3..9773f72d4 100644 --- a/bbot/modules/crt.py +++ b/bbot/modules/crt.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class crt(crobat): +class crt(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/digitorus.py b/bbot/modules/digitorus.py index 3047652b2..0da487744 100644 --- a/bbot/modules/digitorus.py +++ b/bbot/modules/digitorus.py @@ -1,9 +1,9 @@ import re -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class digitorus(crobat): +class digitorus(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index 00a557a3a..8bb1fa1ed 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -1,10 +1,10 @@ import re from bs4 import BeautifulSoup -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class dnsdumpster(crobat): +class dnsdumpster(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 82b579744..3fd47ee2d 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -1,12 +1,13 @@ -from .viewdns import viewdns +from bbot.modules.base import BaseModule -class emailformat(viewdns): +class emailformat(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] meta = {"description": "Query email-format.com for email addresses"} in_scope_only = False + per_domain_only = True base_url = "https://www.email-format.com" diff --git a/bbot/modules/filedownload.py b/bbot/modules/filedownload.py new file mode 100644 index 000000000..8b61dbdd8 --- /dev/null +++ b/bbot/modules/filedownload.py @@ -0,0 +1,165 @@ +import json +from pathlib import Path + +from bbot.modules.base import BaseModule + + +class filedownload(BaseModule): + """ + Watch for common filetypes and download them. + + Capable of identifying interesting files even if the extension is not in the URL. + E.g. if a PDF is being served at https://evilcorp.com/mypdf, it will still be downloaded and given the proper extension. + """ + + watched_events = ["URL_UNVERIFIED", "HTTP_RESPONSE"] + produced_events = [] + flags = ["active", "safe"] + meta = {"description": "Download common filetypes such as PDF, DOCX, PPTX, etc."} + options = { + "extensions": [ + "bak", # Backup File + "bash", # Bash Script or Configuration + "bashrc", # Bash Script or Configuration + "conf", # Configuration File + "cfg", # Configuration File + "crt", # Certificate File + "csv", # Comma Separated Values File + "db", # SQLite Database File + "sqlite", # SQLite Database File + "doc", # Microsoft Word Document (Old Format) + "docx", # Microsoft Word Document + "exe", # Windows PE executable + "ica", # Citrix Independent Computing Architecture File + "indd", # Adobe InDesign Document + "ini", # Initialization File + "jar", # Java Archive + "key", # Private Key File + "pub", # Public Key File + "log", # Log File + "markdown", # Markdown File + "md", # Markdown File + "msi", # Windows setup file + "odg", # OpenDocument Graphics (LibreOffice, OpenOffice) + "odp", # OpenDocument Presentation (LibreOffice, OpenOffice) + "ods", # OpenDocument Spreadsheet (LibreOffice, OpenOffice) + "odt", # OpenDocument Text (LibreOffice, OpenOffice) + "pdf", # Adobe Portable Document Format + "pem", # Privacy Enhanced Mail (SSL certificate) + "png", # Portable Network Graphics Image + "pps", # Microsoft PowerPoint Slideshow (Old Format) + "ppsx", # Microsoft PowerPoint Slideshow + "ppt", # Microsoft PowerPoint Presentation (Old Format) + "pptx", # Microsoft PowerPoint Presentation + "ps1", # PowerShell Script + "raw", # Raw Image File Format + "rdp", # Remote Desktop Protocol File + "sh", # Shell Script + "sql", # SQL Database Dump + "swp", # Swap File (temporary file, often Vim) + "sxw", # OpenOffice.org Writer document + "tar", # Tar Archive + "tar.gz", # Gzip-Compressed Tar Archive + "zip", # Zip Archive + "txt", # Plain Text Document + "vbs", # Visual Basic Script + "wpd", # WordPerfect Document + "xls", # Microsoft Excel Spreadsheet (Old Format) + "xlsx", # Microsoft Excel Spreadsheet + "xml", # eXtensible Markup Language File + "yml", # YAML Ain't Markup Language + "yaml", # YAML Ain't Markup Language + ], + "max_filesize": "10MB", + } + options_desc = { + "extensions": "File extensions to download", + "max_filesize": "Cancel download if filesize is greater than this size", + } + + scope_distance_modifier = 1 + + async def setup(self): + self.extensions = list(set([e.lower().strip(".") for e in self.options.get("extensions", [])])) + self.max_filesize = self.options.get("max_filesize", "10MB") + self.download_dir = self.scan.home / "filedownload" + self.helpers.mkdir(self.download_dir) + self.files_downloaded = set() + self.mime_db_file = await self.helpers.wordlist( + "https://raw.githubusercontent.com/jshttp/mime-db/master/db.json" + ) + self.mime_db = {} + with open(self.mime_db_file) as f: + mime_db = json.load(f) + for content_type, attrs in mime_db.items(): + if "extensions" in attrs and attrs["extensions"]: + self.mime_db[content_type] = attrs["extensions"][0].lower() + return True + + async def filter_event(self, event): + # accept file download requests from other modules + if "filedownload" in event.tags: + return True + h = self.hash_event(event) + if h in self.files_downloaded: + return False, f"Already processed {event}" + return True + + def hash_event(self, event): + if event.type == "HTTP_RESPONSE": + return hash(event.data["url"]) + return hash(event.data) + + async def handle_event(self, event): + if event.type == "URL_UNVERIFIED": + url_lower = event.data.lower() + if any(url_lower.endswith(f".{e}") for e in self.extensions): + await self.download_file(event.data) + elif event.type == "HTTP_RESPONSE": + content_type = event.data["header"].get("content_type", "") + if content_type: + url = event.data["url"] + await self.download_file(url, content_type=content_type) + + async def download_file(self, url, content_type=None): + orig_filename, file_destination, base_url = self.make_filename(url, content_type=content_type) + if orig_filename is None: + return + result = await self.helpers.download(url, warn=False, filename=file_destination, max_size=self.max_filesize) + if result: + self.info(f'Found "{orig_filename}" at "{base_url}", downloaded to {file_destination}') + self.files_downloaded.add(hash(url)) + + def make_filename(self, url, content_type=None): + # first, try to determine original filename + parsed_url = self.helpers.urlparse(url) + base_url = f"{parsed_url.scheme}://{parsed_url.netloc}" + url_path = parsed_url.path.strip("/") + # try to get extension from URL path + extension = Path(url_path).suffix.strip(".").lower() + if extension: + url_stem = url.rsplit(".", 1)[0] + else: + url_stem = str(url) + filename = f"{self.helpers.make_date()}_{self.helpers.tagify(url_stem)}" + if not url_path: + url_path = "unknown" + filename = f"{filename}-{url_path}" + # if that fails, try to get it from content type + if not extension: + if content_type and content_type in self.mime_db: + extension = self.mime_db[content_type] + + if (not extension) or (extension not in self.extensions): + self.debug(f'Extension "{extension}" at url "{url}" not in list of watched extensions.') + return None, None, None + + orig_filename = Path(url_path).stem + if extension: + filename = f"{filename}.{extension}" + orig_filename = f"{orig_filename}.{extension}" + return orig_filename, self.download_dir / filename, base_url + + async def report(self): + if self.files_downloaded: + self.success(f"Downloaded {len(self.files_downloaded):,} file(s) to {self.download_dir}") diff --git a/bbot/modules/fullhunt.py b/bbot/modules/fullhunt.py index 8bc5d2326..1485dc6b5 100644 --- a/bbot/modules/fullhunt.py +++ b/bbot/modules/fullhunt.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class fullhunt(shodan_dns): +class fullhunt(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/github.py b/bbot/modules/github.py index f6a933ea5..25ef862ef 100644 --- a/bbot/modules/github.py +++ b/bbot/modules/github.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class github(shodan_dns): +class github(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] flags = ["passive", "subdomain-enum", "safe"] diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 2ae676134..f19c5ed49 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -148,7 +148,7 @@ async def handle_batch(self, *events): _id = row["url_id"] source_url = self.screenshots_taken[_id] source_event = events[source_url] - if self.is_spider_danger(source_event, url): + if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) diff --git a/bbot/modules/hackertarget.py b/bbot/modules/hackertarget.py index d6c3b4e3b..d23f5c6cf 100644 --- a/bbot/modules/hackertarget.py +++ b/bbot/modules/hackertarget.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class hackertarget(crobat): +class hackertarget(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 8bb9f7474..1e65c6e4c 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class hunterio(shodan_dns): +class hunterio(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS", "DNS_NAME", "URL_UNVERIFIED"] flags = ["passive", "email-enum", "subdomain-enum", "safe"] diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 720b9b96b..51b8a4dc2 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -78,7 +78,7 @@ async def search(self, content, event, **kwargs): url_event = self.report(result, name, event, **kwargs) if url_event is not None: url_in_scope = self.excavate.scan.in_scope(url_event) - is_spider_danger = self.excavate.is_spider_danger(event, result) + is_spider_danger = self.excavate.helpers.is_spider_danger(event, result) if ( ( urls_found >= self.web_spider_links_per_page and url_in_scope diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index 823ede94d..4a203e55b 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.base import BaseModule -class IP2Location(shodan_dns): +class IP2Location(BaseModule): """ IP2Location.io Geolocation API. """ @@ -21,12 +21,10 @@ class IP2Location(shodan_dns): base_url = "http://api.ip2location.io" - async def filter_event(self, event): - return True - async def setup(self): + await self.require_api_key() self.lang = self.config.get("lang", "") - return await super().setup() + return True async def ping(self): url = self.build_url("8.8.8.8") diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index 22dce58be..031ac272c 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.base import BaseModule -class Ipstack(shodan_dns): +class Ipstack(BaseModule): """ Ipstack GeoIP Leverages the ipstack.com API to geolocate a host by IP address. @@ -10,7 +10,7 @@ class Ipstack(shodan_dns): watched_events = ["IP_ADDRESS"] produced_events = ["GEOLOCATION"] flags = ["passive", "safe"] - meta = {"description": "Query IPStack's API for GeoIP ", "auth_required": True} + meta = {"description": "Query IPStack's GeoIP API", "auth_required": True} options = {"api_key": ""} options_desc = {"api_key": "IPStack GeoIP API Key"} scope_distance_modifier = 1 @@ -19,7 +19,8 @@ class Ipstack(shodan_dns): base_url = "http://api.ipstack.com" - async def filter_event(self, event): + async def setup(self): + await self.require_api_key() return True async def ping(self): diff --git a/bbot/modules/leakix.py b/bbot/modules/leakix.py index 4ebf89570..45053755a 100644 --- a/bbot/modules/leakix.py +++ b/bbot/modules/leakix.py @@ -1,8 +1,7 @@ -from .crobat import crobat -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class leakix(shodan_dns): +class leakix(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] @@ -14,7 +13,7 @@ class leakix(shodan_dns): base_url = "https://leakix.net" async def setup(self): - ret = await crobat.setup(self) + ret = await super(subdomain_enum_apikey, self).setup() self.headers = {"Accept": "application/json"} self.api_key = self.config.get("api_key", "") if self.api_key: diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 2ec1031af..be1a19d3b 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -3,10 +3,17 @@ import random import subprocess -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class massdns(crobat): +class massdns(subdomain_enum): + """ + This is BBOT's flagship subdomain enumeration module. + + It uses massdns to brute-force subdomains. + At the end of a scan, it will leverage BBOT's word cloud to recursively discover target-specific subdomain mutations. + """ + flags = ["subdomain-enum", "passive", "slow", "aggressive"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/myssl.py b/bbot/modules/myssl.py index 7edd3fbc0..a08c885ed 100644 --- a/bbot/modules/myssl.py +++ b/bbot/modules/myssl.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class myssl(crobat): +class myssl(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/otx.py b/bbot/modules/otx.py index abe856e47..72f2e1d5b 100644 --- a/bbot/modules/otx.py +++ b/bbot/modules/otx.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class otx(crobat): +class otx(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index ce7098e27..c1e0773c3 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class pgp(crobat): +class pgp(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] diff --git a/bbot/modules/rapiddns.py b/bbot/modules/rapiddns.py index 0af7e3930..088288ddb 100644 --- a/bbot/modules/rapiddns.py +++ b/bbot/modules/rapiddns.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class rapiddns(crobat): +class rapiddns(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/riddler.py b/bbot/modules/riddler.py index c6f865ee1..d525acbad 100644 --- a/bbot/modules/riddler.py +++ b/bbot/modules/riddler.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class riddler(crobat): +class riddler(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index 48ce96709..98b114b75 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -46,6 +46,6 @@ async def handle_event(self, event): continue tags = [] - if self.is_spider_danger(event, unverified_url): + if self.helpers.is_spider_danger(event, unverified_url): tags.append("spider-danger") self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) diff --git a/bbot/modules/shodan_dns.py b/bbot/modules/shodan_dns.py index c94d0ac20..7780120b6 100644 --- a/bbot/modules/shodan_dns.py +++ b/bbot/modules/shodan_dns.py @@ -1,12 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class shodan_dns(crobat): - """ - A typical module for authenticated, API-based subdomain enumeration - Inherited by several other modules including securitytrails, c99.nl, etc. - """ - +class shodan_dns(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] @@ -16,10 +11,6 @@ class shodan_dns(crobat): base_url = "https://api.shodan.io" - async def setup(self): - await super().setup() - return await self.require_api_key() - async def ping(self): url = f"{self.base_url}/api-info?key={self.api_key}" r = await self.request_with_fail_count(url) diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index f9180f1db..87358a955 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class sitedossier(crobat): +class sitedossier(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/sslcert.py b/bbot/modules/sslcert.py index a9f269c55..de598dd73 100644 --- a/bbot/modules/sslcert.py +++ b/bbot/modules/sslcert.py @@ -1,4 +1,3 @@ -import ssl import asyncio from OpenSSL import crypto from contextlib import suppress @@ -109,12 +108,7 @@ async def visit_host(self, host, port): # Create an SSL context try: - ssl_context = ssl.create_default_context() - ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE - ssl_context.options &= ~ssl.OP_NO_SSLv2 & ~ssl.OP_NO_SSLv3 - ssl_context.set_ciphers("ALL:@SECLEVEL=0") - ssl_context.options |= 0x4 # Add the OP_LEGACY_SERVER_CONNECT option + ssl_context = self.helpers.ssl_context_noverify() except Exception as e: self.warning(f"Error creating SSL context: {e}") return [], [], (host, port) diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py index 218c49d30..6d1825b8b 100644 --- a/bbot/modules/subdomaincenter.py +++ b/bbot/modules/subdomaincenter.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class subdomaincenter(crobat): +class subdomaincenter(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/sublist3r.py b/bbot/modules/sublist3r.py index ee15a145b..3c13cf308 100644 --- a/bbot/modules/sublist3r.py +++ b/bbot/modules/sublist3r.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class sublist3r(crobat): +class sublist3r(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] # tag "subdomain-enum" removed 2023-02-24 because API is offline diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py new file mode 100644 index 000000000..61c2de6a4 --- /dev/null +++ b/bbot/modules/templates/subdomain_enum.py @@ -0,0 +1,161 @@ +from bbot.modules.base import BaseModule + + +class subdomain_enum(BaseModule): + """ + A typical free API-based subdomain enumeration module + Inherited by many other modules including sublist3r, dnsdumpster, etc. + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query an API for subdomains"} + + base_url = "https://api.example.com" + + # set module error state after this many failed requests in a row + abort_after_failures = 5 + # whether to reject wildcard DNS_NAMEs + reject_wildcards = "strict" + # this helps combat rate limiting by ensuring that a query doesn't execute + # until the queue is ready to receive its results + _qsize = 1 + + async def setup(self): + self.processed = set() + return True + + async def handle_event(self, event): + query = self.make_query(event) + results = await self.query(query) + if results: + for hostname in set(results): + if hostname: + try: + hostname = self.helpers.validators.validate_host(hostname) + except ValueError as e: + self.verbose(e) + continue + if hostname and hostname.endswith(f".{query}") and not hostname == event.data: + self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + + async def request_url(self, query): + url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" + return await self.request_with_fail_count(url) + + def make_query(self, event): + if "target" in event.tags: + query = str(event.data) + else: + query = self.helpers.parent_domain(event.data).lower() + return ".".join([s for s in query.split(".") if s != "_wildcard"]) + + def parse_results(self, r, query=None): + json = r.json() + if json: + for hostname in json: + yield hostname + + async def query(self, query, parse_fn=None, request_fn=None): + if parse_fn is None: + parse_fn = self.parse_results + if request_fn is None: + request_fn = self.request_url + try: + response = await request_fn(query) + if response is None: + self.info(f'Query "{query}" failed (no response)') + return [] + try: + results = list(parse_fn(response, query)) + except Exception as e: + if response: + self.info( + f'Error parsing results for query "{query}" (status code {response.status_code})', trace=True + ) + self.log.trace(response.text) + else: + self.info(f'Error parsing results for "{query}": {e}', trace=True) + return + if results: + return results + self.debug(f'No results for "{query}"') + except Exception as e: + self.info(f"Error retrieving results for {query}: {e}", trace=True) + + async def _is_wildcard(self, query): + if self.helpers.is_dns_name(query): + for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query)).items(): + if any(t in wildcard_rdtypes for t in ("A", "AAAA", "CNAME")): + return True + return False + + async def filter_event(self, event): + """ + This filter_event is used across many modules + """ + query = self.make_query(event) + # reject if already processed + if self.already_processed(query): + return False, "Event was already processed" + eligible, reason = await self.eligible_for_enumeration(event) + if eligible: + self.processed.add(hash(query)) + return True, reason + return False, reason + + async def eligible_for_enumeration(self, event): + query = self.make_query(event) + # check if wildcard + is_wildcard = await self._is_wildcard(query) + # check if cloud + is_cloud = False + if any(t.startswith("cloud-") for t in event.tags): + is_cloud = True + # reject if it's a cloud resource and not in our target + if is_cloud and event not in self.scan.target: + return False, "Event is a cloud resource and not a direct target" + # optionally reject events with wildcards / errors + if self.reject_wildcards: + if any(t in event.tags for t in ("a-error", "aaaa-error")): + return False, "Event has a DNS resolution error" + if self.reject_wildcards == "strict": + if is_wildcard: + return False, "Event is a wildcard domain" + elif self.reject_wildcards == "cloud_only": + if is_wildcard and is_cloud: + return False, "Event is both a cloud resource and a wildcard domain" + return True, "" + + def already_processed(self, hostname): + for parent in self.helpers.domain_parents(hostname, include_self=True): + if hash(parent) in self.processed: + return True + return False + + async def abort_if(self, event): + # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains + if "in-scope" not in event.tags: + return True + if await self._is_wildcard(event.data): + return True + return False + + +class subdomain_enum_apikey(subdomain_enum): + """ + A typical module for authenticated, API-based subdomain enumeration + Inherited by several other modules including securitytrails, c99.nl, etc. + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query API for subdomains", "auth_required": True} + options = {"api_key": ""} + options_desc = {"api_key": "API key"} + + async def setup(self): + await super().setup() + return await self.require_api_key() diff --git a/bbot/modules/threatminer.py b/bbot/modules/threatminer.py index 0613c5902..bbc1e23c3 100644 --- a/bbot/modules/threatminer.py +++ b/bbot/modules/threatminer.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class threatminer(crobat): +class threatminer(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index 320c32761..f1efe08e5 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class urlscan(crobat): +class urlscan(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME", "URL_UNVERIFIED"] diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index 4fbfb08f1..c2a5e4431 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -5,7 +5,7 @@ class viewdns(BaseModule): """ - Used as a base for modules that only act on root domains and not individual hostnames + Todo: Also retrieve registrar? """ watched_events = ["DNS_NAME"] @@ -16,25 +16,17 @@ class viewdns(BaseModule): } base_url = "https://viewdns.info" in_scope_only = True + per_domain_only = True _qsize = 1 async def setup(self): - self.processed = set() self.date_regex = re.compile(r"\d{4}-\d{2}-\d{2}") return True - async def filter_event(self, event): - _, domain = self.helpers.split_domain(event.data) - if hash(domain) in self.processed: - return False - self.processed.add(hash(domain)) - return True - async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) for domain, _ in await self.query(query): self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) - # todo: registrar? async def query(self, query): results = set() diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 08e32926a..d5b8f320f 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class wayback(crobat): +class wayback(subdomain_enum): flags = ["passive", "subdomain-enum", "safe"] watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED", "DNS_NAME"] diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index d085a60b0..f33d9cf3e 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -11,10 +11,32 @@ class ScanManager: """ - Manages modules and events during a scan + Manages the modules, event queues, and overall event flow during a scan. + + Simultaneously serves as a shepherd, policeman, judge, jury, and executioner for events. + It is responsible for managing the incoming event queue and distributing events to modules. + + Attributes: + scan (Scan): Reference to the Scan object that instantiated the ScanManager. + incoming_event_queue (asyncio.PriorityQueue): Queue storing incoming events for processing. + events_distributed (set): Set tracking globally unique events. + events_accepted (set): Set tracking events accepted by individual modules. + dns_resolution (bool): Flag to enable or disable DNS resolution. + _task_counter (TaskCounter): Counter for ongoing tasks. + _new_activity (bool): Flag indicating new activity. + _modules_by_priority (dict): Modules sorted by their priorities. + _incoming_queues (list): List of incoming event queues from each module. + _module_priority_weights (list): Weight values for each module based on priority. """ def __init__(self, scan): + """ + Initializes the ScanManager object, setting up essential attributes for scan management. + + Args: + scan (Scan): Reference to the Scan object that instantiated the ScanManager. + """ + self.scan = scan self.incoming_event_queue = asyncio.PriorityQueue() @@ -32,10 +54,15 @@ def __init__(self, scan): async def init_events(self): """ - seed scanner with target events + Initializes events by seeding the scanner with target events and distributing them for further processing. + + Notes: + - This method populates the event queue with initial target events. + - It also marks the Scan object as finished with initialization by setting `_finished_init` to True. """ + context = f"manager.init_events()" - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.distribute_event(self.scan.root_event) sorted_events = sorted(self.scan.target.events, key=lambda e: len(e.data)) for event in sorted_events: @@ -66,10 +93,10 @@ async def emit_event(self, event, *args, **kwargs): event._resolved.set() for kwarg in ["abort_if", "on_success_callback"]: kwargs.pop(kwarg, None) - async with self.scan.acatch(context=self.distribute_event): + async with self.scan._acatch(context=self.distribute_event): await self.distribute_event(event, *args, **kwargs) else: - async with self.scan.acatch(context=self._emit_event, finally_callback=event._resolved.set): + async with self.scan._acatch(context=self._emit_event, finally_callback=event._resolved.set): await self._emit_event(event, *args, **kwargs) def _event_precheck(self, event, exclude=("DNS_NAME",)): @@ -87,7 +114,41 @@ def _event_precheck(self, event, exclude=("DNS_NAME",)): return False return True - async def _emit_event(self, event, *args, **kwargs): + async def _emit_event(self, event, **kwargs): + """ + Handles the emission, tagging, and distribution of a events during a scan. + + A lot of really important stuff happens here. Actually this is probably the most + important method in all of BBOT. It is basically the central intersection that + every event passes through. + + Probably it is also needless to say that it exists in a delicate balance. + Close to half of my debugging time has been spent in this function. + I have slain many dragons here and there may still be more yet to slay. + + Tread carefully, friend. -TheTechromancer + + Notes: + - Central function for decision-making in BBOT. + - Conducts DNS resolution, tagging, and scope calculations. + - Checks against whitelists and blacklists. + - Calls custom callbacks. + - Handles DNS wildcard events. + - Decides on event acceptance and distribution. + + Parameters: + event (Event): The event object to be emitted. + **kwargs: Arbitrary keyword arguments (e.g., `on_success_callback`, `abort_if`). + + Side Effects: + - Event tagging. + - Populating DNS data. + - Emitting new events. + - Queueing events for further processing. + - Adjusting event scopes. + - Running callbacks. + - Updating scan statistics. + """ log.debug(f"Emitting {event}") distribute_event = True event_distributed = False @@ -194,7 +255,7 @@ async def _emit_event(self, event, *args, **kwargs): # now that the event is properly tagged, we can finally make decisions about it abort_result = False if callable(abort_if): - async with self.scan.acatch(context=abort_if): + async with self.scan._acatch(context=abort_if): abort_result = await self.scan.helpers.execute_sync_or_async(abort_if, event) msg = f"{event.module}: not raising event {event} due to custom criteria in abort_if()" with suppress(ValueError, TypeError): @@ -210,7 +271,7 @@ async def _emit_event(self, event, *args, **kwargs): # run success callback before distributing event (so it can add tags, etc.) if distribute_event: if callable(on_success_callback): - async with self.scan.acatch(context=on_success_callback): + async with self.scan._acatch(context=on_success_callback): await self.scan.helpers.execute_sync_or_async(on_success_callback, event) if not event.host or (event.always_emit and not event_is_duplicate): @@ -244,7 +305,7 @@ async def _emit_event(self, event, *args, **kwargs): ### Emit DNS children ### if self.dns_resolution: - emit_children = -1 < event.scope_distance < self.scan.dns_search_distance + emit_children = -1 < event.scope_distance < self.scan.scope_dns_search_distance if emit_children: # only emit DNS children once for each unique host host_hash = hash(str(event.host)) @@ -272,7 +333,7 @@ async def _emit_event(self, event, *args, **kwargs): self.queue_event(child_event) except ValidationError as e: - log.warning(f"Event validation failed with args={args}, kwargs={kwargs}: {e}") + log.warning(f"Event validation failed with kwargs={kwargs}: {e}") log.trace(traceback.format_exc()) finally: @@ -317,7 +378,7 @@ async def distribute_event(self, *args, **kwargs): """ Queue event with modules """ - async with self.scan.acatch(context=self.distribute_event): + async with self.scan._acatch(context=self.distribute_event): event = self.scan.make_event(*args, **kwargs) event_hash = hash(event) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 9d71706a4..ecc4d31e4 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -14,8 +14,8 @@ from bbot import config as bbot_config +from .target import Target from .stats import ScanStats -from .target import ScanTarget from .manager import ScanManager from .dispatcher import Dispatcher from bbot.modules import module_loader @@ -41,6 +41,65 @@ class Scanner: + """A class representing a single BBOT scan + + Examples: + Create scan with multiple targets: + >>> my_scan = Scanner("evilcorp.com", "1.2.3.0/24", modules=["nmap", "sslcert", "httpx"]) + + Create scan with custom config: + >>> config = {"http_proxy": "http://127.0.0.1:8080", "modules": {"nmap": {"top_ports": 2000}}} + >>> my_scan = Scanner("www.evilcorp.com", modules=["nmap", "httpx"], config=config) + + Start the scan, iterating over events as they're discovered (synchronous): + >>> for event in my_scan.start(): + >>> print(event) + + Start the scan, iterating over events as they're discovered (asynchronous): + >>> async for event in my_scan.async_start(): + >>> print(event) + + Start the scan without consuming events (synchronous): + >>> my_scan.start_without_generator() + + Start the scan without consuming events (asynchronous): + >>> await my_scan.async_start_without_generator() + + Attributes: + status (str): Status of scan, representing its current state. It can take on the following string values, each of which is mapped to an integer code in `_status_codes`: + ```markdown + - "NOT_STARTED" (0): Initial status before the scan starts. + - "STARTING" (1): Status when the scan is initializing. + - "RUNNING" (2): Status when the scan is in progress. + - "FINISHING" (3): Status when the scan is in the process of finalizing. + - "CLEANING_UP" (4): Status when the scan is cleaning up resources. + - "ABORTING" (5): Status when the scan is in the process of being aborted. + - "ABORTED" (6): Status when the scan has been aborted. + - "FAILED" (7): Status when the scan has encountered a failure. + - "FINISHED" (8): Status when the scan has successfully completed. + ``` + _status_code (int): The numerical representation of the current scan status, stored for internal use. It is mapped according to the values in `_status_codes`. + target (Target): Target of scan + config (omegaconf.dictconfig.DictConfig): BBOT config + whitelist (Target): Scan whitelist (by default this is the same as `target`) + blacklist (Target): Scan blacklist (this takes ultimate precedence) + helpers (ConfigAwareHelper): Helper containing various reusable functions, regexes, etc. + manager (ScanManager): Coordinates and monitors the flow of events between modules during a scan + dispatcher (Dispatcher): Triggers certain events when the scan `status` changes + modules (dict): Holds all loaded modules in this format: `{"module_name": Module()}` + stats (ScanStats): Holds high-level scan statistics such as how many events have been produced and consumed by each module + home (pathlib.Path): Base output directory of the scan (default: `~/.bbot/scans/`) + running (bool): Whether the scan is currently running. + stopping (bool): Whether the scan is currently stopping. + stopped (bool): Whether the scan is currently stopped. + aborting (bool): Whether the scan is aborted or currently aborting. + + Notes: + - The status is read-only once set to "ABORTING" until it transitions to "ABORTED." + - Invalid statuses are logged but not applied. + - Setting a status will trigger the `on_status` event in the dispatcher. + """ + _status_codes = { "NOT_STARTED": 0, "STARTING": 1, @@ -68,6 +127,23 @@ def __init__( strict_scope=False, force_start=False, ): + """ + Initializes the Scanner class. + + Args: + *targets (str): Target(s) to scan. + whitelist (list, optional): Whitelisted target(s) to scan. Defaults to the same as `targets`. + blacklist (list, optional): Blacklisted target(s). Takes ultimate precedence. Defaults to empty. + scan_id (str, optional): Unique identifier for the scan. Auto-generates if None. + name (str, optional): Human-readable name of the scan. Auto-generates if None. + modules (list[str], optional): List of module names to use during the scan. Defaults to empty list. + output_modules (list[str], optional): List of output modules to use. Defaults to ['python']. + output_dir (str or Path, optional): Directory to store scan output. Defaults to BBOT home directory (`~/.bbot`). + config (dict, optional): Configuration settings. Merged with BBOT config. + dispatcher (Dispatcher, optional): Dispatcher object to use. Defaults to new Dispatcher. + strict_scope (bool, optional): If True, only targets explicitly in whitelist are scanned. Defaults to False. + force_start (bool, optional): If True, allows the scan to start even when module setups hard-fail. Defaults to False. + """ if modules is None: modules = [] if output_modules is None: @@ -102,19 +178,15 @@ def __init__( if name is None: tries = 0 - while 1: if tries > 5: self.name = f"{self.helpers.rand_string(4)}_{self.helpers.rand_string(4)}" break - self.name = random_name() - if output_dir is not None: home_path = Path(output_dir).resolve() / self.name else: home_path = self.helpers.bbot_home / "scans" / self.name - if not home_path.exists(): break tries += 1 @@ -126,7 +198,7 @@ def __init__( else: self.home = self.helpers.bbot_home / "scans" / self.name - self.target = ScanTarget(self, *targets, strict_scope=strict_scope) + self.target = Target(self, *targets, strict_scope=strict_scope, make_in_scope=True) self.modules = OrderedDict({}) self._scan_modules = modules @@ -137,10 +209,10 @@ def __init__( if not whitelist: self.whitelist = self.target.copy() else: - self.whitelist = ScanTarget(self, *whitelist, strict_scope=strict_scope) + self.whitelist = Target(self, *whitelist, strict_scope=strict_scope) if not blacklist: blacklist = [] - self.blacklist = ScanTarget(self, *blacklist) + self.blacklist = Target(self, *blacklist) if dispatcher is None: self.dispatcher = Dispatcher() @@ -153,7 +225,7 @@ def __init__( # scope distance self.scope_search_distance = max(0, int(self.config.get("scope_search_distance", 0))) - self.dns_search_distance = max( + self.scope_dns_search_distance = max( self.scope_search_distance, int(self.config.get("scope_dns_search_distance", 2)) ) self.scope_report_distance = int(self.config.get("scope_report_distance", 1)) @@ -173,7 +245,7 @@ def __init__( self._cleanedup = False self.__loop = None - self.manager_worker_loop_tasks = [] + self._manager_worker_loop_tasks = [] self.init_events_task = None self.ticker_task = None self.dispatcher_tasks = [] @@ -188,15 +260,16 @@ def __init__( self._stopping = False self._dns_regexes = None - self._log_handlers = None + self.__log_handlers = None self._log_handler_backup = [] def _on_keyboard_interrupt(self, loop, event): self.stop() - async def prep(self): - # event = asyncio.Event() - # self._loop.add_signal_handler(signal.SIGINT, self._on_keyboard_interrupt, loop, event) + async def _prep(self): + """ + Calls .load_modules() and .setup_modules() in preparation for a scan + """ self.helpers.mkdir(self.home) if not self._prepped: @@ -231,12 +304,13 @@ async def async_start_without_generator(self): pass async def async_start(self): + """ """ failed = True scan_start_time = datetime.now() try: - await self.prep() + await self._prep() - self.start_log_handlers() + self._start_log_handlers() if not self.target: self.warning(f"No scan targets specified") @@ -256,7 +330,7 @@ async def async_start(self): await self.dispatcher.on_start(self) # start manager worker loops - self.manager_worker_loop_tasks = [ + self._manager_worker_loop_tasks = [ asyncio.create_task(self.manager._worker_loop()) for _ in range(self.max_workers) ] @@ -264,18 +338,18 @@ async def async_start(self): self.init_events_task = asyncio.create_task(self.manager.init_events()) self.status = "RUNNING" - self.start_modules() + self._start_modules() self.verbose(f"{len(self.modules):,} modules started") # main scan loop while 1: # abort if we're aborting if self.aborting: - self.drain_queues() + self._drain_queues() break if "python" in self.modules: - events, finish = await self.modules["python"].events_waiting() + events, finish = await self.modules["python"]._events_waiting() for e in events: yield e @@ -307,9 +381,9 @@ async def async_start(self): self.critical(f"Unexpected error during scan:\n{traceback.format_exc()}") finally: - self.cancel_tasks() - await self.report() - await self.cleanup() + self._cancel_tasks() + await self._report() + await self._cleanup() log_fn = self.hugesuccess if self.status == "ABORTING": @@ -327,16 +401,35 @@ async def async_start(self): await self.dispatcher.on_finish(self) - self.stop_log_handlers() + self._stop_log_handlers() - def start_modules(self): + def _start_modules(self): self.verbose(f"Starting module worker loops") for module_name, module in self.modules.items(): module.start() async def setup_modules(self, remove_failed=True): + """Asynchronously initializes all loaded modules by invoking their `setup()` methods. + + Args: + remove_failed (bool): Flag indicating whether to remove modules that fail setup. + + Returns: + dict: Dictionary containing lists of module names categorized by their setup status. + 'succeeded' - List of modules that successfully set up. + 'hard_failed' - List of modules that encountered a hard failure during setup. + 'soft_failed' - List of modules that encountered a soft failure during setup. + + Raises: + ScanError: If no output modules could be loaded. + + Notes: + Hard-failed modules are set to an error state and removed if `remove_failed` is True. + Soft-failed modules are not set to an error state but are also removed if `remove_failed` is True. + """ await self.load_modules() self.verbose(f"Setting up modules") + succeeded = [] hard_failed = [] soft_failed = [] @@ -344,6 +437,7 @@ async def setup_modules(self, remove_failed=True): module_name, status, msg = await task if status == True: self.debug(f"Setup succeeded for {module_name} ({msg})") + succeeded.append(module_name) elif status == False: self.error(f"Setup hard-failed for {module_name}: {msg}") self.modules[module_name].set_error_state() @@ -360,23 +454,133 @@ async def setup_modules(self, remove_failed=True): total_failed = len(hard_failed + soft_failed) if hard_failed: msg = f"Setup hard-failed for {len(hard_failed):,} modules ({','.join(hard_failed)})" - self.fail_setup(msg) + self._fail_setup(msg) elif total_failed > 0: self.warning(f"Setup failed for {total_failed:,} modules") + return { + "succeeded": succeeded, + "hard_failed": hard_failed, + "soft_failed": soft_failed, + } + + async def load_modules(self): + """Asynchronously import and instantiate all scan modules, including internal and output modules. + + This method is automatically invoked by `setup_modules()`. It performs several key tasks in the following sequence: + + 1. Install dependencies for each module via `self.helpers.depsinstaller.install()`. + 2. Load scan modules and updates the `modules` dictionary. + 3. Load internal modules and updates the `modules` dictionary. + 4. Load output modules and updates the `modules` dictionary. + 5. Sorts modules based on their `_priority` attribute. + + If any modules fail to load or their dependencies fail to install, a ScanError will be raised (unless `self.force_start` is set to True). + + Attributes: + succeeded, failed (tuple): A tuple containing lists of modules that succeeded or failed during the dependency installation. + loaded_modules, loaded_internal_modules, loaded_output_modules (dict): Dictionaries of successfully loaded modules. + failed, failed_internal, failed_output (list): Lists of module names that failed to load. + + Raises: + ScanError: If any module dependencies fail to install or modules fail to load, and if self.force_start is False. + + Returns: + None + + Note: + After all modules are loaded, they are sorted by `_priority` and stored in the `modules` dictionary. + """ + if not self._modules_loaded: + all_modules = list(set(self._scan_modules + self._output_modules + self._internal_modules)) + if not all_modules: + self.warning(f"No modules to load") + return + + if not self._scan_modules: + self.warning(f"No scan modules to load") + + # install module dependencies + succeeded, failed = await self.helpers.depsinstaller.install( + *self._scan_modules, *self._output_modules, *self._internal_modules + ) + if failed: + msg = f"Failed to install dependencies for {len(failed):,} modules: {','.join(failed)}" + self._fail_setup(msg) + modules = sorted([m for m in self._scan_modules if m in succeeded]) + output_modules = sorted([m for m in self._output_modules if m in succeeded]) + internal_modules = sorted([m for m in self._internal_modules if m in succeeded]) + + # Load scan modules + self.verbose(f"Loading {len(modules):,} scan modules: {','.join(modules)}") + loaded_modules, failed = self._load_modules(modules) + self.modules.update(loaded_modules) + if len(failed) > 0: + msg = f"Failed to load {len(failed):,} scan modules: {','.join(failed)}" + self._fail_setup(msg) + if loaded_modules: + self.info( + f"Loaded {len(loaded_modules):,}/{len(self._scan_modules):,} scan modules ({','.join(loaded_modules)})" + ) + + # Load internal modules + self.verbose(f"Loading {len(internal_modules):,} internal modules: {','.join(internal_modules)}") + loaded_internal_modules, failed_internal = self._load_modules(internal_modules) + self.modules.update(loaded_internal_modules) + if len(failed_internal) > 0: + msg = f"Failed to load {len(loaded_internal_modules):,} internal modules: {','.join(loaded_internal_modules)}" + self._fail_setup(msg) + if loaded_internal_modules: + self.info( + f"Loaded {len(loaded_internal_modules):,}/{len(self._internal_modules):,} internal modules ({','.join(loaded_internal_modules)})" + ) + + # Load output modules + self.verbose(f"Loading {len(output_modules):,} output modules: {','.join(output_modules)}") + loaded_output_modules, failed_output = self._load_modules(output_modules) + self.modules.update(loaded_output_modules) + if len(failed_output) > 0: + msg = f"Failed to load {len(failed_output):,} output modules: {','.join(failed_output)}" + self._fail_setup(msg) + if loaded_output_modules: + self.info( + f"Loaded {len(loaded_output_modules):,}/{len(self._output_modules):,} output modules, ({','.join(loaded_output_modules)})" + ) + + self.modules = OrderedDict(sorted(self.modules.items(), key=lambda x: getattr(x[-1], "_priority", 0))) + self._modules_loaded = True + def stop(self): + """Stops the in-progress scan and performs necessary cleanup. + + This method sets the scan's status to "ABORTING," cancels any pending tasks, and drains event queues. It also kills child processes spawned during the scan. + + Returns: + None + """ if not self._stopping: self._stopping = True self.status = "ABORTING" self.hugewarning(f"Aborting scan") self.trace() - self.cancel_tasks() - self.drain_queues() + self._cancel_tasks() + self._drain_queues() self.helpers.kill_children() - self.drain_queues() + self._drain_queues() self.helpers.kill_children() async def finish(self): + """Finalizes the scan by invoking the `finished()` method on all active modules if new activity is detected. + + The method is idempotent and will return False if no new activity has been recorded since the last invocation. + + Returns: + bool: True if new activity has been detected and the `finished()` method is invoked on all modules. + False if no new activity has been detected since the last invocation. + + Notes: + This method alters the scan's status to "FINISHING" if new activity is detected. + """ # if new events were generated since last time we were here if self.manager._new_activity: self.manager._new_activity = False @@ -392,8 +596,14 @@ async def finish(self): self.verbose("Completed final finish()") return False - def drain_queues(self): - # Empty event queues + def _drain_queues(self): + """Empties all the event queues for each loaded module and the manager's incoming event queue. + + This method iteratively empties both the incoming and outgoing event queues of each module, as well as the incoming event queue of the scan manager. + + Returns: + None + """ self.debug("Draining queues") for module in self.modules.values(): with contextlib.suppress(asyncio.queues.QueueEmpty): @@ -409,7 +619,17 @@ def drain_queues(self): self.manager.incoming_event_queue.get_nowait() self.debug("Finished draining queues") - def cancel_tasks(self): + def _cancel_tasks(self): + """Cancels all asynchronous tasks and shuts down the process pool. + + This method collects all pending tasks from each module, the dispatcher, + and the scan manager. After collecting these tasks, it cancels them synchronously + using a helper function. Finally, it shuts down the process pool, canceling any + pending futures. + + Returns: + None + """ tasks = [] # module workers for m in self.modules.values(): @@ -423,19 +643,41 @@ def cancel_tasks(self): # dispatcher tasks += self.dispatcher_tasks # manager worker loops - tasks += self.manager_worker_loop_tasks + tasks += self._manager_worker_loop_tasks self.helpers.cancel_tasks_sync(tasks) # process pool self.process_pool.shutdown(cancel_futures=True) - async def report(self): + async def _report(self): + """Asynchronously executes the `report()` method for each module in the scan. + + This method is called once at the end of each scan and is responsible for + triggering the `report()` function for each module. It executes irrespective + of whether the scan was aborted or completed successfully. The method makes + use of an asynchronous context manager (`_acatch`) to handle exceptions and + a task counter to keep track of the task's context. + + Returns: + None + """ for mod in self.modules.values(): context = f"{mod.name}.report()" - async with self.acatch(context), mod._task_counter.count(context): + async with self._acatch(context), mod._task_counter.count(context): await mod.report() - async def cleanup(self): - # clean up modules + async def _cleanup(self): + """Asynchronously executes the `cleanup()` method for each module in the scan. + + This method is called once at the end of the scan to perform resource cleanup + tasks. It is executed regardless of whether the scan was aborted or completed + successfully. The scan status is set to "CLEANING_UP" during the execution. + After calling the `cleanup()` method for each module, it performs additional + cleanup tasks such as removing the scan's home directory if empty and cleaning + old scans. + + Returns: + None + """ self.status = "CLEANING_UP" for mod in self.modules.values(): await mod._cleanup() @@ -447,7 +689,16 @@ async def cleanup(self): def in_scope(self, e): """ - Checks whitelist and blacklist, also taking scope_distance into account + Check whether a hostname, url, IP, etc. is in scope. + Accepts either events or string data. + + Checks whitelist and blacklist. + If `e` is an event and its scope distance is zero, it will be considered in-scope. + + Examples: + Check if a URL is in scope: + >>> scan.in_scope("http://www.evilcorp.com") + True """ try: e = make_event(e, dummy=True) @@ -457,10 +708,16 @@ def in_scope(self, e): return in_scope and not self.blacklisted(e) def blacklisted(self, e): + """ + Check whether a hostname, url, IP, etc. is blacklisted. + """ e = make_event(e, dummy=True) return e in self.blacklist def whitelisted(self, e): + """ + Check whether a hostname, url, IP, etc. is whitelisted. + """ e = make_event(e, dummy=True) return e in self.whitelist @@ -522,6 +779,25 @@ def log(self): @property def root_event(self): + """ + The root scan event, e.g.: + ```json + { + "type": "SCAN", + "id": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "data": "pixilated_kathryn (SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54)", + "scope_distance": 0, + "scan": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "timestamp": 1694548779.616255, + "source": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "tags": [ + "distance-0" + ], + "module": "TARGET", + "module_sequence": "TARGET" + } + ``` + """ root_event = self.make_event(data=f"{self.name} ({self.id})", event_type="SCAN", dummy=True) root_event._id = self.id root_event.scope_distance = 0 @@ -530,12 +806,68 @@ def root_event(self): root_event.module = self.helpers._make_dummy_module(name="TARGET", _type="TARGET") return root_event + def run_in_executor(self, callback, *args, **kwargs): + """ + Run a synchronous task in the event loop's default thread pool executor + + Examples: + Execute callback: + >>> result = await self.scan.run_in_executor(callback_fn, arg1, arg2) + """ + callback = partial(callback, **kwargs) + return self._loop.run_in_executor(None, callback, *args) + + def run_in_executor_mp(self, callback, *args, **kwargs): + """ + Same as run_in_executor() except with a process pool executor + Use only in cases where callback is CPU-bound + + Examples: + Execute callback: + >>> result = await self.scan.run_in_executor_mp(callback_fn, arg1, arg2) + """ + callback = partial(callback, **kwargs) + return self._loop.run_in_executor(self.process_pool, callback, *args) + + @property + def dns_regexes(self): + """ + A list of DNS hostname regexes generated from the scan target + For the purpose of extracting hostnames + + Examples: + Extract hostnames from text: + >>> for regex in scan.dns_regexes: + ... for match in regex.finditer(response.text): + ... hostname = match.group().lower() + """ + if self._dns_regexes is None: + dns_targets = set(t.host for t in self.target if t.host and isinstance(t.host, str)) + dns_whitelist = set(t.host for t in self.whitelist if t.host and isinstance(t.host, str)) + dns_targets.update(dns_whitelist) + dns_targets = sorted(dns_targets, key=len) + dns_targets_set = set() + dns_regexes = [] + for t in dns_targets: + if not any(x in dns_targets_set for x in self.helpers.domain_parents(t, include_self=True)): + dns_targets_set.add(t) + dns_regexes.append(re.compile(r"((?:(?:[\w-]+)\.)+" + re.escape(t) + ")", re.I)) + self._dns_regexes = dns_regexes + + return self._dns_regexes + @property def useragent(self): + """ + Convenient shortcut to the HTTP user-agent configured for the scan + """ return self.config.get("user_agent", "BBOT") @property def json(self): + """ + A dictionary representation of the scan including its name, ID, targets, whitelist, blacklist, and modules + """ j = dict() for i in ("id", "name"): v = getattr(self, i, "") @@ -612,8 +944,15 @@ def critical(self, *args, trace=True, **kwargs): self.trace() @property - def log_handlers(self): - if self._log_handlers is None: + def log_level(self): + """ + Return the current log level, e.g. logging.INFO + """ + return get_log_level() + + @property + def _log_handlers(self): + if self.__log_handlers is None: self.helpers.mkdir(self.home) main_handler = logging.handlers.TimedRotatingFileHandler( str(self.home / "scan.log"), when="d", interval=1, backupCount=14 @@ -625,12 +964,12 @@ def log_handlers(self): str(self.home / "debug.log"), when="d", interval=1, backupCount=14 ) debug_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.DEBUG) - self._log_handlers = [main_handler, debug_handler] - return self._log_handlers + self.__log_handlers = [main_handler, debug_handler] + return self.__log_handlers - def start_log_handlers(self): + def _start_log_handlers(self): # add log handlers - for handler in self.log_handlers: + for handler in self._log_handlers: add_log_handler(handler) # temporarily disable main ones for handler_name in ("file_main", "file_debug"): @@ -639,9 +978,9 @@ def start_log_handlers(self): self._log_handler_backup.append(handler) remove_log_handler(handler) - def stop_log_handlers(self): + def _stop_log_handlers(self): # remove log handlers - for handler in self.log_handlers: + for handler in self._log_handlers: remove_log_handler(handler) # restore main ones for handler in self._log_handler_backup: @@ -652,67 +991,7 @@ def _internal_modules(self): if self.config.get(modname, True): yield modname - async def load_modules(self): - if not self._modules_loaded: - all_modules = list(set(self._scan_modules + self._output_modules + self._internal_modules)) - if not all_modules: - self.warning(f"No modules to load") - return - - if not self._scan_modules: - self.warning(f"No scan modules to load") - - # install module dependencies - succeeded, failed = await self.helpers.depsinstaller.install( - *self._scan_modules, *self._output_modules, *self._internal_modules - ) - if failed: - msg = f"Failed to install dependencies for {len(failed):,} modules: {','.join(failed)}" - self.fail_setup(msg) - modules = sorted([m for m in self._scan_modules if m in succeeded]) - output_modules = sorted([m for m in self._output_modules if m in succeeded]) - internal_modules = sorted([m for m in self._internal_modules if m in succeeded]) - - # Load scan modules - self.verbose(f"Loading {len(modules):,} scan modules: {','.join(modules)}") - loaded_modules, failed = self._load_modules(modules) - self.modules.update(loaded_modules) - if len(failed) > 0: - msg = f"Failed to load {len(failed):,} scan modules: {','.join(failed)}" - self.fail_setup(msg) - if loaded_modules: - self.info( - f"Loaded {len(loaded_modules):,}/{len(self._scan_modules):,} scan modules ({','.join(loaded_modules)})" - ) - - # Load internal modules - self.verbose(f"Loading {len(internal_modules):,} internal modules: {','.join(internal_modules)}") - loaded_internal_modules, failed_internal = self._load_modules(internal_modules) - self.modules.update(loaded_internal_modules) - if len(failed_internal) > 0: - msg = f"Failed to load {len(loaded_internal_modules):,} internal modules: {','.join(loaded_internal_modules)}" - self.fail_setup(msg) - if loaded_internal_modules: - self.info( - f"Loaded {len(loaded_internal_modules):,}/{len(self._internal_modules):,} internal modules ({','.join(loaded_internal_modules)})" - ) - - # Load output modules - self.verbose(f"Loading {len(output_modules):,} output modules: {','.join(output_modules)}") - loaded_output_modules, failed_output = self._load_modules(output_modules) - self.modules.update(loaded_output_modules) - if len(failed_output) > 0: - msg = f"Failed to load {len(failed_output):,} output modules: {','.join(failed_output)}" - self.fail_setup(msg) - if loaded_output_modules: - self.info( - f"Loaded {len(loaded_output_modules):,}/{len(self._output_modules):,} output modules, ({','.join(loaded_output_modules)})" - ) - - self.modules = OrderedDict(sorted(self.modules.items(), key=lambda x: getattr(x[-1], "_priority", 0))) - self._modules_loaded = True - - def fail_setup(self, msg): + def _fail_setup(self, msg): msg = str(msg) if not self.force_start: msg += " (--force to run module anyway)" @@ -721,10 +1000,6 @@ def fail_setup(self, msg): else: raise ScanError(msg) - @property - def log_level(self): - return get_log_level() - @property def _loop(self): if self.__loop is None: @@ -749,13 +1024,13 @@ def _load_modules(self, modules): return loaded_modules, failed async def _status_ticker(self, interval=15): - async with self.acatch(): + async with self._acatch(): while 1: await asyncio.sleep(interval) self.manager.modules_status(_log=True) @contextlib.contextmanager - def catch(self, context="scan", finally_callback=None): + def _catch(self, context="scan", finally_callback=None): """ Handle common errors by stopping scan, logging tracebacks, etc. @@ -768,7 +1043,7 @@ def catch(self, context="scan", finally_callback=None): self._handle_exception(e, context=context) @contextlib.asynccontextmanager - async def acatch(self, context="scan", finally_callback=None): + async def _acatch(self, context="scan", finally_callback=None): """ Async version of catch() @@ -780,40 +1055,6 @@ async def acatch(self, context="scan", finally_callback=None): except BaseException as e: self._handle_exception(e, context=context) - def run_in_executor(self, callback, *args, **kwargs): - """ - Run a synchronous task in the event loop's default thread pool executor - """ - callback = partial(callback, **kwargs) - return self._loop.run_in_executor(None, callback, *args) - - def run_in_executor_mp(self, callback, *args, **kwargs): - """ - Same as run_in_executor() except with a process pool executor - """ - callback = partial(callback, **kwargs) - return self._loop.run_in_executor(self.process_pool, callback, *args) - - @property - def dns_regexes(self): - """ - Return a list of regexes for extracting target hostnames - """ - if self._dns_regexes is None: - dns_targets = set(t.host for t in self.target if t.host and isinstance(t.host, str)) - dns_whitelist = set(t.host for t in self.whitelist if t.host and isinstance(t.host, str)) - dns_targets.update(dns_whitelist) - dns_targets = sorted(dns_targets, key=len) - dns_targets_set = set() - dns_regexes = [] - for t in dns_targets: - if not any(x in dns_targets_set for x in self.helpers.domain_parents(t, include_self=True)): - dns_targets_set.add(t) - dns_regexes.append(re.compile(r"((?:(?:[\w-]+)\.)+" + re.escape(t) + ")", re.I)) - self._dns_regexes = dns_regexes - - return self._dns_regexes - def _handle_exception(self, e, context="scan", finally_callback=None): if callable(context): context = f"{context.__qualname__}()" diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 8d1345d42..f733f8295 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -10,21 +10,110 @@ class Target: - make_in_scope = False + """ + A class representing a target. Can contain an unlimited number of hosts, IP or IP ranges, URLs, etc. - def __init__(self, scan, *targets, strict_scope=False): + Attributes: + make_in_scope (bool): Specifies whether to mark contained events as in-scope. + scan (Scan): Reference to the Scan object that instantiated the Target. + _events (dict): Dictionary mapping hosts to events related to the target. + strict_scope (bool): Flag indicating whether to consider child domains in-scope. + If set to True, only the exact hosts specifieid and not their children are considered part of the target. + + Examples: + Basic usage + >>> target = Target(scan, "evilcorp.com", "1.2.3.0/24") + >>> len(target) + 257 + >>> list(t.events) + [ + DNS_NAME("evilcorp.com", module=TARGET, tags={'domain', 'distance-1', 'target'}), + IP_RANGE("1.2.3.0/24", module=TARGET, tags={'ipv4', 'distance-1', 'target'}) + ] + >>> "www.evilcorp.com" in target + True + >>> "1.2.3.4" in target + True + >>> "4.3.2.1" in target + False + >>> "https://admin.evilcorp.com" in target + True + >>> "bob@evilcorp.com" in target + True + + Event correlation + >>> target.get("www.evilcorp.com") + DNS_NAME("evilcorp.com", module=TARGET, tags={'domain', 'distance-1', 'target'}) + >>> target.get("1.2.3.4") + IP_RANGE("1.2.3.0/24", module=TARGET, tags={'ipv4', 'distance-1', 'target'}) + + Target comparison + >>> target2 = Targets(scan, "www.evilcorp.com") + >>> target2 == target + False + >>> target2 in target + True + >>> target in target2 + False + + Notes: + - Targets are only precise down to the individual host. Ports and protocols are not considered in scope calculations. + - If you specify "https://evilcorp.com:8443" as a target, all of evilcorp.com (including subdomains and other ports and protocols) will be considered part of the target + - If you do not want to include child subdomains, use `strict_scope=True` + """ + + def __init__(self, scan, *targets, strict_scope=False, make_in_scope=False): + """ + Initialize a Target object. + + Args: + scan (Scan): Reference to the Scan object that instantiated the Target. + *targets: One or more targets (e.g., domain names, IP ranges) to be included in this Target. + strict_scope (bool, optional): Flag to control whether only the exact hosts are considered in-scope. + Defaults to False. + make_in_scope (bool, optional): Flag to control whether contained events are marked as in-scope. + Defaults to False. + + Attributes: + scan (Scan): Reference to the Scan object. + strict_scope (bool): Flag to control in-scope conditions. If True, only exact hosts are considered. + + Notes: + - If you are instantiating a target from within a BBOT module, use `self.helpers.make_target()` instead. (this removes the need to pass in a scan object.) + - The strict_scope flag can be set to restrict scope calculation to only exactly-matching hosts and not their child subdomains. + - Each target is processed and stored as an `Event` in the '_events' dictionary. + """ self.scan = scan - self.dummy_module = ScanTargetDummyModule(scan) + self.strict_scope = strict_scope + self.make_in_scope = make_in_scope + + self._dummy_module = TargetDummyModule(scan) self._events = dict() if len(targets) > 0: log.verbose(f"Creating events from {len(targets):,} targets") for t in targets: self.add_target(t) - self.strict_scope = strict_scope self._hash = None def add_target(self, t): + """ + Add a target or merge events from another Target object into this Target. + + Args: + t: The target to be added. It can be either a string, an event object, or another Target object. + + Attributes Modified: + _events (dict): The dictionary is updated to include the new target's events. + + Examples: + >>> target.add_target('example.com') + + Notes: + - If `t` is of the same class as this Target, all its events are merged. + - If `t` is an event, it is directly added to `_events`. + - If `make_in_scope` is True, the scope distance of the event is set to 0. + """ if type(t) == self.__class__: for k, v in t._events.items(): try: @@ -35,7 +124,9 @@ def add_target(self, t): if is_event(t): event = t else: - event = self.scan.make_event(t, source=self.scan.root_event, module=self.dummy_module, tags=["target"]) + event = self.scan.make_event( + t, source=self.scan.root_event, module=self._dummy_module, tags=["target"] + ) if self.make_in_scope: event.set_scope_distance(0) try: @@ -47,18 +138,73 @@ def add_target(self, t): @property def events(self): + """ + A generator property that yields all events in the target. + + Yields: + Event object: One of the Event objects stored in the `_events` dictionary. + + Examples: + >>> target = Target(scan, "example.com") + >>> for event in target.events: + ... print(event) + + Notes: + - This property is read-only. + - Iterating over this property gives you one event at a time from the `_events` dictionary. + """ for _events in self._events.values(): yield from _events def copy(self): + """ + Creates and returns a copy of the Target object, including a shallow copy of the `_events` attribute. + + Returns: + Target: A new Target object with the same `scan` and `strict_scope` attributes as the original. + A shallow copy of the `_events` dictionary is made. + + Examples: + >>> original_target = Target(scan, "example.com") + >>> copied_target = original_target.copy() + >>> copied_target is original_target + False + >>> copied_target == original_target + True + >>> copied_target in original_target + True + >>> original_target in copied_target + True + + Notes: + - The `scan` object reference is kept intact in the copied Target object. + """ self_copy = self.__class__(self.scan, strict_scope=self.strict_scope) self_copy._events = dict(self._events) return self_copy def get(self, host): """ - Get the matching target for a specified host. If not found, return None + Gets the event associated with the specified host from the target's `_events` dictionary. + + Args: + host (Event, Target, or str): The hostname, IP, URL, or event to look for. + + Returns: + Event or None: Returns the Event object associated with the given host if it exists, otherwise returns None. + + Examples: + >>> target = Target(scan, "evilcorp.com", "1.2.3.0/24") + >>> target.get("www.evilcorp.com") + DNS_NAME("evilcorp.com", module=TARGET, tags={'domain', 'distance-1', 'target'}) + >>> target.get("1.2.3.4") + IP_RANGE("1.2.3.0/24", module=TARGET, tags={'ipv4', 'distance-1', 'target'}) + + Notes: + - The method returns the first event that matches the given host. + - If `strict_scope` is False, it will also consider parent domains and IP ranges. """ + try: other = make_event(host, dummy=True) except ValidationError: @@ -87,7 +233,7 @@ def __iter__(self): yield from self.events def __contains__(self, other): - # if "other" is a ScanTarget + # if "other" is a Target if type(other) == self.__class__: contained_in_self = [self._contains(e) for e in other.events] return all(contained_in_self) @@ -108,7 +254,19 @@ def __hash__(self): def __len__(self): """ - Returns the total number of HOSTS (not events) in the target + Calculates and returns the total number of hosts within this target, not counting duplicate events. + + Returns: + int: The total number of unique hosts present within the target's `_events`. + + Examples: + >>> target = Target(scan, "evilcorp.com", "1.2.3.0/24") + >>> len(target) + 257 + + Notes: + - If a host is represented as an IP network, all individual IP addresses in that network are counted. + - For other types of hosts, each unique event is counted as one. """ num_hosts = 0 for host, _events in self._events.items(): @@ -119,11 +277,7 @@ def __len__(self): return num_hosts -class ScanTarget(Target): - make_in_scope = True - - -class ScanTargetDummyModule(BaseModule): +class TargetDummyModule(BaseModule): _type = "TARGET" name = "TARGET" diff --git a/bbot/scripts/docs.py b/bbot/scripts/docs.py index 9969a8989..b66488159 100755 --- a/bbot/scripts/docs.py +++ b/bbot/scripts/docs.py @@ -117,9 +117,10 @@ def update_md_files(keyword, s): bbot_docs_toc += f"- **{section_title}**\n" for subsection in subsections: for subsection_title, subsection_path in subsection.items(): - path = subsection_path.split("index.md")[0] - path = path.split(".md")[0] - bbot_docs_toc += f" - [{subsection_title}]({base_url}/{path})\n" + if isinstance(subsection_path, str): + path = subsection_path.split("index.md")[0] + path = path.split(".md")[0] + bbot_docs_toc += f" - [{subsection_title}]({base_url}/{path})\n" bbot_docs_toc = bbot_docs_toc.strip() assert len(bbot_docs_toc.splitlines()) > 5 update_md_files("BBOT DOCS TOC", bbot_docs_toc) diff --git a/bbot/test/test_step_1/test_agent.py b/bbot/test/test_step_1/test_agent.py index a4b8e447e..73bb50355 100644 --- a/bbot/test/test_step_1/test_agent.py +++ b/bbot/test/test_step_1/test_agent.py @@ -142,7 +142,7 @@ async def test_agent(agent): async with websockets.serve(_websocket_handler, "127.0.0.1", 8765): asyncio.create_task(agent.start()) # wait for 30 seconds - await asyncio.wait_for(scan_done.wait(), 10) + await asyncio.wait_for(scan_done.wait(), 30) assert success await agent.start_scan("scan_to_be_cancelled", targets=["127.0.0.1"], modules=["ipneighbor"]) diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 016e6d79f..f27f4d93b 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -113,6 +113,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") assert helpers.split_domain("test.notreal") == ("", "test.notreal") assert helpers.split_domain("notreal") == ("", "notreal") + assert helpers.split_domain("192.168.0.1") == ("", "192.168.0.1") + assert helpers.split_domain("dead::beef") == ("", "dead::beef") assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) @@ -285,7 +287,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https with pytest.raises(DirectoryCreationError, match="Failed to create.*"): helpers.mkdir(test_file) - helpers._rm_at_exit(test_file) + helpers.delete_file(test_file) assert not test_file.exists() timedelta = datetime.timedelta(hours=1, minutes=2, seconds=3) diff --git a/bbot/test/test_step_1/test_manager.py b/bbot/test/test_step_1/test_manager.py index 7804836f5..16e6db7f5 100644 --- a/bbot/test/test_step_1/test_manager.py +++ b/bbot/test/test_step_1/test_manager.py @@ -108,7 +108,7 @@ class DummyModule3: msg = "Ignore this error, it belongs here" exceptions = (Exception(msg), KeyboardInterrupt(msg), BrokenPipeError(msg)) for e in exceptions: - with manager.scan.catch(): + with manager.scan._catch(): raise e diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index e14f8c402..77c25a7a1 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -201,6 +201,47 @@ async def test_modules_basic_perhostonly(scan, helpers, events, bbot_config, bbo assert valid_1 == True assert valid_2 == False assert hash("http://evilcorp.com/") in module._per_host_tracker + assert reason_2 == "per_host_only enabled and already seen host" + + else: + assert valid_1 == True + assert valid_2 == True + + +@pytest.mark.asyncio +async def test_modules_basic_perdomainonly(scan, helpers, events, bbot_config, bbot_scanner, httpx_mock, monkeypatch): + per_domain_scan = bbot_scanner( + "evilcorp.com", + modules=list(set(available_modules + available_internal_modules)), + config=bbot_config, + ) + + await per_domain_scan.load_modules() + await per_domain_scan.setup_modules() + per_domain_scan.status = "RUNNING" + + # ensure that multiple events to the same "host" (schema + host) are blocked and check the per host tracker + + for module_name, module in sorted(per_domain_scan.modules.items()): + monkeypatch.setattr(module, "filter_event", BaseModule(per_domain_scan).filter_event) + + if "URL" in module.watched_events: + url_1 = per_domain_scan.make_event( + "http://www.evilcorp.com/1", event_type="URL", source=per_domain_scan.root_event, tags=["status-200"] + ) + url_1.set_scope_distance(0) + url_2 = per_domain_scan.make_event( + "http://mail.evilcorp.com/2", event_type="URL", source=per_domain_scan.root_event, tags=["status-200"] + ) + url_2.set_scope_distance(0) + valid_1, reason_1 = await module._event_postcheck(url_1) + valid_2, reason_2 = await module._event_postcheck(url_2) + + if module.per_domain_only == True: + assert valid_1 == True + assert valid_2 == False + assert hash("evilcorp.com") in module._per_host_tracker + assert reason_2 == "per_domain_only enabled and already seen domain" else: assert valid_1 == True diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index 1392e557a..a4562cfc7 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -105,7 +105,7 @@ async def module_test(self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, mo module_test = self.ModuleTest(self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request) module_test.log.info(f"Starting {self.name} module test") await self.setup_before_prep(module_test) - await module_test.scan.prep() + await module_test.scan._prep() await self.setup_after_prep(module_test) module_test.events = [e async for e in module_test.scan.async_start()] yield module_test diff --git a/bbot/test/test_step_2/module_tests/test_module_filedownload.py b/bbot/test/test_step_2/module_tests/test_module_filedownload.py new file mode 100644 index 000000000..e4471d159 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_filedownload.py @@ -0,0 +1,68 @@ +from .base import ModuleTestBase + + +class TestFileDownload(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["filedownload", "httpx", "excavate", "speculate"] + config_overrides = {"web_spider_distance": 2, "web_spider_depth": 2} + + pdf_data = """%PDF-1. +1 0 obj<>endobj +2 0 obj<>endobj +3 0 obj<>endobj +trailer <>""" + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://raw.githubusercontent.com/jshttp/mime-db/master/db.json", + json={ + "application/pdf": {"source": "iana", "compressible": False, "extensions": ["pdf"]}, + }, + ) + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + dict(uri="/"), + dict( + response_data='' + ), + ) + module_test.set_expect_requests( + dict(uri="/Test_File.txt"), + dict( + response_data="juicy stuff", + ), + ) + module_test.set_expect_requests( + dict(uri="/Test_PDF"), + dict(response_data=self.pdf_data, headers={"Content-Type": "application/pdf"}), + ) + module_test.set_expect_requests( + dict(uri="/test.html"), + dict(response_data="", headers={"Content-Type": "text/html"}), + ) + module_test.set_expect_requests( + dict(uri="/test2"), + dict(response_data="", headers={"Content-Type": "text/html"}), + ) + + def check(self, module_test, events): + download_dir = module_test.scan.home / "filedownload" + + # text file + text_files = list(download_dir.glob("*test-file.txt")) + assert len(text_files) == 1, f"No text file found at {download_dir}" + file = text_files[0] + assert file.is_file(), f"File not found at {file}" + assert open(file).read() == "juicy stuff", f"File at {file} does not contain the correct content" + + # PDF file (no extension) + pdf_files = list(download_dir.glob("*test-pdf.pdf")) + assert len(pdf_files) == 1, f"No PDF file found at {download_dir}" + file = pdf_files[0] + assert file.is_file(), f"File not found at {file}" + assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" + + # we don't want html files + html_files = list(download_dir.glob("*.html")) + assert len(html_files) == 0, "HTML files were erroneously downloaded" diff --git a/bbot/test/test_step_2/module_tests/test_module_leakix.py b/bbot/test/test_step_2/module_tests/test_module_leakix.py index b6bfbd8bf..aad4a095c 100644 --- a/bbot/test/test_step_2/module_tests/test_module_leakix.py +++ b/bbot/test/test_step_2/module_tests/test_module_leakix.py @@ -24,3 +24,22 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + + +class TestLeakIX_NoAPIKey(ModuleTestBase): + modules_overrides = ["leakix"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url=f"https://leakix.net/api/subdomains/blacklanternsecurity.com", + json=[ + { + "subdomain": "asdf.blacklanternsecurity.com", + "distinct_ips": 3, + "last_seen": "2023-04-02T09:38:30.02Z", + }, + ], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" diff --git a/docs/contribution.md b/docs/contribution.md index 2bc585898..65b074adb 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -94,7 +94,7 @@ This will produce the output: [INFO] Finishing scan ``` -But something's wrong! We're emitting `IP_ADDRESS` [events](./scanning/events/), but they're not showing up in the output. This is because by default, BBOT only shows in-scope [events](./scanning/events/). To see them, we need to increase the report distance: +But something's wrong! We're emitting `IP_ADDRESS` [events](./scanning/events.md), but they're not showing up in the output. This is because by default, BBOT only shows in-scope [events](./scanning/events.md). To see them, we need to increase the report distance: ```bash # run the module again but with a higher report distance @@ -123,9 +123,9 @@ Now, with the `report_distance=1`: ### `handle_event()` and `emit_event()` -The `handle_event()` method is the most important part of the module. By overriding this method, you control what the module does. During a scan, when an [event](./scanning/events/) from your `watched_events` is encountered (a `DNS_NAME` in this example), `handle_event()` is automatically called with that [event](./scanning/events/). +The `handle_event()` method is the most important part of the module. By overriding this method, you control what the module does. During a scan, when an [event](./scanning/events.md) from your `watched_events` is encountered (a `DNS_NAME` in this example), `handle_event()` is automatically called with that event. -The `emit_event()` method is how modules return data. When you call `emit_event()`, it creates an [event](./scanning/events/) and prints it to the console. It also distributes it any modules that are interested in that data type. +The `emit_event()` method is how modules return data. When you call `emit_event()`, it creates an [event](./scanning/events.md) and prints it to the console. It also distributes it any modules that are interested in that data type. ### Module Dependencies diff --git a/docs/dev/basemodule.md b/docs/dev/basemodule.md new file mode 100644 index 000000000..04e59042e --- /dev/null +++ b/docs/dev/basemodule.md @@ -0,0 +1 @@ +::: bbot.modules.base.BaseModule diff --git a/docs/dev/event.md b/docs/dev/event.md new file mode 100644 index 000000000..79f0cc7cb --- /dev/null +++ b/docs/dev/event.md @@ -0,0 +1,16 @@ +This is a developer reference. For a high-level description of BBOT events including a full list of event types, see [Events](../../scanning/events) + +::: bbot.core.event.base.make_event +::: bbot.core.event.base.event_from_json + +::: bbot.core.event.base.BaseEvent + options: + members: + - __init__ + - json + - from_json + - pretty_string + - module_sequence + - make_internal + - unmake_internal + - set_scope_distance diff --git a/docs/dev/helpers/misc.md b/docs/dev/helpers/misc.md new file mode 100644 index 000000000..3a95dc0d9 --- /dev/null +++ b/docs/dev/helpers/misc.md @@ -0,0 +1,7 @@ +# Misc Helpers + +These are miscellaneous helpers, used throughout BBOT and its modules for simple tasks such as parsing domains, ports, urls, etc. + +::: bbot.core.helpers.misc + options: + show_root_heading: false diff --git a/docs/dev/scanner.md b/docs/dev/scanner.md new file mode 100644 index 000000000..a03de4e4b --- /dev/null +++ b/docs/dev/scanner.md @@ -0,0 +1 @@ +::: bbot.scanner.Scanner diff --git a/docs/dev/target.md b/docs/dev/target.md new file mode 100644 index 000000000..b2e4bffe3 --- /dev/null +++ b/docs/dev/target.md @@ -0,0 +1 @@ +::: bbot.scanner.target.Target diff --git a/docs/how_it_works.md b/docs/how_it_works.md index 3423e28a8..1d42389d4 100644 --- a/docs/how_it_works.md +++ b/docs/how_it_works.md @@ -40,4 +40,4 @@ This allows for some interesting chains of events. Given a single target such as This is a simple example with only a few modules, but you can being to see how if 30 or 40 modules were enabled, they could feed each other exponentially to produce an immense amount of data. This recursion is exactly how BBOT is able to outperform other tools. -For a full list of event types and which modules consume/produce them, see [List of Event Types](../scanning/events/#list-of-event-types). +For a full list of event types and which modules consume/produce them, see [List of Event Types](scanning/events.md#list-of-event-types). diff --git a/docs/index.md b/docs/index.md index ac0017212..f5ef3b332 100644 --- a/docs/index.md +++ b/docs/index.md @@ -113,6 +113,6 @@ Or on the command-line: bbot -t evilcorp.com -f subdomain-enum -c modules.shodan_dns.api_key=deadbeef modules.virustotal.api_key=cafebabe ``` -For more information, see [Configuration](./scanning/configuration/). For a full list of modules, including which ones require API keys, see [List of Modules](./modules/list_of_modules/). +For more information, see [Configuration](./scanning/configuration.md). For a full list of modules, including which ones require API keys, see [List of Modules](./modules/list_of_modules.md). -[Next Up: Scanning -->](./scanning/){ .md-button .md-button--primary } +[Next Up: Scanning -->](./scanning/index.md){ .md-button .md-button--primary } diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 391f74447..59401954d 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -105,4 +105,4 @@ | speculate | internal | No | Derive certain event types from others by common sense | passive | DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, STORAGE_BUCKET, URL, URL_UNVERIFIED | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT | -For a list of module config options, see [Module Options](../configurations/#module-config-options). +For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 66d86991c..c85796860 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -7,7 +7,7 @@ For a list of all possible config options, see: - [Global Options](#global-config-options) - [Module Options](#module-config-options) -For examples of common config changes, see [Tips and Tricks](../tips_and_tricks/). +For examples of common config changes, see [Tips and Tricks](tips_and_tricks.md). ## Configuration Files diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 0f0487073..bf6f5de73 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -42,6 +42,8 @@ These attributes allow us to construct a visual graph of events (e.g. in [Neo4j] } ``` +For a more detailed description of BBOT events, see [Developer Documentation - Event](../../dev/event). + Below is a full list of event types along with which modules produce/consume them. ## List of Event Types diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 7aed1e006..f47c50ba9 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -40,13 +40,13 @@ https://www.evilcorp.co.uk $ bbot -t targets.txt fsociety.com 5.6.7.0/24 -m nmap ``` -On start, BBOT automatically converts Targets into [Events](./events/). +On start, BBOT automatically converts Targets into [Events](events.md). ## Modules (`-m`) -To see a full list of modules and their descriptions, use `bbot -l` or see [List of Modules](./list_of_modules/). +To see a full list of modules and their descriptions, use `bbot -l` or see [List of Modules](../modules/list_of_modules.md). -Modules are the part of BBOT that does the work -- port scanning, subdomain brute-forcing, API querying, etc. Modules consume [Events](./events/) (`IP_ADDRESS`, `DNS_NAME`, etc.) from each other, process the data in a useful way, then emit the results as new events. You can enable individual modules with `-m`. +Modules are the part of BBOT that does the work -- port scanning, subdomain brute-forcing, API querying, etc. Modules consume [Events](events.md) (`IP_ADDRESS`, `DNS_NAME`, etc.) from each other, process the data in a useful way, then emit the results as new events. You can enable individual modules with `-m`. ```bash # Enable modules: nmap, sslcert, and httpx @@ -60,14 +60,14 @@ Modules fall into three categories: - **Scan Modules**: - These make up the majority of modules. Examples are `nmap`, `sslcert`, `httpx`, etc. Enable with `-m`. - **Output Modules**: - - These output scan data to different formats/destinations. `human`, `json`, and `csv` are enabled by default. Enable others with `-om`. (See: [Output](./output/)) + - These output scan data to different formats/destinations. `human`, `json`, and `csv` are enabled by default. Enable others with `-om`. (See: [Output](output.md)) - **Internal Modules**: - These modules perform essential, common-sense tasks. They are always enabled, unless explicitly disabled via the config (e.g. `-c speculate=false`). - `aggregate`: Summarizes results at the end of a scan - `excavate`: Extracts useful data such as subdomains from webpages, etc. - `speculate`: Intelligently infers new events, e.g. `OPEN_TCP_PORT` from `URL` or `IP_ADDRESS` from `IP_NETWORK`. -For details in the inner workings of modules, see [Creating a Module](../contribution/#creating-a-module). +For details in the inner workings of modules, see [Creating a Module](../contribution.md#creating-a-module). ## Flags (`-f`) @@ -141,7 +141,7 @@ BBOT modules have external dependencies ranging from OS packages (`openssl`) to - `--ignore-failed-deps` - Run modules even if they have failed dependencies - `--install-all-deps` - Install dependencies for all modules (useful if you are provisioning a pentest system and want to install everything ahead of time) -For details on how Ansible playbooks are attached to BBOT modules, see [How to Write a Module](../contribution/#module-dependencies). +For details on how Ansible playbooks are attached to BBOT modules, see [How to Write a Module](../contribution.md#module-dependencies). ## Scope @@ -151,15 +151,15 @@ By default, scope is whatever you specify with `-t`. This includes child subdoma ### Scope Distance -Since BBOT is recursive, it would quickly resort to scanning the entire internet without some kind of restraining mechanism. To solve this problem, every [event](./events/) discovered by BBOT is assigned a **Scope Distance**. Scope distance represents how far out from the main scope that data was discovered. +Since BBOT is recursive, it would quickly resort to scanning the entire internet without some kind of restraining mechanism. To solve this problem, every [event](events.md) discovered by BBOT is assigned a **Scope Distance**. Scope distance represents how far out from the main scope that data was discovered. For example, if your target is `evilcorp.com`, `www.evilcorp.com` would have a scope distance of `0` (i.e. in-scope). If BBOT discovers that `www.evilcorp.com` resolves to `1.2.3.4`, `1.2.3.4` is one hop away, which means it would have a scope distance of `1`. If `1.2.3.4` has a PTR record that points to `ecorp.blob.core.windows.net`, `ecorp.blob.core.windows.net` is two hops away, so its scope distance is `2`. -Scope distance continues to increase the further out you get. Most modules (e.g. `nuclei` and `nmap`) only consume in-scope events. Certain other passive modules such as `asn` accept out to distance `1`. By default, DNS resolution happens out to a distance of `2`. Upon its discovery, any [event](./events/) that's determined to be in-scope (e.g. `www.evilcorp.com`) immediately becomes distance `0`, and the cycle starts over. +Scope distance continues to increase the further out you get. Most modules (e.g. `nuclei` and `nmap`) only consume in-scope events. Certain other passive modules such as `asn` accept out to distance `1`. By default, DNS resolution happens out to a distance of `2`. Upon its discovery, any [event](events.md) that's determined to be in-scope (e.g. `www.evilcorp.com`) immediately becomes distance `0`, and the cycle starts over. #### Displaying Out-of-scope Events -By default, BBOT only displays in-scope events (with a few exceptions such as `STORAGE_BUCKET`s). If you want to see more, you must increase the [config](./configuration/) value of `scope_report_distance`: +By default, BBOT only displays in-scope events (with a few exceptions such as `STORAGE_BUCKET`s). If you want to see more, you must increase the [config](configuration.md) value of `scope_report_distance`: ```bash # display out-of-scope events up to one hop away from the main scope @@ -206,7 +206,7 @@ Wildcard hosts are collapsed into a single host beginning with `_wildcard`: ^^^^^^^^^ ``` -If you don't want this, you can disable wildcard detection on a domain-to-domain basis in the [config](./configuration/): +If you don't want this, you can disable wildcard detection on a domain-to-domain basis in the [config](configuration.md): ```yaml title="~/.bbot/config/bbot.yml" dns_wildcard_ignore: diff --git a/docs/scanning/output.md b/docs/scanning/output.md index e1c042a27..a425bb4fd 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -42,7 +42,7 @@ If you manually enable the `json` output module, it will go to stdout: bbot -t evilcorp.com -om json | jq ``` -You will then see [events](./events) like this: +You will then see [events](events.md) like this: ```json { @@ -114,7 +114,7 @@ output_modules: ### HTTP -The `http` output module sends [events](./events) in JSON format to a desired HTTP endpoint. +The `http` output module sends [events](events.md) in JSON format to a desired HTTP endpoint. ```bash # POST scan results to localhost diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index f8afedbfd..aaafb15ca 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -24,9 +24,9 @@ The web spider is great for finding juicy data like subdomains, email addresses, The web spider is controlled with three config values: -- `web_spider_distance` (`0` == all spidering disabled, default: `0`): the maximum number of links that can be followed in a row. This is designed to limit the spider in cases where `web_spider_depth` fails (e.g. for an ecommerce website with thousands of base-level URLs). - `web_spider_depth` (default: `1`: the maximum directory depth allowed. This is to prevent the spider from delving too deep into a website. -- `web_spider_links_per_page` (default: `25`): the maximum number of links per page that can be followed. This is designed specifically for cases where a single page has hundreds or thousands of links. +- `web_spider_distance` (`0` == all spidering disabled, default: `0`): the maximum number of links that can be followed in a row. This is designed to limit the spider in cases where `web_spider_depth` fails (e.g. for an ecommerce website with thousands of base-level URLs). +- `web_spider_links_per_page` (default: `25`): the maximum number of links per page that can be followed. This is designed to save you in cases where a single page has hundreds or thousands of links. Here is a typical example: @@ -87,7 +87,7 @@ bbot -m httpx gowitness wappalyzer -t urls.txt -c dns_resolution=false `URL_UNVERIFIED` events are URLs that haven't yet been visited by `httpx`. Once `httpx` visits them, it reraises them as `URL`s, tagged with their resulting status code. -For example, when [`excavate`](../#types-of-modules) gets an `HTTP_RESPONSE` event, it extracts links from the raw HTTP response as `URL_UNVERIFIED`s and then passes them back to `httpx` to be visited. +For example, when [`excavate`](index.md/#types-of-modules) gets an `HTTP_RESPONSE` event, it extracts links from the raw HTTP response as `URL_UNVERIFIED`s and then passes them back to `httpx` to be visited. By default, `URL_UNVERIFIED`s are hidden from output. If you want to see all of them including the out-of-scope ones, you can do it by changing `omit_event_types` and `scope_report_distance` in the config like so: diff --git a/mkdocs.yml b/mkdocs.yml index a6ae600fa..4bec84f24 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -7,6 +7,10 @@ site_description: >- # Repository repo_name: blacklanternsecurity/bbot repo_url: https://github.com/blacklanternsecurity/bbot +watch: + - "mkdocs.yml" + - "bbot" + - "docs" # Page tree nav: @@ -26,6 +30,19 @@ nav: - Nuclei: modules/nuclei.md - Contribution: - How to Write a Module: contribution.md + - Developer Reference: + - Scanner: dev/scanner.md + - Event: dev/event.md + - Target: dev/target.md + - BaseModule: dev/basemodule.md + - Helpers: + # dev/helpers/index.md + - Command: dev/helpers/command.md + - DNS: dev/helpers/dns.md + - Interactsh: dev/helpers/interactsh.md + - Miscellaneous: dev/helpers/misc.md + - Web: dev/helpers/web.md + - Word Cloud: dev/helpers/wordcloud.md - Misc: - Release History: release_history.md - Troubleshooting: troubleshooting.md @@ -46,6 +63,24 @@ theme: plugins: - search - extra-sass + - mkdocstrings: + enable_inventory: true + handlers: + python: + options: + heading_level: 1 + show_signature_annotations: true + show_root_toc_entry: false + show_root_heading: true + show_root_full_path: false + separate_signature: true + docstring_section_style: "list" + filters: + - "!^_" + - "^__init__$" + import: + - https://docs.python.org/3.11/objects.inv + - https://omegaconf.readthedocs.io/en/latest/objects.inv markdown_extensions: - attr_list diff --git a/poetry.lock b/poetry.lock index edda4d005..2f1daa6c8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,27 +2,27 @@ [[package]] name = "ansible" -version = "7.5.0" +version = "7.7.0" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-7.5.0-py3-none-any.whl", hash = "sha256:a2deadeb8a199abfbd7c1960bc126697be517ac4310b2f59eb2190706e6a2637"}, - {file = "ansible-7.5.0.tar.gz", hash = "sha256:4f08ca25bb29005c1afc4125e837882ad7a2c67ff0cc9d1a361b89ad09cf8c44"}, + {file = "ansible-7.7.0-py3-none-any.whl", hash = "sha256:4f8d346db1b6cec0f30b77935b3dce5633d76881186da839b58b34b48a089b92"}, + {file = "ansible-7.7.0.tar.gz", hash = "sha256:9c206ba515f13a0cc9c919d496218ba26df581755bdc39be85b074066c699a02"}, ] [package.dependencies] -ansible-core = ">=2.14.5,<2.15.0" +ansible-core = ">=2.14.7,<2.15.0" [[package]] name = "ansible-core" -version = "2.14.6" +version = "2.14.10" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-core-2.14.6.tar.gz", hash = "sha256:0cddb0df454561981f1c541db7ac5398d5e9de452ea1f01847acbd031fd7d2b2"}, - {file = "ansible_core-2.14.6-py3-none-any.whl", hash = "sha256:08963309f44cd98862aba8d887ac5c2b4159cd1c2e31ac9cf47d661e985e6bb9"}, + {file = "ansible-core-2.14.10.tar.gz", hash = "sha256:2c5d26d0f8d152020dd92d98f595f63c248a1997b8d74c5c3fb0d2408ec5a487"}, + {file = "ansible_core-2.14.10-py3-none-any.whl", hash = "sha256:97ab6c2d62940a7a02c4f2505aad5c5e30bb486e8122899c5c147b16e98ef51e"}, ] [package.dependencies] @@ -34,16 +34,17 @@ resolvelib = ">=0.5.3,<0.9.0" [[package]] name = "ansible-runner" -version = "2.3.2" +version = "2.3.4" description = "\"Consistent Ansible Python API and CLI with container and process isolation runtime capabilities\"" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "ansible-runner-2.3.2.tar.gz", hash = "sha256:c420e76ba18311d6350c8982fc3c0519b00624654053e538b0ea630651b08921"}, - {file = "ansible_runner-2.3.2-py3-none-any.whl", hash = "sha256:21f94eeaa536e19ab3913ad882c0722c86aad9cb371eebf99361b8c1fb38ee8c"}, + {file = "ansible-runner-2.3.4.tar.gz", hash = "sha256:79a1bd134d813c8ea3740599c6fd961a11425ce7757f2fd725cf56d6a1a7236c"}, + {file = "ansible_runner-2.3.4-py3-none-any.whl", hash = "sha256:73cca4fe509c8f4f0e93bf8ae13492c98454c62152685e43bdbd0f51907682bb"}, ] [package.dependencies] +importlib-metadata = {version = ">=4.6,<6.3", markers = "python_version < \"3.10\""} packaging = "*" pexpect = ">=4.5" python-daemon = "*" @@ -110,6 +111,17 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -176,28 +188,38 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cattrs" -version = "22.2.0" +version = "23.1.2" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.7" files = [ - {file = "cattrs-22.2.0-py3-none-any.whl", hash = "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21"}, - {file = "cattrs-22.2.0.tar.gz", hash = "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d"}, + {file = "cattrs-23.1.2-py3-none-any.whl", hash = "sha256:b2bb14311ac17bed0d58785e5a60f022e5431aca3932e3fc5cc8ed8639de50a4"}, + {file = "cattrs-23.1.2.tar.gz", hash = "sha256:db1c821b8c537382b2c7c66678c3790091ca0275ac486c76f3c8f3920e83c657"}, ] [package.dependencies] attrs = ">=20" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.2.0,<5.0.0)"] +cbor2 = ["cbor2 (>=5.4.6,<6.0.0)"] +msgpack = ["msgpack (>=1.0.2,<2.0.0)"] +orjson = ["orjson (>=3.5.2,<4.0.0)"] +pyyaml = ["PyYAML (>=6.0,<7.0)"] +tomlkit = ["tomlkit (>=0.11.4,<0.12.0)"] +ujson = ["ujson (>=5.4.0,<6.0.0)"] [[package]] name = "certifi" -version = "2023.5.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] @@ -289,97 +311,97 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -413,62 +435,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.5" +version = "7.3.1" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, ] [package.dependencies] @@ -479,30 +502,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, ] [package.dependencies] @@ -511,29 +538,29 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "deepdiff" -version = "6.3.0" +version = "6.5.0" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." optional = false python-versions = ">=3.7" files = [ - {file = "deepdiff-6.3.0-py3-none-any.whl", hash = "sha256:15838bd1cbd046ce15ed0c41e837cd04aff6b3e169c5e06fca69d7aa11615ceb"}, - {file = "deepdiff-6.3.0.tar.gz", hash = "sha256:6a3bf1e7228ac5c71ca2ec43505ca0a743ff54ec77aa08d7db22de6bc7b2b644"}, + {file = "deepdiff-6.5.0-py3-none-any.whl", hash = "sha256:acdc1651a3e802415e0337b7e1192df5cd7c17b72fbab480466fdd799b9a72e7"}, + {file = "deepdiff-6.5.0.tar.gz", hash = "sha256:080b1359d6128f3f5f1738c6be3064f0ad9b0cc41994aa90a028065f6ad11f25"}, ] [package.dependencies] ordered-set = ">=4.0.2,<4.2.0" [package.extras] -cli = ["click (==8.1.3)", "pyyaml (==6.0)"] +cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"] optimize = ["orjson"] [[package]] @@ -579,13 +606,13 @@ files = [ [[package]] name = "dunamai" -version = "1.17.0" +version = "1.18.0" description = "Dynamic version generation" optional = false python-versions = ">=3.5,<4.0" files = [ - {file = "dunamai-1.17.0-py3-none-any.whl", hash = "sha256:5aa4ac1085de10691269af021b10497261a5dd644f277e2a21822212604d877b"}, - {file = "dunamai-1.17.0.tar.gz", hash = "sha256:459381b585a1e78e4070f0d38a6afb4d67de2ee95064bf6b0438ec620dde0820"}, + {file = "dunamai-1.18.0-py3-none-any.whl", hash = "sha256:f9284a9f4048f0b809d11539896e78bde94c05b091b966a04a44ab4c48df03ce"}, + {file = "dunamai-1.18.0.tar.gz", hash = "sha256:5200598561ea5ba956a6174c36e402e92206c6a6aa4a93a6c5cb8003ee1e0997"}, ] [package.dependencies] @@ -593,13 +620,13 @@ packaging = ">=20.9" [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -607,18 +634,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.4" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "flake8" @@ -636,6 +664,37 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.1.0,<3.2.0" +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +optional = false +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + +[[package]] +name = "griffe" +version = "0.36.2" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "griffe-0.36.2-py3-none-any.whl", hash = "sha256:ba71895a3f5f606b18dcd950e8a1f8e7332a37f90f24caeb002546593f2e0eee"}, + {file = "griffe-0.36.2.tar.gz", hash = "sha256:333ade7932bb9096781d83092602625dfbfe220e87a039d2801259a1bd41d1c2"}, +] + +[package.dependencies] +colorama = ">=0.4" + [[package]] name = "h11" version = "0.14.0" @@ -754,6 +813,25 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-metadata" +version = "6.2.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.2.1-py3-none-any.whl", hash = "sha256:f65e478a7c2177bd19517a3a15dac094d253446d8690c5f3e71e735a04312374"}, + {file = "importlib_metadata-6.2.1.tar.gz", hash = "sha256:5a66966b39ff1c14ef5b2d60c1d842b0141fefff0f4cc6365b4bc9446c652807"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -782,6 +860,20 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "libsass" +version = "0.22.0" +description = "Sass for Python: A straightforward binding of libsass for Python." +optional = false +python-versions = ">=3.6" +files = [ + {file = "libsass-0.22.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f1efc1b612299c88aec9e39d6ca0c266d360daa5b19d9430bdeaffffa86993f9"}, + {file = "libsass-0.22.0-cp37-abi3-macosx_10_15_x86_64.whl", hash = "sha256:081e256ab3c5f3f09c7b8dea3bf3bf5e64a97c6995fd9eea880639b3f93a9f9a"}, + {file = "libsass-0.22.0-cp37-abi3-win32.whl", hash = "sha256:89c5ce497fcf3aba1dd1b19aae93b99f68257e5f2026b731b00a872f13324c7f"}, + {file = "libsass-0.22.0-cp37-abi3-win_amd64.whl", hash = "sha256:65455a2728b696b62100eb5932604aa13a29f4ac9a305d95773c14aaa7200aaf"}, + {file = "libsass-0.22.0.tar.gz", hash = "sha256:3ab5ad18e47db560f4f0c09e3d28cf3bb1a44711257488ac2adad69f4f7f8425"}, +] + [[package]] name = "lockfile" version = "0.12.2" @@ -795,153 +887,196 @@ files = [ [[package]] name = "lxml" -version = "4.9.2" +version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ - {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, - {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, - {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, - {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, - {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, - {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, - {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, - {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, - {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, - {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, - {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, - {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, - {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, - {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, - {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, - {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, - {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, - {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, - {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, - {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, - {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, - {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, - {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] +source = ["Cython (>=0.29.35)"] + +[[package]] +name = "markdown" +version = "3.4.4" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, + {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] +testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -955,6 +1090,160 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "mkdocs" +version = "1.5.3" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +jinja2 = ">=2.11.1" +markdown = ">=3.2.1" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +pathspec = ">=0.11.1" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.5.0" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, + {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, +] + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-extra-sass-plugin" +version = "0.1.0" +description = "This plugin adds stylesheets to your mkdocs site from `Sass`/`SCSS`." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mkdocs-extra-sass-plugin-0.1.0.tar.gz", hash = "sha256:cca7ae778585514371b22a63bcd69373d77e474edab4b270cf2924e05c879219"}, + {file = "mkdocs_extra_sass_plugin-0.1.0-py3-none-any.whl", hash = "sha256:10aa086fa8ef1fc4650f7bb6927deb7bf5bbf5a2dd3178f47e4ef44546b156db"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.6.3" +libsass = ">=0.15" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-material" +version = "9.3.2" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material-9.3.2-py3-none-any.whl", hash = "sha256:f2fd5cef6f0266b4caad6414f31c6a51e3183dbdd341995ad8fa7f33bc998c3d"}, + {file = "mkdocs_material-9.3.2.tar.gz", hash = "sha256:7b3a35a7731af02d70d120224fcec053ce09bebbf83dff3366ab72abc4d5fc89"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.5,<2.0" +mkdocs-material-extensions = ">=1.1,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4,<2023.0" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.1.1" +description = "Extension pack for Python Markdown and MkDocs Material." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, + {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.22.0" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocstrings-0.22.0-py3-none-any.whl", hash = "sha256:2d4095d461554ff6a778fdabdca3c00c468c2f1459d469f7a7f622a2b23212ba"}, + {file = "mkdocstrings-0.22.0.tar.gz", hash = "sha256:82a33b94150ebb3d4b5c73bab4598c3e21468c79ec072eff6931c8f3bfc38256"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.2" +mkdocs-autorefs = ">=0.3.1" +pymdown-extensions = ">=6.3" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.7.0" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings_python-1.7.0-py3-none-any.whl", hash = "sha256:85c5f009a5a0ebb6076b7818c82a2bb0eebd0b54662628fa8b25ee14a6207951"}, + {file = "mkdocstrings_python-1.7.0.tar.gz", hash = "sha256:5dac2712bd38a3ff0812b8650a68b232601d1474091b380a8b5bc102c8c0d80a"}, +] + +[package.dependencies] +griffe = ">=0.35" +mkdocstrings = ">=0.20" + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1020,15 +1309,25 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -1047,28 +1346,28 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1173,88 +1472,88 @@ files = [ [[package]] name = "pycryptodome" -version = "3.18.0" +version = "3.19.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.18.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d1497a8cd4728db0e0da3c304856cb37c0c4e3d0b36fcbabcc1600f18504fc54"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:928078c530da78ff08e10eb6cada6e0dff386bf3d9fa9871b4bbc9fbc1efe024"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:157c9b5ba5e21b375f052ca78152dd309a09ed04703fd3721dce3ff8ecced148"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:d20082bdac9218649f6abe0b885927be25a917e29ae0502eaf2b53f1233ce0c2"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad74044e5f5d2456c11ed4cfd3e34b8d4898c0cb201c4038fe41458a82ea27"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win32.whl", hash = "sha256:62a1e8847fabb5213ccde38915563140a5b338f0d0a0d363f996b51e4a6165cf"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win_amd64.whl", hash = "sha256:16bfd98dbe472c263ed2821284118d899c76968db1a6665ade0c46805e6b29a4"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7a3d22c8ee63de22336679e021c7f2386f7fc465477d59675caa0e5706387944"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:78d863476e6bad2a592645072cc489bb90320972115d8995bcfbee2f8b209918"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b6a610f8bfe67eab980d6236fdc73bfcdae23c9ed5548192bb2d530e8a92780e"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:422c89fd8df8a3bee09fb8d52aaa1e996120eafa565437392b781abec2a56e14"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:9ad6f09f670c466aac94a40798e0e8d1ef2aa04589c29faa5b9b97566611d1d1"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:53aee6be8b9b6da25ccd9028caf17dcdce3604f2c7862f5167777b707fbfb6cb"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:10da29526a2a927c7d64b8f34592f461d92ae55fc97981aab5bbcde8cb465bb6"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f21efb8438971aa16924790e1c3dba3a33164eb4000106a55baaed522c261acf"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4944defabe2ace4803f99543445c27dd1edbe86d7d4edb87b256476a91e9ffa4"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:51eae079ddb9c5f10376b4131be9589a6554f6fd84f7f655180937f611cd99a2"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:83c75952dcf4a4cebaa850fa257d7a860644c70a7cd54262c237c9f2be26f76e"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:957b221d062d5752716923d14e0926f47670e95fead9d240fa4d4862214b9b2f"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win32.whl", hash = "sha256:795bd1e4258a2c689c0b1f13ce9684fa0dd4c0e08680dcf597cf9516ed6bc0f3"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win_amd64.whl", hash = "sha256:b1d9701d10303eec8d0bd33fa54d44e67b8be74ab449052a8372f12a66f93fb9"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:cb1be4d5af7f355e7d41d36d8eec156ef1382a88638e8032215c215b82a4b8ec"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-win32.whl", hash = "sha256:fc0a73f4db1e31d4a6d71b672a48f3af458f548059aa05e83022d5f61aac9c08"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f022a4fd2a5263a5c483a2bb165f9cb27f2be06f2f477113783efe3fe2ad887b"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:363dd6f21f848301c2dcdeb3c8ae5f0dee2286a5e952a0f04954b82076f23825"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12600268763e6fec3cefe4c2dcdf79bde08d0b6dc1813887e789e495cb9f3403"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4604816adebd4faf8810782f137f8426bf45fee97d8427fa8e1e49ea78a52e2c"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01489bbdf709d993f3058e2996f8f40fee3f0ea4d995002e5968965fa2fe89fb"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3811e31e1ac3069988f7a1c9ee7331b942e605dfc0f27330a9ea5997e965efb2"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4b967bb11baea9128ec88c3d02f55a3e338361f5e4934f5240afcb667fdaec"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c8eda4f260072f7dbe42f473906c659dcbadd5ae6159dfb49af4da1293ae380"}, - {file = "pycryptodome-3.18.0.tar.gz", hash = "sha256:c9adee653fc882d98956e33ca2c1fb582e23a8af7ac82fee75bd6113c55a0413"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, + {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, ] [[package]] name = "pydantic" -version = "1.10.7" +version = "1.10.12" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, - {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, - {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, - {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, - {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, - {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, - {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, - {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, - {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, - {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, ] [package.dependencies] @@ -1275,15 +1574,29 @@ files = [ {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyjwt" -version = "2.7.0" +version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" files = [ - {file = "PyJWT-2.7.0-py3-none-any.whl", hash = "sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1"}, - {file = "PyJWT-2.7.0.tar.gz", hash = "sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074"}, + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] [package.extras] @@ -1292,15 +1605,33 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pymdown-extensions" +version = "10.3" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pymdown_extensions-10.3-py3-none-any.whl", hash = "sha256:77a82c621c58a83efc49a389159181d570e370fff9f810d3a4766a75fc678b66"}, + {file = "pymdown_extensions-10.3.tar.gz", hash = "sha256:94a0d8a03246712b64698af223848fd80aaf1ae4c4be29c8c61939b0467b5722"}, +] + +[package.dependencies] +markdown = ">=3.2" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.12)"] + [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -1316,13 +1647,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.21.0" +version = "0.21.1" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, - {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, ] [package.dependencies] @@ -1334,13 +1665,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -1448,53 +1779,188 @@ setuptools = ">=62.4.0" devel = ["coverage", "docutils", "isort", "testscenarios (>=0.4)", "testtools", "twine"] test = ["coverage", "docutils", "testscenarios (>=0.4)", "testtools"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "regex" +version = "2022.10.31" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.6" +files = [ + {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, + {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"}, + {file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"}, + {file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"}, + {file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"}, + {file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"}, + {file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"}, + {file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"}, + {file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"}, + {file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"}, + {file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"}, + {file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"}, + {file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"}, + {file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"}, + {file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"}, + {file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"}, + {file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"}, + {file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"}, + {file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"}, + {file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"}, + {file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"}, + {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, + {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, ] [[package]] @@ -1582,19 +2048,19 @@ test = ["commentjson", "packaging", "pytest"] [[package]] name = "setuptools" -version = "67.8.0" +version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1620,13 +2086,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -1645,13 +2111,13 @@ widechars = ["wcwidth"] [[package]] name = "tldextract" -version = "3.4.4" +version = "3.6.0" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false python-versions = ">=3.7" files = [ - {file = "tldextract-3.4.4-py3-none-any.whl", hash = "sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2"}, - {file = "tldextract-3.4.4.tar.gz", hash = "sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234"}, + {file = "tldextract-3.6.0-py3-none-any.whl", hash = "sha256:30a492de80f4de215aa998588ba5c2e625ee74ace3a2705cfb52b0021053bcbe"}, + {file = "tldextract-3.6.0.tar.gz", hash = "sha256:a5d8b6583791daca268a7592ebcf764152fa49617983c49916ee9de99b366222"}, ] [package.dependencies] @@ -1673,24 +2139,24 @@ files = [ [[package]] name = "tomlkit" -version = "0.11.8" +version = "0.12.1" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] [[package]] name = "typing-extensions" -version = "4.5.0" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -1709,13 +2175,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.0.2" +version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, - {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, + {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, + {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, ] [package.extras] @@ -1726,23 +2192,62 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.24.1" +version = "20.24.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.1-py3-none-any.whl", hash = "sha256:01aacf8decd346cf9a865ae85c0cdc7f64c8caa07ff0d8b1dfc1733d10677442"}, - {file = "virtualenv-20.24.1.tar.gz", hash = "sha256:2ef6a237c31629da6442b0bcaa3999748108c7166318d1f55cc9f8d7294e97bd"}, + {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, + {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, ] [package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.12,<4" -platformdirs = ">=3.5.1,<4" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] +watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "websockets" @@ -1825,13 +2330,13 @@ files = [ [[package]] name = "werkzeug" -version = "2.3.4" +version = "2.3.7" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.3.4-py3-none-any.whl", hash = "sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f"}, - {file = "Werkzeug-2.3.4.tar.gz", hash = "sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76"}, + {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, + {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, ] [package.dependencies] @@ -1875,7 +2380,22 @@ files = [ [package.dependencies] xmltodict = ">=0.12.0,<0.13.0" +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "caa5eee14e9087c9a47b3f25af5e0a453269fe68eaa8617d141c6b1169250c3f" +content-hash = "fefc413a0132045bbdb1665144f272f9ca328dfc0db48926c2585a8927dd0af1" diff --git a/pyproject.toml b/pyproject.toml index 3b8ed842b..f8436f3bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,14 @@ pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" +[tool.poetry.group.docs.dependencies] +mkdocs = "^1.5.2" +mkdocs-extra-sass-plugin = "^0.1.0" +mkdocs-material = "^9.2.5" +mkdocs-material-extensions = "^1.1.1" +mkdocstrings = "^0.22.0" +mkdocstrings-python = "^1.6.0" + [tool.pytest.ini_options] env = [ "BBOT_TESTING = True",