diff --git a/.flake8 b/.flake8 index bbe8b7581..a6f057338 100644 --- a/.flake8 +++ b/.flake8 @@ -2,4 +2,4 @@ select = F,E722 ignore = F403,F405,F541 per-file-ignores = - */__init__.py:F401,F403 \ No newline at end of file + */__init__.py:F401,F403 diff --git a/.github/workflows/distro_tests.yml b/.github/workflows/distro_tests.yml new file mode 100644 index 000000000..7b2bd51dd --- /dev/null +++ b/.github/workflows/distro_tests.yml @@ -0,0 +1,64 @@ +name: Tests (Linux Distros) +on: + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + test-distros: + runs-on: ubuntu-latest + container: + image: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ["ubuntu:22.04", "ubuntu:24.04", "debian", "archlinux", "fedora", "kalilinux/kali-rolling", "parrotsec/security"] + steps: + - uses: actions/checkout@v4 + - name: Install Python and Poetry + run: | + if [ -f /etc/os-release ]; then + . /etc/os-release + if [ "$ID" = "ubuntu" ] || [ "$ID" = "debian" ] || [ "$ID" = "kali" ] || [ "$ID" = "parrotsec" ]; then + export DEBIAN_FRONTEND=noninteractive + apt-get update + apt-get -y install curl git bash build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev + elif [ "$ID" = "alpine" ]; then + apk add --no-cache bash gcc g++ musl-dev libffi-dev curl git make openssl-dev bzip2-dev zlib-dev xz-dev sqlite-dev + elif [ "$ID" = "arch" ]; then + pacman -Syu --noconfirm curl git bash base-devel + elif [ "$ID" = "fedora" ]; then + dnf install -y curl git bash gcc make openssl-devel bzip2-devel libffi-devel zlib-devel xz-devel tk-devel gdbm-devel readline-devel sqlite-devel + elif [ "$ID" = "gentoo" ]; then + echo "media-libs/libglvnd X" >> /etc/portage/package.use/libglvnd + emerge-webrsync + emerge --update --newuse dev-vcs/git media-libs/mesa curl bash + fi + fi + + # Re-run the script with bash + exec bash -c " + curl https://pyenv.run | bash + export PATH=\"$HOME/.pyenv/bin:\$PATH\" + export PATH=\"$HOME/.local/bin:\$PATH\" + eval \"\$(pyenv init --path)\" + eval \"\$(pyenv init -)\" + eval \"\$(pyenv virtualenv-init -)\" + pyenv install 3.11 + pyenv global 3.11 + pyenv rehash + python3.11 -m pip install --user pipx + python3.11 -m pipx ensurepath + pipx install poetry + " + - name: Run tests + run: | + export PATH="$HOME/.local/bin:$PATH" + export PATH="$HOME/.pyenv/bin:$PATH" + export PATH="$HOME/.pyenv/shims:$PATH" + export BBOT_DISTRO_TESTS=true + poetry env use python3.11 + poetry install + poetry run pytest --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=INFO . diff --git a/.github/workflows/docs_updater.yml b/.github/workflows/docs_updater.yml index fdc585718..58afcbf3d 100644 --- a/.github/workflows/docs_updater.yml +++ b/.github/workflows/docs_updater.yml @@ -2,7 +2,8 @@ name: Daily Docs Update on: schedule: - - cron: '0 0 * * *' # Runs daily at midnight UTC + - cron: '30 2 * * *' # Runs daily at 2:30 AM UTC, a less congested time + workflow_dispatch: # Allows manual triggering of the workflow jobs: update_docs: @@ -23,14 +24,17 @@ jobs: - name: Generate docs run: | poetry run bbot/scripts/docs.py - - name: Commit changes - uses: EndBug/add-and-commit@v9 - with: - add: '["*.md", "docs/data/chord_graph/*.json"]' - author_name: "BBOT Docs Autopublish" - author_email: info@blacklanternsecurity.com - message: "Refresh module docs" - - name: Create Pull Request + - name: Commit and Push Changes + run: | + git config user.name "BBOT Docs Autopublish" + git config user.email "info@blacklanternsecurity.com" + git checkout -b update-docs + git add "*.md" "docs/data/chord_graph/*.json" + git commit -m "Refresh module docs" + git push -u origin update-docs --force + env: + GITHUB_TOKEN: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} + - name: Create or Update Pull Request uses: peter-evans/create-pull-request@v7 with: token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} @@ -38,3 +42,4 @@ jobs: base: dev title: "Daily Docs Update" body: "This is an automated pull request to update the documentation." + update-existing: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 799c0101e..78b0cbd93 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: tests +name: Tests (Python Versions) on: push: branches: @@ -14,12 +14,12 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: psf/black@stable with: options: "--check" - name: Install Python 3 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install dependencies @@ -27,7 +27,7 @@ jobs: pip install flake8 - name: flake8 run: | - flake8 --select F,E722 --ignore F403,F405,F541 --per-file-ignores="*/__init__.py:F401,F403" + flake8 test: needs: lint runs-on: ubuntu-latest @@ -37,9 +37,9 @@ jobs: matrix: python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -48,95 +48,29 @@ jobs: poetry install - name: Run tests run: | - poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=DEBUG --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . + poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=INFO --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . + - name: Upload Debug Logs + uses: actions/upload-artifact@v3 + with: + name: pytest-debug-logs + path: pytest_debug.log - name: Upload Code Coverage uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: ./cov.xml verbose: true - update_docs: - needs: test - runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.ref != 'refs/heads/dev' && github.ref != 'refs/heads/stable') - steps: - - uses: actions/checkout@v3 - with: - token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.x" - - name: Install dependencies - run: | - pip install poetry - poetry install - - name: Generate docs - run: | - poetry run bbot/scripts/docs.py - - name: Commit docs - uses: EndBug/add-and-commit@v9 - continue-on-error: true - with: - add: '["*.md", "docs/data/chord_graph/*.json"]' - author_name: "BBOT Docs Autopublish" - author_email: info@blacklanternsecurity.com - message: "Refresh module docs" - publish_docs: - needs: test - runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/dev') - steps: - - uses: actions/checkout@v3 - with: - token: ${{ secrets.BBOT_DOCS_UPDATER_PAT }} - - uses: actions/setup-python@v4 - with: - python-version: "3.x" - - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - - uses: actions/cache@v3 - with: - key: mkdocs-material-${{ env.cache_id }} - path: .cache - restore-keys: | - mkdocs-material- - - name: Install dependencies - run: | - pip install poetry - poetry install --only=docs - - name: Configure Git - run: | - git config user.name github-actions - git config user.email github-actions@github.com - git fetch origin gh-pages:refs/remotes/origin/gh-pages - if git show-ref --verify --quiet refs/heads/gh-pages; then - git branch -f gh-pages origin/gh-pages - else - git branch --track gh-pages origin/gh-pages - fi - - name: Generate docs (stable branch) - if: github.ref == 'refs/heads/stable' - run: | - poetry run mike deploy Stable - - name: Generate docs (dev branch) - if: github.ref == 'refs/heads/dev' - run: | - poetry run mike deploy Dev - - name: Publish docs - run: | - git switch gh-pages - git push publish_code: needs: test runs-on: ubuntu-latest if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable') continue-on-error: true steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install dependencies @@ -185,7 +119,7 @@ jobs: # runs-on: ubuntu-latest # if: github.event_name == 'push' && github.ref == 'refs/heads/stable' # steps: - # - uses: actions/checkout@v3 + # - uses: actions/checkout@v4 # with: # ref: ${{ github.head_ref }} # fetch-depth: 0 # Fetch all history for all tags and branches diff --git a/bbot/core/config/logger.py b/bbot/core/config/logger.py index 6a213d42d..2e42ef8de 100644 --- a/bbot/core/config/logger.py +++ b/bbot/core/config/logger.py @@ -5,6 +5,7 @@ import multiprocessing import logging.handlers from pathlib import Path +from contextlib import suppress from ..helpers.misc import mkdir, error_and_exit from ...logger import colorize, loglevel_mapping @@ -71,10 +72,36 @@ def __init__(self, core): # Start the QueueListener self.listener = logging.handlers.QueueListener(self.queue, *self.log_handlers.values()) self.listener.start() - atexit.register(self.listener.stop) + atexit.register(self.cleanup_logging) self.log_level = logging.INFO + def cleanup_logging(self): + # Close the queue handler + with suppress(Exception): + self.queue_handler.close() + + # Clean root logger + root_logger = logging.getLogger() + for handler in list(root_logger.handlers): + with suppress(Exception): + root_logger.removeHandler(handler) + with suppress(Exception): + handler.close() + + # Clean all other loggers + for logger in logging.Logger.manager.loggerDict.values(): + if hasattr(logger, "handlers"): # Logger, not PlaceHolder + for handler in list(logger.handlers): + with suppress(Exception): + logger.removeHandler(handler) + with suppress(Exception): + handler.close() + + # Stop queue listener + with suppress(Exception): + self.listener.stop() + def setup_queue_handler(self, logging_queue=None, log_level=logging.DEBUG): if logging_queue is None: logging_queue = self.queue diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index 8df5f05cf..80ded37a6 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -342,7 +342,15 @@ def install_core_deps(self): # ensure tldextract data is cached self.parent_helper.tldextract("evilcorp.co.uk") # command: package_name - core_deps = {"unzip": "unzip", "curl": "curl"} + core_deps = { + "unzip": "unzip", + "zipinfo": "unzip", + "curl": "curl", + "git": "git", + "make": "make", + "gcc": "gcc", + "bash": "bash", + } for command, package_name in core_deps.items(): if not self.parent_helper.which(command): to_install.add(package_name) diff --git a/bbot/modules/bufferoverrun.py b/bbot/modules/bufferoverrun.py new file mode 100644 index 000000000..1eba8ad4c --- /dev/null +++ b/bbot/modules/bufferoverrun.py @@ -0,0 +1,48 @@ +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey + + +class BufferOverrun(subdomain_enum_apikey): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = { + "description": "Query BufferOverrun's TLS API for subdomains", + "created_date": "2024-10-23", + "author": "@TheTechromancer", + "auth_required": True, + } + options = {"api_key": "", "commercial": False} + options_desc = {"api_key": "BufferOverrun API key", "commercial": "Use commercial API"} + + base_url = "https://tls.bufferover.run/dns" + commercial_base_url = "https://bufferover-run-tls.p.rapidapi.com/ipv4/dns" + + async def setup(self): + self.commercial = self.config.get("commercial", False) + return await super().setup() + + def prepare_api_request(self, url, kwargs): + if self.commercial: + kwargs["headers"]["x-rapidapi-host"] = "bufferover-run-tls.p.rapidapi.com" + kwargs["headers"]["x-rapidapi-key"] = self.api_key + else: + kwargs["headers"]["x-api-key"] = self.api_key + return url, kwargs + + async def request_url(self, query): + url = f"{self.commercial_base_url if self.commercial else self.base_url}?q=.{query}" + return await self.api_request(url) + + def parse_results(self, r, query): + j = r.json() + subdomains_set = set() + if isinstance(j, dict): + results = j.get("Results", []) + for result in results: + parts = result.split(",") + if len(parts) > 4: + subdomain = parts[4].strip() + if subdomain and subdomain.endswith(f".{query}"): + subdomains_set.add(subdomain) + for subdomain in subdomains_set: + yield subdomain diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 506db6f0e..1eb10cb23 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -15,7 +15,7 @@ class nuclei(BaseModule): } options = { - "version": "3.3.4", + "version": "3.3.5", "tags": "", "templates": "", "severity": "", diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 9d6d57483..08edfaaf3 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -1,5 +1,5 @@ import asyncio -import sqlite3 +import aiosqlite import multiprocessing from pathlib import Path from contextlib import suppress @@ -34,6 +34,7 @@ class gowitness(BaseModule): "idle_timeout": "Skip the current gowitness batch if it stalls for longer than this many seconds", } deps_common = ["chromium"] + deps_pip = ["aiosqlite"] deps_ansible = [ { "name": "Download gowitness", @@ -72,7 +73,7 @@ async def setup(self): # make sure we have a working chrome install chrome_test_pass = False - for binary in ("chrome", "chromium", custom_chrome_path): + for binary in ("chrome", "chromium", "chromium-browser", custom_chrome_path): binary_path = self.helpers.which(binary) if binary_path and Path(binary_path).is_file(): chrome_test_proc = await self.run_process([binary_path, "--version"]) @@ -136,7 +137,8 @@ async def handle_batch(self, *events): return # emit web screenshots - for filename, screenshot in self.new_screenshots.items(): + new_screenshots = await self.get_new_screenshots() + for filename, screenshot in new_screenshots.items(): url = screenshot["url"] final_url = screenshot["final_url"] filename = self.screenshot_path / screenshot["filename"] @@ -150,7 +152,8 @@ async def handle_batch(self, *events): ) # emit URLs - for url, row in self.new_network_logs.items(): + new_network_logs = await self.get_new_network_logs() + for url, row in new_network_logs.items(): ip = row["ip"] status_code = row["status_code"] tags = [f"status-{status_code}", f"ip-{ip}", "spider-danger"] @@ -168,7 +171,8 @@ async def handle_batch(self, *events): ) # emit technologies - for _, row in self.new_technologies.items(): + new_technologies = await self.get_new_technologies() + for _, row in new_technologies.items(): parent_id = row["url_id"] parent_url = self.screenshots_taken[parent_id] parent_event = event_dict[parent_url] @@ -207,59 +211,53 @@ def construct_command(self): command += ["--timeout", str(self.timeout)] return command - @property - def new_screenshots(self): + async def get_new_screenshots(self): screenshots = {} if self.db_path.is_file(): - with sqlite3.connect(str(self.db_path)) as con: - con.row_factory = sqlite3.Row + async with aiosqlite.connect(str(self.db_path)) as con: + con.row_factory = aiosqlite.Row con.text_factory = self.helpers.smart_decode - cur = con.cursor() - res = self.cur_execute(cur, "SELECT * FROM urls") - for row in res: - row = dict(row) - _id = row["id"] - if _id not in self.screenshots_taken: - self.screenshots_taken[_id] = row["url"] - screenshots[_id] = row + async with con.execute("SELECT * FROM urls") as cur: + async for row in cur: + row = dict(row) + _id = row["id"] + if _id not in self.screenshots_taken: + self.screenshots_taken[_id] = row["url"] + screenshots[_id] = row return screenshots - @property - def new_network_logs(self): + async def get_new_network_logs(self): network_logs = dict() if self.db_path.is_file(): - with sqlite3.connect(str(self.db_path)) as con: - con.row_factory = sqlite3.Row - cur = con.cursor() - res = self.cur_execute(cur, "SELECT * FROM network_logs") - for row in res: - row = dict(row) - url = row["final_url"] - if url not in self.connections_logged: - self.connections_logged.add(url) - network_logs[url] = row + async with aiosqlite.connect(str(self.db_path)) as con: + con.row_factory = aiosqlite.Row + async with con.execute("SELECT * FROM network_logs") as cur: + async for row in cur: + row = dict(row) + url = row["final_url"] + if url not in self.connections_logged: + self.connections_logged.add(url) + network_logs[url] = row return network_logs - @property - def new_technologies(self): + async def get_new_technologies(self): technologies = dict() if self.db_path.is_file(): - with sqlite3.connect(str(self.db_path)) as con: - con.row_factory = sqlite3.Row - cur = con.cursor() - res = self.cur_execute(cur, "SELECT * FROM technologies") - for row in res: - _id = row["id"] - if _id not in self.technologies_found: - self.technologies_found.add(_id) - row = dict(row) - technologies[_id] = row + async with aiosqlite.connect(str(self.db_path)) as con: + con.row_factory = aiosqlite.Row + async with con.execute("SELECT * FROM technologies") as cur: + async for row in cur: + _id = row["id"] + if _id not in self.technologies_found: + self.technologies_found.add(_id) + row = dict(row) + technologies[_id] = row return technologies - def cur_execute(self, cur, query): + async def cur_execute(self, cur, query): try: - return cur.execute(query) - except sqlite3.OperationalError as e: + return await cur.execute(query) + except aiosqlite.OperationalError as e: self.warning(f"Error executing query: {query}: {e}") return [] diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index e185e265e..148d62100 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.82.11", + "version": "3.83.1", "config": "", "only_verified": True, "concurrency": 8, diff --git a/bbot/modules/wpscan.py b/bbot/modules/wpscan.py index d9c43905e..980d24fb5 100644 --- a/bbot/modules/wpscan.py +++ b/bbot/modules/wpscan.py @@ -33,7 +33,7 @@ class wpscan(BaseModule): deps_apt = ["curl", "make", "gcc"] deps_ansible = [ { - "name": "Install Ruby Deps (Debian/Ubuntu)", + "name": "Install Ruby Deps (Debian)", "package": {"name": ["ruby-rubygems", "ruby-dev"], "state": "present"}, "become": True, "when": "ansible_facts['os_family'] == 'Debian'", @@ -48,7 +48,13 @@ class wpscan(BaseModule): "name": "Install Ruby Deps (Fedora)", "package": {"name": ["rubygems", "ruby-devel"], "state": "present"}, "become": True, - "when": "ansible_facts['os_family'] == 'Fedora'", + "when": "ansible_facts['os_family'] == 'RedHat'", + }, + { + "name": "Install Ruby Deps (Alpine)", + "package": {"name": ["ruby-dev", "ruby-bundler"], "state": "present"}, + "become": True, + "when": "ansible_facts['os_family'] == 'Alpine'", }, { "name": "Install wpscan gem", diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 7cccc950d..c2e8b3448 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -13,17 +13,31 @@ from bbot.core.helpers.misc import execute_sync_or_async from bbot.core.helpers.interactsh import server_list as interactsh_servers +# silence stdout + trace +root_logger = logging.getLogger() +pytest_debug_file = Path(__file__).parent.parent.parent / "pytest_debug.log" +print(f"pytest_debug_file: {pytest_debug_file}") +debug_handler = logging.FileHandler(pytest_debug_file) +debug_handler.setLevel(logging.DEBUG) +debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)s %(message)s") +debug_handler.setFormatter(debug_format) +root_logger.addHandler(debug_handler) test_config = OmegaConf.load(Path(__file__).parent / "test.conf") -if test_config.get("debug", False): - os.environ["BBOT_DEBUG"] = "True" - logging.getLogger("bbot").setLevel(logging.DEBUG) - CORE.logger.log_level = logging.DEBUG -else: - # silence stdout + trace - root_logger = logging.getLogger() - for h in root_logger.handlers: - h.addFilter(lambda x: x.levelname not in ("STDOUT", "TRACE")) + +os.environ["BBOT_DEBUG"] = "True" +CORE.logger.log_level = logging.DEBUG + +# silence all stderr output: +stderr_handler = CORE.logger.log_handlers["stderr"] +stderr_handler.setLevel(logging.CRITICAL) +handlers = list(CORE.logger.listener.handlers) +handlers.remove(stderr_handler) +CORE.logger.listener.handlers = tuple(handlers) + +for h in root_logger.handlers: + h.addFilter(lambda x: x.levelname not in ("STDOUT", "TRACE")) + CORE.merge_default(test_config) @@ -33,6 +47,13 @@ def assert_all_responses_were_requested() -> bool: return False +@pytest.fixture(autouse=True) +def silence_live_logging(): + for handler in logging.getLogger().handlers: + if type(handler).__name__ == "_LiveLoggingStreamHandler": + handler.setLevel(logging.CRITICAL) + + @pytest.fixture def bbot_httpserver(): server = HTTPServer(host="127.0.0.1", port=8888, threaded=True) @@ -202,20 +223,20 @@ def pytest_terminal_summary(terminalreporter, exitstatus, config): # pragma: no errors = len(stats.get("error", [])) failed = stats.get("failed", []) - print("\nTest Session Summary:") - print(f"Total tests run: {total_tests}") - print( - f"{GREEN}Passed: {passed}{RESET}, {RED}Failed: {len(failed)}{RESET}, {YELLOW}Skipped: {skipped}{RESET}, Errors: {errors}" + terminalreporter.write("\nTest Session Summary:") + terminalreporter.write(f"\nTotal tests run: {total_tests}") + terminalreporter.write( + f"\n{GREEN}Passed: {passed}{RESET}, {RED}Failed: {len(failed)}{RESET}, {YELLOW}Skipped: {skipped}{RESET}, Errors: {errors}" ) if failed: - print(f"\n{RED}Detailed failed test report:{RESET}") + terminalreporter.write(f"\n{RED}Detailed failed test report:{RESET}") for item in failed: test_name = item.nodeid.split("::")[-1] if "::" in item.nodeid else item.nodeid file_and_line = f"{item.location[0]}:{item.location[1]}" # File path and line number - print(f"{BLUE}Test Name: {test_name}{RESET} {CYAN}({file_and_line}){RESET}") - print(f"{RED}Location: {item.nodeid} at {item.location[0]}:{item.location[1]}{RESET}") - print(f"{RED}Failure details:\n{item.longreprtext}{RESET}") + terminalreporter.write(f"\n{BLUE}Test Name: {test_name}{RESET} {CYAN}({file_and_line}){RESET}") + terminalreporter.write(f"\n{RED}Location: {item.nodeid} at {item.location[0]}:{item.location[1]}{RESET}") + terminalreporter.write(f"\n{RED}Failure details:\n{item.longreprtext}{RESET}") # BELOW: debugging for frozen/hung tests diff --git a/bbot/test/run_tests.sh b/bbot/test/run_tests.sh index 3490f5b65..39458dbf9 100755 --- a/bbot/test/run_tests.sh +++ b/bbot/test/run_tests.sh @@ -10,7 +10,7 @@ echo echo "[+] Linting with flake8" echo "=======================" -flake8 --select F,E722 --ignore F403,F405,F541 --per-file-ignores="*/__init__.py:F401,F403" "$bbot_dir" || exit 1 +flake8 "$bbot_dir" || exit 1 echo if [ "${1}x" != "x" ] ; then diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index f5d4255f6..bb63b57e5 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -89,6 +89,10 @@ def module(self): async def module_test( self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys ): + # Skip dastardly test if we're in the distro tests (because dastardly uses docker) + if os.getenv("BBOT_DISTRO_TESTS") and self.name == "dastardly": + pytest.skip("Skipping module_test for dastardly module due to BBOT_DISTRO_TESTS environment variable") + self.log.info(f"Starting {self.name} module test") module_test = self.ModuleTest( self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys diff --git a/bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py b/bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py new file mode 100644 index 000000000..b8a8137e2 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_bufferoverrun.py @@ -0,0 +1,35 @@ +from .base import ModuleTestBase + + +class TestBufferOverrun(ModuleTestBase): + config_overrides = {"modules": {"bufferoverrun": {"api_key": "asdf", "commercial": False}}} + + async def setup_before_prep(self, module_test): + # Mock response for non-commercial API + module_test.httpx_mock.add_response( + url="https://tls.bufferover.run/dns?q=.blacklanternsecurity.com", + match_headers={"x-api-key": "asdf"}, + json={"Results": ["1.2.3.4,example.com,*,*,sub.blacklanternsecurity.com"]}, + ) + + def check(self, module_test, events): + assert any(e.data == "sub.blacklanternsecurity.com" for e in events), "Failed to detect subdomain for free API" + + +class TestBufferOverrunCommercial(ModuleTestBase): + modules_overrides = ["bufferoverrun"] + module_name = "bufferoverrun" + config_overrides = {"modules": {"bufferoverrun": {"api_key": "asdf", "commercial": True}}} + + async def setup_before_prep(self, module_test): + # Mock response for commercial API + module_test.httpx_mock.add_response( + url="https://bufferover-run-tls.p.rapidapi.com/ipv4/dns?q=.blacklanternsecurity.com", + match_headers={"x-rapidapi-host": "bufferover-run-tls.p.rapidapi.com", "x-rapidapi-key": "asdf"}, + json={"Results": ["5.6.7.8,blacklanternsecurity.com,*,*,sub.blacklanternsecurity.com"]}, + ) + + def check(self, module_test, events): + assert any( + e.data == "sub.blacklanternsecurity.com" for e in events + ), "Failed to detect subdomain for commercial API" diff --git a/poetry.lock b/poetry.lock index 6e6dd87f1..8da0a4305 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1278,13 +1278,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.41" +version = "9.5.42" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.41-py3-none-any.whl", hash = "sha256:990bc138c33342b5b73e7545915ebc0136e501bfbd8e365735144f5120891d83"}, - {file = "mkdocs_material-9.5.41.tar.gz", hash = "sha256:30fa5d459b4b8130848ecd8e1c908878345d9d8268f7ddbc31eebe88d462d97b"}, + {file = "mkdocs_material-9.5.42-py3-none-any.whl", hash = "sha256:452a7c5d21284b373f36b981a2cbebfff59263feebeede1bc28652e9c5bbe316"}, + {file = "mkdocs_material-9.5.42.tar.gz", hash = "sha256:92779b5e9b5934540c574c11647131d217dc540dce72b05feeda088c8eb1b8f2"}, ] [package.dependencies] @@ -1346,13 +1346,13 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.12.1" +version = "1.12.2" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.9" files = [ - {file = "mkdocstrings_python-1.12.1-py3-none-any.whl", hash = "sha256:205244488199c9aa2a39787ad6a0c862d39b74078ea9aa2be817bc972399563f"}, - {file = "mkdocstrings_python-1.12.1.tar.gz", hash = "sha256:60d6a5ca912c9af4ad431db6d0111ce9f79c6c48d33377dde6a05a8f5f48d792"}, + {file = "mkdocstrings_python-1.12.2-py3-none-any.whl", hash = "sha256:7f7d40d6db3cb1f5d19dbcd80e3efe4d0ba32b073272c0c0de9de2e604eda62a"}, + {file = "mkdocstrings_python-1.12.2.tar.gz", hash = "sha256:7a1760941c0b52a2cd87b960a9e21112ffe52e7df9d0b9583d04d47ed2e186f3"}, ] [package.dependencies] @@ -1645,31 +1645,33 @@ virtualenv = ">=20.10.0" [[package]] name = "psutil" -version = "5.9.8" +version = "6.1.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "ptyprocess" @@ -1967,17 +1969,17 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] -pytest = ">=7.0.0,<9" +pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] @@ -2943,28 +2945,28 @@ files = [ [[package]] name = "xmltodict" -version = "0.12.0" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" files = [ - {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, - {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, ] [[package]] name = "xmltojson" -version = "2.0.2" +version = "2.0.3" description = "A Python module and cli tool to quickly convert xml text or files into json" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.7" files = [ - {file = "xmltojson-2.0.2-py3-none-any.whl", hash = "sha256:8ba5c8b33a5a0f824ad754ed62367d841ce91f7deaf82e118c28e42a0e24454c"}, - {file = "xmltojson-2.0.2.tar.gz", hash = "sha256:10719660409bd1825507e04d2fa4848c10591a092613bcd66651c7e0774f5405"}, + {file = "xmltojson-2.0.3-py3-none-any.whl", hash = "sha256:1b68519bd14fbf3e28baa630b8c9116b5d3aa8976648f277a78ae3448498889a"}, + {file = "xmltojson-2.0.3.tar.gz", hash = "sha256:68a0022272adf70b8f2639186172c808e9502cd03c0b851a65e0760561c7801d"}, ] [package.dependencies] -xmltodict = ">=0.12.0,<0.13.0" +xmltodict = "0.14.2" [[package]] name = "yara-python" @@ -3077,4 +3079,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "056fa05ead5abab1767c7c410539a4536659d1b6420bd13375aab965f98e326e" +content-hash = "b87b1a5bccf2a50f548ae9470eabff8a8a9e870387562308a7b217b0fa2d84f0" diff --git a/pyproject.toml b/pyproject.toml index 9158de2cc..f762e3312 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "bbot" -version = "2.1.0" +version = "2.1.1" description = "OSINT automation for hackers." authors = [ "TheTechromancer", @@ -27,7 +27,7 @@ bbot = 'bbot.cli:main' [tool.poetry.dependencies] python = "^3.9" omegaconf = "^2.3.0" -psutil = "^5.9.4" +psutil = ">=5.9.4,<7.0.0" wordninja = "^2.0.0" ansible-runner = "^2.3.2" deepdiff = ">=6.2.3,<8.0.0" @@ -70,7 +70,7 @@ pytest-timeout = "^2.3.1" pytest-httpx = "^0.30.0" pytest-httpserver = "^1.0.11" pytest = "^8.3.1" -pytest-asyncio = "0.23.8" +pytest-asyncio = "0.24.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" @@ -100,7 +100,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v2.1.0{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v2.1.1{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"]