From c0be122b718577fb00ff55e0db6ae963dab62473 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 9 Nov 2024 06:33:10 -0500 Subject: [PATCH 01/40] add postgres module --- .github/workflows/tests.yml | 6 +++ bbot/db/sql/models.py | 3 +- bbot/modules/output/postgres.py | 49 +++++++++++++++++ bbot/modules/output/sqlite.py | 2 +- bbot/modules/templates/sql.py | 25 ++++----- bbot/test/test_step_2/module_tests/base.py | 4 +- .../module_tests/test_module_postgres.py | 53 +++++++++++++++++++ .../module_tests/test_module_sqlite.py | 12 ++--- docs/scanning/output.md | 20 +++++++ 9 files changed, 153 insertions(+), 21 deletions(-) create mode 100644 bbot/modules/output/postgres.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_postgres.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 507b7ac54..ca4c7c851 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -42,6 +42,12 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - name: Install Docker + run: | + export DEBIAN_FRONTEND=noninteractive + apt-get update + apt-get -y install docker.io + systemctl enable docker --now - name: Install dependencies run: | pip install poetry diff --git a/bbot/db/sql/models.py b/bbot/db/sql/models.py index 7677a181e..b15f4abfa 100644 --- a/bbot/db/sql/models.py +++ b/bbot/db/sql/models.py @@ -3,9 +3,9 @@ import json import logging -from datetime import datetime from pydantic import ConfigDict from typing import List, Optional +from datetime import datetime, timezone from typing_extensions import Annotated from pydantic.functional_validators import AfterValidator from sqlmodel import inspect, Column, Field, SQLModel, JSON, String, DateTime as SQLADateTime @@ -114,6 +114,7 @@ def _get_data(data, type): discovery_context: str = "" discovery_path: List[str] = Field(default=[], sa_type=JSON) parent_chain: List[str] = Field(default=[], sa_type=JSON) + inserted_at: NaiveUTC = Field(default_factory=lambda: datetime.now(timezone.utc)) ### SCAN ### diff --git a/bbot/modules/output/postgres.py b/bbot/modules/output/postgres.py new file mode 100644 index 000000000..b1c8c2659 --- /dev/null +++ b/bbot/modules/output/postgres.py @@ -0,0 +1,49 @@ +from bbot.modules.templates.sql import SQLTemplate + + +class Postgres(SQLTemplate): + watched_events = ["*"] + meta = {"description": "Output scan data to a SQLite database"} + options = { + "username": "postgres", + "password": "bbotislife", + "host": "localhost", + "port": 5432, + "database": "bbot", + } + options_desc = { + "username": "The username to connect to Postgres", + "password": "The password to connect to Postgres", + "host": "The server running Postgres", + "port": "The port to connect to Postgres", + "database": "The database name to connect to", + } + deps_pip = ["sqlmodel", "asyncpg"] + protocol = "postgresql+asyncpg" + + async def create_database(self): + import asyncpg + from sqlalchemy import text + from sqlalchemy.ext.asyncio import create_async_engine + + # Create the engine for the initial connection to the server + initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0]) + + async with initial_engine.connect() as conn: + # Check if the database exists + result = await conn.execute(text(f"SELECT 1 FROM pg_database WHERE datname = '{self.database}'")) + database_exists = result.scalar() is not None + + # Create the database if it does not exist + if not database_exists: + # Use asyncpg directly to create the database + raw_conn = await asyncpg.connect( + user=self.username, + password=self.password, + host=self.host, + port=self.port, + ) + try: + await raw_conn.execute(f"CREATE DATABASE {self.database}") + finally: + await raw_conn.close() diff --git a/bbot/modules/output/sqlite.py b/bbot/modules/output/sqlite.py index 68ac60daf..5926c961e 100644 --- a/bbot/modules/output/sqlite.py +++ b/bbot/modules/output/sqlite.py @@ -12,7 +12,7 @@ class SQLite(SQLTemplate): options_desc = { "database": "The path to the sqlite database file", } - deps_pip = ["sqlmodel", "sqlalchemy-utils", "aiosqlite"] + deps_pip = ["sqlmodel", "aiosqlite"] async def setup(self): db_file = self.config.get("database", "") diff --git a/bbot/modules/templates/sql.py b/bbot/modules/templates/sql.py index b075753d3..fa00ad828 100644 --- a/bbot/modules/templates/sql.py +++ b/bbot/modules/templates/sql.py @@ -1,7 +1,6 @@ from sqlmodel import SQLModel from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession -from sqlalchemy_utils.functions import database_exists, create_database from bbot.db.sql.models import Event, Scan, Target from bbot.modules.output.base import BaseOutputModule @@ -10,7 +9,6 @@ class SQLTemplate(BaseOutputModule): meta = {"description": "SQL output module template"} options = { - "protocol": "", "database": "bbot", "username": "", "password": "", @@ -18,7 +16,6 @@ class SQLTemplate(BaseOutputModule): "port": 0, } options_desc = { - "protocol": "The protocol to use to connect to the database", "database": "The database to use", "username": "The username to use to connect to the database", "password": "The password to use to connect to the database", @@ -26,6 +23,8 @@ class SQLTemplate(BaseOutputModule): "port": "The port to use to connect to the database", } + protocol = "" + async def setup(self): self.database = self.config.get("database", "bbot") self.username = self.config.get("username", "") @@ -33,11 +32,6 @@ async def setup(self): self.host = self.config.get("host", "127.0.0.1") self.port = self.config.get("port", 0) - self.log.info(f"Connecting to {self.connection_string(mask_password=True)}") - - self.engine = create_async_engine(self.connection_string()) - # Create a session factory bound to the engine - self.async_session = sessionmaker(self.engine, expire_on_commit=False, class_=AsyncSession) await self.init_database() return True @@ -65,12 +59,19 @@ async def handle_event(self, event): await session.commit() + async def create_database(self): + pass + async def init_database(self): + await self.create_database() + + # Now create the engine for the actual database + self.engine = create_async_engine(self.connection_string()) + # Create a session factory bound to the engine + self.async_session = sessionmaker(self.engine, expire_on_commit=False, class_=AsyncSession) + + # Use the engine directly to create all tables async with self.engine.begin() as conn: - # Check if the database exists using the connection's engine URL - if not await conn.run_sync(lambda sync_conn: database_exists(sync_conn.engine.url)): - await conn.run_sync(lambda sync_conn: create_database(sync_conn.engine.url)) - # Create all tables await conn.run_sync(SQLModel.metadata.create_all) def connection_string(self, mask_password=False): diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index bb63b57e5..acdb37c50 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -112,7 +112,9 @@ async def module_test( @pytest.mark.asyncio async def test_module_run(self, module_test): - self.check(module_test, module_test.events) + from bbot.core.helpers.misc import execute_sync_or_async + + await execute_sync_or_async(self.check, module_test, module_test.events) module_test.log.info(f"Finished {self.name} module test") current_task = asyncio.current_task() tasks = [t for t in asyncio.all_tasks() if t != current_task] diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py new file mode 100644 index 000000000..5632db296 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -0,0 +1,53 @@ +import asyncio + +from .base import ModuleTestBase + + +class TestPostgres(ModuleTestBase): + targets = ["evilcorp.com"] + + async def setup_before_prep(self, module_test): + process = await asyncio.create_subprocess_exec( + "docker", + "run", + "--name", + "bbot-test-postgres", + "--rm", + "-e", + "POSTGRES_PASSWORD=bbotislife", + "-e", + "POSTGRES_USER=postgres", + "-p", + "5432:5432", + "-d", + "postgres", + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await process.communicate() + + if process.returncode != 0: + self.log.error(f"Failed to start PostgreSQL server: {stderr.decode()}") + + async def check(self, module_test, events): + import asyncpg + + # Connect to the PostgreSQL database + conn = await asyncpg.connect(user="postgres", password="bbotislife", database="bbot", host="localhost") + + try: + events = await conn.fetch("SELECT * FROM event") + assert len(events) == 3, "No events found in PostgreSQL database" + scans = await conn.fetch("SELECT * FROM scan") + assert len(scans) == 1, "No scans found in PostgreSQL database" + targets = await conn.fetch("SELECT * FROM target") + assert len(targets) == 1, "No targets found in PostgreSQL database" + finally: + await conn.close() + process = await asyncio.create_subprocess_exec( + "docker", "stop", "bbot-test-postgres", stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + + if process.returncode != 0: + raise Exception(f"Failed to stop PostgreSQL server: {stderr.decode()}") diff --git a/bbot/test/test_step_2/module_tests/test_module_sqlite.py b/bbot/test/test_step_2/module_tests/test_module_sqlite.py index 809d68c47..ec80b7555 100644 --- a/bbot/test/test_step_2/module_tests/test_module_sqlite.py +++ b/bbot/test/test_step_2/module_tests/test_module_sqlite.py @@ -10,9 +10,9 @@ def check(self, module_test, events): assert sqlite_output_file.exists(), "SQLite output file not found" with sqlite3.connect(sqlite_output_file) as db: cursor = db.cursor() - cursor.execute("SELECT * FROM event") - assert len(cursor.fetchall()) > 0, "No events found in SQLite database" - cursor.execute("SELECT * FROM scan") - assert len(cursor.fetchall()) > 0, "No scans found in SQLite database" - cursor.execute("SELECT * FROM target") - assert len(cursor.fetchall()) > 0, "No targets found in SQLite database" + results = cursor.execute("SELECT * FROM event").fetchall() + assert len(results) == 3, "No events found in SQLite database" + results = cursor.execute("SELECT * FROM scan").fetchall() + assert len(results) == 1, "No scans found in SQLite database" + results = cursor.execute("SELECT * FROM target").fetchall() + assert len(results) == 1, "No targets found in SQLite database" diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 7efdf4862..e49e10857 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -187,6 +187,26 @@ The `sqlite` output module produces a SQLite database containing all events, sca bbot -t evilcorp.com -om sqlite -c modules.sqlite.database=/tmp/bbot.sqlite ``` +### Postgres + +The `postgres` output module allows you to ingest events, scans, and targets into a Postgres database. By default, it will connect to the server on `localhost` with a username of `postgres` and password of `bbotislife`. You can change this behavior in the config. + +```bash +# specifying an alternate database +bbot -t evilcorp.com -om postgres -c modules.postgres.database=custom_bbot_db +``` + +```yaml title="postgres_preset.yml" +config: + modules: + postgres: + host: psq.fsociety.local + database: custom_bbot_db + port: 5432 + username: postgres + password: bbotislife +``` + ### Subdomains The `subdomains` output module produces simple text file containing only in-scope and resolved subdomains: From 2b88e109b482fd0b9ec606149ecafbba1abfb183 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 9 Nov 2024 18:14:44 -0500 Subject: [PATCH 02/40] fix tests --- .../module_tests/test_module_postgres.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py index 5632db296..2a723bd71 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postgres.py +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -1,3 +1,4 @@ +import time import asyncio from .base import ModuleTestBase @@ -26,6 +27,28 @@ async def setup_before_prep(self, module_test): ) stdout, stderr = await process.communicate() + import asyncpg + + # wait for the container to start + start_time = time.time() + while True: + try: + # Connect to the default 'postgres' database to create 'bbot' + conn = await asyncpg.connect( + user="postgres", password="bbotislife", database="postgres", host="localhost" + ) + await conn.execute("CREATE DATABASE bbot") + await conn.close() + break + except asyncpg.exceptions.DuplicateDatabaseError: + # If the database already exists, break the loop + break + except Exception as e: + if time.time() - start_time > 30: # timeout after 30 seconds + self.log.error("PostgreSQL server did not start in time.") + raise e + await asyncio.sleep(1) + if process.returncode != 0: self.log.error(f"Failed to start PostgreSQL server: {stderr.decode()}") From 0fc6bc71a671aca3b08e9ea3bea9208ce350197c Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Nov 2024 15:42:27 -0500 Subject: [PATCH 03/40] troubleshooting tests --- bbot/test/test_step_2/module_tests/test_module_postgres.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py index 2a723bd71..54db0e3e2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postgres.py +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -22,10 +22,7 @@ async def setup_before_prep(self, module_test): "5432:5432", "-d", "postgres", - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, ) - stdout, stderr = await process.communicate() import asyncpg @@ -35,7 +32,7 @@ async def setup_before_prep(self, module_test): try: # Connect to the default 'postgres' database to create 'bbot' conn = await asyncpg.connect( - user="postgres", password="bbotislife", database="postgres", host="localhost" + user="postgres", password="bbotislife", database="postgres", host="127.0.0.1" ) await conn.execute("CREATE DATABASE bbot") await conn.close() @@ -56,7 +53,7 @@ async def check(self, module_test, events): import asyncpg # Connect to the PostgreSQL database - conn = await asyncpg.connect(user="postgres", password="bbotislife", database="bbot", host="localhost") + conn = await asyncpg.connect(user="postgres", password="bbotislife", database="bbot", host="127.0.0.1") try: events = await conn.fetch("SELECT * FROM event") From 7ca8b6dadf10bfd174c3463384bf40cf10b5bfdf Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Nov 2024 15:44:05 -0500 Subject: [PATCH 04/40] flake --- bbot/test/test_step_2/module_tests/test_module_postgres.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py index 54db0e3e2..c511a89af 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postgres.py +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -41,13 +41,13 @@ async def setup_before_prep(self, module_test): # If the database already exists, break the loop break except Exception as e: - if time.time() - start_time > 30: # timeout after 30 seconds + if time.time() - start_time > 60: # timeout after 60 seconds self.log.error("PostgreSQL server did not start in time.") raise e await asyncio.sleep(1) if process.returncode != 0: - self.log.error(f"Failed to start PostgreSQL server: {stderr.decode()}") + self.log.error(f"Failed to start PostgreSQL server") async def check(self, module_test, events): import asyncpg From af13af44ca50b1aaea00e028bd38c5abb1441bff Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Nov 2024 15:53:59 -0500 Subject: [PATCH 05/40] force apt --- .github/workflows/distro_tests.yml | 10 +++++----- .github/workflows/tests.yml | 16 ++++++++++++---- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/.github/workflows/distro_tests.yml b/.github/workflows/distro_tests.yml index 95f9d7b5f..4e3f268d7 100644 --- a/.github/workflows/distro_tests.yml +++ b/.github/workflows/distro_tests.yml @@ -24,17 +24,17 @@ jobs: if [ "$ID" = "ubuntu" ] || [ "$ID" = "debian" ] || [ "$ID" = "kali" ] || [ "$ID" = "parrotsec" ]; then export DEBIAN_FRONTEND=noninteractive apt-get update - apt-get -y install curl git bash build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev + apt-get -y install curl git bash build-essential docker.io libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev elif [ "$ID" = "alpine" ]; then - apk add --no-cache bash gcc g++ musl-dev libffi-dev curl git make openssl-dev bzip2-dev zlib-dev xz-dev sqlite-dev + apk add --no-cache bash gcc g++ musl-dev libffi-dev docker curl git make openssl-dev bzip2-dev zlib-dev xz-dev sqlite-dev elif [ "$ID" = "arch" ]; then - pacman -Syu --noconfirm curl git bash base-devel + pacman -Syu --noconfirm curl docker git bash base-devel elif [ "$ID" = "fedora" ]; then - dnf install -y curl git bash gcc make openssl-devel bzip2-devel libffi-devel zlib-devel xz-devel tk-devel gdbm-devel readline-devel sqlite-devel python3-libdnf5 + dnf install -y curl docker git bash gcc make openssl-devel bzip2-devel libffi-devel zlib-devel xz-devel tk-devel gdbm-devel readline-devel sqlite-devel python3-libdnf5 elif [ "$ID" = "gentoo" ]; then echo "media-libs/libglvnd X" >> /etc/portage/package.use/libglvnd emerge-webrsync - emerge --update --newuse dev-vcs/git media-libs/mesa curl bash + emerge --update --newuse dev-vcs/git media-libs/mesa curl docker bash fi fi diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ca4c7c851..69f0adf17 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,16 +45,24 @@ jobs: - name: Install Docker run: | export DEBIAN_FRONTEND=noninteractive - apt-get update - apt-get -y install docker.io - systemctl enable docker --now + # Kill any running apt processes + sudo killall -9 apt apt-get || true + # Remove lock files + sudo rm -f /var/lib/apt/lists/lock || true + sudo rm -f /var/cache/apt/archives/lock || true + sudo rm -f /var/lib/dpkg/lock* || true + # Reconfigure dpkg + sudo dpkg --configure -a || true + # Update and install Docker + sudo apt-get update + sudo apt-get -y install docker.io - name: Install dependencies run: | pip install poetry poetry install - name: Run tests run: | - poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=INFO --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . + poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --pable-warnings --log-cli-level=INFO --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . - name: Upload Debug Logs uses: actions/upload-artifact@v3 with: From 7c61bb82096b40644f4598a4333ccf740484becf Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Nov 2024 16:02:52 -0500 Subject: [PATCH 06/40] don't bother installing docker --- .github/workflows/tests.yml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 69f0adf17..8400d3ad8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -42,20 +42,6 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install Docker - run: | - export DEBIAN_FRONTEND=noninteractive - # Kill any running apt processes - sudo killall -9 apt apt-get || true - # Remove lock files - sudo rm -f /var/lib/apt/lists/lock || true - sudo rm -f /var/cache/apt/archives/lock || true - sudo rm -f /var/lib/dpkg/lock* || true - # Reconfigure dpkg - sudo dpkg --configure -a || true - # Update and install Docker - sudo apt-get update - sudo apt-get -y install docker.io - name: Install dependencies run: | pip install poetry From e69b561bedc14b2143c17a3658ac9235a25aa105 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Nov 2024 16:04:49 -0500 Subject: [PATCH 07/40] wat --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8400d3ad8..507b7ac54 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -48,7 +48,7 @@ jobs: poetry install - name: Run tests run: | - poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --pable-warnings --log-cli-level=INFO --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . + poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 1200 --disable-warnings --log-cli-level=INFO --cov-config=bbot/test/coverage.cfg --cov-report xml:cov.xml --cov=bbot . - name: Upload Debug Logs uses: actions/upload-artifact@v3 with: From 167303b84bc2914579679fc1f8d8d3ebd134ce20 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 12 Nov 2024 09:32:45 -0500 Subject: [PATCH 08/40] skip distro tests --- bbot/test/test_step_2/module_tests/base.py | 4 +++- bbot/test/test_step_2/module_tests/test_module_dastardly.py | 1 + bbot/test/test_step_2/module_tests/test_module_postgres.py | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index acdb37c50..47038e9ae 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -20,6 +20,8 @@ class ModuleTestBase: config_overrides = {} modules_overrides = None log = logging.getLogger("bbot") + # if True, the test will be skipped (useful for tests that require docker) + skip_distro_tests = False class ModuleTest: def __init__( @@ -90,7 +92,7 @@ async def module_test( self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys ): # Skip dastardly test if we're in the distro tests (because dastardly uses docker) - if os.getenv("BBOT_DISTRO_TESTS") and self.name == "dastardly": + if os.getenv("BBOT_DISTRO_TESTS") and self.skip_distro_tests: pytest.skip("Skipping module_test for dastardly module due to BBOT_DISTRO_TESTS environment variable") self.log.info(f"Starting {self.name} module test") diff --git a/bbot/test/test_step_2/module_tests/test_module_dastardly.py b/bbot/test/test_step_2/module_tests/test_module_dastardly.py index cb4a501b8..98fa02453 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dastardly.py +++ b/bbot/test/test_step_2/module_tests/test_module_dastardly.py @@ -7,6 +7,7 @@ class TestDastardly(ModuleTestBase): targets = ["http://127.0.0.1:5556/"] modules_overrides = ["httpx", "dastardly"] + skip_distro_tests = True web_response = """ diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py index c511a89af..874acdb19 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postgres.py +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -6,6 +6,7 @@ class TestPostgres(ModuleTestBase): targets = ["evilcorp.com"] + skip_distro_tests = True async def setup_before_prep(self, module_test): process = await asyncio.create_subprocess_exec( From dd8743cd94a2bc1113f862f8bf7c5fb302e1ddba Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 9 Nov 2024 18:03:39 -0500 Subject: [PATCH 09/40] mysql --- bbot/db/sql/models.py | 10 +-- bbot/modules/output/mysql.py | 51 +++++++++++++ bbot/modules/templates/sql.py | 5 ++ .../module_tests/test_module_mysql.py | 75 +++++++++++++++++++ docs/scanning/output.md | 20 +++++ 5 files changed, 156 insertions(+), 5 deletions(-) create mode 100644 bbot/modules/output/mysql.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_mysql.py diff --git a/bbot/db/sql/models.py b/bbot/db/sql/models.py index b15f4abfa..e937fad1e 100644 --- a/bbot/db/sql/models.py +++ b/bbot/db/sql/models.py @@ -141,8 +141,8 @@ class Target(BBOTBaseModel, table=True): seeds: List = Field(default=[], sa_type=JSON) whitelist: List = Field(default=None, sa_type=JSON) blacklist: List = Field(default=[], sa_type=JSON) - hash: str = Field(sa_column=Column("hash", String, unique=True, primary_key=True, index=True)) - scope_hash: str = Field(sa_column=Column("scope_hash", String, index=True)) - seed_hash: str = Field(sa_column=Column("seed_hashhash", String, index=True)) - whitelist_hash: str = Field(sa_column=Column("whitelist_hash", String, index=True)) - blacklist_hash: str = Field(sa_column=Column("blacklist_hash", String, index=True)) + hash: str = Field(sa_column=Column("hash", String(length=255), unique=True, primary_key=True, index=True)) + scope_hash: str = Field(sa_column=Column("scope_hash", String(length=255), index=True)) + seed_hash: str = Field(sa_column=Column("seed_hashhash", String(length=255), index=True)) + whitelist_hash: str = Field(sa_column=Column("whitelist_hash", String(length=255), index=True)) + blacklist_hash: str = Field(sa_column=Column("blacklist_hash", String(length=255), index=True)) diff --git a/bbot/modules/output/mysql.py b/bbot/modules/output/mysql.py new file mode 100644 index 000000000..69856a8a3 --- /dev/null +++ b/bbot/modules/output/mysql.py @@ -0,0 +1,51 @@ +from bbot.modules.templates.sql import SQLTemplate + + +class MySQL(SQLTemplate): + watched_events = ["*"] + meta = {"description": "Output scan data to a MySQL database"} + options = { + "username": "root", + "password": "bbotislife", + "host": "localhost", + "port": 3306, + "database": "bbot", + } + options_desc = { + "username": "The username to connect to MySQL", + "password": "The password to connect to MySQL", + "host": "The server running MySQL", + "port": "The port to connect to MySQL", + "database": "The database name to connect to", + } + deps_pip = ["sqlmodel", "aiomysql"] + protocol = "mysql+aiomysql" + + async def create_database(self): + from sqlalchemy import text + from sqlalchemy.ext.asyncio import create_async_engine + + # Create the engine for the initial connection to the server + initial_engine = create_async_engine(self.connection_string().rsplit("/", 1)[0]) + + async with initial_engine.connect() as conn: + # Check if the database exists + result = await conn.execute(text(f"SHOW DATABASES LIKE '{self.database}'")) + database_exists = result.scalar() is not None + + # Create the database if it does not exist + if not database_exists: + # Use aiomysql directly to create the database + import aiomysql + + raw_conn = await aiomysql.connect( + user=self.username, + password=self.password, + host=self.host, + port=self.port, + ) + try: + async with raw_conn.cursor() as cursor: + await cursor.execute(f"CREATE DATABASE {self.database}") + finally: + await raw_conn.ensure_closed() diff --git a/bbot/modules/templates/sql.py b/bbot/modules/templates/sql.py index fa00ad828..39b4e6f00 100644 --- a/bbot/modules/templates/sql.py +++ b/bbot/modules/templates/sql.py @@ -1,3 +1,4 @@ +from contextlib import suppress from sqlmodel import SQLModel from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession @@ -88,3 +89,7 @@ def connection_string(self, mask_password=False): if self.database: connection_string += f"/{self.database}" return connection_string + + async def cleanup(self): + with suppress(Exception): + await self.engine.dispose() diff --git a/bbot/test/test_step_2/module_tests/test_module_mysql.py b/bbot/test/test_step_2/module_tests/test_module_mysql.py new file mode 100644 index 000000000..46165c995 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_mysql.py @@ -0,0 +1,75 @@ +import asyncio +import time + +from .base import ModuleTestBase + + +class TestMySQL(ModuleTestBase): + targets = ["evilcorp.com"] + + async def setup_before_prep(self, module_test): + process = await asyncio.create_subprocess_exec( + "docker", + "run", + "--name", + "bbot-test-mysql", + "--rm", + "-e", + "MYSQL_ROOT_PASSWORD=bbotislife", + "-e", + "MYSQL_DATABASE=bbot", + "-p", + "3306:3306", + "-d", + "mysql", + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await process.communicate() + + import aiomysql + + # wait for the container to start + start_time = time.time() + while True: + try: + conn = await aiomysql.connect(user="root", password="bbotislife", db="bbot", host="localhost") + conn.close() + break + except Exception as e: + if time.time() - start_time > 60: # timeout after 60 seconds + self.log.error("MySQL server did not start in time.") + raise e + await asyncio.sleep(1) + + if process.returncode != 0: + self.log.error(f"Failed to start MySQL server: {stderr.decode()}") + + async def check(self, module_test, events): + import aiomysql + + # Connect to the MySQL database + conn = await aiomysql.connect(user="root", password="bbotislife", db="bbot", host="localhost") + + try: + async with conn.cursor() as cur: + await cur.execute("SELECT * FROM event") + events = await cur.fetchall() + assert len(events) == 3, "No events found in MySQL database" + + await cur.execute("SELECT * FROM scan") + scans = await cur.fetchall() + assert len(scans) == 1, "No scans found in MySQL database" + + await cur.execute("SELECT * FROM target") + targets = await cur.fetchall() + assert len(targets) == 1, "No targets found in MySQL database" + finally: + conn.close() + process = await asyncio.create_subprocess_exec( + "docker", "stop", "bbot-test-mysql", stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + stdout, stderr = await process.communicate() + + if process.returncode != 0: + raise Exception(f"Failed to stop MySQL server: {stderr.decode()}") diff --git a/docs/scanning/output.md b/docs/scanning/output.md index e49e10857..2a1edbc7c 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -207,6 +207,26 @@ config: password: bbotislife ``` +### MySQL + +The `mysql` output module allows you to ingest events, scans, and targets into a MySQL database. By default, it will connect to the server on `localhost` with a username of `root` and password of `bbotislife`. You can change this behavior in the config. + +```bash +# specifying an alternate database +bbot -t evilcorp.com -om mysql -c modules.mysql.database=custom_bbot_db +``` + +```yaml title="mysql_preset.yml" +config: + modules: + mysql: + host: mysql.fsociety.local + database: custom_bbot_db + port: 3306 + username: root + password: bbotislife +``` + ### Subdomains The `subdomains` output module produces simple text file containing only in-scope and resolved subdomains: From 66dc0f3e8417099cee2a2418c2b47bde7776b2c1 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 9 Nov 2024 18:08:44 -0500 Subject: [PATCH 10/40] update readme --- README.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/README.md b/README.md index 50e26da26..9db6529d6 100644 --- a/README.md +++ b/README.md @@ -236,6 +236,24 @@ Click the graph below to explore the [inner workings](https://www.blacklanternse [![image](https://github.com/blacklanternsecurity/bbot/assets/20261699/e55ba6bd-6d97-48a6-96f0-e122acc23513)](https://www.blacklanternsecurity.com/bbot/Stable/how_it_works/) +## Output Modules + +- [Neo4j](docs/scanning/output.md#neo4j) +- [Teams](docs/scanning/output.md#teams) +- [Discord](docs/scanning/output.md#discord) +- [Slack](docs/scanning/output.md#slack) +- [Postgres](docs/scanning/output.md#postgres) +- [MySQL](docs/scanning/output.md#mysql) +- [SQLite](docs/scanning/output.md#sqlite) +- [Splunk](docs/scanning/output.md#splunk) +- [Elasticsearch](docs/scanning/output.md#elasticsearch) +- [CSV](docs/scanning/output.md#csv) +- [JSON](docs/scanning/output.md#json) +- [HTTP](docs/scanning/output.md#http) +- [Websocket](docs/scanning/output.md#websocket) + +...and [more](docs/scanning/output.md)! + ## BBOT as a Python Library #### Synchronous From c3271b9ae4e563f50580560d48d99269d5f6d7c9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 12 Nov 2024 09:34:13 -0500 Subject: [PATCH 11/40] skip --- bbot/test/test_step_2/module_tests/test_module_mysql.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/test/test_step_2/module_tests/test_module_mysql.py b/bbot/test/test_step_2/module_tests/test_module_mysql.py index 46165c995..4867c568d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_mysql.py +++ b/bbot/test/test_step_2/module_tests/test_module_mysql.py @@ -6,6 +6,7 @@ class TestMySQL(ModuleTestBase): targets = ["evilcorp.com"] + skip_distro_tests = True async def setup_before_prep(self, module_test): process = await asyncio.create_subprocess_exec( From d83db49f6e2cef704e211754dad125bda8adcc16 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 12 Nov 2024 09:50:05 -0500 Subject: [PATCH 12/40] sort things --- bbot/scanner/preset/args.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index 986fd909f..314fd42f0 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -217,7 +217,7 @@ def create_parser(self, *args, **kwargs): "--modules", nargs="+", default=[], - help=f'Modules to enable. Choices: {",".join(self.preset.module_loader.scan_module_choices)}', + help=f'Modules to enable. Choices: {",".join(sorted(self.preset.module_loader.scan_module_choices))}', metavar="MODULE", ) modules.add_argument("-l", "--list-modules", action="store_true", help=f"List available modules.") @@ -232,7 +232,7 @@ def create_parser(self, *args, **kwargs): "--flags", nargs="+", default=[], - help=f'Enable modules by flag. Choices: {",".join(self.preset.module_loader.flag_choices)}', + help=f'Enable modules by flag. Choices: {",".join(sorted(self.preset.module_loader.flag_choices))}', metavar="FLAG", ) modules.add_argument("-lf", "--list-flags", action="store_true", help=f"List available flags.") From cabc4931ed6a77ba3cde7a1d50eb81dd4861dc7d Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 15 Nov 2024 22:33:14 -0500 Subject: [PATCH 13/40] enable filtering open ports based on cdn --- bbot/modules/portscan.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index 5ff23dc7b..a034046b7 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -6,6 +6,9 @@ from bbot.modules.base import BaseModule +# TODO: this module is getting big. It should probably be two modules: one for ping and one for SYN. + + class portscan(BaseModule): flags = ["active", "portscan", "safe"] watched_events = ["IP_ADDRESS", "IP_RANGE", "DNS_NAME"] @@ -27,6 +30,7 @@ class portscan(BaseModule): "adapter_ip": "", "adapter_mac": "", "router_mac": "", + "allowed_cdn_ports": None, } options_desc = { "top_ports": "Top ports to scan (default 100) (to override, specify 'ports')", @@ -39,6 +43,7 @@ class portscan(BaseModule): "adapter_ip": "Send packets using this IP address. Not needed unless masscan's autodetection fails", "adapter_mac": "Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails", "router_mac": "Send packets to this MAC address as the destination. Not needed unless masscan's autodetection fails", + "allowed_cdn_ports": "Comma-separated list of ports that are allowed to be scanned for CDNs", } deps_common = ["masscan"] batch_size = 1000000 @@ -60,7 +65,14 @@ async def setup(self): try: self.helpers.parse_port_string(self.ports) except ValueError as e: - return False, f"Error parsing ports: {e}" + return False, f"Error parsing ports '{self.ports}': {e}" + self.allowed_cdn_ports = self.config.get("allowed_cdn_ports", None) + if self.allowed_cdn_ports is not None: + try: + self.allowed_cdn_ports = [int(p.strip()) for p in self.allowed_cdn_ports.split(",")] + except Exception as e: + return False, f"Error parsing allowed CDN ports '{self.allowed_cdn_ports}': {e}" + # whether we've finished scanning our original scan targets self.scanned_initial_targets = False # keeps track of individual scanned IPs and their open ports @@ -227,9 +239,19 @@ async def emit_open_port(self, ip, port, parent_event): parent=parent_event, context=f"{{module}} executed a {scan_type} scan against {parent_event.data} and found: {{event.type}}: {{event.data}}", ) - await self.emit_event(event) + + await self.emit_event(event, abort_if=self.abort_if) return event + def abort_if(self, event): + if self.allowed_cdn_ports is not None: + # if the host is a CDN + if any(t.startswith("cdn-") for t in event.tags): + # and if its port isn't in the list of allowed CDN ports + if event.port not in self.allowed_cdn_ports: + return True, "event is a CDN and port is not in the allowed list" + return False + def parse_json_line(self, line): try: j = json.loads(line) From d469faaef987e982d2e6e6b410f2d34161c4949d Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 15 Nov 2024 22:48:03 -0500 Subject: [PATCH 14/40] add to tips and tricks --- docs/scanning/tips_and_tricks.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 32b55448f..7b5d0cbdf 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -77,7 +77,15 @@ You can also pair the web spider with subdomain enumeration: bbot -t evilcorp.com -f subdomain-enum -c spider.yml ``` -### Ingesting BBOT Data Into SIEM (Elastic, Splunk) +### Exclude CDNs from Port Scan + +If you want to exclude CDNs (e.g. Cloudflare) from port scanning, you can set the `allowed_cdn_ports` config option in the `portscan` module. For example, to allow only port 80 (HTTP) and 443 (HTTPS), you can do the following: + +```bash +bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 +``` + +### Ingest BBOT Data Into SIEM (Elastic, Splunk) If your goal is to feed BBOT data into a SIEM such as Elastic, be sure to enable this option when scanning: From c873822d8708f7558e9c56456b037b3459a5825e Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 17 Nov 2024 21:57:34 -0500 Subject: [PATCH 15/40] allow configurable cdns --- bbot/modules/portscan.py | 12 ++++++++---- docs/scanning/tips_and_tricks.md | 23 +++++++++++++++++++++++ 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index a034046b7..75b216612 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -30,6 +30,7 @@ class portscan(BaseModule): "adapter_ip": "", "adapter_mac": "", "router_mac": "", + "cdn_tags": "cdn", "allowed_cdn_ports": None, } options_desc = { @@ -43,6 +44,7 @@ class portscan(BaseModule): "adapter_ip": "Send packets using this IP address. Not needed unless masscan's autodetection fails", "adapter_mac": "Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails", "router_mac": "Send packets to this MAC address as the destination. Not needed unless masscan's autodetection fails", + "cdn_tags": "Comma-separated list of tags to skip, e.g. 'cdn,cloud'", "allowed_cdn_ports": "Comma-separated list of ports that are allowed to be scanned for CDNs", } deps_common = ["masscan"] @@ -66,6 +68,7 @@ async def setup(self): self.helpers.parse_port_string(self.ports) except ValueError as e: return False, f"Error parsing ports '{self.ports}': {e}" + self.cdn_tags = [t.strip() for t in self.config.get("cdn_tags", "").split(",")] self.allowed_cdn_ports = self.config.get("allowed_cdn_ports", None) if self.allowed_cdn_ports is not None: try: @@ -246,10 +249,11 @@ async def emit_open_port(self, ip, port, parent_event): def abort_if(self, event): if self.allowed_cdn_ports is not None: # if the host is a CDN - if any(t.startswith("cdn-") for t in event.tags): - # and if its port isn't in the list of allowed CDN ports - if event.port not in self.allowed_cdn_ports: - return True, "event is a CDN and port is not in the allowed list" + for cdn_tag in self.cdn_tags: + if any(t.startswith(f"{cdn_tag}-") for t in event.tags): + # and if its port isn't in the list of allowed CDN ports + if event.port not in self.allowed_cdn_ports: + return True, "event is a CDN and port is not in the allowed list" return False def parse_json_line(self, line): diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 7b5d0cbdf..7a9f66204 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -85,6 +85,29 @@ If you want to exclude CDNs (e.g. Cloudflare) from port scanning, you can set th bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 ``` +By default, if you set `allowed_cdn_ports`, it will skip only providers marked as CDNs. If you want to skip cloud providers as well, you can set `cdn_tags`: + +```bash +bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 modules.portscan.cdn_tags=cdn,cloud +``` + +...or via a preset: + +```yaml title="skip_cdns.yml" +modules: + - portscan + +config: + modules: + portscan: + allowed_cdn_ports: 80,443 + cdn_tags: cdn,cloud +``` + +```bash +bbot -t evilcorp.com -p skip_cdns.yml +``` + ### Ingest BBOT Data Into SIEM (Elastic, Splunk) If your goal is to feed BBOT data into a SIEM such as Elastic, be sure to enable this option when scanning: From 7030aa22fb00b2cb16c8e28476cfc16e54835d09 Mon Sep 17 00:00:00 2001 From: Josh Smith Date: Sun, 17 Nov 2024 21:26:46 -0700 Subject: [PATCH 16/40] Fix preservation of scan id at scan init I noticed this change here: https://github.com/blacklanternsecurity/bbot/commit/02acd94d67c26821b8b8aa79ccc01d2aa6ae09ad#diff-c6756c35a5fb1d7b87dc0ed3d33a2d211374dc196f245d26a362c82c8c346d5cR131 I can only assume it was unintentional, given `id` is a python builtin function, and `str(id)` returns: ```py cmyui@Joshs-MacBook-Pro ~ % python3 Python 3.13.0 (main, Oct 7 2024, 05:02:14) [Clang 15.0.0 (clang-1500.3.9.4)] on darwin Type "help", "copyright", "credits" or "license" for more information. >>> str(id) '' ``` --- bbot/scanner/scanner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 34ef29c38..0c515b2b9 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -126,7 +126,7 @@ def __init__( self._success = False if scan_id is not None: - self.id = str(id) + self.id = str(scan_id) else: self.id = f"SCAN:{sha1(rand_string(20)).hexdigest()}" From b206ae16c94f97ce560ed1317f013771b44847af Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 18 Nov 2024 14:57:20 -0500 Subject: [PATCH 17/40] bump version --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4aea71259..b99d9f344 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "bbot" -version = "2.2.0" +version = "2.3.0" description = "OSINT automation for hackers." authors = [ "TheTechromancer", @@ -103,7 +103,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v2.2.0{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v2.3.0{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From a6ab266a99fe37fe92d2435afbe1384011ef355e Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 18 Nov 2024 21:01:07 -0500 Subject: [PATCH 18/40] fix target docs --- docs/dev/target.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/dev/target.md b/docs/dev/target.md index b2e4bffe3..6740cfb74 100644 --- a/docs/dev/target.md +++ b/docs/dev/target.md @@ -1 +1,9 @@ -::: bbot.scanner.target.Target +::: bbot.scanner.target.BaseTarget + +::: bbot.scanner.target.ScanSeeds + +::: bbot.scanner.target.ScanWhitelist + +::: bbot.scanner.target.ScanBlacklist + +::: bbot.scanner.target.BBOTTarget From 4c732c56c2d2acf53f5643efb71860c529895283 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 10:40:52 -0500 Subject: [PATCH 19/40] more precise cloud tags --- bbot/modules/internal/cloudcheck.py | 23 ++++++++++++++++--- bbot/test/bbot_fixtures.py | 14 +++++------ bbot/test/conftest.py | 2 +- .../module_tests/test_module_cloudcheck.py | 4 ++++ 4 files changed, 32 insertions(+), 11 deletions(-) diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 392c8e0c5..49d507330 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -1,3 +1,5 @@ +from contextlib import suppress + from bbot.modules.base import BaseInterceptModule @@ -29,12 +31,27 @@ async def handle_event(self, event, **kwargs): self.make_dummy_modules() # cloud tagging by hosts hosts_to_check = set(str(s) for s in event.resolved_hosts) - # we use the original host, since storage buckets hostnames might be collapsed to _wildcard - hosts_to_check.add(str(event.host_original)) + event_host = event.host_original + event_is_ip = self.helpers.is_ip(event_host) + with suppress(KeyError): + hosts_to_check.remove(event_host) + for provider, provider_type, subnet in self.helpers.cloudcheck(event_host): + if provider: + event.add_tag(f"{provider_type}-{provider}") + if event_is_ip: + event.add_tag(f"{provider_type}-ip") + else: + event.add_tag(f"{provider_type}-domain") + for host in hosts_to_check: + host_is_ip = self.helpers.is_ip(host) for provider, provider_type, subnet in self.helpers.cloudcheck(host): if provider: event.add_tag(f"{provider_type}-{provider}") + if host_is_ip: + event.add_tag(f"{provider_type}-ip") + elif not event_is_ip: + event.add_tag(f"{provider_type}-cname") found = set() # look for cloud assets in hosts, http responses @@ -54,7 +71,7 @@ async def handle_event(self, event, **kwargs): if event.type == "HTTP_RESPONSE": matches = await self.helpers.re.findall(sig, event.data.get("body", "")) elif event.type.startswith("DNS_NAME"): - for host in hosts_to_check: + for host in hosts_to_check.union([event_host]): match = sig.match(host) if match: matches.append(match.groups()) diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index e1e3aa1b8..94df335a8 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -224,12 +224,12 @@ class bbot_events: return bbot_events -@pytest.fixture(scope="session", autouse=True) -def install_all_python_deps(): - deps_pip = set() - for module in DEFAULT_PRESET.module_loader.preloaded().values(): - deps_pip.update(set(module.get("deps", {}).get("pip", []))) +# @pytest.fixture(scope="session", autouse=True) +# def install_all_python_deps(): +# deps_pip = set() +# for module in DEFAULT_PRESET.module_loader.preloaded().values(): +# deps_pip.update(set(module.get("deps", {}).get("pip", []))) - constraint_file = tempwordlist(get_python_constraints()) +# constraint_file = tempwordlist(get_python_constraints()) - subprocess.run([sys.executable, "-m", "pip", "install", "--constraint", constraint_file] + list(deps_pip)) +# subprocess.run([sys.executable, "-m", "pip", "install", "--constraint", constraint_file] + list(deps_pip)) diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 93d635e42..3a4901b12 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -337,7 +337,7 @@ def pytest_sessionfinish(session, exitstatus): logger.removeHandler(handler) # Wipe out BBOT home dir - shutil.rmtree("/tmp/.bbot_test", ignore_errors=True) + # shutil.rmtree("/tmp/.bbot_test", ignore_errors=True) yield diff --git a/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py b/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py index b95e7455d..902f2df35 100644 --- a/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py +++ b/bbot/test/test_step_2/module_tests/test_module_cloudcheck.py @@ -51,6 +51,10 @@ async def setup_after_prep(self, module_test): await module.handle_event(event) assert "cloud-amazon" in event.tags, f"{event} was not properly cloud-tagged" + assert "cloud-domain" in aws_event1.tags + assert "cloud-ip" in other_event2.tags + assert "cloud-cname" in other_event3.tags + for event in (aws_event3, other_event1): await module.handle_event(event) assert "cloud-amazon" not in event.tags, f"{event} was improperly cloud-tagged" From bbee6705e3540681b941a71f3ba70c7657400b3f Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 10:41:24 -0500 Subject: [PATCH 20/40] blacked --- bbot/test/bbot_fixtures.py | 14 +++++++------- bbot/test/conftest.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index 94df335a8..e1e3aa1b8 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -224,12 +224,12 @@ class bbot_events: return bbot_events -# @pytest.fixture(scope="session", autouse=True) -# def install_all_python_deps(): -# deps_pip = set() -# for module in DEFAULT_PRESET.module_loader.preloaded().values(): -# deps_pip.update(set(module.get("deps", {}).get("pip", []))) +@pytest.fixture(scope="session", autouse=True) +def install_all_python_deps(): + deps_pip = set() + for module in DEFAULT_PRESET.module_loader.preloaded().values(): + deps_pip.update(set(module.get("deps", {}).get("pip", []))) -# constraint_file = tempwordlist(get_python_constraints()) + constraint_file = tempwordlist(get_python_constraints()) -# subprocess.run([sys.executable, "-m", "pip", "install", "--constraint", constraint_file] + list(deps_pip)) + subprocess.run([sys.executable, "-m", "pip", "install", "--constraint", constraint_file] + list(deps_pip)) diff --git a/bbot/test/conftest.py b/bbot/test/conftest.py index 3a4901b12..93d635e42 100644 --- a/bbot/test/conftest.py +++ b/bbot/test/conftest.py @@ -337,7 +337,7 @@ def pytest_sessionfinish(session, exitstatus): logger.removeHandler(handler) # Wipe out BBOT home dir - # shutil.rmtree("/tmp/.bbot_test", ignore_errors=True) + shutil.rmtree("/tmp/.bbot_test", ignore_errors=True) yield From afd6f1d9fd4d72f465f25a705060e783a8cc6385 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 10:45:46 -0500 Subject: [PATCH 21/40] tag updated --- bbot/modules/portscan.py | 4 ++-- docs/scanning/tips_and_tricks.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index 75b216612..674242169 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -30,7 +30,7 @@ class portscan(BaseModule): "adapter_ip": "", "adapter_mac": "", "router_mac": "", - "cdn_tags": "cdn", + "cdn_tags": "cdn-", "allowed_cdn_ports": None, } options_desc = { @@ -250,7 +250,7 @@ def abort_if(self, event): if self.allowed_cdn_ports is not None: # if the host is a CDN for cdn_tag in self.cdn_tags: - if any(t.startswith(f"{cdn_tag}-") for t in event.tags): + if any(t.startswith(str(cdn_tag)) for t in event.tags): # and if its port isn't in the list of allowed CDN ports if event.port not in self.allowed_cdn_ports: return True, "event is a CDN and port is not in the allowed list" diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index 7a9f66204..f81b5ec81 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -85,10 +85,10 @@ If you want to exclude CDNs (e.g. Cloudflare) from port scanning, you can set th bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 ``` -By default, if you set `allowed_cdn_ports`, it will skip only providers marked as CDNs. If you want to skip cloud providers as well, you can set `cdn_tags`: +By default, if you set `allowed_cdn_ports`, it will skip only providers marked as CDNs. If you want to skip cloud providers as well, you can set `cdn_tags`, which is a comma-separated list of tags to skip (matched against the beginning of each tag): ```bash -bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 modules.portscan.cdn_tags=cdn,cloud +bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 modules.portscan.cdn_tags=cdn-,cloud- ``` ...or via a preset: From 3ac32bd9bf25cec6ac91dfa0b33663f632f1f79e Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 10:46:57 -0500 Subject: [PATCH 22/40] update --- docs/scanning/tips_and_tricks.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index f81b5ec81..913ccca6f 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -85,7 +85,7 @@ If you want to exclude CDNs (e.g. Cloudflare) from port scanning, you can set th bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 ``` -By default, if you set `allowed_cdn_ports`, it will skip only providers marked as CDNs. If you want to skip cloud providers as well, you can set `cdn_tags`, which is a comma-separated list of tags to skip (matched against the beginning of each tag): +By default, if you set `allowed_cdn_ports`, it will skip only providers marked as CDNs. If you want to skip cloud providers as well, you can set `cdn_tags`, which is a comma-separated list of tags to skip (matched against the beginning of each tag). ```bash bbot -t evilcorp.com -m portscan -c modules.portscan.allowed_cdn_ports=80,443 modules.portscan.cdn_tags=cdn-,cloud- @@ -101,7 +101,7 @@ config: modules: portscan: allowed_cdn_ports: 80,443 - cdn_tags: cdn,cloud + cdn_tags: cdn-,cloud- ``` ```bash From 5041c166730a313f4c7d90ea5146f89e3e7c9905 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 18:48:02 -0500 Subject: [PATCH 23/40] organize --- bbot/modules/internal/cloudcheck.py | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 49d507330..285d48188 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -30,28 +30,25 @@ async def handle_event(self, event, **kwargs): if self.dummy_modules is None: self.make_dummy_modules() # cloud tagging by hosts - hosts_to_check = set(str(s) for s in event.resolved_hosts) - event_host = event.host_original - event_is_ip = self.helpers.is_ip(event_host) + hosts_to_check = set(event.resolved_hosts) with suppress(KeyError): - hosts_to_check.remove(event_host) - for provider, provider_type, subnet in self.helpers.cloudcheck(event_host): - if provider: - event.add_tag(f"{provider_type}-{provider}") - if event_is_ip: - event.add_tag(f"{provider_type}-ip") - else: - event.add_tag(f"{provider_type}-domain") + hosts_to_check.remove(event.host_original) + hosts_to_check = [event.host_original] + list(hosts_to_check) - for host in hosts_to_check: + for i, host in enumerate(hosts_to_check): host_is_ip = self.helpers.is_ip(host) for provider, provider_type, subnet in self.helpers.cloudcheck(host): if provider: event.add_tag(f"{provider_type}-{provider}") if host_is_ip: event.add_tag(f"{provider_type}-ip") - elif not event_is_ip: - event.add_tag(f"{provider_type}-cname") + else: + # if the original hostname is a cloud domain, tag it as such + if i == 0: + event.add_tag(f"{provider_type}-domain") + # any children are tagged as CNAMEs + else: + event.add_tag(f"{provider_type}-cname") found = set() # look for cloud assets in hosts, http responses @@ -71,7 +68,7 @@ async def handle_event(self, event, **kwargs): if event.type == "HTTP_RESPONSE": matches = await self.helpers.re.findall(sig, event.data.get("body", "")) elif event.type.startswith("DNS_NAME"): - for host in hosts_to_check.union([event_host]): + for host in hosts_to_check: match = sig.match(host) if match: matches.append(match.groups()) From 87b9f75276eafe54bbaa294090887638512bda9a Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Nov 2024 18:48:31 -0500 Subject: [PATCH 24/40] fix portscan tests --- .../test_step_2/module_tests/test_module_portscan.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_portscan.py b/bbot/test/test_step_2/module_tests/test_module_portscan.py index 56536cb5d..d9f55c27f 100644 --- a/bbot/test/test_step_2/module_tests/test_module_portscan.py +++ b/bbot/test/test_step_2/module_tests/test_module_portscan.py @@ -109,10 +109,12 @@ def check(self, module_test, events): if e.type == "DNS_NAME" and e.data == "dummy.asdf.evilcorp.net" and str(e.module) == "dummy_module" ] ) - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 3 + # the reason these numbers aren't exactly predictable is because we can't predict which one arrives first + # to the portscan module. Sometimes, one that would normally be deduped is force-emitted because it led to a new open port. + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 4 assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.8.8:443"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.5:80"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.6:631"]) From c760d0b371d98d773b36a23d8a33f4f8e3c9f48b Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Wed, 20 Nov 2024 02:48:40 +0000 Subject: [PATCH 25/40] [create-pull-request] automated change --- README.md | 4 ++++ docs/modules/list_of_modules.md | 1 + docs/scanning/advanced.md | 17 ++++++++++------- docs/scanning/configuration.md | 15 +++++++++++---- docs/scanning/events.md | 2 +- docs/scanning/presets_list.md | 33 +++++++++++++++++++++++++++++++++ 6 files changed, 60 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 50e26da26..7916346e0 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,10 @@ description: Recursive web spider modules: - httpx +blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + config: web: # how many links to follow in a row diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index e9798983c..3eb87b75b 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -120,6 +120,7 @@ | http | output | No | Send every event to a custom URL via a web request | | * | | @TheTechromancer | 2022-04-13 | | json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | @TheTechromancer | 2022-04-07 | | neo4j | output | No | Output to Neo4j | | * | | @TheTechromancer | 2022-04-07 | +| postgres | output | No | Output scan data to a SQLite database | | * | | | | | python | output | No | Output via Python API | | * | | @TheTechromancer | 2022-09-13 | | slack | output | No | Message a Slack channel when certain events are encountered | | * | | @TheTechromancer | 2023-08-14 | | splunk | output | No | Send every event to a splunk instance through HTTP Event Collector | | * | | @w0Tx | 2024-02-17 | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 355aabc25..a5666e5fd 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -38,12 +38,13 @@ usage: bbot [-h] [-t TARGET [TARGET ...]] [-w WHITELIST [WHITELIST ...]] [-m MODULE [MODULE ...]] [-l] [-lmo] [-em MODULE [MODULE ...]] [-f FLAG [FLAG ...]] [-lf] [-rf FLAG [FLAG ...]] [-ef FLAG [FLAG ...]] [--allow-deadly] [-n SCAN_NAME] [-v] [-d] - [-s] [--force] [-y] [--dry-run] [--current-preset] - [--current-preset-full] [-o DIR] [-om MODULE [MODULE ...]] - [--json] [--brief] + [-s] [--force] [-y] [--fast-mode] [--dry-run] + [--current-preset] [--current-preset-full] [-o DIR] + [-om MODULE [MODULE ...]] [--json] [--brief] [--event-types EVENT_TYPES [EVENT_TYPES ...]] [--no-deps | --force-deps | --retry-deps | --ignore-failed-deps | --install-all-deps] - [--version] [-H CUSTOM_HEADERS [CUSTOM_HEADERS ...]] + [--version] [--proxy HTTP_PROXY] + [-H CUSTOM_HEADERS [CUSTOM_HEADERS ...]] [--custom-yara-rules CUSTOM_YARA_RULES] Bighuge BLS OSINT Tool @@ -69,14 +70,14 @@ Presets: Modules: -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: sitedossier,crt,postman,ipneighbor,bucket_amazon,baddns_direct,ipstack,extractous,bucket_google,host_header,internetdb,jadx,baddns_zone,bucket_azure,ajaxpro,skymem,censys,postman_download,dockerhub,generic_ssrf,ip2location,gitlab,url_manipulation,paramminer_getparams,builtwith,emailformat,gowitness,github_workflows,bevigil,wayback,subdomaincenter,nuclei,bucket_firebase,bucket_file_enum,badsecrets,httpx,apkpure,leakix,paramminer_headers,chaos,git,filedownload,git_clone,sslcert,virustotal,trufflehog,ffuf,pgp,shodan_dns,certspotter,ntlm,secretsdb,wappalyzer,c99,securitytrails,securitytxt,newsletters,urlscan,dnsdumpster,credshed,baddns,wafw00f,dastardly,azure_tenant,docker_pull,columbus,fullhunt,fingerprintx,hackertarget,github_codesearch,dnsbrute,zoomeye,affiliates,oauth,azure_realm,binaryedge,bufferoverrun,dehashed,dnscaa,dnscommonsrv,myssl,ffuf_shortnames,robots,rapiddns,digitorus,wpscan,hunterio,passivetotal,code_repository,bypass403,vhost,google_playstore,dnsbrute_mutations,anubisdb,viewdns,trickest,portscan,smuggler,iis_shortnames,paramminer_cookies,bucket_digitalocean,dnsbimi,social,otx,github_org,hunt,telerik,subdomainradar,dotnetnuke,asn + Modules to enable. Choices: affiliates,ajaxpro,anubisdb,apkpure,asn,azure_realm,azure_tenant,baddns,baddns_direct,baddns_zone,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,bufferoverrun,builtwith,bypass403,c99,censys,certspotter,chaos,code_repository,columbus,credshed,crt,dastardly,dehashed,digitorus,dnsbimi,dnsbrute,dnsbrute_mutations,dnscaa,dnscommonsrv,dnsdumpster,docker_pull,dockerhub,dotnetnuke,emailformat,extractous,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,git_clone,github_codesearch,github_org,github_workflows,gitlab,google_playstore,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,jadx,leakix,myssl,newsletters,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,portscan,postman,postman_download,rapiddns,robots,secretsdb,securitytrails,securitytxt,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomaincenter,subdomainradar,telerik,trickest,trufflehog,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,wpscan,zoomeye -l, --list-modules List available modules. -lmo, --list-module-options Show all module config options -em MODULE [MODULE ...], --exclude-modules MODULE [MODULE ...] Exclude these modules. -f FLAG [FLAG ...], --flags FLAG [FLAG ...] - Enable modules by flag. Choices: email-enum,affiliates,web-screenshots,subdomain-hijack,baddns,portscan,iis-shortnames,safe,web-thorough,active,cloud-enum,web-basic,passive,report,code-enum,subdomain-enum,slow,aggressive,social-enum,deadly,web-paramminer,service-enum + Enable modules by flag. Choices: active,affiliates,aggressive,baddns,cloud-enum,code-enum,deadly,email-enum,iis-shortnames,passive,portscan,report,safe,service-enum,slow,social-enum,subdomain-enum,subdomain-hijack,web-basic,web-paramminer,web-screenshots,web-thorough -lf, --list-flags List available flags. -rf FLAG [FLAG ...], --require-flags FLAG [FLAG ...] Only enable modules with these flags (e.g. -rf passive) @@ -92,6 +93,7 @@ Scan: -s, --silent Be quiet --force Run scan even in the case of condition violations or failed module setups -y, --yes Skip scan confirmation prompt + --fast-mode Scan only the provided targets as fast as possible, with no extra discovery --dry-run Abort before executing scan --current-preset Show the current preset in YAML format --current-preset-full @@ -101,7 +103,7 @@ Output: -o DIR, --output-dir DIR Directory to output scan results -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: asset_inventory,discord,python,slack,http,json,web_report,teams,subdomains,emails,websocket,sqlite,txt,csv,stdout,neo4j,splunk + Output module(s). Choices: asset_inventory,csv,discord,emails,http,json,neo4j,postgres,python,slack,splunk,sqlite,stdout,subdomains,teams,txt,web_report,websocket --json, -j Output scan data in JSON format --brief, -br Output only the data itself --event-types EVENT_TYPES [EVENT_TYPES ...] @@ -118,6 +120,7 @@ Module dependencies: Misc: --version show BBOT version and exit + --proxy HTTP_PROXY Use this proxy for all HTTP requests -H CUSTOM_HEADERS [CUSTOM_HEADERS ...], --custom-headers CUSTOM_HEADERS [CUSTOM_HEADERS ...] List of custom headers as key value pairs (header=value). --custom-yara-rules CUSTOM_YARA_RULES, -cy CUSTOM_YARA_RULES diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 51f9cc3f0..77349adf4 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -73,6 +73,9 @@ folder_blobs: false ### SCOPE ### scope: + # strict scope means only exact DNS names are considered in-scope + # subdomains are not included unless they are explicitly provided in the target list + strict: false # Filter by scope distance which events are displayed in the output # 0 == show only in-scope events (affiliates are always shown) # 1 == show all events up to distance-1 (1 hop from target) @@ -378,8 +381,7 @@ Many modules accept their own configuration options. These options have the abil | modules.builtwith.api_key | str | Builtwith API key | | | modules.builtwith.redirects | bool | Also look up inbound and outbound redirects | True | | modules.c99.api_key | str | c99.nl API key | | -| modules.censys.api_id | str | Censys.io API ID | | -| modules.censys.api_secret | str | Censys.io API Secret | | +| modules.censys.api_key | str | Censys.io API Key in the format of 'key:secret' | | | modules.censys.max_pages | int | Maximum number of pages to fetch (100 results per page) | 5 | | modules.chaos.api_key | str | Chaos API key | | | modules.credshed.credshed_url | str | URL of credshed server | | @@ -415,8 +417,7 @@ Many modules accept their own configuration options. These options have the abil | modules.ipstack.api_key | str | IPStack GeoIP API Key | | | modules.jadx.threads | int | Maximum jadx threads for extracting apk's, default: 4 | 4 | | modules.leakix.api_key | str | LeakIX API Key | | -| modules.passivetotal.api_key | str | RiskIQ API Key | | -| modules.passivetotal.username | str | RiskIQ Username | | +| modules.passivetotal.api_key | str | PassiveTotal API Key in the format of 'username:api_key' | | | modules.pgp.search_urls | list | PGP key servers to search |` ['https://keyserver.ubuntu.com/pks/lookup?fingerprint=on&op=vindex&search=', 'http://the.earth.li:11371/pks/lookup?fingerprint=on&op=vindex&search=', 'https://pgpkeys.eu/pks/lookup?search=&op=index', 'https://pgp.mit.edu/pks/lookup?search=&op=index'] `| | modules.postman_download.api_key | str | Postman API Key | | | modules.postman_download.output_folder | str | Folder to download postman workspaces to | | @@ -459,6 +460,11 @@ Many modules accept their own configuration options. These options have the abil | modules.neo4j.password | str | Neo4j password | bbotislife | | modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | modules.neo4j.username | str | Neo4j username | neo4j | +| modules.postgres.database | str | The database name to connect to | bbot | +| modules.postgres.host | str | The server running Postgres | localhost | +| modules.postgres.password | str | The password to connect to Postgres | bbotislife | +| modules.postgres.port | int | The port to connect to Postgres | 5432 | +| modules.postgres.username | str | The username to connect to Postgres | postgres | | modules.slack.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | | modules.slack.min_severity | str | Only allow VULNERABILITY events of this severity or higher | LOW | | modules.slack.webhook_url | str | Discord webhook URL | | @@ -487,6 +493,7 @@ Many modules accept their own configuration options. These options have the abil | modules.excavate.custom_yara_rules | str | Include custom Yara rules | | | modules.excavate.retain_querystring | bool | Keep the querystring intact on emitted WEB_PARAMETERS | False | | modules.excavate.yara_max_match_data | int | Sets the maximum amount of text that can extracted from a YARA regex | 2000 | +| modules.speculate.essential_only | bool | Only enable essential speculate features (no extra discovery) | False | | modules.speculate.max_hosts | int | Max number of IP_RANGE hosts to convert into IP_ADDRESS events | 65536 | | modules.speculate.ports | str | The set of ports to speculate on | 80,443 | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 48a98515a..6abe816f7 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -106,7 +106,7 @@ Below is a full list of event types along with which modules produce/consume the | Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | |---------------------|-----------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 16 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, neo4j, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | +| * | 17 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | | ASN | 0 | 1 | | asn | | AZURE_TENANT | 1 | 0 | speculate | | | CODE_REPOSITORY | 6 | 6 | docker_pull, git_clone, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, github_codesearch, github_org, gitlab, postman | diff --git a/docs/scanning/presets_list.md b/docs/scanning/presets_list.md index bc06e5be0..fc0cbc5f3 100644 --- a/docs/scanning/presets_list.md +++ b/docs/scanning/presets_list.md @@ -189,6 +189,34 @@ Enumerate email addresses from APIs, web crawling, etc. Modules: [7]("`dehashed`, `dnscaa`, `emailformat`, `hunterio`, `pgp`, `skymem`, `sslcert`") +## **fast** + +Scan only the provided targets as fast as possible - no extra discovery + +??? note "`fast.yml`" + ```yaml title="~/.bbot/presets/fast.yml" + description: Scan only the provided targets as fast as possible - no extra discovery + + exclude_modules: + - excavate + + config: + # only scan the exact targets specified + scope: + strict: true + # speed up dns resolution by doing A/AAAA only - not MX/NS/SRV/etc + dns: + minimal: true + # essential speculation only + modules: + speculate: + essential_only: true + ``` + + + +Modules: [0]("") + ## **iis-shortnames** Recursively enumerate IIS shortnames @@ -278,6 +306,10 @@ Recursive web spider modules: - httpx + blacklist: + # Prevent spider from invalidating sessions by logging out + - "RE:/.*(sign|log)[_-]?out" + config: web: # how many links to follow in a row @@ -406,6 +438,7 @@ Here is a the same data, but in a table: | dirbust-light | web | Basic web directory brute-force (surface-level directories only) | 4 | ffuf, ffuf_shortnames, httpx, iis_shortnames | | dotnet-audit | web | Comprehensive scan for all IIS/.NET specific modules and module settings | 8 | ajaxpro, badsecrets, dotnetnuke, ffuf, ffuf_shortnames, httpx, iis_shortnames, telerik | | email-enum | | Enumerate email addresses from APIs, web crawling, etc. | 7 | dehashed, dnscaa, emailformat, hunterio, pgp, skymem, sslcert | +| fast | | Scan only the provided targets as fast as possible - no extra discovery | 0 | | | iis-shortnames | web | Recursively enumerate IIS shortnames | 3 | ffuf_shortnames, httpx, iis_shortnames | | kitchen-sink | | Everything everywhere all at once | 86 | anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, docker_pull, dockerhub, emailformat, ffuf, ffuf_shortnames, filedownload, fullhunt, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunterio, iis_shortnames, internetdb, ipneighbor, leakix, myssl, ntlm, oauth, otx, paramminer_cookies, paramminer_getparams, paramminer_headers, passivetotal, pgp, postman, postman_download, rapiddns, robots, secretsdb, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, virustotal, wappalyzer, wayback, zoomeye | | paramminer | web | Discover new web parameters via brute-force | 4 | httpx, paramminer_cookies, paramminer_getparams, paramminer_headers | From 0fd7102d3b55b751c943fdee5e62f1a7ec6c8b41 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 20 Nov 2024 11:36:11 -0500 Subject: [PATCH 26/40] fix cloudcheck bug --- bbot/modules/internal/cloudcheck.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 285d48188..42c51ec03 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -51,6 +51,7 @@ async def handle_event(self, event, **kwargs): event.add_tag(f"{provider_type}-cname") found = set() + str_hosts_to_check = [str(host) for host in hosts_to_check] # look for cloud assets in hosts, http responses # loop through each provider for provider in self.helpers.cloud.providers.values(): @@ -68,7 +69,7 @@ async def handle_event(self, event, **kwargs): if event.type == "HTTP_RESPONSE": matches = await self.helpers.re.findall(sig, event.data.get("body", "")) elif event.type.startswith("DNS_NAME"): - for host in hosts_to_check: + for host in str_hosts_to_check: match = sig.match(host) if match: matches.append(match.groups()) From 1286a9469ed921542551430258f3b9b0dc464c57 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 20 Nov 2024 11:49:12 -0500 Subject: [PATCH 27/40] fix portscan tests --- .../test_step_2/module_tests/test_module_portscan.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_portscan.py b/bbot/test/test_step_2/module_tests/test_module_portscan.py index 56536cb5d..d9f55c27f 100644 --- a/bbot/test/test_step_2/module_tests/test_module_portscan.py +++ b/bbot/test/test_step_2/module_tests/test_module_portscan.py @@ -109,10 +109,12 @@ def check(self, module_test, events): if e.type == "DNS_NAME" and e.data == "dummy.asdf.evilcorp.net" and str(e.module) == "dummy_module" ] ) - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 3 - assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 3 + # the reason these numbers aren't exactly predictable is because we can't predict which one arrives first + # to the portscan module. Sometimes, one that would normally be deduped is force-emitted because it led to a new open port. + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.8.8"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.4"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.5"]) <= 4 + assert 2 <= len([e for e in events if e.type == "IP_ADDRESS" and e.data == "8.8.4.6"]) <= 4 assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.8.8:443"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.5:80"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "8.8.4.6:631"]) From b054b156974ae83743db4837f2d6bcf7b3fe3a64 Mon Sep 17 00:00:00 2001 From: blsaccess Date: Thu, 21 Nov 2024 00:23:53 +0000 Subject: [PATCH 28/40] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index a57b3e689..2b71f973b 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.83.7", + "version": "3.84.0", "config": "", "only_verified": True, "concurrency": 8, From 0adf09dc0fa2ff51cd1699913144566b8a21346b Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Thu, 21 Nov 2024 10:52:33 +0100 Subject: [PATCH 29/40] Keep GitHub Actions up to date with GitHub's Dependabot Fixes software supply chain safety warnings like at the bottom right of https://github.com/blacklanternsecurity/bbot/actions/runs/11946699098 * [Keeping your actions up to date with Dependabot](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot) * [Configuration options for the dependabot.yml file - package-ecosystem](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem) --- .github/dependabot.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index bd3b2c06f..18e2537a5 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -6,3 +6,11 @@ updates: interval: "weekly" target-branch: "dev" open-pull-requests-limit: 10 + - package-ecosystem: github-actions + directory: / + groups: + github-actions: + patterns: + - "*" # Group all Actions updates into a single larger pull request + schedule: + interval: weekly From a51f0ef3aa76a3cc2eecd852caab2ebfc005844d Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Thu, 21 Nov 2024 11:37:26 +0100 Subject: [PATCH 30/40] Keep the dict sorted by keys to spot two duplicates --- bbot/core/helpers/libmagic.py | 70 +++++++++++++++++------------------ 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/bbot/core/helpers/libmagic.py b/bbot/core/helpers/libmagic.py index 77a9eebce..5e1279d9c 100644 --- a/bbot/core/helpers/libmagic.py +++ b/bbot/core/helpers/libmagic.py @@ -15,54 +15,52 @@ def get_compression(mime_type): mime_type = mime_type.lower() # from https://github.com/cdgriffith/puremagic/blob/master/puremagic/magic_data.json compression_map = { - "application/gzip": "gzip", # Gzip compressed file - "application/zip": "zip", # Zip archive - "application/x-bzip2": "bzip2", # Bzip2 compressed file - "application/x-xz": "xz", # XZ compressed file - "application/x-7z-compressed": "7z", # 7-Zip archive - "application/vnd.rar": "rar", # RAR archive - "application/x-lzma": "lzma", # LZMA compressed file - "application/x-compress": "compress", # Unix compress file - "application/zstd": "zstd", # Zstandard compressed file - "application/x-lz4": "lz4", # LZ4 compressed file - "application/x-tar": "tar", # Tar archive - "application/x-zip-compressed-fb2": "zip", # Zip archive (FB2) - "application/epub+zip": "zip", # EPUB book (Zip archive) - "application/pak": "pak", # PAK archive - "application/x-lha": "lha", # LHA archive "application/arj": "arj", # ARJ archive - "application/vnd.ms-cab-compressed": "cab", # Microsoft Cabinet archive - "application/x-sit": "sit", # StuffIt archive "application/binhex": "binhex", # BinHex encoded file - "application/x-lrzip": "lrzip", # Long Range ZIP - "application/x-alz": "alz", # ALZip archive - "application/x-tgz": "tgz", # Gzip compressed Tar archive - "application/x-gzip": "gzip", # Gzip compressed file - "application/x-lzip": "lzip", # Lzip compressed file - "application/x-zstd-compressed-tar": "zstd", # Zstandard compressed Tar archive - "application/x-lz4-compressed-tar": "lz4", # LZ4 compressed Tar archive - "application/vnd.comicbook+zip": "zip", # Comic book archive (Zip) - "application/vnd.palm": "palm", # Palm OS data + "application/epub+zip": "zip", # EPUB book (Zip archive) "application/fictionbook2+zip": "zip", # FictionBook 2.0 (Zip) "application/fictionbook3+zip": "zip", # FictionBook 3.0 (Zip) + "application/gzip": "gzip", # Gzip compressed file + "application/java-archive": "zip", # Java Archive (JAR) + "application/pak": "pak", # PAK archive + "application/vnd.android.package-archive": "zip", # Android package (APK) + "application/vnd.comicbook-rar": "rar", # Comic book archive (RAR) + "application/vnd.comicbook+zip": "zip", # Comic book archive (Zip) + "application/vnd.ms-cab-compressed": "cab", # Microsoft Cabinet archive + "application/vnd.palm": "palm", # Palm OS data + "application/vnd.rar": "rar", # RAR archive + "application/x-7z-compressed": "7z", # 7-Zip archive + "application/x-ace": "ace", # ACE archive + "application/x-alz": "alz", # ALZip archive + "application/x-arc": "arc", # ARC archive + "application/x-archive": "ar", # Unix archive + "application/x-bzip2": "bzip2", # Bzip2 compressed file + "application/x-compress": "compress", # Unix compress file "application/x-cpio": "cpio", # CPIO archive + "application/x-gzip": "gzip", # Gzip compressed file + "application/x-itunes-ipa": "zip", # iOS application archive (IPA) "application/x-java-pack200": "pack200", # Java Pack200 archive + "application/x-lha": "lha", # LHA archive + "application/x-lrzip": "lrzip", # Long Range ZIP + "application/x-lz4-compressed-tar": "lz4", # LZ4 compressed Tar archive + "application/x-lz4": "lz4", # LZ4 compressed file + "application/x-lzip": "lzip", # Lzip compressed file + "application/x-lzma": "lzma", # LZMA compressed file "application/x-par2": "par2", # PAR2 recovery file + "application/x-qpress": "qpress", # Qpress archive "application/x-rar-compressed": "rar", # RAR archive - "application/java-archive": "zip", # Java Archive (JAR) - "application/x-webarchive": "zip", # Web archive (Zip) - "application/vnd.android.package-archive": "zip", # Android package (APK) - "application/x-itunes-ipa": "zip", # iOS application archive (IPA) + "application/x-sit": "sit", # StuffIt archive "application/x-stuffit": "sit", # StuffIt archive - "application/x-archive": "ar", # Unix archive - "application/x-qpress": "qpress", # Qpress archive + "application/x-tar": "tar", # Tar archive + "application/x-tgz": "tgz", # Gzip compressed Tar archive + "application/x-webarchive": "zip", # Web archive (Zip) "application/x-xar": "xar", # XAR archive - "application/x-ace": "ace", # ACE archive + "application/x-xz": "xz", # XZ compressed file + "application/x-zip-compressed-fb2": "zip", # Zip archive (FB2) "application/x-zoo": "zoo", # Zoo archive - "application/x-arc": "arc", # ARC archive "application/x-zstd-compressed-tar": "zstd", # Zstandard compressed Tar archive - "application/x-lz4-compressed-tar": "lz4", # LZ4 compressed Tar archive - "application/vnd.comicbook-rar": "rar", # Comic book archive (RAR) + "application/zip": "zip", # Zip archive + "application/zstd": "zstd", # Zstandard compressed file } return compression_map.get(mime_type, "") From 46b679918510b0d80ba73950b1ab060642370e31 Mon Sep 17 00:00:00 2001 From: Sangharsha <67364489+noob6t5@users.noreply.github.com> Date: Thu, 21 Nov 2024 21:00:43 +0545 Subject: [PATCH 31/40] Update README.md Added option --allow-deadly --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 50e26da26..ad8fdd6e9 100644 --- a/README.md +++ b/README.md @@ -191,10 +191,10 @@ flags: ```bash # everything everywhere all at once -bbot -t evilcorp.com -p kitchen-sink +bbot -t evilcorp.com -p kitchen-sink --allow-deadly # roughly equivalent to: -bbot -t evilcorp.com -p subdomain-enum cloud-enum code-enum email-enum spider web-basic paramminer dirbust-light web-screenshots +bbot -t evilcorp.com -p subdomain-enum cloud-enum code-enum email-enum spider web-basic paramminer dirbust-light web-screenshots --allow-deadly ``` From 7554de3d5af1579d4d2b82f8df3ef6997b60c570 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 21 Nov 2024 10:48:19 -0500 Subject: [PATCH 32/40] fix subdomain enum bug --- bbot/modules/templates/subdomain_enum.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 913b6c2ed..0d5f347e9 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -169,7 +169,7 @@ async def filter_event(self, event): if any(t.startswith("cloud-") for t in event.tags): is_cloud = True # reject if it's a cloud resource and not in our target - if is_cloud and event not in self.scan.target: + if is_cloud and event not in self.scan.target.whitelist: return False, "Event is a cloud resource and not a direct target" # optionally reject events with wildcards / errors if self.reject_wildcards: From eaef028486c0e11cab78a1c032d3ecd68109b455 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 21 Nov 2024 10:54:04 -0500 Subject: [PATCH 33/40] fix conflict --- bbot/core/helpers/depsinstaller/installer.py | 3 ++- bbot/defaults.yml | 15 +++++++-------- bbot/test/test.conf | 2 ++ 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index 479c51c97..fce5f077d 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -49,7 +49,8 @@ def __init__(self, parent_helper): self.minimal_git_config.touch() os.environ["GIT_CONFIG_GLOBAL"] = str(self.minimal_git_config) - self.deps_behavior = self.parent_helper.config.get("deps_behavior", "abort_on_failure").lower() + self.deps_config = self.parent_helper.config.get("deps", {}) + self.deps_behavior = self.deps_config.get("behavior", "abort_on_failure").lower() self.ansible_debug = self.core.logger.log_level <= logging.DEBUG self.venv = "" if sys.prefix != sys.base_prefix: diff --git a/bbot/defaults.yml b/bbot/defaults.yml index e659a183b..63f5f7e68 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -112,6 +112,13 @@ engine: deps: ffuf: version: "2.1.0" + # How to handle installation of module dependencies + # Choices are: + # - abort_on_failure (default) - if a module dependency fails to install, abort the scan + # - retry_failed - try again to install failed dependencies + # - ignore_failed - run the scan regardless of what happens with dependency installation + # - disable - completely disable BBOT's dependency system (you are responsible for installing tools, pip packages, etc.) + behavior: abort_on_failure ### ADVANCED OPTIONS ### @@ -129,14 +136,6 @@ dnsresolve: True # Cloud provider tagging cloudcheck: True -# How to handle installation of module dependencies -# Choices are: -# - abort_on_failure (default) - if a module dependency fails to install, abort the scan -# - retry_failed - try again to install failed dependencies -# - ignore_failed - run the scan regardless of what happens with dependency installation -# - disable - completely disable BBOT's dependency system (you are responsible for installing tools, pip packages, etc.) -deps_behavior: abort_on_failure - # Strip querystring from URLs by default url_querystring_remove: True # When query string is retained, by default collapse parameter values down to a single value per parameter diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 63914fe65..1c6a19dbf 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -36,6 +36,8 @@ dns: - example.com - evilcorp.com - one +deps: + behavior: retry_failed engine: debug: true agent_url: ws://127.0.0.1:8765 From 3b6aaa9695f85242db495e8e8a28f7af6c0e838e Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 21 Nov 2024 13:44:46 -0500 Subject: [PATCH 34/40] misc small bugfixes --- bbot/core/helpers/dns/brute.py | 9 ++++++--- bbot/modules/github_codesearch.py | 3 ++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/bbot/core/helpers/dns/brute.py b/bbot/core/helpers/dns/brute.py index 0c3799ca5..dd17ec5cc 100644 --- a/bbot/core/helpers/dns/brute.py +++ b/bbot/core/helpers/dns/brute.py @@ -41,10 +41,13 @@ async def dnsbrute(self, module, domain, subdomains, type=None): type = "A" type = str(type).strip().upper() - wildcard_rdtypes = await self.parent_helper.dns.is_wildcard_domain(domain, (type, "CNAME")) - if wildcard_rdtypes: + wildcard_domains = await self.parent_helper.dns.is_wildcard_domain(domain, (type, "CNAME")) + wildcard_rdtypes = set() + for domain, rdtypes in wildcard_domains.items(): + wildcard_rdtypes.update(rdtypes) + if wildcard_domains: self.log.hugewarning( - f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(wildcard_rdtypes)})" + f"Aborting massdns on {domain} because it's a wildcard domain ({','.join(sorted(wildcard_rdtypes))})" ) return [] diff --git a/bbot/modules/github_codesearch.py b/bbot/modules/github_codesearch.py index 39e1ee7b4..4c838a1c5 100644 --- a/bbot/modules/github_codesearch.py +++ b/bbot/modules/github_codesearch.py @@ -50,9 +50,10 @@ async def query(self, query): break status_code = getattr(r, "status_code", 0) if status_code == 429: - "Github is rate-limiting us (HTTP status: 429)" + self.info("Github is rate-limiting us (HTTP status: 429)") break if status_code != 200: + self.info(f"Unexpected response (HTTP status: {status_code})") break try: j = r.json() From 13d87c62189cfdda9d133deb5edc18c0e972beb8 Mon Sep 17 00:00:00 2001 From: blsaccess Date: Fri, 22 Nov 2024 00:24:15 +0000 Subject: [PATCH 35/40] Update nuclei --- bbot/modules/deadly/nuclei.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 1eb10cb23..f254f9bb0 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -15,7 +15,7 @@ class nuclei(BaseModule): } options = { - "version": "3.3.5", + "version": "3.3.6", "tags": "", "templates": "", "severity": "", From 97b3df757e06ea534a4462fd67587777b2bc233f Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Fri, 22 Nov 2024 02:49:37 +0000 Subject: [PATCH 36/40] [create-pull-request] automated change --- docs/modules/list_of_modules.md | 1 + docs/scanning/advanced.md | 2 +- docs/scanning/configuration.md | 24 +++++++++++++++--------- docs/scanning/events.md | 2 +- 4 files changed, 18 insertions(+), 11 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 3eb87b75b..4647950b5 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -119,6 +119,7 @@ | emails | output | No | Output any email addresses found belonging to the target domain | email-enum | EMAIL_ADDRESS | | @domwhewell-sage | 2023-12-23 | | http | output | No | Send every event to a custom URL via a web request | | * | | @TheTechromancer | 2022-04-13 | | json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | @TheTechromancer | 2022-04-07 | +| mysql | output | No | Output scan data to a MySQL database | | * | | | | | neo4j | output | No | Output to Neo4j | | * | | @TheTechromancer | 2022-04-07 | | postgres | output | No | Output scan data to a SQLite database | | * | | | | | python | output | No | Output via Python API | | * | | @TheTechromancer | 2022-09-13 | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index a5666e5fd..6af93cf8c 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -103,7 +103,7 @@ Output: -o DIR, --output-dir DIR Directory to output scan results -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: asset_inventory,csv,discord,emails,http,json,neo4j,postgres,python,slack,splunk,sqlite,stdout,subdomains,teams,txt,web_report,websocket + Output module(s). Choices: asset_inventory,csv,discord,emails,http,json,mysql,neo4j,postgres,python,slack,splunk,sqlite,stdout,subdomains,teams,txt,web_report,websocket --json, -j Output scan data in JSON format --brief, -br Output only the data itself --event-types EVENT_TYPES [EVENT_TYPES ...] diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 77349adf4..be14d2fe0 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -171,6 +171,13 @@ engine: deps: ffuf: version: "2.1.0" + # How to handle installation of module dependencies + # Choices are: + # - abort_on_failure (default) - if a module dependency fails to install, abort the scan + # - retry_failed - try again to install failed dependencies + # - ignore_failed - run the scan regardless of what happens with dependency installation + # - disable - completely disable BBOT's dependency system (you are responsible for installing tools, pip packages, etc.) + behavior: abort_on_failure ### ADVANCED OPTIONS ### @@ -188,14 +195,6 @@ dnsresolve: True # Cloud provider tagging cloudcheck: True -# How to handle installation of module dependencies -# Choices are: -# - abort_on_failure (default) - if a module dependency fails to install, abort the scan -# - retry_failed - try again to install failed dependencies -# - ignore_failed - run the scan regardless of what happens with dependency installation -# - disable - completely disable BBOT's dependency system (you are responsible for installing tools, pip packages, etc.) -deps_behavior: abort_on_failure - # Strip querystring from URLs by default url_querystring_remove: True # When query string is retained, by default collapse parameter values down to a single value per parameter @@ -340,6 +339,8 @@ Many modules accept their own configuration options. These options have the abil | modules.portscan.adapter | str | Manually specify a network interface, such as "eth0" or "tun0". If not specified, the first network interface found with a default gateway will be used. | | | modules.portscan.adapter_ip | str | Send packets using this IP address. Not needed unless masscan's autodetection fails | | | modules.portscan.adapter_mac | str | Send packets using this as the source MAC address. Not needed unless masscan's autodetection fails | | +| modules.portscan.allowed_cdn_ports | NoneType | Comma-separated list of ports that are allowed to be scanned for CDNs | None | +| modules.portscan.cdn_tags | str | Comma-separated list of tags to skip, e.g. 'cdn,cloud' | cdn- | | modules.portscan.ping_first | bool | Only portscan hosts that reply to pings | False | | modules.portscan.ping_only | bool | Ping sweep only, no portscan | False | | modules.portscan.ports | str | Ports to scan | | @@ -431,7 +432,7 @@ Many modules accept their own configuration options. These options have the abil | modules.trufflehog.config | str | File path or URL to YAML trufflehog config | | | modules.trufflehog.deleted_forks | bool | Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours. | False | | modules.trufflehog.only_verified | bool | Only report credentials that have been verified | True | -| modules.trufflehog.version | str | trufflehog version | 3.83.7 | +| modules.trufflehog.version | str | trufflehog version | 3.84.0 | | modules.urlscan.urls | bool | Emit URLs in addition to DNS_NAMEs | False | | modules.virustotal.api_key | str | VirusTotal API Key | | | modules.wayback.garbage_threshold | int | Dedupe similar urls if they are in a group of this size or higher (lower values == less garbage data) | 10 | @@ -457,6 +458,11 @@ Many modules accept their own configuration options. These options have the abil | modules.http.username | str | Username (basic auth) | | | modules.json.output_file | str | Output to file | | | modules.json.siem_friendly | bool | Output JSON in a SIEM-friendly format for ingestion into Elastic, Splunk, etc. | False | +| modules.mysql.database | str | The database name to connect to | bbot | +| modules.mysql.host | str | The server running MySQL | localhost | +| modules.mysql.password | str | The password to connect to MySQL | bbotislife | +| modules.mysql.port | int | The port to connect to MySQL | 3306 | +| modules.mysql.username | str | The username to connect to MySQL | root | | modules.neo4j.password | str | Neo4j password | bbotislife | | modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | modules.neo4j.username | str | Neo4j username | neo4j | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 6abe816f7..967c5cbf2 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -106,7 +106,7 @@ Below is a full list of event types along with which modules produce/consume the | Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | |---------------------|-----------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 17 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | +| * | 18 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, mysql, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | | ASN | 0 | 1 | | asn | | AZURE_TENANT | 1 | 0 | speculate | | | CODE_REPOSITORY | 6 | 6 | docker_pull, git_clone, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, github_codesearch, github_org, gitlab, postman | From b6c96c780769330556b8f56c8b43b1560b243f05 Mon Sep 17 00:00:00 2001 From: Colin Stubbs <3059577+colin-stubbs@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:21:51 +0800 Subject: [PATCH 37/40] initial release of dnstlsrpt module --- bbot/modules/dnstlsrpt.py | 144 ++++++++++++++++++ .../module_tests/test_module_dnstlsrpt.py | 64 ++++++++ 2 files changed, 208 insertions(+) create mode 100644 bbot/modules/dnstlsrpt.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py diff --git a/bbot/modules/dnstlsrpt.py b/bbot/modules/dnstlsrpt.py new file mode 100644 index 000000000..c3b709c41 --- /dev/null +++ b/bbot/modules/dnstlsrpt.py @@ -0,0 +1,144 @@ +# dnstlsrpt.py +# +# Checks for and parses common TLS-RPT TXT records, e.g. _smtp._tls.target.domain +# +# TLS-RPT policies may contain email addresses or URL's for reporting destinations, typically the email addresses are software processed inboxes, but they may also be to individual humans or team inboxes. +# +# The domain portion of any email address or URL is also passively checked and added as appropriate, for additional inspection by other modules. +# +# Example records, +# _smtp._tls.example.com TXT "v=TLSRPTv1;rua=https://tlsrpt.azurewebsites.net/report" +# _smtp._tls.example.net TXT "v=TLSRPTv1; rua=mailto:sts-reports@example.net;" +# +# TODO: extract %{UNIQUE_ID}% from hosted services as ORG_STUB ? +# e.g. %{UNIQUE_ID}%@tlsrpt.hosted.service.provider is usually a tenant specific ID. +# e.g. tlsrpt@%{UNIQUE_ID}%.hosted.service.provider is usually a tenant specific ID. + +from bbot.modules.base import BaseModule +from bbot.core.helpers.dns.helpers import service_record + +import re + +from bbot.core.helpers.regexes import email_regex, url_regexes + +_tlsrpt_regex = r"^v=(?PTLSRPTv[0-9]+); *(?P.*)$" +tlsrpt_regex = re.compile(_tlsrpt_regex, re.I) + +_tlsrpt_kvp_regex = r"(?P\w+)=(?P[^;]+);*" +tlsrpt_kvp_regex = re.compile(_tlsrpt_kvp_regex) + +_csul = r"(?P[^, ]+)" +csul = re.compile(_csul) + + +class dnstlsrpt(BaseModule): + watched_events = ["DNS_NAME"] + produced_events = ["EMAIL_ADDRESS", "URL_UNVERIFIED", "RAW_DNS_RECORD"] + flags = ["subdomain-enum", "cloud-enum", "email-enum", "passive", "safe"] + meta = { + "description": "Check for TLS-RPT records", + "author": "@colin-stubbs", + "created_date": "2024-07-26", + } + options = { + "emit_emails": True, + "emit_raw_dns_records": False, + "emit_urls": True, + "emit_vulnerabilities": True, + } + options_desc = { + "emit_emails": "Emit EMAIL_ADDRESS events", + "emit_raw_dns_records": "Emit RAW_DNS_RECORD events", + "emit_urls": "Emit URL_UNVERIFIED events", + "emit_vulnerabilities": "Emit VULNERABILITY events", + } + + async def setup(self): + self.emit_emails = self.config.get("emit_emails", True) + self.emit_raw_dns_records = self.config.get("emit_raw_dns_records", False) + self.emit_urls = self.config.get("emit_urls", True) + self.emit_vulnerabilities = self.config.get("emit_vulnerabilities", True) + return await super().setup() + + def _incoming_dedup_hash(self, event): + # dedupe by parent + parent_domain = self.helpers.parent_domain(event.data) + return hash(parent_domain), "already processed parent domain" + + async def filter_event(self, event): + if "_wildcard" in str(event.host).split("."): + return False, "event is wildcard" + + # there's no value in inspecting service records + if service_record(event.host) == True: + return False, "service record detected" + + return True + + async def handle_event(self, event): + rdtype = "TXT" + tags = ["tlsrpt-record"] + hostname = f"_smtp._tls.{event.host}" + + r = await self.helpers.resolve_raw(hostname, type=rdtype) + + if r: + raw_results, errors = r + for answer in raw_results: + if self.emit_raw_dns_records: + await self.emit_event( + {"host": hostname, "type": rdtype, "answer": answer.to_text()}, + "RAW_DNS_RECORD", + parent=event, + tags=tags.append(f"{rdtype.lower()}-record"), + context=f"{rdtype} lookup on {hostname} produced {{event.type}}", + ) + + # we need to fix TXT data that may have been split across two different rdata's + # e.g. we will get a single string, but within that string we may have two parts such as: + # answer = '"part 1 that was really long" "part 2 that did not fit in part 1"' + # NOTE: the leading and trailing double quotes are essential as part of a raw DNS TXT record, or another record type that contains a free form text string as a component. + s = answer.to_text().strip('"').replace('" "', "") + + # validate TLSRPT record, tag appropriately + tlsrpt_match = tlsrpt_regex.search(s) + + if ( + tlsrpt_match + and tlsrpt_match.group("v") + and tlsrpt_match.group("kvps") + and tlsrpt_match.group("kvps") != "" + ): + for kvp_match in tlsrpt_kvp_regex.finditer(tlsrpt_match.group("kvps")): + key = kvp_match.group("k").lower() + + if key == "rua": + for csul_match in csul.finditer(kvp_match.group("v")): + if csul_match.group("uri"): + for match in email_regex.finditer(csul_match.group("uri")): + start, end = match.span() + email = csul_match.group("uri")[start:end] + + if self.emit_emails: + await self.emit_event( + email, + "EMAIL_ADDRESS", + tags=tags.append(f"tlsrpt-record-{key}"), + parent=event, + ) + + for url_regex in url_regexes: + for match in url_regex.finditer(csul_match.group("uri")): + start, end = match.span() + url = csul_match.group("uri")[start:end] + + if self.emit_urls: + await self.emit_event( + url, + "URL_UNVERIFIED", + tags=tags.append(f"tlsrpt-record-{key}"), + parent=event, + ) + + +# EOF diff --git a/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py b/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py new file mode 100644 index 000000000..a14a882fd --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py @@ -0,0 +1,64 @@ +from .base import ModuleTestBase + +raw_smtp_tls_txt = '"v=TLSRPTv1; rua=mailto:tlsrpt@sub.blacklanternsecurity.notreal,mailto:test@on.thirdparty.com, https://tlspost.example.com;"' + + +class TestDNSTLSRPT(ModuleTestBase): + targets = ["blacklanternsecurity.notreal"] + modules_overrides = ["dnstlsrpt", "speculate"] + config_overrides = {"modules": {"dnstlsrpt": {"emit_raw_dns_records": True}}, "scope": {"report_distance": 1}} + + async def setup_after_prep(self, module_test): + await module_test.mock_dns( + { + "blacklanternsecurity.notreal": { + "A": ["127.0.0.11"], + }, + "_tls.blacklanternsecurity.notreal": { + "A": ["127.0.0.22"], + }, + "_smtp._tls.blacklanternsecurity.notreal": { + "A": ["127.0.0.33"], + "TXT": [raw_smtp_tls_txt], + }, + "_tls._smtp._tls.blacklanternsecurity.notreal": { + "A": ["127.0.0.44"], + }, + "_smtp._tls._smtp._tls.blacklanternsecurity.notreal": { + "TXT": [raw_smtp_tls_txt], + }, + "sub.blacklanternsecurity.notreal": { + "A": ["127.0.0.55"], + }, + } + ) + + def check(self, module_test, events): + assert any( + e.type == "RAW_DNS_RECORD" and e.data["answer"] == raw_smtp_tls_txt for e in events + ), "Failed to emit RAW_DNS_RECORD" + assert any( + e.type == "DNS_NAME" and e.data == "sub.blacklanternsecurity.notreal" for e in events + ), "Failed to detect sub-domain" + assert any( + e.type == "EMAIL_ADDRESS" and e.data == "tlsrpt@sub.blacklanternsecurity.notreal" for e in events + ), "Failed to detect email address" + assert any( + e.type == "EMAIL_ADDRESS" and e.data == "test@on.thirdparty.com" for e in events + ), "Failed to detect third party email address" + assert any( + e.type == "URL_UNVERIFIED" and e.data == "https://tlspost.example.com/" for e in events + ), "Failed to detect third party URL" + + +class TestDNSTLSRPTRecursiveRecursion(TestDNSTLSRPT): + config_overrides = { + "scope": {"report_distance": 1}, + "modules": {"dnstlsrpt": {"emit_raw_dns_records": True}}, + } + + def check(self, module_test, events): + assert not any( + e.type == "RAW_DNS_RECORD" and e.data["host"] == "_mta-sts._mta-sts.blacklanternsecurity.notreal" + for e in events + ), "Unwanted recursion occurring" From 835665ce99e6dcc6318fc9639238f2887144f4e0 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 23 Nov 2024 00:43:48 +0100 Subject: [PATCH 38/40] ruff check && ruff format --- .github/workflows/tests.yml | 17 ++------ bbot/cli.py | 4 -- bbot/core/config/files.py | 1 - bbot/core/event/base.py | 3 -- bbot/core/helpers/dns/dns.py | 1 - bbot/core/helpers/dns/engine.py | 2 - bbot/core/helpers/dns/mock.py | 1 - bbot/core/helpers/libmagic.py | 1 - bbot/core/helpers/process.py | 2 - bbot/core/helpers/validators.py | 1 - bbot/core/helpers/web/engine.py | 1 - bbot/core/helpers/web/web.py | 1 - bbot/core/modules.py | 1 - bbot/db/sql/models.py | 1 - bbot/modules/baddns.py | 2 - bbot/modules/baddns_direct.py | 1 - bbot/modules/dotnetnuke.py | 2 - bbot/modules/internal/excavate.py | 39 +++++++++---------- bbot/modules/paramminer_headers.py | 5 +-- bbot/scanner/preset/args.py | 1 - bbot/scanner/preset/environ.py | 1 - .../test_module_asset_inventory.py | 1 - .../module_tests/test_module_dnsbrute.py | 1 - .../test_module_dnsbrute_mutations.py | 1 - .../module_tests/test_module_dnscommonsrv.py | 1 - .../module_tests/test_module_dotnetnuke.py | 2 - .../module_tests/test_module_excavate.py | 20 ---------- .../test_module_paramminer_cookies.py | 1 - .../test_module_paramminer_getparams.py | 6 --- .../test_module_paramminer_headers.py | 7 ---- .../module_tests/test_module_portscan.py | 1 - pyproject.toml | 5 ++- 32 files changed, 26 insertions(+), 108 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index dbd9d53e3..b4efe9fdb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -15,19 +15,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: psf/black@stable - with: - options: "--check" - - name: Install Python 3 - uses: actions/setup-python@v5 - with: - python-version: "3.x" - - name: Install dependencies - run: | - pip install flake8 - - name: flake8 - run: | - flake8 + - run: | + pipx install ruff + ruff check + ruff format test: needs: lint runs-on: ubuntu-latest diff --git a/bbot/cli.py b/bbot/cli.py index 4e2ce39a8..c1ec117b2 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -29,7 +29,6 @@ async def _main(): - import asyncio import traceback from contextlib import suppress @@ -45,7 +44,6 @@ async def _main(): global scan_name try: - # start by creating a default scan preset preset = Preset(_log=True, name="bbot_cli_main") # parse command line arguments and merge into preset @@ -81,7 +79,6 @@ async def _main(): # if we're listing modules or their options if options.list_modules or options.list_module_options: - # if no modules or flags are specified, enable everything if not (options.modules or options.output_modules or options.flags): for module, preloaded in preset.module_loader.preloaded().items(): @@ -172,7 +169,6 @@ async def _main(): log.trace(f"Command: {' '.join(sys.argv)}") if sys.stdin.isatty(): - # warn if any targets belong directly to a cloud provider for event in scan.target.seeds.events: if event.type == "DNS_NAME": diff --git a/bbot/core/config/files.py b/bbot/core/config/files.py index c66e92116..2be7bbaa1 100644 --- a/bbot/core/config/files.py +++ b/bbot/core/config/files.py @@ -10,7 +10,6 @@ class BBOTConfigFiles: - config_dir = (Path.home() / ".config" / "bbot").resolve() defaults_filename = (bbot_code_dir / "defaults.yml").resolve() config_filename = (config_dir / "bbot.yml").resolve() diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index ce627f695..bb6d92e91 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -1180,7 +1180,6 @@ def __init__(self, *args, **kwargs): self.num_redirects = getattr(self.parent, "num_redirects", 0) def _data_id(self): - data = super()._data_id() # remove the querystring for URL/URL_UNVERIFIED events, because we will conditionally add it back in (based on settings) @@ -1267,7 +1266,6 @@ def http_status(self): class URL(URL_UNVERIFIED): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -1309,7 +1307,6 @@ class URL_HINT(URL_UNVERIFIED): class WEB_PARAMETER(DictHostEvent): - def _data_id(self): # dedupe by url:name:param_type url = self.data.get("url", "") diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index 43380b746..4526f084a 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -16,7 +16,6 @@ class DNSHelper(EngineClient): - SERVER_CLASS = DNSEngine ERROR_CLASS = DNSError diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 219339c30..8f3c5a0b7 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -24,7 +24,6 @@ class DNSEngine(EngineServer): - CMDS = { 0: "resolve", 1: "resolve_raw", @@ -476,7 +475,6 @@ async def is_wildcard(self, query, rdtypes, raw_dns_records=None): # for every parent domain, starting with the shortest parents = list(domain_parents(query)) for parent in parents[::-1]: - # check if the parent domain is set up with wildcards wildcard_results = await self.is_wildcard_domain(parent, rdtypes_to_check) diff --git a/bbot/core/helpers/dns/mock.py b/bbot/core/helpers/dns/mock.py index 17ee2759a..3f6fd83ea 100644 --- a/bbot/core/helpers/dns/mock.py +++ b/bbot/core/helpers/dns/mock.py @@ -5,7 +5,6 @@ class MockResolver: - def __init__(self, mock_data=None, custom_lookup_fn=None): self.mock_data = mock_data if mock_data else {} self._custom_lookup_fn = custom_lookup_fn diff --git a/bbot/core/helpers/libmagic.py b/bbot/core/helpers/libmagic.py index 5e1279d9c..37612f558 100644 --- a/bbot/core/helpers/libmagic.py +++ b/bbot/core/helpers/libmagic.py @@ -2,7 +2,6 @@ def get_magic_info(file): - magic_detections = puremagic.magic_file(file) if magic_detections: magic_detections.sort(key=lambda x: x.confidence, reverse=True) diff --git a/bbot/core/helpers/process.py b/bbot/core/helpers/process.py index 06607659f..30f143985 100644 --- a/bbot/core/helpers/process.py +++ b/bbot/core/helpers/process.py @@ -7,7 +7,6 @@ class BBOTThread(threading.Thread): - default_name = "default bbot thread" def __init__(self, *args, **kwargs): @@ -24,7 +23,6 @@ def run(self): class BBOTProcess(SpawnProcess): - default_name = "bbot process pool" def __init__(self, *args, **kwargs): diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 417683adf..bc6ca1372 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -299,7 +299,6 @@ def is_email(email): class Validators: - def __init__(self, parent_helper): self.parent_helper = parent_helper diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 6d13d775c..85805b2b7 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -14,7 +14,6 @@ class HTTPEngine(EngineServer): - CMDS = { 0: "request", 1: "request_batch", diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index a767945d0..33aa2d035 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -19,7 +19,6 @@ class WebHelper(EngineClient): - SERVER_CLASS = HTTPEngine ERROR_CLASS = WebError diff --git a/bbot/core/modules.py b/bbot/core/modules.py index 7fd38a33f..a5f4b30eb 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -337,7 +337,6 @@ def preload_module(self, module_file): # look for classes if type(root_element) == ast.ClassDef: for class_attr in root_element.body: - # class attributes that are dictionaries if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict: # module options diff --git a/bbot/db/sql/models.py b/bbot/db/sql/models.py index e937fad1e..d6e765610 100644 --- a/bbot/db/sql/models.py +++ b/bbot/db/sql/models.py @@ -69,7 +69,6 @@ def __eq__(self, other): class Event(BBOTBaseModel, table=True): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) data = self._get_data(self.data, self.type) diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py index 5e468b0d7..d0e4c6c1b 100644 --- a/bbot/modules/baddns.py +++ b/bbot/modules/baddns.py @@ -55,7 +55,6 @@ async def setup(self): return True async def handle_event(self, event): - tasks = [] for ModuleClass in self.select_modules(): kwargs = { @@ -75,7 +74,6 @@ async def handle_event(self, event): tasks.append((module_instance, task)) async for completed_task in self.helpers.as_completed([task for _, task in tasks]): - module_instance = next((m for m, t in tasks if t == completed_task), None) try: task_result = await completed_task diff --git a/bbot/modules/baddns_direct.py b/bbot/modules/baddns_direct.py index 33b6b9575..f8881dedb 100644 --- a/bbot/modules/baddns_direct.py +++ b/bbot/modules/baddns_direct.py @@ -51,7 +51,6 @@ async def handle_event(self, event): CNAME_direct_instance = CNAME_direct_module(event.host, **kwargs) if await CNAME_direct_instance.dispatch(): - results = CNAME_direct_instance.analyze() if results and len(results) > 0: for r in results: diff --git a/bbot/modules/dotnetnuke.py b/bbot/modules/dotnetnuke.py index d36b4a014..2e1d7390e 100644 --- a/bbot/modules/dotnetnuke.py +++ b/bbot/modules/dotnetnuke.py @@ -32,7 +32,6 @@ async def setup(self): self.interactsh_instance = None if self.scan.config.get("interactsh_disable", False) == False: - try: self.interactsh_instance = self.helpers.interactsh() self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback) @@ -114,7 +113,6 @@ async def handle_event(self, event): ) if "endpoint" not in event.tags: - # NewsArticlesSlider ImageHandler.ashx File Read result = await self.helpers.request( f'{event.data["url"]}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx?img=~/web.config' diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 94032c554..3e3df8643 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -62,7 +62,6 @@ def _exclude_key(original_dict, key_to_exclude): def extract_params_url(parsed_url): - params = parse_qs(parsed_url.query) flat_params = {k: v[0] for k, v in params.items()} @@ -94,7 +93,6 @@ def extract_params_location(location_header_value, original_parsed_url): class YaraRuleSettings: - def __init__(self, description, tags, emit_match): self.description = description self.tags = tags @@ -263,7 +261,6 @@ async def report( class CustomExtractor(ExcavateRule): - def __init__(self, excavate): super().__init__(excavate) @@ -358,7 +355,6 @@ def url_unparse(self, param_type, parsed_url): ) class ParameterExtractor(ExcavateRule): - yara_rules = {} class ParameterExtractorRule: @@ -372,7 +368,6 @@ def __init__(self, excavate, result): self.result = result class GetJquery(ParameterExtractorRule): - name = "GET jquery" discovery_regex = r"/\$.get\([^\)].+\)/ nocase" extraction_regex = re.compile(r"\$.get\([\'\"](.+)[\'\"].+(\{.+\})\)") @@ -393,8 +388,12 @@ def extract(self): for action, extracted_parameters in extracted_results: extracted_parameters_dict = self.convert_to_dict(extracted_parameters) for parameter_name, original_value in extracted_parameters_dict.items(): - yield self.output_type, parameter_name, original_value, action, _exclude_key( - extracted_parameters_dict, parameter_name + yield ( + self.output_type, + parameter_name, + original_value, + action, + _exclude_key(extracted_parameters_dict, parameter_name), ) class PostJquery(GetJquery): @@ -418,8 +417,12 @@ def extract(self): k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in query_strings.items() } for parameter_name, original_value in query_strings_dict.items(): - yield self.output_type, parameter_name, original_value, url, _exclude_key( - query_strings_dict, parameter_name + yield ( + self.output_type, + parameter_name, + original_value, + url, + _exclude_key(query_strings_dict, parameter_name), ) class GetForm(ParameterExtractorRule): @@ -444,8 +447,12 @@ def extract(self): form_parameters[parameter_name] = original_value for parameter_name, original_value in form_parameters.items(): - yield self.output_type, parameter_name, original_value, form_action, _exclude_key( - form_parameters, parameter_name + yield ( + self.output_type, + parameter_name, + original_value, + form_action, + _exclude_key(form_parameters, parameter_name), ) class PostForm(GetForm): @@ -485,7 +492,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte endpoint, additional_params, ) in extracted_params: - self.excavate.debug( f"Found Parameter [{parameter_name}] in [{parameterExtractorSubModule.name}] ParameterExtractor Submodule" ) @@ -497,7 +503,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte ) if self.excavate.helpers.validate_parameter(parameter_name, parameter_type): - if self.excavate.in_bl(parameter_name) == False: parsed_url = urlparse(url) description = f"HTTP Extracted Parameter [{parameter_name}] ({parameterExtractorSubModule.name} Submodule)" @@ -532,7 +537,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte await self.report(domain, event, yara_rule_settings, discovery_context, event_type="DNS_NAME") class EmailExtractor(ExcavateRule): - yara_rules = { "email": 'rule email { meta: description = "contains email address" strings: $email = /[^\\W_][\\w\\-\\.\\+\']{0,100}@[a-zA-Z0-9\\-]{1,100}(\\.[a-zA-Z0-9\\-]{1,100})*\\.[a-zA-Z]{2,63}/ nocase fullword condition: $email }', } @@ -551,7 +555,6 @@ class JWTExtractor(ExcavateRule): } class ErrorExtractor(ExcavateRule): - signatures = { "PHP_1": r"/\.php on line [0-9]+/", "PHP_2": r"/\.php<\/b> on line [0-9]+/", @@ -589,7 +592,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") class SerializationExtractor(ExcavateRule): - regexes = { "Java": re.compile(r"[^a-zA-Z0-9\/+]rO0[a-zA-Z0-9+\/]+={0,2}"), "DOTNET": re.compile(r"[^a-zA-Z0-9\/+]AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"), @@ -619,7 +621,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") class FunctionalityExtractor(ExcavateRule): - yara_rules = { "File_Upload_Functionality": r'rule File_Upload_Functionality { meta: description = "contains file upload functionality" strings: $fileuploadfunc = /]+type=["\']?file["\']?[^>]+>/ nocase condition: $fileuploadfunc }', "Web_Service_WSDL": r'rule Web_Service_WSDL { meta: emit_match = "True" description = "contains a web service WSDL URL" strings: $wsdl = /https?:\/\/[^\s]*\.(wsdl)/ nocase condition: $wsdl }', @@ -704,7 +705,6 @@ class URLExtractor(ExcavateRule): tag_attribute_regex = bbot_regexes.tag_attribute_regex async def process(self, yara_results, event, yara_rule_settings, discovery_context): - for identifier, results in yara_results.items(): urls_found = 0 final_url = "" @@ -897,7 +897,6 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon decoded_data = await self.helpers.re.recursive_decode(data) if self.parameter_extraction: - content_type_lower = content_type.lower() if content_type else "" extraction_map = { "json": self.helpers.extract_params_json, @@ -934,7 +933,6 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") async def handle_event(self, event): - if event.type == "HTTP_RESPONSE": # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled if ( @@ -1023,7 +1021,6 @@ async def handle_event(self, event): # Try to extract parameters from the redirect URL if self.parameter_extraction: - for ( method, parsed_url, diff --git a/bbot/modules/paramminer_headers.py b/bbot/modules/paramminer_headers.py index 56090f6a2..c8e78de09 100644 --- a/bbot/modules/paramminer_headers.py +++ b/bbot/modules/paramminer_headers.py @@ -82,7 +82,6 @@ class paramminer_headers(BaseModule): header_regex = re.compile(r"^[!#$%&\'*+\-.^_`|~0-9a-zA-Z]+: [^\r\n]+$") async def setup(self): - self.recycle_words = self.config.get("recycle_words", True) self.event_dict = {} self.already_checked = set() @@ -157,7 +156,6 @@ async def process_results(self, event, results): ) async def handle_event(self, event): - # If recycle words is enabled, we will collect WEB_PARAMETERS we find to build our list in finish() # We also collect any parameters of type "SPECULATIVE" if event.type == "WEB_PARAMETER": @@ -201,7 +199,7 @@ async def count_test(self, url): return for count, args, kwargs in self.gen_count_args(url): r = await self.helpers.request(*args, **kwargs) - if r is not None and not ((str(r.status_code)[0] in ("4", "5"))): + if r is not None and not (str(r.status_code)[0] in ("4", "5")): return count def gen_count_args(self, url): @@ -240,7 +238,6 @@ async def check_batch(self, compare_helper, url, header_list): return await compare_helper.compare(url, headers=test_headers, check_reflection=(len(header_list) == 1)) async def finish(self): - untested_matches = sorted(list(self.extracted_words_master.copy())) for url, (event, batch_size) in list(self.event_dict.items()): try: diff --git a/bbot/scanner/preset/args.py b/bbot/scanner/preset/args.py index 591a52235..ad0267780 100644 --- a/bbot/scanner/preset/args.py +++ b/bbot/scanner/preset/args.py @@ -10,7 +10,6 @@ class BBOTArgs: - # module config options to exclude from validation exclude_from_validation = re.compile(r".*modules\.[a-z0-9_]+\.(?:batch_size|module_threads)$") diff --git a/bbot/scanner/preset/environ.py b/bbot/scanner/preset/environ.py index 66253de32..6dc5d8ada 100644 --- a/bbot/scanner/preset/environ.py +++ b/bbot/scanner/preset/environ.py @@ -65,7 +65,6 @@ def add_to_path(v, k="PATH", environ=None): class BBOTEnviron: - def __init__(self, preset): self.preset = preset diff --git a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py index 99af7d556..5cb2f3603 100644 --- a/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py +++ b/bbot/test/test_step_2/module_tests/test_module_asset_inventory.py @@ -10,7 +10,6 @@ class TestAsset_Inventory(ModuleTestBase): masscan_output = """{ "ip": "127.0.0.1", "timestamp": "1680197558", "ports": [ {"port": 9999, "proto": "tcp", "status": "open", "reason": "syn-ack", "ttl": 54} ] }""" async def setup_before_prep(self, module_test): - async def run_masscan(command, *args, **kwargs): if "masscan" in command[:2]: targets = open(command[11]).read().splitlines() diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py index 12427b050..a5ba1eba7 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py @@ -7,7 +7,6 @@ class TestDnsbrute(ModuleTestBase): config_overrides = {"modules": {"dnsbrute": {"wordlist": str(subdomain_wordlist), "max_depth": 3}}} async def setup_after_prep(self, module_test): - old_run_live = module_test.scan.helpers.run_live async def new_run_live(*command, check=False, text=True, **kwargs): diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py b/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py index 0c9b6baaa..4f4009825 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbrute_mutations.py @@ -11,7 +11,6 @@ class TestDnsbrute_mutations(ModuleTestBase): ] async def setup_after_prep(self, module_test): - old_run_live = module_test.scan.helpers.run_live async def new_run_live(*command, check=False, text=True, **kwargs): diff --git a/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py b/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py index 6c5023db1..848491511 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnscommonsrv.py @@ -8,7 +8,6 @@ class TestDNSCommonSRV(ModuleTestBase): config_overrides = {"dns": {"minimal": False}} async def setup_after_prep(self, module_test): - old_run_live = module_test.scan.helpers.run_live async def new_run_live(*command, check=False, text=True, **kwargs): diff --git a/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py b/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py index 2916c527a..fc666b64e 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py +++ b/bbot/test/test_step_2/module_tests/test_module_dotnetnuke.py @@ -146,14 +146,12 @@ def request_handler(self, request): return Response("alive", status=200) async def setup_before_prep(self, module_test): - self.interactsh_mock_instance = module_test.mock_interactsh("dotnetnuke_blindssrf") module_test.monkeypatch.setattr( module_test.scan.helpers, "interactsh", lambda *args, **kwargs: self.interactsh_mock_instance ) async def setup_after_prep(self, module_test): - # Simulate DotNetNuke Instance expect_args = {"method": "GET", "uri": "/"} respond_args = {"response_data": dotnetnuke_http_response} diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 5b266a781..bccbe3d73 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -13,7 +13,6 @@ class TestExcavate(ModuleTestBase): config_overrides = {"web": {"spider_distance": 1, "spider_depth": 1}} async def setup_before_prep(self, module_test): - response_data = """ ftp://ftp.test.notreal \\nhttps://www1.test.notreal @@ -181,7 +180,6 @@ async def setup_before_prep(self, module_test): module_test.httpserver.no_handler_status_code = 404 def check(self, module_test, events): - assert 1 == len( [ e @@ -332,7 +330,6 @@ def check(self, module_test, events): class TestExcavateCSP(TestExcavate): - csp_test_header = "default-src 'self'; script-src asdf.test.notreal; object-src 'none';" async def setup_before_prep(self, module_test): @@ -356,7 +353,6 @@ def check(self, module_test, events): class TestExcavateURL_IP(TestExcavate): - targets = ["http://127.0.0.1:8888/", "127.0.0.2"] async def setup_before_prep(self, module_test): @@ -405,7 +401,6 @@ def check(self, module_test, events): class TestExcavateNonHttpScheme(TestExcavate): - targets = ["http://127.0.0.1:8888/", "test.notreal"] non_http_scheme_html = """ @@ -425,7 +420,6 @@ async def setup_before_prep(self, module_test): module_test.httpserver.expect_request("/").respond_with_data(self.non_http_scheme_html) def check(self, module_test, events): - found_hxxp_url = False found_ftp_url = False found_nonsense_url = False @@ -540,7 +534,6 @@ def check(self, module_test, events): class TestExcavateParameterExtraction_getparam(ModuleTestBase): - targets = ["http://127.0.0.1:8888/"] # hunt is added as parameter extraction is only activated by one or more modules that consume WEB_PARAMETER @@ -554,11 +547,9 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(respond_args=respond_args) def check(self, module_test, events): - excavate_getparam_extraction = False for e in events: if e.type == "WEB_PARAMETER": - if "HTTP Extracted Parameter [hack] (HTML Tags Submodule)" in e.data["description"]: excavate_getparam_extraction = True assert excavate_getparam_extraction, "Excavate failed to extract web parameter" @@ -626,7 +617,6 @@ class excavateTestRule(ExcavateRule): class TestExcavateYara(TestExcavate): - targets = ["http://127.0.0.1:8888/"] yara_test_html = """ @@ -641,12 +631,10 @@ class TestExcavateYara(TestExcavate): """ async def setup_before_prep(self, module_test): - self.modules_overrides = ["excavate", "httpx"] module_test.httpserver.expect_request("/").respond_with_data(self.yara_test_html) async def setup_after_prep(self, module_test): - excavate_module = module_test.scan.modules["excavate"] excavateruleinstance = excavateTestRule(excavate_module) excavate_module.add_yara_rule( @@ -665,7 +653,6 @@ def check(self, module_test, events): found_yara_string_1 = False found_yara_string_2 = False for e in events: - if e.type == "FINDING": if e.data["description"] == "HTTP response (body) Contains the text AAAABBBBCCCC": found_yara_string_1 = True @@ -677,7 +664,6 @@ def check(self, module_test, events): class TestExcavateYaraCustom(TestExcavateYara): - rule_file = [ 'rule SearchForText { meta: description = "Contains the text AAAABBBBCCCC" strings: $text = "AAAABBBBCCCC" condition: $text }', 'rule SearchForText2 { meta: description = "Contains the text DDDDEEEEFFFF" strings: $text2 = "DDDDEEEEFFFF" condition: $text2 }', @@ -711,7 +697,6 @@ async def setup_after_prep(self, module_test): module_test.httpserver.expect_request("/spider").respond_with_data("hi") def check(self, module_test, events): - found_url_unverified_spider_max = False found_url_unverified_dummy = False found_url_event = False @@ -803,7 +788,6 @@ def check(self, module_test, events): class TestExcavate_retain_querystring_not(TestExcavate_retain_querystring): - config_overrides = { "url_querystring_remove": False, "url_querystring_collapse": False, @@ -827,7 +811,6 @@ def check(self, module_test, events): class TestExcavate_webparameter_outofscope(ModuleTestBase): - html_body = "" targets = ["http://127.0.0.1:8888", "socialmediasite.com"] @@ -858,13 +841,11 @@ def check(self, module_test, events): class TestExcavateHeaders(ModuleTestBase): - targets = ["http://127.0.0.1:8888/"] modules_overrides = ["excavate", "httpx", "hunt"] config_overrides = {"web": {"spider_distance": 1, "spider_depth": 1}} async def setup_before_prep(self, module_test): - module_test.httpserver.expect_request("/").respond_with_data( "

test

", status=200, @@ -877,7 +858,6 @@ async def setup_before_prep(self, module_test): ) def check(self, module_test, events): - found_first_cookie = False found_second_cookie = False diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py index 58d76ff19..0e099e63d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_cookies.py @@ -36,7 +36,6 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(respond_args=respond_args) def check(self, module_test, events): - found_reflected_cookie = False false_positive_match = False diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py index 1bf290c41..e74e067a3 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_getparams.py @@ -89,7 +89,6 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(respond_args=respond_args) def check(self, module_test, events): - emitted_boring_parameter = False for e in events: if e.type == "WEB_PARAMETER": @@ -106,7 +105,6 @@ class TestParamminer_Getparams_boring_on(TestParamminer_Getparams_boring_off): } def check(self, module_test, events): - emitted_boring_parameter = False for e in events: @@ -160,15 +158,12 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) def check(self, module_test, events): - excavate_extracted_web_parameter = False found_hidden_getparam_recycled = False emitted_excavate_paramminer_duplicate = False for e in events: - if e.type == "WEB_PARAMETER": - if ( "http://127.0.0.1:8888/test2.php" in e.data["url"] and "HTTP Extracted Parameter [abcd1234] (HTML Tags Submodule)" in e.data["description"] @@ -213,7 +208,6 @@ class TestParamminer_Getparams_xmlspeculative(Paramminer_Headers): """ async def setup_after_prep(self, module_test): - module_test.scan.modules["paramminer_getparams"].rand_string = lambda *args, **kwargs: "AAAAAAAAAAAAAA" module_test.monkeypatch.setattr( helper.HttpCompare, "gen_cache_buster", lambda *args, **kwargs: {"AAAAAA": "1"} diff --git a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py index 0f66e5e87..c2cdddffd 100644 --- a/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py +++ b/bbot/test/test_step_2/module_tests/test_module_paramminer_headers.py @@ -39,7 +39,6 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(respond_args=respond_args) def check(self, module_test, events): - found_reflected_header = False false_positive_match = False @@ -60,7 +59,6 @@ class TestParamminer_Headers(Paramminer_Headers): class TestParamminer_Headers_noreflection(Paramminer_Headers): - found_nonreflected_header = False headers_body_match = """ @@ -82,7 +80,6 @@ def check(self, module_test, events): class TestParamminer_Headers_extract(Paramminer_Headers): - modules_overrides = ["httpx", "paramminer_headers", "excavate"] config_overrides = { "modules": { @@ -123,7 +120,6 @@ async def setup_after_prep(self, module_test): module_test.set_expect_requests(respond_args=respond_args) def check(self, module_test, events): - excavate_extracted_web_parameter = False used_recycled_parameter = False @@ -139,17 +135,14 @@ def check(self, module_test, events): class TestParamminer_Headers_extract_norecycle(TestParamminer_Headers_extract): - modules_overrides = ["httpx", "excavate"] config_overrides = {} async def setup_after_prep(self, module_test): - respond_args = {"response_data": self.headers_body} module_test.set_expect_requests(respond_args=respond_args) def check(self, module_test, events): - excavate_extracted_web_parameter = False for e in events: diff --git a/bbot/test/test_step_2/module_tests/test_module_portscan.py b/bbot/test/test_step_2/module_tests/test_module_portscan.py index d9f55c27f..f6f7a62a8 100644 --- a/bbot/test/test_step_2/module_tests/test_module_portscan.py +++ b/bbot/test/test_step_2/module_tests/test_module_portscan.py @@ -21,7 +21,6 @@ class TestPortscan(ModuleTestBase): masscan_output_ping = """{ "ip": "8.8.8.8", "timestamp": "1719862594", "ports": [ {"port": 0, "proto": "icmp", "status": "open", "reason": "none", "ttl": 54} ] }""" async def setup_after_prep(self, module_test): - from bbot.modules.base import BaseModule class DummyModule(BaseModule): diff --git a/pyproject.toml b/pyproject.toml index 8315373da..d333855f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,9 +96,10 @@ asyncio_default_fixture_loop_scope = "function" requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] build-backend = "poetry_dynamic_versioning.backend" -[tool.black] +[tool.ruff] line-length = 119 -extend-exclude = "(test_step_1/test_manager_*)" +format.exclude = ["bbot/test/test_step_1/test_manager_*"] +lint.ignore = ["E402", "E711", "E712", "E713", "E721", "E731", "E741", "F401", "F403", "F405", "F541", "F601"] [tool.poetry-dynamic-versioning] enable = true From 0b18b9af4516b277437ef9118d699c506dbe4247 Mon Sep 17 00:00:00 2001 From: blsaccess Date: Sat, 23 Nov 2024 00:23:14 +0000 Subject: [PATCH 39/40] Update trufflehog --- bbot/modules/trufflehog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index 2b71f973b..cbd219bcf 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -13,7 +13,7 @@ class trufflehog(BaseModule): } options = { - "version": "3.84.0", + "version": "3.84.1", "config": "", "only_verified": True, "concurrency": 8, From 5243a6be251c6a5b7c149708ef4d5d9bd76f1def Mon Sep 17 00:00:00 2001 From: TheTechromancer <20261699+TheTechromancer@users.noreply.github.com> Date: Sat, 23 Nov 2024 02:46:06 +0000 Subject: [PATCH 40/40] [create-pull-request] automated change --- docs/data/chord_graph/entities.json | 643 ++++++++++++++-------------- docs/data/chord_graph/rels.json | 532 ++++++++++++----------- docs/modules/list_of_modules.md | 1 + docs/modules/nuclei.md | 2 +- docs/scanning/advanced.md | 2 +- docs/scanning/configuration.md | 6 +- docs/scanning/events.md | 70 +-- docs/scanning/index.md | 48 +-- docs/scanning/presets_list.md | 44 +- 9 files changed, 695 insertions(+), 653 deletions(-) diff --git a/docs/data/chord_graph/entities.json b/docs/data/chord_graph/entities.json index 7fb11654f..96d387594 100644 --- a/docs/data/chord_graph/entities.json +++ b/docs/data/chord_graph/entities.json @@ -23,11 +23,11 @@ ] }, { - "id": 128, + "id": 129, "name": "AZURE_TENANT", "parent": 88888888, "consumes": [ - 127 + 128 ], "produces": [] }, @@ -36,20 +36,20 @@ "name": "CODE_REPOSITORY", "parent": 88888888, "consumes": [ - 61, - 81, - 84, - 86, - 116, - 135 + 62, + 82, + 85, + 87, + 117, + 136 ], "produces": [ 42, - 62, - 82, + 63, 83, - 85, - 115 + 84, + 86, + 116 ] }, { @@ -87,35 +87,36 @@ 58, 59, 60, - 66, - 78, - 82, - 89, - 93, - 95, - 101, + 61, + 67, + 79, + 83, + 90, + 94, + 96, 102, - 106, + 103, 107, - 111, + 108, 112, 113, - 117, - 120, + 114, + 118, 121, 122, 123, 124, - 127, - 130, + 125, + 128, 131, 132, - 134, - 137, - 140, + 133, + 135, + 138, 141, - 144, - 147 + 142, + 145, + 148 ], "produces": [ 6, @@ -136,31 +137,31 @@ 58, 59, 60, - 78, - 89, - 93, - 95, - 101, + 79, + 90, + 94, + 96, 102, - 104, - 106, + 103, + 105, 107, - 111, - 117, - 120, - 122, + 108, + 112, + 118, + 121, 123, - 127, - 129, + 124, + 128, 130, 131, - 134, - 137, + 132, + 135, 138, - 140, + 139, 141, - 144, - 147 + 142, + 145, + 148 ] }, { @@ -169,8 +170,8 @@ "parent": 88888888, "consumes": [ 21, - 127, - 132 + 128, + 133 ], "produces": [] }, @@ -179,18 +180,19 @@ "name": "EMAIL_ADDRESS", "parent": 88888888, "consumes": [ - 67 + 68 ], "produces": [ 45, 52, 58, - 66, - 93, - 112, - 121, - 124, - 129 + 61, + 67, + 94, + 113, + 122, + 125, + 130 ] }, { @@ -198,18 +200,18 @@ "name": "FILESYSTEM", "parent": 88888888, "consumes": [ - 71, - 100, - 135 + 72, + 101, + 136 ], "produces": [ 8, - 61, - 75, - 81, - 84, - 100, - 116 + 62, + 76, + 82, + 85, + 101, + 117 ] }, { @@ -218,7 +220,7 @@ "parent": 88888888, "consumes": [ 14, - 145 + 146 ], "produces": [ 1, @@ -233,33 +235,33 @@ 34, 37, 51, - 80, - 85, - 90, - 92, - 95, - 103, + 81, + 86, + 91, + 93, + 96, 104, 105, - 108, + 106, 109, - 119, - 125, - 127, - 133, - 135, + 110, + 120, + 126, + 128, + 134, 136, - 146 + 137, + 147 ] }, { - "id": 97, + "id": 98, "name": "GEOLOCATION", "parent": 88888888, "consumes": [], "produces": [ - 96, - 99 + 97, + 100 ] }, { @@ -281,24 +283,24 @@ 14, 26, 51, - 65, - 68, - 75, - 85, - 90, - 103, + 66, + 69, + 76, + 86, + 91, 104, - 108, + 105, 109, 110, - 119, - 127, - 133, - 143, - 146 + 111, + 120, + 128, + 134, + 144, + 147 ], "produces": [ - 91 + 92 ] }, { @@ -308,26 +310,26 @@ "consumes": [ 11, 14, - 95, 96, - 98, + 97, 99, - 113, - 127 + 100, + 114, + 128 ], "produces": [ 14, - 98, - 127 + 99, + 128 ] }, { - "id": 114, + "id": 115, "name": "IP_RANGE", "parent": 88888888, "consumes": [ - 113, - 127 + 114, + 128 ], "produces": [] }, @@ -339,7 +341,7 @@ 8 ], "produces": [ - 86 + 87 ] }, { @@ -348,29 +350,29 @@ "parent": 88888888, "consumes": [ 14, - 76, - 91, - 129 + 77, + 92, + 130 ], "produces": [ 14, - 95, - 113, - 127 + 96, + 114, + 128 ] }, { - "id": 63, + "id": 64, "name": "ORG_STUB", "parent": 88888888, "consumes": [ - 62, - 83, - 86, - 115 + 63, + 84, + 87, + 116 ], "produces": [ - 127 + 128 ] }, { @@ -384,12 +386,12 @@ ] }, { - "id": 77, + "id": 78, "name": "PROTOCOL", "parent": 88888888, "consumes": [], "produces": [ - 76 + 77 ] }, { @@ -398,36 +400,37 @@ "parent": 88888888, "consumes": [], "produces": [ - 54 + 54, + 61 ] }, { - "id": 69, + "id": 70, "name": "RAW_TEXT", "parent": 88888888, "consumes": [ - 68 + 69 ], "produces": [ - 71 + 72 ] }, { - "id": 64, + "id": 65, "name": "SOCIAL", "parent": 88888888, "consumes": [ - 62, - 83, - 85, - 87, - 115, - 127 + 63, + 84, + 86, + 88, + 116, + 128 ], "produces": [ - 62, - 85, - 126 + 63, + 86, + 127 ] }, { @@ -442,7 +445,7 @@ 32, 33, 34, - 127 + 128 ], "produces": [ 29, @@ -458,19 +461,19 @@ "parent": 88888888, "consumes": [ 14, - 85, - 145, - 146 + 86, + 146, + 147 ], "produces": [ 26, - 65, - 85, - 87, - 95, - 105, - 143, - 146 + 66, + 86, + 88, + 96, + 106, + 144, + 147 ] }, { @@ -482,37 +485,37 @@ 14, 23, 37, - 72, - 79, + 73, 80, - 87, - 91, - 94, - 104, + 81, + 88, + 92, + 95, 105, - 118, - 125, - 127, - 133, - 136, - 138, - 142, - 145 + 106, + 119, + 126, + 128, + 134, + 137, + 139, + 143, + 146 ], "produces": [ - 87, - 91 + 88, + 92 ] }, { - "id": 74, + "id": 75, "name": "URL_HINT", "parent": 88888888, "consumes": [ - 73 + 74 ], "produces": [ - 94 + 95 ] }, { @@ -521,11 +524,11 @@ "parent": 88888888, "consumes": [ 42, - 75, - 91, - 106, - 126, - 127 + 76, + 92, + 107, + 127, + 128 ], "produces": [ 18, @@ -533,18 +536,19 @@ 32, 54, 58, - 62, - 68, - 72, - 73, - 82, - 87, - 93, - 118, - 121, - 137, - 144, - 146 + 61, + 63, + 69, + 73, + 74, + 83, + 88, + 94, + 119, + 122, + 138, + 145, + 147 ] }, { @@ -552,7 +556,7 @@ "name": "USERNAME", "parent": 88888888, "consumes": [ - 127 + 128 ], "produces": [ 45, @@ -560,14 +564,14 @@ ] }, { - "id": 139, + "id": 140, "name": "VHOST", "parent": 88888888, "consumes": [ - 145 + 146 ], "produces": [ - 138 + 139 ] }, { @@ -576,7 +580,7 @@ "parent": 88888888, "consumes": [ 14, - 145 + 146 ], "produces": [ 1, @@ -585,13 +589,13 @@ 25, 26, 51, - 65, - 79, - 95, - 105, - 133, - 135, - 146 + 66, + 80, + 96, + 106, + 134, + 136, + 147 ] }, { @@ -602,33 +606,33 @@ 14 ], "produces": [ - 142 + 143 ] }, { - "id": 88, + "id": 89, "name": "WEBSCREENSHOT", "parent": 88888888, "consumes": [], "produces": [ - 87 + 88 ] }, { - "id": 70, + "id": 71, "name": "WEB_PARAMETER", "parent": 88888888, "consumes": [ - 92, - 108, + 93, 109, - 110 + 110, + 111 ], "produces": [ - 68, - 108, + 69, 109, - 110 + 110, + 111 ] }, { @@ -1101,6 +1105,19 @@ }, { "id": 61, + "name": "dnstlsrpt", + "parent": 99999999, + "consumes": [ + 7 + ], + "produces": [ + 46, + 55, + 19 + ] + }, + { + "id": 62, "name": "docker_pull", "parent": 99999999, "consumes": [ @@ -1111,21 +1128,21 @@ ] }, { - "id": 62, + "id": 63, "name": "dockerhub", "parent": 99999999, "consumes": [ - 63, - 64 + 64, + 65 ], "produces": [ 43, - 64, + 65, 19 ] }, { - "id": 65, + "id": 66, "name": "dotnetnuke", "parent": 99999999, "consumes": [ @@ -1137,7 +1154,7 @@ ] }, { - "id": 66, + "id": 67, "name": "emailformat", "parent": 99999999, "consumes": [ @@ -1148,7 +1165,7 @@ ] }, { - "id": 67, + "id": 68, "name": "emails", "parent": 99999999, "consumes": [ @@ -1157,31 +1174,31 @@ "produces": [] }, { - "id": 68, + "id": 69, "name": "excavate", "parent": 99999999, "consumes": [ 2, - 69 + 70 ], "produces": [ 19, - 70 + 71 ] }, { - "id": 71, + "id": 72, "name": "extractous", "parent": 99999999, "consumes": [ 10 ], "produces": [ - 69 + 70 ] }, { - "id": 72, + "id": 73, "name": "ffuf", "parent": 99999999, "consumes": [ @@ -1192,18 +1209,18 @@ ] }, { - "id": 73, + "id": 74, "name": "ffuf_shortnames", "parent": 99999999, "consumes": [ - 74 + 75 ], "produces": [ 19 ] }, { - "id": 75, + "id": 76, "name": "filedownload", "parent": 99999999, "consumes": [ @@ -1215,18 +1232,18 @@ ] }, { - "id": 76, + "id": 77, "name": "fingerprintx", "parent": 99999999, "consumes": [ 15 ], "produces": [ - 77 + 78 ] }, { - "id": 78, + "id": 79, "name": "fullhunt", "parent": 99999999, "consumes": [ @@ -1237,7 +1254,7 @@ ] }, { - "id": 79, + "id": 80, "name": "generic_ssrf", "parent": 99999999, "consumes": [ @@ -1248,7 +1265,7 @@ ] }, { - "id": 80, + "id": 81, "name": "git", "parent": 99999999, "consumes": [ @@ -1259,7 +1276,7 @@ ] }, { - "id": 81, + "id": 82, "name": "git_clone", "parent": 99999999, "consumes": [ @@ -1270,7 +1287,7 @@ ] }, { - "id": 82, + "id": 83, "name": "github_codesearch", "parent": 99999999, "consumes": [ @@ -1282,19 +1299,19 @@ ] }, { - "id": 83, + "id": 84, "name": "github_org", "parent": 99999999, "consumes": [ - 63, - 64 + 64, + 65 ], "produces": [ 43 ] }, { - "id": 84, + "id": 85, "name": "github_workflows", "parent": 99999999, "consumes": [ @@ -1305,50 +1322,50 @@ ] }, { - "id": 85, + "id": 86, "name": "gitlab", "parent": 99999999, "consumes": [ 2, - 64, + 65, 16 ], "produces": [ 43, 4, - 64, + 65, 16 ] }, { - "id": 86, + "id": 87, "name": "google_playstore", "parent": 99999999, "consumes": [ 43, - 63 + 64 ], "produces": [ 9 ] }, { - "id": 87, + "id": 88, "name": "gowitness", "parent": 99999999, "consumes": [ - 64, + 65, 3 ], "produces": [ 16, 3, 19, - 88 + 89 ] }, { - "id": 89, + "id": 90, "name": "hackertarget", "parent": 99999999, "consumes": [ @@ -1359,7 +1376,7 @@ ] }, { - "id": 90, + "id": 91, "name": "host_header", "parent": 99999999, "consumes": [ @@ -1370,7 +1387,7 @@ ] }, { - "id": 91, + "id": 92, "name": "httpx", "parent": 99999999, "consumes": [ @@ -1384,18 +1401,18 @@ ] }, { - "id": 92, + "id": 93, "name": "hunt", "parent": 99999999, "consumes": [ - 70 + 71 ], "produces": [ 4 ] }, { - "id": 93, + "id": 94, "name": "hunterio", "parent": 99999999, "consumes": [ @@ -1408,18 +1425,18 @@ ] }, { - "id": 94, + "id": 95, "name": "iis_shortnames", "parent": 99999999, "consumes": [ 3 ], "produces": [ - 74 + 75 ] }, { - "id": 95, + "id": 96, "name": "internetdb", "parent": 99999999, "consumes": [ @@ -1435,18 +1452,18 @@ ] }, { - "id": 96, + "id": 97, "name": "ip2location", "parent": 99999999, "consumes": [ 12 ], "produces": [ - 97 + 98 ] }, { - "id": 98, + "id": 99, "name": "ipneighbor", "parent": 99999999, "consumes": [ @@ -1457,18 +1474,18 @@ ] }, { - "id": 99, + "id": 100, "name": "ipstack", "parent": 99999999, "consumes": [ 12 ], "produces": [ - 97 + 98 ] }, { - "id": 100, + "id": 101, "name": "jadx", "parent": 99999999, "consumes": [ @@ -1479,7 +1496,7 @@ ] }, { - "id": 101, + "id": 102, "name": "leakix", "parent": 99999999, "consumes": [ @@ -1490,7 +1507,7 @@ ] }, { - "id": 102, + "id": 103, "name": "myssl", "parent": 99999999, "consumes": [ @@ -1501,7 +1518,7 @@ ] }, { - "id": 103, + "id": 104, "name": "newsletters", "parent": 99999999, "consumes": [ @@ -1512,7 +1529,7 @@ ] }, { - "id": 104, + "id": 105, "name": "ntlm", "parent": 99999999, "consumes": [ @@ -1525,7 +1542,7 @@ ] }, { - "id": 105, + "id": 106, "name": "nuclei", "parent": 99999999, "consumes": [ @@ -1538,7 +1555,7 @@ ] }, { - "id": 106, + "id": 107, "name": "oauth", "parent": 99999999, "consumes": [ @@ -1550,7 +1567,7 @@ ] }, { - "id": 107, + "id": 108, "name": "otx", "parent": 99999999, "consumes": [ @@ -1561,45 +1578,45 @@ ] }, { - "id": 108, + "id": 109, "name": "paramminer_cookies", "parent": 99999999, "consumes": [ 2, - 70 + 71 ], "produces": [ 4, - 70 + 71 ] }, { - "id": 109, + "id": 110, "name": "paramminer_getparams", "parent": 99999999, "consumes": [ 2, - 70 + 71 ], "produces": [ 4, - 70 + 71 ] }, { - "id": 110, + "id": 111, "name": "paramminer_headers", "parent": 99999999, "consumes": [ 2, - 70 + 71 ], "produces": [ - 70 + 71 ] }, { - "id": 111, + "id": 112, "name": "passivetotal", "parent": 99999999, "consumes": [ @@ -1610,7 +1627,7 @@ ] }, { - "id": 112, + "id": 113, "name": "pgp", "parent": 99999999, "consumes": [ @@ -1621,32 +1638,32 @@ ] }, { - "id": 113, + "id": 114, "name": "portscan", "parent": 99999999, "consumes": [ 7, 12, - 114 + 115 ], "produces": [ 15 ] }, { - "id": 115, + "id": 116, "name": "postman", "parent": 99999999, "consumes": [ - 63, - 64 + 64, + 65 ], "produces": [ 43 ] }, { - "id": 116, + "id": 117, "name": "postman_download", "parent": 99999999, "consumes": [ @@ -1657,7 +1674,7 @@ ] }, { - "id": 117, + "id": 118, "name": "rapiddns", "parent": 99999999, "consumes": [ @@ -1668,7 +1685,7 @@ ] }, { - "id": 118, + "id": 119, "name": "robots", "parent": 99999999, "consumes": [ @@ -1679,7 +1696,7 @@ ] }, { - "id": 119, + "id": 120, "name": "secretsdb", "parent": 99999999, "consumes": [ @@ -1690,7 +1707,7 @@ ] }, { - "id": 120, + "id": 121, "name": "securitytrails", "parent": 99999999, "consumes": [ @@ -1701,7 +1718,7 @@ ] }, { - "id": 121, + "id": 122, "name": "securitytxt", "parent": 99999999, "consumes": [ @@ -1713,7 +1730,7 @@ ] }, { - "id": 122, + "id": 123, "name": "shodan_dns", "parent": 99999999, "consumes": [ @@ -1724,7 +1741,7 @@ ] }, { - "id": 123, + "id": 124, "name": "sitedossier", "parent": 99999999, "consumes": [ @@ -1735,7 +1752,7 @@ ] }, { - "id": 124, + "id": 125, "name": "skymem", "parent": 99999999, "consumes": [ @@ -1746,7 +1763,7 @@ ] }, { - "id": 125, + "id": 126, "name": "smuggler", "parent": 99999999, "consumes": [ @@ -1757,28 +1774,28 @@ ] }, { - "id": 126, + "id": 127, "name": "social", "parent": 99999999, "consumes": [ 19 ], "produces": [ - 64 + 65 ] }, { - "id": 127, + "id": 128, "name": "speculate", "parent": 99999999, "consumes": [ - 128, + 129, 7, 22, 2, 12, - 114, - 64, + 115, + 65, 24, 3, 19, @@ -1789,11 +1806,11 @@ 4, 12, 15, - 63 + 64 ] }, { - "id": 129, + "id": 130, "name": "sslcert", "parent": 99999999, "consumes": [ @@ -1805,7 +1822,7 @@ ] }, { - "id": 130, + "id": 131, "name": "subdomaincenter", "parent": 99999999, "consumes": [ @@ -1816,7 +1833,7 @@ ] }, { - "id": 131, + "id": 132, "name": "subdomainradar", "parent": 99999999, "consumes": [ @@ -1827,7 +1844,7 @@ ] }, { - "id": 132, + "id": 133, "name": "subdomains", "parent": 99999999, "consumes": [ @@ -1837,7 +1854,7 @@ "produces": [] }, { - "id": 133, + "id": 134, "name": "telerik", "parent": 99999999, "consumes": [ @@ -1850,7 +1867,7 @@ ] }, { - "id": 134, + "id": 135, "name": "trickest", "parent": 99999999, "consumes": [ @@ -1861,7 +1878,7 @@ ] }, { - "id": 135, + "id": 136, "name": "trufflehog", "parent": 99999999, "consumes": [ @@ -1874,7 +1891,7 @@ ] }, { - "id": 136, + "id": 137, "name": "url_manipulation", "parent": 99999999, "consumes": [ @@ -1885,7 +1902,7 @@ ] }, { - "id": 137, + "id": 138, "name": "urlscan", "parent": 99999999, "consumes": [ @@ -1897,7 +1914,7 @@ ] }, { - "id": 138, + "id": 139, "name": "vhost", "parent": 99999999, "consumes": [ @@ -1905,11 +1922,11 @@ ], "produces": [ 7, - 139 + 140 ] }, { - "id": 140, + "id": 141, "name": "viewdns", "parent": 99999999, "consumes": [ @@ -1920,7 +1937,7 @@ ] }, { - "id": 141, + "id": 142, "name": "virustotal", "parent": 99999999, "consumes": [ @@ -1931,7 +1948,7 @@ ] }, { - "id": 142, + "id": 143, "name": "wafw00f", "parent": 99999999, "consumes": [ @@ -1942,7 +1959,7 @@ ] }, { - "id": 143, + "id": 144, "name": "wappalyzer", "parent": 99999999, "consumes": [ @@ -1953,7 +1970,7 @@ ] }, { - "id": 144, + "id": 145, "name": "wayback", "parent": 99999999, "consumes": [ @@ -1965,20 +1982,20 @@ ] }, { - "id": 145, + "id": 146, "name": "web_report", "parent": 99999999, "consumes": [ 4, 16, 3, - 139, + 140, 5 ], "produces": [] }, { - "id": 146, + "id": 147, "name": "wpscan", "parent": 99999999, "consumes": [ @@ -1993,7 +2010,7 @@ ] }, { - "id": 147, + "id": 148, "name": "zoomeye", "parent": 99999999, "consumes": [ diff --git a/docs/data/chord_graph/rels.json b/docs/data/chord_graph/rels.json index 96980c3fd..7ebca1393 100644 --- a/docs/data/chord_graph/rels.json +++ b/docs/data/chord_graph/rels.json @@ -586,1117 +586,1137 @@ }, { "source": 61, - "target": 43, + "target": 7, "type": "consumes" }, { - "source": 10, + "source": 46, + "target": 61, + "type": "produces" + }, + { + "source": 55, + "target": 61, + "type": "produces" + }, + { + "source": 19, "target": 61, "type": "produces" }, { "source": 62, - "target": 63, + "target": 43, "type": "consumes" }, { - "source": 62, + "source": 10, + "target": 62, + "type": "produces" + }, + { + "source": 63, "target": 64, "type": "consumes" }, + { + "source": 63, + "target": 65, + "type": "consumes" + }, { "source": 43, - "target": 62, + "target": 63, "type": "produces" }, { - "source": 64, - "target": 62, + "source": 65, + "target": 63, "type": "produces" }, { "source": 19, - "target": 62, + "target": 63, "type": "produces" }, { - "source": 65, + "source": 66, "target": 2, "type": "consumes" }, { "source": 16, - "target": 65, + "target": 66, "type": "produces" }, { "source": 5, - "target": 65, + "target": 66, "type": "produces" }, { - "source": 66, + "source": 67, "target": 7, "type": "consumes" }, { "source": 46, - "target": 66, + "target": 67, "type": "produces" }, { - "source": 67, + "source": 68, "target": 46, "type": "consumes" }, { - "source": 68, + "source": 69, "target": 2, "type": "consumes" }, { - "source": 68, - "target": 69, + "source": 69, + "target": 70, "type": "consumes" }, { "source": 19, - "target": 68, + "target": 69, "type": "produces" }, { - "source": 70, - "target": 68, + "source": 71, + "target": 69, "type": "produces" }, { - "source": 71, + "source": 72, "target": 10, "type": "consumes" }, { - "source": 69, - "target": 71, + "source": 70, + "target": 72, "type": "produces" }, { - "source": 72, + "source": 73, "target": 3, "type": "consumes" }, { "source": 19, - "target": 72, + "target": 73, "type": "produces" }, { - "source": 73, - "target": 74, + "source": 74, + "target": 75, "type": "consumes" }, { "source": 19, - "target": 73, + "target": 74, "type": "produces" }, { - "source": 75, + "source": 76, "target": 2, "type": "consumes" }, { - "source": 75, + "source": 76, "target": 19, "type": "consumes" }, { "source": 10, - "target": 75, + "target": 76, "type": "produces" }, { - "source": 76, + "source": 77, "target": 15, "type": "consumes" }, { - "source": 77, - "target": 76, + "source": 78, + "target": 77, "type": "produces" }, { - "source": 78, + "source": 79, "target": 7, "type": "consumes" }, { "source": 7, - "target": 78, + "target": 79, "type": "produces" }, { - "source": 79, + "source": 80, "target": 3, "type": "consumes" }, { "source": 5, - "target": 79, + "target": 80, "type": "produces" }, { - "source": 80, + "source": 81, "target": 3, "type": "consumes" }, { "source": 4, - "target": 80, + "target": 81, "type": "produces" }, { - "source": 81, + "source": 82, "target": 43, "type": "consumes" }, { "source": 10, - "target": 81, + "target": 82, "type": "produces" }, { - "source": 82, + "source": 83, "target": 7, "type": "consumes" }, { "source": 43, - "target": 82, + "target": 83, "type": "produces" }, { "source": 19, - "target": 82, + "target": 83, "type": "produces" }, { - "source": 83, - "target": 63, + "source": 84, + "target": 64, "type": "consumes" }, { - "source": 83, - "target": 64, + "source": 84, + "target": 65, "type": "consumes" }, { "source": 43, - "target": 83, + "target": 84, "type": "produces" }, { - "source": 84, + "source": 85, "target": 43, "type": "consumes" }, { "source": 10, - "target": 84, + "target": 85, "type": "produces" }, { - "source": 85, + "source": 86, "target": 2, "type": "consumes" }, { - "source": 85, - "target": 64, + "source": 86, + "target": 65, "type": "consumes" }, { - "source": 85, + "source": 86, "target": 16, "type": "consumes" }, { "source": 43, - "target": 85, + "target": 86, "type": "produces" }, { "source": 4, - "target": 85, + "target": 86, "type": "produces" }, { - "source": 64, - "target": 85, + "source": 65, + "target": 86, "type": "produces" }, { "source": 16, - "target": 85, + "target": 86, "type": "produces" }, { - "source": 86, + "source": 87, "target": 43, "type": "consumes" }, { - "source": 86, - "target": 63, + "source": 87, + "target": 64, "type": "consumes" }, { "source": 9, - "target": 86, + "target": 87, "type": "produces" }, { - "source": 87, - "target": 64, + "source": 88, + "target": 65, "type": "consumes" }, { - "source": 87, + "source": 88, "target": 3, "type": "consumes" }, { "source": 16, - "target": 87, + "target": 88, "type": "produces" }, { "source": 3, - "target": 87, + "target": 88, "type": "produces" }, { "source": 19, - "target": 87, + "target": 88, "type": "produces" }, { - "source": 88, - "target": 87, + "source": 89, + "target": 88, "type": "produces" }, { - "source": 89, + "source": 90, "target": 7, "type": "consumes" }, { "source": 7, - "target": 89, + "target": 90, "type": "produces" }, { - "source": 90, + "source": 91, "target": 2, "type": "consumes" }, { "source": 4, - "target": 90, + "target": 91, "type": "produces" }, { - "source": 91, + "source": 92, "target": 15, "type": "consumes" }, { - "source": 91, + "source": 92, "target": 3, "type": "consumes" }, { - "source": 91, + "source": 92, "target": 19, "type": "consumes" }, { "source": 2, - "target": 91, + "target": 92, "type": "produces" }, { "source": 3, - "target": 91, + "target": 92, "type": "produces" }, { - "source": 92, - "target": 70, + "source": 93, + "target": 71, "type": "consumes" }, { "source": 4, - "target": 92, + "target": 93, "type": "produces" }, { - "source": 93, + "source": 94, "target": 7, "type": "consumes" }, { "source": 7, - "target": 93, + "target": 94, "type": "produces" }, { "source": 46, - "target": 93, + "target": 94, "type": "produces" }, { "source": 19, - "target": 93, + "target": 94, "type": "produces" }, { - "source": 94, + "source": 95, "target": 3, "type": "consumes" }, { - "source": 74, - "target": 94, + "source": 75, + "target": 95, "type": "produces" }, { - "source": 95, + "source": 96, "target": 7, "type": "consumes" }, { - "source": 95, + "source": 96, "target": 12, "type": "consumes" }, { "source": 7, - "target": 95, + "target": 96, "type": "produces" }, { "source": 4, - "target": 95, + "target": 96, "type": "produces" }, { "source": 15, - "target": 95, + "target": 96, "type": "produces" }, { "source": 16, - "target": 95, + "target": 96, "type": "produces" }, { "source": 5, - "target": 95, + "target": 96, "type": "produces" }, { - "source": 96, + "source": 97, "target": 12, "type": "consumes" }, { - "source": 97, - "target": 96, + "source": 98, + "target": 97, "type": "produces" }, { - "source": 98, + "source": 99, "target": 12, "type": "consumes" }, { "source": 12, - "target": 98, + "target": 99, "type": "produces" }, { - "source": 99, + "source": 100, "target": 12, "type": "consumes" }, { - "source": 97, - "target": 99, + "source": 98, + "target": 100, "type": "produces" }, { - "source": 100, + "source": 101, "target": 10, "type": "consumes" }, { "source": 10, - "target": 100, + "target": 101, "type": "produces" }, { - "source": 101, + "source": 102, "target": 7, "type": "consumes" }, { "source": 7, - "target": 101, + "target": 102, "type": "produces" }, { - "source": 102, + "source": 103, "target": 7, "type": "consumes" }, { "source": 7, - "target": 102, + "target": 103, "type": "produces" }, { - "source": 103, + "source": 104, "target": 2, "type": "consumes" }, { "source": 4, - "target": 103, + "target": 104, "type": "produces" }, { - "source": 104, + "source": 105, "target": 2, "type": "consumes" }, { - "source": 104, + "source": 105, "target": 3, "type": "consumes" }, { "source": 7, - "target": 104, + "target": 105, "type": "produces" }, { "source": 4, - "target": 104, + "target": 105, "type": "produces" }, { - "source": 105, + "source": 106, "target": 3, "type": "consumes" }, { "source": 4, - "target": 105, + "target": 106, "type": "produces" }, { "source": 16, - "target": 105, + "target": 106, "type": "produces" }, { "source": 5, - "target": 105, + "target": 106, "type": "produces" }, { - "source": 106, + "source": 107, "target": 7, "type": "consumes" }, { - "source": 106, + "source": 107, "target": 19, "type": "consumes" }, { "source": 7, - "target": 106, + "target": 107, "type": "produces" }, { - "source": 107, + "source": 108, "target": 7, "type": "consumes" }, { "source": 7, - "target": 107, + "target": 108, "type": "produces" }, { - "source": 108, + "source": 109, "target": 2, "type": "consumes" }, { - "source": 108, - "target": 70, + "source": 109, + "target": 71, "type": "consumes" }, { "source": 4, - "target": 108, + "target": 109, "type": "produces" }, { - "source": 70, - "target": 108, + "source": 71, + "target": 109, "type": "produces" }, { - "source": 109, + "source": 110, "target": 2, "type": "consumes" }, { - "source": 109, - "target": 70, + "source": 110, + "target": 71, "type": "consumes" }, { "source": 4, - "target": 109, + "target": 110, "type": "produces" }, { - "source": 70, - "target": 109, + "source": 71, + "target": 110, "type": "produces" }, { - "source": 110, + "source": 111, "target": 2, "type": "consumes" }, { - "source": 110, - "target": 70, + "source": 111, + "target": 71, "type": "consumes" }, { - "source": 70, - "target": 110, + "source": 71, + "target": 111, "type": "produces" }, { - "source": 111, + "source": 112, "target": 7, "type": "consumes" }, { "source": 7, - "target": 111, + "target": 112, "type": "produces" }, { - "source": 112, + "source": 113, "target": 7, "type": "consumes" }, { "source": 46, - "target": 112, + "target": 113, "type": "produces" }, { - "source": 113, + "source": 114, "target": 7, "type": "consumes" }, { - "source": 113, + "source": 114, "target": 12, "type": "consumes" }, { - "source": 113, - "target": 114, + "source": 114, + "target": 115, "type": "consumes" }, { "source": 15, - "target": 113, + "target": 114, "type": "produces" }, { - "source": 115, - "target": 63, + "source": 116, + "target": 64, "type": "consumes" }, { - "source": 115, - "target": 64, + "source": 116, + "target": 65, "type": "consumes" }, { "source": 43, - "target": 115, + "target": 116, "type": "produces" }, { - "source": 116, + "source": 117, "target": 43, "type": "consumes" }, { "source": 10, - "target": 116, + "target": 117, "type": "produces" }, { - "source": 117, + "source": 118, "target": 7, "type": "consumes" }, { "source": 7, - "target": 117, + "target": 118, "type": "produces" }, { - "source": 118, + "source": 119, "target": 3, "type": "consumes" }, { "source": 19, - "target": 118, + "target": 119, "type": "produces" }, { - "source": 119, + "source": 120, "target": 2, "type": "consumes" }, { "source": 4, - "target": 119, + "target": 120, "type": "produces" }, { - "source": 120, + "source": 121, "target": 7, "type": "consumes" }, { "source": 7, - "target": 120, + "target": 121, "type": "produces" }, { - "source": 121, + "source": 122, "target": 7, "type": "consumes" }, { "source": 46, - "target": 121, + "target": 122, "type": "produces" }, { "source": 19, - "target": 121, + "target": 122, "type": "produces" }, { - "source": 122, + "source": 123, "target": 7, "type": "consumes" }, { "source": 7, - "target": 122, + "target": 123, "type": "produces" }, { - "source": 123, + "source": 124, "target": 7, "type": "consumes" }, { "source": 7, - "target": 123, + "target": 124, "type": "produces" }, { - "source": 124, + "source": 125, "target": 7, "type": "consumes" }, { "source": 46, - "target": 124, + "target": 125, "type": "produces" }, { - "source": 125, + "source": 126, "target": 3, "type": "consumes" }, { "source": 4, - "target": 125, + "target": 126, "type": "produces" }, { - "source": 126, + "source": 127, "target": 19, "type": "consumes" }, { - "source": 64, - "target": 126, + "source": 65, + "target": 127, "type": "produces" }, { - "source": 127, - "target": 128, + "source": 128, + "target": 129, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 7, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 22, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 2, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 12, "type": "consumes" }, { - "source": 127, - "target": 114, + "source": 128, + "target": 115, "type": "consumes" }, { - "source": 127, - "target": 64, + "source": 128, + "target": 65, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 24, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 3, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 19, "type": "consumes" }, { - "source": 127, + "source": 128, "target": 49, "type": "consumes" }, { "source": 7, - "target": 127, + "target": 128, "type": "produces" }, { "source": 4, - "target": 127, + "target": 128, "type": "produces" }, { "source": 12, - "target": 127, + "target": 128, "type": "produces" }, { "source": 15, - "target": 127, + "target": 128, "type": "produces" }, { - "source": 63, - "target": 127, + "source": 64, + "target": 128, "type": "produces" }, { - "source": 129, + "source": 130, "target": 15, "type": "consumes" }, { "source": 7, - "target": 129, + "target": 130, "type": "produces" }, { "source": 46, - "target": 129, + "target": 130, "type": "produces" }, { - "source": 130, + "source": 131, "target": 7, "type": "consumes" }, { "source": 7, - "target": 130, + "target": 131, "type": "produces" }, { - "source": 131, + "source": 132, "target": 7, "type": "consumes" }, { "source": 7, - "target": 131, + "target": 132, "type": "produces" }, { - "source": 132, + "source": 133, "target": 7, "type": "consumes" }, { - "source": 132, + "source": 133, "target": 22, "type": "consumes" }, { - "source": 133, + "source": 134, "target": 2, "type": "consumes" }, { - "source": 133, + "source": 134, "target": 3, "type": "consumes" }, { "source": 4, - "target": 133, + "target": 134, "type": "produces" }, { "source": 5, - "target": 133, + "target": 134, "type": "produces" }, { - "source": 134, + "source": 135, "target": 7, "type": "consumes" }, { "source": 7, - "target": 134, + "target": 135, "type": "produces" }, { - "source": 135, + "source": 136, "target": 43, "type": "consumes" }, { - "source": 135, + "source": 136, "target": 10, "type": "consumes" }, { "source": 4, - "target": 135, + "target": 136, "type": "produces" }, { "source": 5, - "target": 135, + "target": 136, "type": "produces" }, { - "source": 136, + "source": 137, "target": 3, "type": "consumes" }, { "source": 4, - "target": 136, + "target": 137, "type": "produces" }, { - "source": 137, + "source": 138, "target": 7, "type": "consumes" }, { "source": 7, - "target": 137, + "target": 138, "type": "produces" }, { "source": 19, - "target": 137, + "target": 138, "type": "produces" }, { - "source": 138, + "source": 139, "target": 3, "type": "consumes" }, { "source": 7, - "target": 138, + "target": 139, "type": "produces" }, { - "source": 139, - "target": 138, + "source": 140, + "target": 139, "type": "produces" }, { - "source": 140, + "source": 141, "target": 7, "type": "consumes" }, { "source": 7, - "target": 140, + "target": 141, "type": "produces" }, { - "source": 141, + "source": 142, "target": 7, "type": "consumes" }, { "source": 7, - "target": 141, + "target": 142, "type": "produces" }, { - "source": 142, + "source": 143, "target": 3, "type": "consumes" }, { "source": 17, - "target": 142, + "target": 143, "type": "produces" }, { - "source": 143, + "source": 144, "target": 2, "type": "consumes" }, { "source": 16, - "target": 143, + "target": 144, "type": "produces" }, { - "source": 144, + "source": 145, "target": 7, "type": "consumes" }, { "source": 7, - "target": 144, + "target": 145, "type": "produces" }, { "source": 19, - "target": 144, + "target": 145, "type": "produces" }, { - "source": 145, + "source": 146, "target": 4, "type": "consumes" }, { - "source": 145, + "source": 146, "target": 16, "type": "consumes" }, { - "source": 145, + "source": 146, "target": 3, "type": "consumes" }, { - "source": 145, - "target": 139, + "source": 146, + "target": 140, "type": "consumes" }, { - "source": 145, + "source": 146, "target": 5, "type": "consumes" }, { - "source": 146, + "source": 147, "target": 2, "type": "consumes" }, { - "source": 146, + "source": 147, "target": 16, "type": "consumes" }, { "source": 4, - "target": 146, + "target": 147, "type": "produces" }, { "source": 16, - "target": 146, + "target": 147, "type": "produces" }, { "source": 19, - "target": 146, + "target": 147, "type": "produces" }, { "source": 5, - "target": 146, + "target": 147, "type": "produces" }, { - "source": 147, + "source": 148, "target": 7, "type": "consumes" }, { "source": 7, - "target": 147, + "target": 148, "type": "produces" } ] \ No newline at end of file diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 4647950b5..44d57a0fb 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -74,6 +74,7 @@ | dnsbimi | scan | No | Check DNS_NAME's for BIMI records to find image and certificate hosting URL's | cloud-enum, passive, safe, subdomain-enum | DNS_NAME | RAW_DNS_RECORD, URL_UNVERIFIED | @colin-stubbs | 2024-11-15 | | dnscaa | scan | No | Check for CAA records | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | @colin-stubbs | 2024-05-26 | | dnsdumpster | scan | No | Query dnsdumpster for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @TheTechromancer | 2022-03-12 | +| dnstlsrpt | scan | No | Check for TLS-RPT records | cloud-enum, email-enum, passive, safe, subdomain-enum | DNS_NAME | EMAIL_ADDRESS, RAW_DNS_RECORD, URL_UNVERIFIED | @colin-stubbs | 2024-07-26 | | docker_pull | scan | No | Download images from a docker repository | code-enum, passive, safe, slow | CODE_REPOSITORY | FILESYSTEM | @domwhewell-sage | 2024-03-24 | | dockerhub | scan | No | Search for docker repositories of discovered orgs/usernames | code-enum, passive, safe | ORG_STUB, SOCIAL | CODE_REPOSITORY, SOCIAL, URL_UNVERIFIED | @domwhewell-sage | 2024-03-12 | | emailformat | scan | No | Query email-format.com for email addresses | email-enum, passive, safe | DNS_NAME | EMAIL_ADDRESS | @TheTechromancer | 2022-07-11 | diff --git a/docs/modules/nuclei.md b/docs/modules/nuclei.md index 516944ec9..c6836008e 100644 --- a/docs/modules/nuclei.md +++ b/docs/modules/nuclei.md @@ -51,7 +51,7 @@ The Nuclei module has many configuration options: | modules.nuclei.silent | bool | Don't display nuclei's banner or status messages | False | | modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | | modules.nuclei.templates | str | template or template directory paths to include in the scan | | -| modules.nuclei.version | str | nuclei version | 3.3.5 | +| modules.nuclei.version | str | nuclei version | 3.3.6 | Most of these you probably will **NOT** want to change. In particular, we advise against changing the version of Nuclei, as it's possible the latest version won't work right with BBOT. diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 6af93cf8c..b990598a1 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -70,7 +70,7 @@ Presets: Modules: -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: affiliates,ajaxpro,anubisdb,apkpure,asn,azure_realm,azure_tenant,baddns,baddns_direct,baddns_zone,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,bufferoverrun,builtwith,bypass403,c99,censys,certspotter,chaos,code_repository,columbus,credshed,crt,dastardly,dehashed,digitorus,dnsbimi,dnsbrute,dnsbrute_mutations,dnscaa,dnscommonsrv,dnsdumpster,docker_pull,dockerhub,dotnetnuke,emailformat,extractous,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,git_clone,github_codesearch,github_org,github_workflows,gitlab,google_playstore,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,jadx,leakix,myssl,newsletters,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,portscan,postman,postman_download,rapiddns,robots,secretsdb,securitytrails,securitytxt,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomaincenter,subdomainradar,telerik,trickest,trufflehog,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,wpscan,zoomeye + Modules to enable. Choices: affiliates,ajaxpro,anubisdb,apkpure,asn,azure_realm,azure_tenant,baddns,baddns_direct,baddns_zone,badsecrets,bevigil,binaryedge,bucket_amazon,bucket_azure,bucket_digitalocean,bucket_file_enum,bucket_firebase,bucket_google,bufferoverrun,builtwith,bypass403,c99,censys,certspotter,chaos,code_repository,columbus,credshed,crt,dastardly,dehashed,digitorus,dnsbimi,dnsbrute,dnsbrute_mutations,dnscaa,dnscommonsrv,dnsdumpster,dnstlsrpt,docker_pull,dockerhub,dotnetnuke,emailformat,extractous,ffuf,ffuf_shortnames,filedownload,fingerprintx,fullhunt,generic_ssrf,git,git_clone,github_codesearch,github_org,github_workflows,gitlab,google_playstore,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,internetdb,ip2location,ipneighbor,ipstack,jadx,leakix,myssl,newsletters,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,portscan,postman,postman_download,rapiddns,robots,secretsdb,securitytrails,securitytxt,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomaincenter,subdomainradar,telerik,trickest,trufflehog,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,wpscan,zoomeye -l, --list-modules List available modules. -lmo, --list-module-options Show all module config options diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index be14d2fe0..14c2465d9 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -325,7 +325,7 @@ Many modules accept their own configuration options. These options have the abil | modules.nuclei.silent | bool | Don't display nuclei's banner or status messages | False | | modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | | modules.nuclei.templates | str | template or template directory paths to include in the scan | | -| modules.nuclei.version | str | nuclei version | 3.3.5 | +| modules.nuclei.version | str | nuclei version | 3.3.6 | | modules.oauth.try_all | bool | Check for OAUTH/IODC on every subdomain and URL. | False | | modules.paramminer_cookies.recycle_words | bool | Attempt to use words found during the scan on all other endpoints | False | | modules.paramminer_cookies.skip_boring_words | bool | Remove commonly uninteresting words from the wordlist | True | @@ -397,6 +397,10 @@ Many modules accept their own configuration options. These options have the abil | modules.dnscaa.emails | bool | emit EMAIL_ADDRESS events | True | | modules.dnscaa.in_scope_only | bool | Only check in-scope domains | True | | modules.dnscaa.urls | bool | emit URL_UNVERIFIED events | True | +| modules.dnstlsrpt.emit_emails | bool | Emit EMAIL_ADDRESS events | True | +| modules.dnstlsrpt.emit_raw_dns_records | bool | Emit RAW_DNS_RECORD events | False | +| modules.dnstlsrpt.emit_urls | bool | Emit URL_UNVERIFIED events | True | +| modules.dnstlsrpt.emit_vulnerabilities | bool | Emit VULNERABILITY events | True | | modules.docker_pull.all_tags | bool | Download all tags from each registry (Default False) | False | | modules.docker_pull.output_folder | str | Folder to download docker repositories to | | | modules.extractous.extensions | list | File extensions to parse | ['bak', 'bash', 'bashrc', 'conf', 'cfg', 'crt', 'csv', 'db', 'sqlite', 'doc', 'docx', 'ica', 'indd', 'ini', 'key', 'pub', 'log', 'markdown', 'md', 'odg', 'odp', 'ods', 'odt', 'pdf', 'pem', 'pps', 'ppsx', 'ppt', 'pptx', 'ps1', 'rdp', 'sh', 'sql', 'swp', 'sxw', 'txt', 'vbs', 'wpd', 'xls', 'xlsx', 'xml', 'yml', 'yaml'] | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 967c5cbf2..e83741798 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -104,41 +104,41 @@ Below is a full list of event types along with which modules produce/consume the ## List of Event Types -| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | -|---------------------|-----------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 18 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, mysql, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | -| ASN | 0 | 1 | | asn | -| AZURE_TENANT | 1 | 0 | speculate | | -| CODE_REPOSITORY | 6 | 6 | docker_pull, git_clone, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, github_codesearch, github_org, gitlab, postman | -| DNS_NAME | 59 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, baddns, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, myssl, oauth, otx, passivetotal, pgp, portscan, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, fullhunt, hackertarget, hunterio, internetdb, leakix, myssl, ntlm, oauth, otx, passivetotal, rapiddns, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | -| DNS_NAME_UNRESOLVED | 3 | 0 | baddns, speculate, subdomains | | -| EMAIL_ADDRESS | 1 | 9 | emails | credshed, dehashed, dnscaa, emailformat, hunterio, pgp, securitytxt, skymem, sslcert | -| FILESYSTEM | 3 | 7 | extractous, jadx, trufflehog | apkpure, docker_pull, filedownload, git_clone, github_workflows, jadx, postman_download | -| FINDING | 2 | 29 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, gitlab, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, secretsdb, smuggler, speculate, telerik, trufflehog, url_manipulation, wpscan | -| GEOLOCATION | 0 | 2 | | ip2location, ipstack | -| HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | -| HTTP_RESPONSE | 19 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, dotnetnuke, excavate, filedownload, gitlab, host_header, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer, wpscan | httpx | -| IP_ADDRESS | 8 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, portscan, speculate | asset_inventory, ipneighbor, speculate | -| IP_RANGE | 2 | 0 | portscan, speculate | | -| MOBILE_APP | 1 | 1 | apkpure | google_playstore | -| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, portscan, speculate | -| ORG_STUB | 4 | 1 | dockerhub, github_org, google_playstore, postman | speculate | -| PASSWORD | 0 | 2 | | credshed, dehashed | -| PROTOCOL | 0 | 1 | | fingerprintx | -| RAW_DNS_RECORD | 0 | 1 | | dnsbimi | -| RAW_TEXT | 1 | 1 | excavate | extractous | -| SOCIAL | 6 | 3 | dockerhub, github_org, gitlab, gowitness, postman, speculate | dockerhub, gitlab, social | -| STORAGE_BUCKET | 8 | 5 | baddns_direct, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | -| TECHNOLOGY | 4 | 8 | asset_inventory, gitlab, web_report, wpscan | badsecrets, dotnetnuke, gitlab, gowitness, internetdb, nuclei, wappalyzer, wpscan | -| URL | 20 | 2 | ajaxpro, asset_inventory, baddns_direct, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | -| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | -| URL_UNVERIFIED | 6 | 17 | code_repository, filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, dnsbimi, dnscaa, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, robots, securitytxt, urlscan, wayback, wpscan | -| USERNAME | 1 | 2 | speculate | credshed, dehashed | -| VHOST | 1 | 1 | web_report | vhost | -| VULNERABILITY | 2 | 13 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, dastardly, dotnetnuke, generic_ssrf, internetdb, nuclei, telerik, trufflehog, wpscan | -| WAF | 1 | 1 | asset_inventory | wafw00f | -| WEBSCREENSHOT | 0 | 1 | | gowitness | -| WEB_PARAMETER | 4 | 4 | hunt, paramminer_cookies, paramminer_getparams, paramminer_headers | excavate, paramminer_cookies, paramminer_getparams, paramminer_headers | +| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | +|---------------------|-----------------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| * | 18 | 0 | affiliates, cloudcheck, csv, discord, dnsresolve, http, json, mysql, neo4j, postgres, python, slack, splunk, sqlite, stdout, teams, txt, websocket | | +| ASN | 0 | 1 | | asn | +| AZURE_TENANT | 1 | 0 | speculate | | +| CODE_REPOSITORY | 6 | 6 | docker_pull, git_clone, github_workflows, google_playstore, postman_download, trufflehog | code_repository, dockerhub, github_codesearch, github_org, gitlab, postman | +| DNS_NAME | 60 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, baddns, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, emailformat, fullhunt, github_codesearch, hackertarget, hunterio, internetdb, leakix, myssl, oauth, otx, passivetotal, pgp, portscan, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, fullhunt, hackertarget, hunterio, internetdb, leakix, myssl, ntlm, oauth, otx, passivetotal, rapiddns, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | +| DNS_NAME_UNRESOLVED | 3 | 0 | baddns, speculate, subdomains | | +| EMAIL_ADDRESS | 1 | 10 | emails | credshed, dehashed, dnscaa, dnstlsrpt, emailformat, hunterio, pgp, securitytxt, skymem, sslcert | +| FILESYSTEM | 3 | 7 | extractous, jadx, trufflehog | apkpure, docker_pull, filedownload, git_clone, github_workflows, jadx, postman_download | +| FINDING | 2 | 29 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, git, gitlab, host_header, hunt, internetdb, newsletters, ntlm, nuclei, paramminer_cookies, paramminer_getparams, secretsdb, smuggler, speculate, telerik, trufflehog, url_manipulation, wpscan | +| GEOLOCATION | 0 | 2 | | ip2location, ipstack | +| HASHED_PASSWORD | 0 | 2 | | credshed, dehashed | +| HTTP_RESPONSE | 19 | 1 | ajaxpro, asset_inventory, badsecrets, dastardly, dotnetnuke, excavate, filedownload, gitlab, host_header, newsletters, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, telerik, wappalyzer, wpscan | httpx | +| IP_ADDRESS | 8 | 3 | asn, asset_inventory, internetdb, ip2location, ipneighbor, ipstack, portscan, speculate | asset_inventory, ipneighbor, speculate | +| IP_RANGE | 2 | 0 | portscan, speculate | | +| MOBILE_APP | 1 | 1 | apkpure | google_playstore | +| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, internetdb, portscan, speculate | +| ORG_STUB | 4 | 1 | dockerhub, github_org, google_playstore, postman | speculate | +| PASSWORD | 0 | 2 | | credshed, dehashed | +| PROTOCOL | 0 | 1 | | fingerprintx | +| RAW_DNS_RECORD | 0 | 2 | | dnsbimi, dnstlsrpt | +| RAW_TEXT | 1 | 1 | excavate | extractous | +| SOCIAL | 6 | 3 | dockerhub, github_org, gitlab, gowitness, postman, speculate | dockerhub, gitlab, social | +| STORAGE_BUCKET | 8 | 5 | baddns_direct, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, speculate | bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google | +| TECHNOLOGY | 4 | 8 | asset_inventory, gitlab, web_report, wpscan | badsecrets, dotnetnuke, gitlab, gowitness, internetdb, nuclei, wappalyzer, wpscan | +| URL | 20 | 2 | ajaxpro, asset_inventory, baddns_direct, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | +| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | +| URL_UNVERIFIED | 6 | 18 | code_repository, filedownload, httpx, oauth, social, speculate | azure_realm, bevigil, bucket_file_enum, dnsbimi, dnscaa, dnstlsrpt, dockerhub, excavate, ffuf, ffuf_shortnames, github_codesearch, gowitness, hunterio, robots, securitytxt, urlscan, wayback, wpscan | +| USERNAME | 1 | 2 | speculate | credshed, dehashed | +| VHOST | 1 | 1 | web_report | vhost | +| VULNERABILITY | 2 | 13 | asset_inventory, web_report | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, dastardly, dotnetnuke, generic_ssrf, internetdb, nuclei, telerik, trufflehog, wpscan | +| WAF | 1 | 1 | asset_inventory | wafw00f | +| WEBSCREENSHOT | 0 | 1 | | gowitness | +| WEB_PARAMETER | 4 | 4 | hunt, paramminer_cookies, paramminer_getparams, paramminer_headers | excavate, paramminer_cookies, paramminer_getparams, paramminer_headers | ## Findings Vs. Vulnerabilities diff --git a/docs/scanning/index.md b/docs/scanning/index.md index e82d9101f..e97986360 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -107,30 +107,30 @@ A single module can have multiple flags. For example, the `securitytrails` modul ### List of Flags -| Flag | # Modules | Description | Modules | -|------------------|-------------|----------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 91 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnscaa, dnscommonsrv, dnsdumpster, docker_pull, dockerhub, emailformat, extractous, filedownload, fingerprintx, fullhunt, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, jadx, leakix, myssl, newsletters, ntlm, oauth, otx, passivetotal, pgp, portscan, postman, postman_download, rapiddns, robots, secretsdb, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 66 | Never connects to target systems | affiliates, aggregate, anubisdb, apkpure, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnscaa, dnsdumpster, docker_pull, dockerhub, emailformat, excavate, extractous, fullhunt, git_clone, github_codesearch, github_org, github_workflows, google_playstore, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, jadx, leakix, myssl, otx, passivetotal, pgp, postman, postman_download, rapiddns, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 52 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, baddns_direct, baddns_zone, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, sslcert, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, virustotal, wayback, zoomeye | -| active | 47 | Makes active connections to target systems | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnsbrute, dnsbrute_mutations, dnscommonsrv, dotnetnuke, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gitlab, gowitness, host_header, httpx, hunt, iis_shortnames, newsletters, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, portscan, robots, secretsdb, securitytxt, smuggler, sslcert, telerik, url_manipulation, vhost, wafw00f, wappalyzer, wpscan | -| aggressive | 20 | Generates a large amount of network traffic | bypass403, dastardly, dnsbrute, dnsbrute_mutations, dotnetnuke, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f, wpscan | -| web-basic | 18 | Basic, non-intrusive web scan functionality | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, securitytxt, sslcert, wappalyzer | -| cloud-enum | 15 | Enumerates cloud resources | azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, dnsbimi, httpx, oauth, securitytxt | -| code-enum | 14 | Find public code repositories and search them for secrets etc. | apkpure, code_repository, docker_pull, dockerhub, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, postman, postman_download, trufflehog | -| web-thorough | 12 | More advanced web scanning functionality | ajaxpro, bucket_digitalocean, bypass403, dastardly, dotnetnuke, ffuf_shortnames, generic_ssrf, host_header, hunt, smuggler, telerik, url_manipulation | -| slow | 11 | May take a long time to complete | bucket_digitalocean, dastardly, dnsbrute_mutations, docker_pull, fingerprintx, git_clone, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | -| affiliates | 9 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, trickest, viewdns, zoomeye | -| email-enum | 8 | Enumerates email addresses | dehashed, dnscaa, emailformat, emails, hunterio, pgp, skymem, sslcert | -| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | -| baddns | 3 | Runs all modules from the DNS auditing tool BadDNS | baddns, baddns_direct, baddns_zone | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| portscan | 2 | Discovers open ports | internetdb, portscan | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | baddns | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|----------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 92 | Non-intrusive, safe to run | affiliates, aggregate, ajaxpro, anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, docker_pull, dockerhub, emailformat, extractous, filedownload, fingerprintx, fullhunt, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, internetdb, ip2location, ipstack, jadx, leakix, myssl, newsletters, ntlm, oauth, otx, passivetotal, pgp, portscan, postman, postman_download, rapiddns, robots, secretsdb, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 67 | Never connects to target systems | affiliates, aggregate, anubisdb, apkpure, asn, azure_realm, azure_tenant, bevigil, binaryedge, bucket_file_enum, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, credshed, crt, dehashed, digitorus, dnsbimi, dnscaa, dnsdumpster, dnstlsrpt, docker_pull, dockerhub, emailformat, excavate, extractous, fullhunt, git_clone, github_codesearch, github_org, github_workflows, google_playstore, hackertarget, hunterio, internetdb, ip2location, ipneighbor, ipstack, jadx, leakix, myssl, otx, passivetotal, pgp, postman, postman_download, rapiddns, securitytrails, shodan_dns, sitedossier, skymem, social, speculate, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 53 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, baddns_direct, baddns_zone, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, sslcert, subdomaincenter, subdomainradar, subdomains, trickest, urlscan, virustotal, wayback, zoomeye | +| active | 47 | Makes active connections to target systems | ajaxpro, baddns, baddns_direct, baddns_zone, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dnsbrute, dnsbrute_mutations, dnscommonsrv, dotnetnuke, ffuf, ffuf_shortnames, filedownload, fingerprintx, generic_ssrf, git, gitlab, gowitness, host_header, httpx, hunt, iis_shortnames, newsletters, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, portscan, robots, secretsdb, securitytxt, smuggler, sslcert, telerik, url_manipulation, vhost, wafw00f, wappalyzer, wpscan | +| aggressive | 20 | Generates a large amount of network traffic | bypass403, dastardly, dnsbrute, dnsbrute_mutations, dotnetnuke, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f, wpscan | +| web-basic | 18 | Basic, non-intrusive web scan functionality | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, securitytxt, sslcert, wappalyzer | +| cloud-enum | 16 | Enumerates cloud resources | azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, dnsbimi, dnstlsrpt, httpx, oauth, securitytxt | +| code-enum | 14 | Find public code repositories and search them for secrets etc. | apkpure, code_repository, docker_pull, dockerhub, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, postman, postman_download, trufflehog | +| web-thorough | 12 | More advanced web scanning functionality | ajaxpro, bucket_digitalocean, bypass403, dastardly, dotnetnuke, ffuf_shortnames, generic_ssrf, host_header, hunt, smuggler, telerik, url_manipulation | +| slow | 11 | May take a long time to complete | bucket_digitalocean, dastardly, dnsbrute_mutations, docker_pull, fingerprintx, git_clone, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, vhost | +| affiliates | 9 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, trickest, viewdns, zoomeye | +| email-enum | 9 | Enumerates email addresses | dehashed, dnscaa, dnstlsrpt, emailformat, emails, hunterio, pgp, skymem, sslcert | +| deadly | 4 | Highly aggressive | dastardly, ffuf, nuclei, vhost | +| baddns | 3 | Runs all modules from the DNS auditing tool BadDNS | baddns, baddns_direct, baddns_zone | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| portscan | 2 | Discovers open ports | internetdb, portscan | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | baddns | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | ## Dependencies diff --git a/docs/scanning/presets_list.md b/docs/scanning/presets_list.md index fc0cbc5f3..93e1d3c8b 100644 --- a/docs/scanning/presets_list.md +++ b/docs/scanning/presets_list.md @@ -42,7 +42,7 @@ Enumerate cloud resources such as storage buckets, etc. -Modules: [59]("`anubisdb`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `baddns`, `bevigil`, `binaryedge`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_file_enum`, `bucket_firebase`, `bucket_google`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `columbus`, `crt`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `fullhunt`, `github_codesearch`, `github_org`, `hackertarget`, `httpx`, `hunterio`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `oauth`, `otx`, `passivetotal`, `postman_download`, `postman`, `rapiddns`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `urlscan`, `virustotal`, `wayback`, `zoomeye`") +Modules: [60]("`anubisdb`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `baddns`, `bevigil`, `binaryedge`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_file_enum`, `bucket_firebase`, `bucket_google`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `columbus`, `crt`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `dnstlsrpt`, `fullhunt`, `github_codesearch`, `github_org`, `hackertarget`, `httpx`, `hunterio`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `oauth`, `otx`, `passivetotal`, `postman_download`, `postman`, `rapiddns`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `urlscan`, `virustotal`, `wayback`, `zoomeye`") ## **code-enum** @@ -187,7 +187,7 @@ Enumerate email addresses from APIs, web crawling, etc. -Modules: [7]("`dehashed`, `dnscaa`, `emailformat`, `hunterio`, `pgp`, `skymem`, `sslcert`") +Modules: [8]("`dehashed`, `dnscaa`, `dnstlsrpt`, `emailformat`, `hunterio`, `pgp`, `skymem`, `sslcert`") ## **fast** @@ -269,7 +269,7 @@ Everything everywhere all at once -Modules: [86]("`anubisdb`, `apkpure`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `baddns`, `badsecrets`, `bevigil`, `binaryedge`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_file_enum`, `bucket_firebase`, `bucket_google`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `code_repository`, `columbus`, `crt`, `dehashed`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `docker_pull`, `dockerhub`, `emailformat`, `ffuf_shortnames`, `ffuf`, `filedownload`, `fullhunt`, `git_clone`, `git`, `github_codesearch`, `github_org`, `github_workflows`, `gitlab`, `google_playstore`, `gowitness`, `hackertarget`, `httpx`, `hunterio`, `iis_shortnames`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `ntlm`, `oauth`, `otx`, `paramminer_cookies`, `paramminer_getparams`, `paramminer_headers`, `passivetotal`, `pgp`, `postman_download`, `postman`, `rapiddns`, `robots`, `secretsdb`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `skymem`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `trufflehog`, `urlscan`, `virustotal`, `wappalyzer`, `wayback`, `zoomeye`") +Modules: [87]("`anubisdb`, `apkpure`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `baddns`, `badsecrets`, `bevigil`, `binaryedge`, `bucket_amazon`, `bucket_azure`, `bucket_digitalocean`, `bucket_file_enum`, `bucket_firebase`, `bucket_google`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `code_repository`, `columbus`, `crt`, `dehashed`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `dnstlsrpt`, `docker_pull`, `dockerhub`, `emailformat`, `ffuf_shortnames`, `ffuf`, `filedownload`, `fullhunt`, `git_clone`, `git`, `github_codesearch`, `github_org`, `github_workflows`, `gitlab`, `google_playstore`, `gowitness`, `hackertarget`, `httpx`, `hunterio`, `iis_shortnames`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `ntlm`, `oauth`, `otx`, `paramminer_cookies`, `paramminer_getparams`, `paramminer_headers`, `passivetotal`, `pgp`, `postman_download`, `postman`, `rapiddns`, `robots`, `secretsdb`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `skymem`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `trufflehog`, `urlscan`, `virustotal`, `wappalyzer`, `wayback`, `zoomeye`") ## **paramminer** @@ -356,7 +356,7 @@ Enumerate subdomains via APIs, brute-force -Modules: [52]("`anubisdb`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `bevigil`, `binaryedge`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `columbus`, `crt`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `fullhunt`, `github_codesearch`, `github_org`, `hackertarget`, `httpx`, `hunterio`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `oauth`, `otx`, `passivetotal`, `postman_download`, `postman`, `rapiddns`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `urlscan`, `virustotal`, `wayback`, `zoomeye`") +Modules: [53]("`anubisdb`, `asn`, `azure_realm`, `azure_tenant`, `baddns_direct`, `baddns_zone`, `bevigil`, `binaryedge`, `bufferoverrun`, `builtwith`, `c99`, `censys`, `certspotter`, `chaos`, `columbus`, `crt`, `digitorus`, `dnsbimi`, `dnsbrute_mutations`, `dnsbrute`, `dnscaa`, `dnscommonsrv`, `dnsdumpster`, `dnstlsrpt`, `fullhunt`, `github_codesearch`, `github_org`, `hackertarget`, `httpx`, `hunterio`, `internetdb`, `ipneighbor`, `leakix`, `myssl`, `oauth`, `otx`, `passivetotal`, `postman_download`, `postman`, `rapiddns`, `securitytrails`, `securitytxt`, `shodan_dns`, `sitedossier`, `social`, `sslcert`, `subdomaincenter`, `subdomainradar`, `trickest`, `urlscan`, `virustotal`, `wayback`, `zoomeye`") ## **web-basic** @@ -429,22 +429,22 @@ Modules: [30]("`ajaxpro`, `azure_realm`, `baddns`, `badsecrets`, `bucket_amazon` Here is a the same data, but in a table: -| Preset | Category | Description | # Modules | Modules | -|-----------------|------------|--------------------------------------------------------------------------|-------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| baddns-thorough | | Run all baddns modules and submodules. | 4 | baddns, baddns_direct, baddns_zone, httpx | -| cloud-enum | | Enumerate cloud resources such as storage buckets, etc. | 59 | anubisdb, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, social, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, virustotal, wayback, zoomeye | -| code-enum | | Enumerate Git repositories, Docker images, etc. | 16 | apkpure, code_repository, docker_pull, dockerhub, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, httpx, postman, postman_download, social, trufflehog | -| dirbust-heavy | web | Recursive web directory brute-force (aggressive) | 5 | ffuf, ffuf_shortnames, httpx, iis_shortnames, wayback | -| dirbust-light | web | Basic web directory brute-force (surface-level directories only) | 4 | ffuf, ffuf_shortnames, httpx, iis_shortnames | -| dotnet-audit | web | Comprehensive scan for all IIS/.NET specific modules and module settings | 8 | ajaxpro, badsecrets, dotnetnuke, ffuf, ffuf_shortnames, httpx, iis_shortnames, telerik | -| email-enum | | Enumerate email addresses from APIs, web crawling, etc. | 7 | dehashed, dnscaa, emailformat, hunterio, pgp, skymem, sslcert | -| fast | | Scan only the provided targets as fast as possible - no extra discovery | 0 | | -| iis-shortnames | web | Recursively enumerate IIS shortnames | 3 | ffuf_shortnames, httpx, iis_shortnames | -| kitchen-sink | | Everything everywhere all at once | 86 | anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, docker_pull, dockerhub, emailformat, ffuf, ffuf_shortnames, filedownload, fullhunt, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunterio, iis_shortnames, internetdb, ipneighbor, leakix, myssl, ntlm, oauth, otx, paramminer_cookies, paramminer_getparams, paramminer_headers, passivetotal, pgp, postman, postman_download, rapiddns, robots, secretsdb, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, virustotal, wappalyzer, wayback, zoomeye | -| paramminer | web | Discover new web parameters via brute-force | 4 | httpx, paramminer_cookies, paramminer_getparams, paramminer_headers | -| spider | | Recursive web spider | 1 | httpx | -| subdomain-enum | | Enumerate subdomains via APIs, brute-force | 52 | anubisdb, asn, azure_realm, azure_tenant, baddns_direct, baddns_zone, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, social, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, virustotal, wayback, zoomeye | -| web-basic | | Quick web scan | 19 | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, ffuf_shortnames, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, securitytxt, sslcert, wappalyzer | -| web-screenshots | | Take screenshots of webpages | 3 | gowitness, httpx, social | -| web-thorough | | Aggressive web scan | 30 | ajaxpro, azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dotnetnuke, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, ntlm, oauth, robots, secretsdb, securitytxt, smuggler, sslcert, telerik, url_manipulation, wappalyzer | +| Preset | Category | Description | # Modules | Modules | +|-----------------|------------|--------------------------------------------------------------------------|-------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| baddns-thorough | | Run all baddns modules and submodules. | 4 | baddns, baddns_direct, baddns_zone, httpx | +| cloud-enum | | Enumerate cloud resources such as storage buckets, etc. | 60 | anubisdb, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, social, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, virustotal, wayback, zoomeye | +| code-enum | | Enumerate Git repositories, Docker images, etc. | 16 | apkpure, code_repository, docker_pull, dockerhub, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, httpx, postman, postman_download, social, trufflehog | +| dirbust-heavy | web | Recursive web directory brute-force (aggressive) | 5 | ffuf, ffuf_shortnames, httpx, iis_shortnames, wayback | +| dirbust-light | web | Basic web directory brute-force (surface-level directories only) | 4 | ffuf, ffuf_shortnames, httpx, iis_shortnames | +| dotnet-audit | web | Comprehensive scan for all IIS/.NET specific modules and module settings | 8 | ajaxpro, badsecrets, dotnetnuke, ffuf, ffuf_shortnames, httpx, iis_shortnames, telerik | +| email-enum | | Enumerate email addresses from APIs, web crawling, etc. | 8 | dehashed, dnscaa, dnstlsrpt, emailformat, hunterio, pgp, skymem, sslcert | +| fast | | Scan only the provided targets as fast as possible - no extra discovery | 0 | | +| iis-shortnames | web | Recursively enumerate IIS shortnames | 3 | ffuf_shortnames, httpx, iis_shortnames | +| kitchen-sink | | Everything everywhere all at once | 87 | anubisdb, apkpure, asn, azure_realm, azure_tenant, baddns, baddns_direct, baddns_zone, badsecrets, bevigil, binaryedge, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_file_enum, bucket_firebase, bucket_google, bufferoverrun, builtwith, c99, censys, certspotter, chaos, code_repository, columbus, crt, dehashed, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, docker_pull, dockerhub, emailformat, ffuf, ffuf_shortnames, filedownload, fullhunt, git, git_clone, github_codesearch, github_org, github_workflows, gitlab, google_playstore, gowitness, hackertarget, httpx, hunterio, iis_shortnames, internetdb, ipneighbor, leakix, myssl, ntlm, oauth, otx, paramminer_cookies, paramminer_getparams, paramminer_headers, passivetotal, pgp, postman, postman_download, rapiddns, robots, secretsdb, securitytrails, securitytxt, shodan_dns, sitedossier, skymem, social, sslcert, subdomaincenter, subdomainradar, trickest, trufflehog, urlscan, virustotal, wappalyzer, wayback, zoomeye | +| paramminer | web | Discover new web parameters via brute-force | 4 | httpx, paramminer_cookies, paramminer_getparams, paramminer_headers | +| spider | | Recursive web spider | 1 | httpx | +| subdomain-enum | | Enumerate subdomains via APIs, brute-force | 53 | anubisdb, asn, azure_realm, azure_tenant, baddns_direct, baddns_zone, bevigil, binaryedge, bufferoverrun, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnsbimi, dnsbrute, dnsbrute_mutations, dnscaa, dnscommonsrv, dnsdumpster, dnstlsrpt, fullhunt, github_codesearch, github_org, hackertarget, httpx, hunterio, internetdb, ipneighbor, leakix, myssl, oauth, otx, passivetotal, postman, postman_download, rapiddns, securitytrails, securitytxt, shodan_dns, sitedossier, social, sslcert, subdomaincenter, subdomainradar, trickest, urlscan, virustotal, wayback, zoomeye | +| web-basic | | Quick web scan | 19 | azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_firebase, bucket_google, ffuf_shortnames, filedownload, git, httpx, iis_shortnames, ntlm, oauth, robots, secretsdb, securitytxt, sslcert, wappalyzer | +| web-screenshots | | Take screenshots of webpages | 3 | gowitness, httpx, social | +| web-thorough | | Aggressive web scan | 30 | ajaxpro, azure_realm, baddns, badsecrets, bucket_amazon, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_google, bypass403, dastardly, dotnetnuke, ffuf_shortnames, filedownload, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, ntlm, oauth, robots, secretsdb, securitytxt, smuggler, sslcert, telerik, url_manipulation, wappalyzer |