Skip to content

Commit

Permalink
Merge branch '3.0' into nats
Browse files Browse the repository at this point in the history
  • Loading branch information
TheTechromancer authored Nov 25, 2024
2 parents 792eab9 + bb86fb6 commit 3755647
Show file tree
Hide file tree
Showing 61 changed files with 337 additions and 177 deletions.
2 changes: 1 addition & 1 deletion .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
*.txt text eol=lf
*.json text eol=lf
*.md text eol=lf
*.sh text eol=lf
*.sh text eol=lf
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[submodule "bbot/modules/playground"]
path = bbot/modules/playground
url = https://github.com/blacklanternsecurity/bbot-module-playground
branch = main
branch = main
48 changes: 48 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Learn more about this config here: https://pre-commit.com/

# To enable these pre-commit hooks run:
# `pipx install pre-commit` or `brew install pre-commit`
# Then in the project root directory run `pre-commit install`

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
- id: check-ast
- id: check-builtin-literals
- id: check-byte-order-marker
- id: check-case-conflict
# - id: check-docstring-first
# - id: check-executables-have-shebangs
- id: check-json
- id: check-merge-conflict
# - id: check-shebang-scripts-are-executable
- id: check-symlinks
- id: check-toml
- id: check-vcs-permalinks
- id: check-xml
# - id: check-yaml
- id: debug-statements
- id: destroyed-symlinks
# - id: detect-private-key
- id: end-of-file-fixer
- id: file-contents-sorter
- id: fix-byte-order-marker
- id: forbid-new-submodules
- id: forbid-submodules
- id: mixed-line-ending
- id: requirements-txt-fixer
- id: sort-simple-yaml
- id: trailing-whitespace

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.0
hooks:
- id: ruff
- id: ruff-format

- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.23
hooks:
- id: validate-pyproject
2 changes: 1 addition & 1 deletion bbot/core/event/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -814,7 +814,7 @@ def json(self, mode="json"):
if parent_uuid:
j["parent_uuid"] = parent_uuid
# tags
j.update({"tags": list(self.tags)})
j.update({"tags": sorted(self.tags)})
# parent module
if self.module:
j.update({"module": str(self.module)})
Expand Down
2 changes: 1 addition & 1 deletion bbot/defaults.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ dns:

web:
# HTTP proxy
http_proxy:
http_proxy:
# Web user-agent
user_agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.2151.97
# Set the maximum number of HTTP links that can be followed in a row (0 == no spidering allowed)
Expand Down
3 changes: 0 additions & 3 deletions bbot/models/pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ def _indexed_fields(cls):

### EVENT ###


class Event(BBOTBaseModel):
uuid: Annotated[str, "indexed", "unique"]
id: Annotated[str, "indexed"]
Expand Down Expand Up @@ -93,7 +92,6 @@ def get_data(self):

### SCAN ###


class Scan(BBOTBaseModel):
id: Annotated[str, "indexed", "unique"]
name: str
Expand All @@ -117,7 +115,6 @@ def from_scan(cls, scan):

### TARGET ###


class Target(BBOTBaseModel):
name: str = "Default Target"
strict_scope: bool = False
Expand Down
3 changes: 0 additions & 3 deletions bbot/models/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def __eq__(self, other):

### EVENT ###


class Event(BBOTBaseModel, table=True):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Expand Down Expand Up @@ -106,7 +105,6 @@ def get_data(self):

### SCAN ###


class Scan(BBOTBaseModel, table=True):
id: str = Field(primary_key=True)
name: str
Expand All @@ -121,7 +119,6 @@ class Scan(BBOTBaseModel, table=True):

### TARGET ###


class Target(BBOTBaseModel, table=True):
name: str = "Default Target"
strict_scope: bool = False
Expand Down
4 changes: 3 additions & 1 deletion bbot/modules/internal/cloudcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ async def handle_event(self, event, **kwargs):
for provider in self.helpers.cloud.providers.values():
provider_name = provider.name.lower()
base_kwargs = {
"parent": event, "tags": [f"{provider.provider_type}-{provider_name}"], "_provider": provider_name
"parent": event,
"tags": [f"{provider.provider_type}-{provider_name}"],
"_provider": provider_name,
}
# loop through the provider's regex signatures, if any
for event_type, sigs in provider.signatures.items():
Expand Down
4 changes: 1 addition & 3 deletions bbot/modules/internal/dnsresolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,9 +306,7 @@ def get_dns_parent(self, event):
@property
def emit_raw_records(self):
if self._emit_raw_records is None:
watching_raw_records = any(
"RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values()
)
watching_raw_records = any("RAW_DNS_RECORD" in m.get_watched_events() for m in self.scan.modules.values())
omitted_event_types = self.scan.config.get("omit_event_types", [])
omit_raw_records = "RAW_DNS_RECORD" in omitted_event_types
self._emit_raw_records = watching_raw_records or not omit_raw_records
Expand Down
14 changes: 12 additions & 2 deletions bbot/modules/output/elastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,20 @@


class Elastic(HTTP):
"""
docker run -d -p 9200:9200 --name=bbot-elastic --v "$(pwd)/elastic_data:/usr/share/elasticsearch/data" -e ELASTIC_PASSWORD=bbotislife -m 1GB docker.elastic.co/elasticsearch/elasticsearch:8.16.0
"""

watched_events = ["*"]
metadata = {
"description": "Send scan results to Elasticsearch",
"created_date": "2022-11-21",
"author": "@TheTechromancer",
}
options = {
"url": "",
"url": "https://localhost:9200/bbot_events/_doc",
"username": "elastic",
"password": "changeme",
"password": "bbotislife",
"timeout": 10,
}
options_desc = {
Expand All @@ -20,3 +24,9 @@ class Elastic(HTTP):
"password": "Elastic password",
"timeout": "HTTP timeout",
}

async def cleanup(self):
# refresh the index
doc_regex = self.helpers.re.compile(r"/[^/]+$")
refresh_url = doc_regex.sub("/_refresh", self.url)
await self.helpers.request(refresh_url, auth=self.auth)
2 changes: 1 addition & 1 deletion bbot/modules/output/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ class Kafka(BaseOutputModule):
watched_events = ["*"]
meta = {
"description": "Output scan data to a Kafka topic",
"created_date": "2024-11-17",
"created_date": "2024-11-22",
"author": "@TheTechromancer",
}
options = {
Expand Down
8 changes: 7 additions & 1 deletion bbot/modules/output/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,13 @@ async def setup(self):
async def handle_event(self, event):
event_json = event.json()
event_pydantic = Event(**event_json)
await self.events_collection.insert_one(event_pydantic.model_dump())
while 1:
try:
await self.events_collection.insert_one(event_pydantic.model_dump())
break
except Exception as e:
self.warning(f"Error inserting event into MongoDB: {e}, retrying...")
await self.helpers.sleep(1)

if event.type == "SCAN":
scan_json = Scan(**event.data_json).model_dump()
Expand Down
56 changes: 56 additions & 0 deletions bbot/modules/output/rabbitmq.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import json
import aio_pika

from bbot.modules.output.base import BaseOutputModule


class RabbitMQ(BaseOutputModule):
watched_events = ["*"]
meta = {
"description": "Output scan data to a RabbitMQ queue",
"created_date": "2024-11-22",
"author": "@TheTechromancer",
}
options = {
"url": "amqp://guest:guest@localhost/",
"queue": "bbot_events",
}
options_desc = {
"url": "The RabbitMQ connection URL",
"queue": "The RabbitMQ queue to publish events to",
}
deps_pip = ["aio_pika~=9.5.0"]

async def setup(self):
self.rabbitmq_url = self.config.get("url", "amqp://guest:guest@localhost/")
self.queue_name = self.config.get("queue", "bbot_events")

# Connect to RabbitMQ
self.connection = await aio_pika.connect_robust(self.rabbitmq_url)
self.channel = await self.connection.channel()

# Declare the queue
self.queue = await self.channel.declare_queue(self.queue_name, durable=True)
self.verbose("RabbitMQ connection and queue setup successfully")
return True

async def handle_event(self, event):
event_json = event.json()
event_data = json.dumps(event_json).encode("utf-8")

# Publish the message to the queue
while 1:
try:
await self.channel.default_exchange.publish(
aio_pika.Message(body=event_data),
routing_key=self.queue_name,
)
break
except Exception as e:
self.error(f"Error publishing message to RabbitMQ: {e}, rerying...")
await self.helpers.sleep(1)

async def cleanup(self):
# Close the connection
await self.connection.close()
self.verbose("RabbitMQ connection closed successfully")
9 changes: 8 additions & 1 deletion bbot/modules/report/asn.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,14 @@ async def get_asn_bgpview(self, ip):
return False
asns_tried.add(asn)
asns.append(
{"asn": asn, "subnet": subnet, "name": name, "description": description, "country": country, "emails": emails}
{
"asn": asn,
"subnet": subnet,
"name": name,
"description": description,
"country": country,
"emails": emails,
}
)
if not asns:
self.debug(f'No results for "{ip}"')
Expand Down
2 changes: 0 additions & 2 deletions bbot/presets/kitchen-sink.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,3 @@ config:
modules:
baddns:
enable_references: True


1 change: 0 additions & 1 deletion bbot/presets/web/dotnet-audit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,3 @@ config:
extensions: asp,aspx,ashx,asmx,ascx
telerik:
exploit_RAU_crypto: True

4 changes: 3 additions & 1 deletion bbot/scanner/preset/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,9 @@ def preset_from_args(self):
def create_parser(self, *args, **kwargs):
kwargs.update(
{
"description": "Bighuge BLS OSINT Tool", "formatter_class": argparse.RawTextHelpFormatter, "epilog": self.epilog
"description": "Bighuge BLS OSINT Tool",
"formatter_class": argparse.RawTextHelpFormatter,
"epilog": self.epilog,
}
)
p = argparse.ArgumentParser(*args, **kwargs)
Expand Down
2 changes: 1 addition & 1 deletion bbot/scanner/preset/preset.py
Original file line number Diff line number Diff line change
Expand Up @@ -967,7 +967,7 @@ def presets_table(self, include_modules=True):
header = ["Preset", "Category", "Description", "# Modules"]
if include_modules:
header.append("Modules")
for (loaded_preset, category, preset_path, original_file) in self.all_presets.values():
for loaded_preset, category, preset_path, original_file in self.all_presets.values():
loaded_preset = loaded_preset.bake()
num_modules = f"{len(loaded_preset.scan_modules):,}"
row = [loaded_preset.name, category, loaded_preset.description, num_modules]
Expand Down
1 change: 0 additions & 1 deletion bbot/test/test_step_1/test__module__tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@


def test__module__tests():

preset = Preset()

# make sure each module has a .py file
Expand Down
3 changes: 0 additions & 3 deletions bbot/test/test_step_1/test_bbot_fastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ def run_bbot_multiprocess(queue):


def test_bbot_multiprocess(bbot_httpserver):

bbot_httpserver.expect_request("/").respond_with_data("[email protected]")

queue = multiprocessing.Queue()
Expand All @@ -32,12 +31,10 @@ def test_bbot_multiprocess(bbot_httpserver):


def test_bbot_fastapi(bbot_httpserver):

bbot_httpserver.expect_request("/").respond_with_data("[email protected]")
fastapi_process = start_fastapi_server()

try:

# wait for the server to start with a timeout of 60 seconds
start_time = time.time()
while True:
Expand Down
1 change: 0 additions & 1 deletion bbot/test/test_step_1/test_bloom_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

@pytest.mark.asyncio
async def test_bloom_filter():

def generate_random_strings(n, length=10):
"""Generate a list of n random strings."""
return ["".join(random.choices(string.ascii_letters + string.digits, k=length)) for _ in range(n)]
Expand Down
3 changes: 0 additions & 3 deletions bbot/test/test_step_1/test_dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ async def test_dns_resolution(bbot_scanner):

@pytest.mark.asyncio
async def test_wildcards(bbot_scanner):

scan = bbot_scanner("1.1.1.1")
helpers = scan.helpers

Expand Down Expand Up @@ -634,7 +633,6 @@ def custom_lookup(query, rdtype):

@pytest.mark.asyncio
async def test_wildcard_deduplication(bbot_scanner):

custom_lookup = """
def custom_lookup(query, rdtype):
if rdtype == "TXT" and query.strip(".").endswith("evilcorp.com"):
Expand Down Expand Up @@ -670,7 +668,6 @@ async def handle_event(self, event):

@pytest.mark.asyncio
async def test_dns_raw_records(bbot_scanner):

from bbot.modules.base import BaseModule

class DummyModule(BaseModule):
Expand Down
2 changes: 0 additions & 2 deletions bbot/test/test_step_1/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ async def test_engine():
return_errored = False

class TestEngineServer(EngineServer):

CMDS = {
0: "return_thing",
1: "yield_stuff",
Expand Down Expand Up @@ -54,7 +53,6 @@ async def yield_stuff(self, n):
raise

class TestEngineClient(EngineClient):

SERVER_CLASS = TestEngineServer

async def return_thing(self, n):
Expand Down
Loading

0 comments on commit 3755647

Please sign in to comment.