Skip to content

Commit

Permalink
Merge branch 'dev' into readme-updates
Browse files Browse the repository at this point in the history
  • Loading branch information
TheTechromancer authored Jul 31, 2024
2 parents d265cb9 + 0f93179 commit a4eaaf9
Show file tree
Hide file tree
Showing 16 changed files with 227 additions and 110 deletions.
19 changes: 17 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ jobs:
publish_docs:
needs: update_docs
runs-on: ubuntu-latest
if: github.event_name == 'push' && (github.ref == 'refs/heads/stable')
if: github.event_name == 'push' && (github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/dev')
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
Expand All @@ -102,7 +102,22 @@ jobs:
run: |
pip install poetry
poetry install --only=docs
- run: poetry run mkdocs gh-deploy --force
- name: Publish docs (stable branch)
if: github.ref == 'refs/heads/stable'
run: |
mkdocs build -f mkdocs.yml
mkdocs gh-deploy --force --dir=site
- name: Publish docs (dev branch)
if: github.ref == 'refs/heads/dev'
run: |
mkdocs build -f mkdocs-dev.yml -d site/dev_branch
git config user.name github-actions
git config user.email [email protected]
git checkout gh-pages
mv site/dev_branch .
git add dev_branch
git commit -m "Update dev documentation"
git push
publish_code:
needs: update_docs
runs-on: ubuntu-latest
Expand Down
74 changes: 41 additions & 33 deletions bbot/core/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

from bbot.core import CORE
from bbot.errors import BBOTEngineError
from bbot.core.helpers.misc import rand_string
from bbot.core.helpers.async_helpers import get_event_loop
from bbot.core.helpers.misc import rand_string, in_exception_chain


error_sentinel = object()
Expand All @@ -41,6 +41,7 @@ class EngineBase:
ERROR_CLASS = BBOTEngineError

def __init__(self):
self._shutdown_status = False
self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}")

def pickle(self, obj):
Expand All @@ -62,7 +63,7 @@ def unpickle(self, binary):

async def _infinite_retry(self, callback, *args, **kwargs):
interval = kwargs.pop("_interval", 10)
while 1:
while not self._shutdown_status:
try:
return await asyncio.wait_for(callback(*args, **kwargs), timeout=interval)
except (TimeoutError, asyncio.TimeoutError):
Expand Down Expand Up @@ -107,7 +108,6 @@ class EngineClient(EngineBase):
SERVER_CLASS = None

def __init__(self, **kwargs):
self._shutdown = False
super().__init__()
self.name = f"EngineClient {self.__class__.__name__}"
self.process = None
Expand Down Expand Up @@ -135,7 +135,7 @@ def check_error(self, message):
async def run_and_return(self, command, *args, **kwargs):
fn_str = f"{command}({args}, {kwargs})"
self.log.debug(f"{self.name}: executing run-and-return {fn_str}")
if self._shutdown and not command == "_shutdown":
if self._shutdown_status and not command == "_shutdown":
self.log.verbose(f"{self.name} has been shut down and is not accepting new tasks")
return
async with self.new_socket() as socket:
Expand Down Expand Up @@ -163,7 +163,7 @@ async def run_and_return(self, command, *args, **kwargs):
async def run_and_yield(self, command, *args, **kwargs):
fn_str = f"{command}({args}, {kwargs})"
self.log.debug(f"{self.name}: executing run-and-yield {fn_str}")
if self._shutdown:
if self._shutdown_status:
self.log.verbose("Engine has been shut down and is not accepting new tasks")
return
message = self.make_message(command, args=args, kwargs=kwargs)
Expand Down Expand Up @@ -213,14 +213,16 @@ async def send_shutdown_message(self):
async with self.new_socket() as socket:
# -99 == special shutdown message
message = pickle.dumps({"c": -99})
await self._infinite_retry(socket.send, message)
while 1:
response = await self._infinite_retry(socket.recv)
response = pickle.loads(response)
if isinstance(response, dict):
response = response.get("m", "")
if response == "SHUTDOWN_OK":
break
with suppress(TimeoutError, asyncio.TimeoutError):
await asyncio.wait_for(socket.send(message), 0.5)
with suppress(TimeoutError, asyncio.TimeoutError):
while 1:
response = await asyncio.wait_for(socket.recv(), 0.5)
response = pickle.loads(response)
if isinstance(response, dict):
response = response.get("m", "")
if response == "SHUTDOWN_OK":
break

def check_stop(self, message):
if isinstance(message, dict) and len(message) == 1 and "_s" in message:
Expand Down Expand Up @@ -280,7 +282,7 @@ def server_process(server_class, socket_path, **kwargs):
else:
asyncio.run(engine_server.worker())
except (asyncio.CancelledError, KeyboardInterrupt, CancelledError):
pass
return
except Exception:
import traceback

Expand All @@ -306,9 +308,9 @@ async def new_socket(self):
socket.close()

async def shutdown(self):
self.log.debug(f"{self.name}: shutting down...")
if not self._shutdown:
self._shutdown = True
if not self._shutdown_status:
self._shutdown_status = True
self.log.verbose(f"{self.name}: shutting down...")
# send shutdown signal
await self.send_shutdown_message()
# then terminate context
Expand Down Expand Up @@ -446,6 +448,7 @@ def check_error(self, message):
return True

async def worker(self):
self.log.debug(f"{self.name}: starting worker")
try:
while 1:
client_id, binary = await self.socket.recv_multipart()
Expand All @@ -462,8 +465,8 @@ async def worker(self):
# -1 == cancel task
if cmd == -1:
self.log.debug(f"{self.name} got cancel signal")
await self.cancel_task(client_id)
await self.send_socket_multipart(client_id, {"m": "CANCEL_OK"})
await self.cancel_task(client_id)
continue

# -99 == shutdown task
Expand Down Expand Up @@ -500,24 +503,28 @@ async def worker(self):
task = asyncio.create_task(coroutine)
self.tasks[client_id] = task, command_fn, args, kwargs
# self.log.debug(f"{self.name}: finished creating task for {command_name}() coroutine")
except Exception as e:
self.log.error(f"{self.name}: error in EngineServer worker: {e}")
self.log.trace(traceback.format_exc())
except BaseException as e:
await self._shutdown()
if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
self.log.error(f"{self.name}: error in EngineServer worker: {e}")
self.log.trace(traceback.format_exc())
finally:
self.log.debug(f"{self.name}: finished worker()")

async def _shutdown(self):
self.log.debug(f"{self.name}: shutting down...")
await self.cancel_all_tasks()
try:
self.context.destroy(linger=0)
except Exception:
self.log.trace(traceback.format_exc())
try:
self.context.term()
except Exception:
self.log.trace(traceback.format_exc())
self.log.debug(f"{self.name}: finished shutting down")
if not self._shutdown_status:
self.log.verbose(f"{self.name}: shutting down...")
self._shutdown_status = True
await self.cancel_all_tasks()
try:
self.context.destroy(linger=0)
except Exception:
self.log.trace(traceback.format_exc())
try:
self.context.term()
except Exception:
self.log.trace(traceback.format_exc())
self.log.debug(f"{self.name}: finished shutting down")

def new_child_task(self, client_id, coro):
task = asyncio.create_task(coro)
Expand Down Expand Up @@ -554,8 +561,9 @@ async def _cancel_task(self, task):
await asyncio.wait_for(task, timeout=10)
except (TimeoutError, asyncio.TimeoutError):
self.log.debug(f"{self.name}: Timeout cancelling task")
return
except (KeyboardInterrupt, asyncio.CancelledError):
pass
return
except BaseException as e:
self.log.error(f"Unhandled error in {task.get_coro().__name__}(): {e}")
self.log.trace(traceback.format_exc())
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1478,7 +1478,7 @@ async def _worker(self):
self.scan.stats.event_consumed(event, self)
self.debug(f"Intercepting {event}")
async with self.scan._acatch(context), self._task_counter.count(context):
forward_event = await self.handle_event(event, kwargs)
forward_event = await self.handle_event(event, **kwargs)
with suppress(ValueError, TypeError):
forward_event, forward_event_reason = forward_event

Expand Down
18 changes: 8 additions & 10 deletions bbot/modules/dnsbrute_mutations.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class dnsbrute_mutations(BaseModule):

async def setup(self):
self.found = {}
self.parent_events = self.helpers.make_target()
self.parent_events = {}
self.max_mutations = self.config.get("max_mutations", 500)
# 800M bits == 100MB bloom filter == 10M entries before false positives start emerging
self.mutations_tried = self.helpers.bloom_filter(800000000)
Expand All @@ -30,11 +30,8 @@ async def setup(self):

async def handle_event(self, event):
# here we don't brute-force, we just add the subdomain to our end-of-scan
self.add_found(event)

def add_found(self, event):
self.parent_events.add(event)
host = str(event.host)
self.parent_events[host] = event
if self.helpers.is_subdomain(host):
subdomain, domain = host.split(".", 1)
if not self.helpers.dns.brute.has_excessive_digits(subdomain):
Expand All @@ -43,6 +40,10 @@ def add_found(self, event):
except KeyError:
self.found[domain] = {subdomain}

def get_parent_event(self, subdomain):
parent_host = self.helpers.closest_match(subdomain, self.parent_events)
return self.parent_events[parent_host]

async def finish(self):
found = sorted(self.found.items(), key=lambda x: len(x[-1]), reverse=True)
# if we have a lot of rounds to make, don't try mutations on less-populated domains
Expand Down Expand Up @@ -119,18 +120,15 @@ def add_mutation(m):
self._mutation_run_counter[domain] = mutation_run = 1
self._mutation_run_counter[domain] += 1
for hostname in results:
parent_event = self.parent_events.get_host(hostname)
if parent_event is None:
self.warning(f"Could not correlate parent event from: {hostname}")
parent_event = self.scan.root_event
parent_event = self.get_parent_event(hostname)
mutation_run_ordinal = self.helpers.integer_to_ordinal(mutation_run)
await self.emit_event(
hostname,
"DNS_NAME",
parent=parent_event,
tags=[f"mutation-{mutation_run}"],
abort_if=self.abort_if,
context=f'{{module}} found a mutated subdomain of "{domain}" on its {mutation_run_ordinal} run: {{event.type}}: {{event.data}}',
context=f'{{module}} found a mutated subdomain of "{parent_event.host}" on its {mutation_run_ordinal} run: {{event.type}}: {{event.data}}',
)
if results:
continue
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/internal/cloudcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ async def filter_event(self, event):
return False, "event does not have host attribute"
return True

async def handle_event(self, event, kwargs):
async def handle_event(self, event, **kwargs):
# don't hold up the event loop loading cloud IPs etc.
if self.dummy_modules is None:
self.make_dummy_modules()
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/internal/dnsresolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ async def filter_event(self, event):
return False, "event does not have host attribute"
return True

async def handle_event(self, event, kwargs):
async def handle_event(self, event, **kwargs):
dns_tags = set()
dns_children = dict()
event_whitelisted = False
Expand Down
16 changes: 8 additions & 8 deletions bbot/modules/output/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def human_event_str(self, event):
return event_str

def _event_precheck(self, event):
reason = "precheck succeeded"
# special signal event types
if event.type in ("FINISHED",):
return True, "its type is FINISHED"
Expand All @@ -42,24 +43,23 @@ def _event_precheck(self, event):
if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags:
return False, (f"Omitting {event} from output because it's marked as httpx-only")

if event._omit:
return False, "_omit is True"

# omit certain event types
if event.type in self.scan.omitted_event_types:
if event._omit:
if "target" in event.tags:
self.debug(f"Allowing omitted event: {event} because it's a target")
reason = "it's a target"
self.debug(f"Allowing omitted event: {event} because {reason}")
elif event.type in self.get_watched_events():
self.debug(f"Allowing omitted event: {event} because its type is explicitly in watched_events")
reason = "its type is explicitly in watched_events"
self.debug(f"Allowing omitted event: {event} because {reason}")
else:
return False, "its type is omitted in the config"
return False, "_omit is True"

# internal events like those from speculate, ipneighbor
# or events that are over our report distance
if event._internal:
return False, "_internal is True"

return True, "precheck succeeded"
return True, reason

async def _event_postcheck(self, event):
acceptable, reason = await super()._event_postcheck(event)
Expand Down
Loading

0 comments on commit a4eaaf9

Please sign in to comment.