Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DEF CON Update #2 #1617

Merged
merged 25 commits into from
Aug 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
3b30b55
fix Dastardly error
invalid-email-address Aug 2, 2024
1ea03fd
blacked
invalid-email-address Aug 2, 2024
cda85df
add timeout for resolve_raw_batch()
invalid-email-address Aug 2, 2024
cbe4746
fix error
invalid-email-address Aug 2, 2024
959b916
more engine debugging
invalid-email-address Aug 2, 2024
278496b
default 5-minute timeout on engine interface
invalid-email-address Aug 3, 2024
eb326e0
LFU cache for wildcard checks
invalid-email-address Aug 3, 2024
d9815d3
LFU cache for DNS engine
invalid-email-address Aug 3, 2024
522e39c
clearer log messages
invalid-email-address Aug 3, 2024
55b6d3c
timeout errors
invalid-email-address Aug 4, 2024
471e159
support presets= in python API
invalid-email-address Aug 4, 2024
a7bcea8
fix keyboardinterrupt errors
invalid-email-address Aug 5, 2024
38d544f
raw dns tweak
invalid-email-address Aug 5, 2024
56eef29
fix ctrl+c error
invalid-email-address Aug 5, 2024
82dd0f4
dns discovery path troubleshooting
invalid-email-address Aug 6, 2024
20abca1
fix dns discovery path duplication
invalid-email-address Aug 6, 2024
9499e03
merge dev
invalid-email-address Aug 7, 2024
9949408
DNS todo
invalid-email-address Aug 7, 2024
3e78bf0
remove old link
invalid-email-address Aug 7, 2024
40d2d10
Update README.md
TheTechromancer Aug 7, 2024
fdf9838
fix context formatting error
invalid-email-address Aug 8, 2024
999399b
cleaned context
invalid-email-address Aug 8, 2024
0b0f03c
fix discovery path, write tests
invalid-email-address Aug 8, 2024
43651fd
blacked
invalid-email-address Aug 8, 2024
9cad808
suppress duplicate storage buckets
invalid-email-address Aug 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ config:

<!-- END BBOT SUBDOMAIN-ENUM PRESET EXPANDABLE -->

BBOT consistently finds 20-50% more subdomains than other tools. The bigger the domain, the bigger the difference. To learn how this is possible, see [How It Works](https://www.blacklanternsecurity.com/bbot/Stable/how_it_works/).
BBOT consistently finds 20-50% more subdomains than other tools. The bigger the domain, the bigger the difference. To learn how this is possible, see [How It Works](https://www.blacklanternsecurity.com/bbot/Dev/how_it_works/).

![subdomain-stats-ebay](https://github.com/blacklanternsecurity/bbot/assets/20261699/de3e7f21-6f52-4ac4-8eab-367296cd385f)

Expand Down Expand Up @@ -394,7 +394,7 @@ Thanks to these amazing people for contributing to BBOT! :heart:

Special thanks to:

- @TheTechromancer for creating [BBOT](https://github.com/blacklanternsecurity/bbot)
- @TheTechromancer for creating BBOT
- @liquidsec for his extensive work on BBOT's web hacking features, including [badsecrets](https://github.com/blacklanternsecurity/badsecrets) and [baddns](https://github.com/blacklanternsecurity/baddns)
- Steve Micallef (@smicallef) for creating Spiderfoot
- @kerrymilan for his Neo4j and Ansible expertise
Expand Down
7 changes: 0 additions & 7 deletions bbot/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,6 @@
www.blacklanternsecurity.com/bbot
"""
print(ascii_art, file=sys.stderr)
log_to_stderr(
"This is a pre-release of BBOT 2.0. If you upgraded from version 1, we recommend cleaning your old configs etc. before running this version!",
level="WARNING",
)
log_to_stderr(
"For details, see https://github.com/blacklanternsecurity/bbot/discussions/1540", level="WARNING"
)

scan_name = ""

Expand Down
69 changes: 47 additions & 22 deletions bbot/core/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,21 @@ def unpickle(self, binary):
return error_sentinel

async def _infinite_retry(self, callback, *args, **kwargs):
interval = kwargs.pop("_interval", 10)
interval = kwargs.pop("_interval", 15)
context = kwargs.pop("_context", "")
# default overall timeout of 5 minutes (15 second interval * 20 iterations)
max_retries = kwargs.pop("_max_retries", 4 * 5)
if not context:
context = f"{callback.__name__}({args}, {kwargs})"
retries = 0
while not self._shutdown_status:
try:
return await asyncio.wait_for(callback(*args, **kwargs), timeout=interval)
except (TimeoutError, asyncio.TimeoutError):
self.log.debug(f"{self.name}: Timeout waiting for response for {context}, retrying...")
except (TimeoutError, asyncio.exceptions.TimeoutError):
self.log.debug(f"{self.name}: Timeout after {interval:,} seconds{context}, retrying...")
retries += 1
if max_retries is not None and retries > max_retries:
raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}")


class EngineClient(EngineBase):
Expand Down Expand Up @@ -205,7 +211,9 @@ async def send_cancel_message(self, socket, context):
message = pickle.dumps({"c": -1})
await self._infinite_retry(socket.send, message)
while 1:
response = await self._infinite_retry(socket.recv, _context=f"waiting for CANCEL_OK from {context}")
response = await self._infinite_retry(
socket.recv, _context=f"waiting for CANCEL_OK from {context}", _max_retries=4
)
response = pickle.loads(response)
if isinstance(response, dict):
response = response.get("m", "")
Expand All @@ -216,9 +224,9 @@ async def send_shutdown_message(self):
async with self.new_socket() as socket:
# -99 == special shutdown message
message = pickle.dumps({"c": -99})
with suppress(TimeoutError, asyncio.TimeoutError):
with suppress(TimeoutError, asyncio.exceptions.TimeoutError):
await asyncio.wait_for(socket.send(message), 0.5)
with suppress(TimeoutError, asyncio.TimeoutError):
with suppress(TimeoutError, asyncio.exceptions.TimeoutError):
while 1:
response = await asyncio.wait_for(socket.recv(), 0.5)
response = pickle.loads(response)
Expand Down Expand Up @@ -390,18 +398,21 @@ async def run_and_return(self, client_id, command_fn, *args, **kwargs):
with self.client_id_context(client_id):
try:
self.log.debug(f"{self.name} run-and-return {fn_str}")
result = error_sentinel
try:
result = await command_fn(*args, **kwargs)
except BaseException as e:
error = f"Error in {self.name}.{fn_str}: {e}"
self.log.debug(error)
trace = traceback.format_exc()
self.log.debug(trace)
result = {"_e": (error, trace)}
if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
error = f"Error in {self.name}.{fn_str}: {e}"
self.log.debug(error)
trace = traceback.format_exc()
self.log.debug(trace)
result = {"_e": (error, trace)}
finally:
self.tasks.pop(client_id, None)
self.log.debug(f"{self.name}: Sending response to {fn_str}: {result}")
await self.send_socket_multipart(client_id, result)
if result is not error_sentinel:
self.log.debug(f"{self.name}: Sending response to {fn_str}: {result}")
await self.send_socket_multipart(client_id, result)
except BaseException as e:
self.log.critical(
f"Unhandled exception in {self.name}.run_and_return({client_id}, {command_fn}, {args}, {kwargs}): {e}"
Expand All @@ -417,14 +428,16 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs):
self.log.debug(f"{self.name} run-and-yield {fn_str}")
try:
async for _ in command_fn(*args, **kwargs):
self.log.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}")
await self.send_socket_multipart(client_id, _)
except BaseException as e:
error = f"Error in {self.name}.{fn_str}: {e}"
trace = traceback.format_exc()
self.log.debug(error)
self.log.debug(trace)
result = {"_e": (error, trace)}
await self.send_socket_multipart(client_id, result)
if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
error = f"Error in {self.name}.{fn_str}: {e}"
trace = traceback.format_exc()
self.log.debug(error)
self.log.debug(trace)
result = {"_e": (error, trace)}
await self.send_socket_multipart(client_id, result)
finally:
self.log.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()")
# _s == special signal that means StopIteration
Expand Down Expand Up @@ -537,9 +550,21 @@ def new_child_task(self, client_id, coro):
self.child_tasks[client_id] = {task}
return task

async def finished_tasks(self, client_id):
async def finished_tasks(self, client_id, timeout=None):
child_tasks = self.child_tasks.get(client_id, set())
done, pending = await asyncio.wait(child_tasks, return_when=asyncio.FIRST_COMPLETED)
try:
done, pending = await asyncio.wait(child_tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout)
except BaseException as e:
if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)):
done = set()
self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({child_tasks})")
for task in child_tasks:
task.cancel()
else:
if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
self.log.error(f"{self.name}: Unhandled exception in finished_tasks({child_tasks}): {e}")
self.log.trace(traceback.format_exc())
raise
self.child_tasks[client_id] = pending
return done

Expand All @@ -562,7 +587,7 @@ async def cancel_task(self, client_id):
async def _cancel_task(self, task):
try:
await asyncio.wait_for(task, timeout=10)
except (TimeoutError, asyncio.TimeoutError):
except (TimeoutError, asyncio.exceptions.TimeoutError):
self.log.debug(f"{self.name}: Timeout cancelling task")
return
except (KeyboardInterrupt, asyncio.CancelledError):
Expand Down
26 changes: 21 additions & 5 deletions bbot/core/event/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,9 @@ class BaseEvent:
_data_validator = None
# Whether to increment scope distance if the child and parent hosts are the same
_scope_distance_increment_same_host = False
# Don't allow duplicates to occur within a parent chain
# In other words, don't emit the event if the same one already exists in its discovery context
_suppress_chain_dupes = False

def __init__(
self,
Expand Down Expand Up @@ -169,6 +172,7 @@ def __init__(
self._resolved_hosts = set()
self.dns_children = dict()
self._discovery_context = ""
self._discovery_context_regex = re.compile(r"\{(?:event|module)[^}]*\}")
self.web_spider_distance = 0

# for creating one-off events without enforcing parent requirement
Expand Down Expand Up @@ -339,19 +343,25 @@ def discovery_context(self):

@discovery_context.setter
def discovery_context(self, context):
def replace(match):
s = match.group()
return s.format(module=self.module, event=self)

try:
self._discovery_context = context.format(module=self.module, event=self)
self._discovery_context = self._discovery_context_regex.sub(replace, context)
except Exception as e:
log.warning(f"Error formatting discovery context for {self}: {e} (context: '{context}')")
log.trace(f"Error formatting discovery context for {self}: {e} (context: '{context}')")
self._discovery_context = context

@property
def discovery_path(self):
"""
This event's full discovery context, including those of all its parents
"""
full_event_chain = list(reversed(self.get_parents())) + [self]
return [[e.id, e.discovery_context] for e in full_event_chain if e.type != "SCAN"]
parent_path = []
if self.parent is not None and self != self.parent:
parent_path = self.parent.discovery_path
return parent_path + [[self.id, self.discovery_context]]

@property
def words(self):
Expand Down Expand Up @@ -870,6 +880,10 @@ class SCAN(BaseEvent):
def _data_human(self):
return f"{self.data['name']} ({self.data['id']})"

@property
def discovery_path(self):
return []


class FINISHED(BaseEvent):
"""
Expand Down Expand Up @@ -1165,6 +1179,7 @@ def pretty_string(self):

class STORAGE_BUCKET(DictEvent, URL_UNVERIFIED):
_always_emit = True
_suppress_chain_dupes = True

class _data_validator(BaseModel):
name: str
Expand Down Expand Up @@ -1436,7 +1451,8 @@ class FILESYSTEM(DictPathEvent):


class RAW_DNS_RECORD(DictHostEvent):
pass
# don't emit raw DNS records for affiliates
_always_emit_tags = ["target"]


def make_event(
Expand Down
26 changes: 24 additions & 2 deletions bbot/core/helpers/async_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
import random
import asyncio
import logging
import functools
from datetime import datetime
from cachetools import LRUCache
from .misc import human_timedelta
from cachetools import keys, LRUCache
from contextlib import asynccontextmanager

log = logging.getLogger("bbot.core.helpers.async_helpers")
Expand Down Expand Up @@ -33,7 +34,7 @@ class NamedLock:
E.g. simultaneous DNS lookups on the same hostname
"""

def __init__(self, max_size=1000):
def __init__(self, max_size=10000):
self._cache = LRUCache(maxsize=max_size)

@asynccontextmanager
Expand Down Expand Up @@ -105,3 +106,24 @@ def async_to_sync_gen(async_gen):
yield loop.run_until_complete(async_gen.__anext__())
except StopAsyncIteration:
pass


def async_cachedmethod(cache, key=keys.hashkey):
def decorator(method):
async def wrapper(self, *args, **kwargs):
method_cache = cache(self)
k = key(*args, **kwargs)
try:
return method_cache[k]
except KeyError:
pass
ret = await method(self, *args, **kwargs)
try:
method_cache[k] = ret
except ValueError:
pass
return ret

return functools.wraps(method)(wrapper)

return decorator
7 changes: 7 additions & 0 deletions bbot/core/helpers/dns/dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@
import logging
import dns.exception
import dns.asyncresolver
from cachetools import LFUCache
from radixtarget import RadixTarget

from bbot.errors import DNSError
from bbot.core.engine import EngineClient
from bbot.core.helpers.async_helpers import async_cachedmethod
from ..misc import clean_dns_record, is_ip, is_domain, is_dns_name

from .engine import DNSEngine
Expand Down Expand Up @@ -79,6 +81,9 @@ def __init__(self, parent_helper):
# brute force helper
self._brute = None

self._is_wildcard_cache = LFUCache(maxsize=1000)
self._is_wildcard_domain_cache = LFUCache(maxsize=1000)

async def resolve(self, query, **kwargs):
return await self.run_and_return("resolve", query=query, **kwargs)

Expand Down Expand Up @@ -111,6 +116,7 @@ def brute(self):
self._brute = DNSBrute(self.parent_helper)
return self._brute

@async_cachedmethod(lambda self: self._is_wildcard_cache)
async def is_wildcard(self, query, ips=None, rdtype=None):
"""
Use this method to check whether a *host* is a wildcard entry
Expand Down Expand Up @@ -156,6 +162,7 @@ async def is_wildcard(self, query, ips=None, rdtype=None):

return await self.run_and_return("is_wildcard", query=query, ips=ips, rdtype=rdtype)

@async_cachedmethod(lambda self: self._is_wildcard_domain_cache)
async def is_wildcard_domain(self, domain, log_info=False):
domain = self._wildcard_prevalidation(domain)
if not domain:
Expand Down
Loading
Loading