Skip to content

Commit

Permalink
Merge pull request #1761 from blacklanternsecurity/scan-finish-event
Browse files Browse the repository at this point in the history
Emit SCAN event on finish
  • Loading branch information
TheTechromancer authored Sep 23, 2024
2 parents b2f4988 + c58cf71 commit 8a3916d
Show file tree
Hide file tree
Showing 20 changed files with 138 additions and 92 deletions.
97 changes: 70 additions & 27 deletions bbot/scanner/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,13 @@ def __init__(
**kwargs (list[str], optional): Additional keyword arguments (passed through to `Preset`).
"""
self._root_event = None
self.start_time = None
self.end_time = None
self.duration = None
self.duration_human = None
self.duration_seconds = None

self._success = False

if scan_id is not None:
self.id = str(id)
Expand Down Expand Up @@ -306,13 +313,13 @@ async def async_start_without_generator(self):

async def async_start(self):
""" """
failed = True
scan_start_time = datetime.now()
self.start_time = datetime.now()
self.root_event.data["started_at"] = self.start_time.isoformat()
try:
await self._prep()

self._start_log_handlers()
self.trace(f'Ran BBOT {__version__} at {scan_start_time}, command: {" ".join(sys.argv)}')
self.trace(f'Ran BBOT {__version__} at {self.start_time}, command: {" ".join(sys.argv)}')
self.trace(f"Target: {self.preset.target.json}")
self.trace(f"Preset: {self.preset.to_dict(redact_secrets=True)}")

Expand Down Expand Up @@ -363,16 +370,19 @@ async def async_start(self):
if self._finished_init and self.modules_finished:
new_activity = await self.finish()
if not new_activity:
self._success = True
await self._mark_finished()
yield self.root_event
break

await asyncio.sleep(0.1)

failed = False
self._success = True

except BaseException as e:
if self.helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
self.stop()
failed = False
self._success = True
else:
try:
raise
Expand All @@ -396,24 +406,46 @@ async def async_start(self):
await self._report()
await self._cleanup()

log_fn = self.hugesuccess
if self.status == "ABORTING":
self.status = "ABORTED"
log_fn = self.hugewarning
elif failed:
self.status = "FAILED"
log_fn = self.critical
else:
self.status = "FINISHED"

scan_run_time = datetime.now() - scan_start_time
scan_run_time = self.helpers.human_timedelta(scan_run_time)
log_fn(f"Scan {self.name} completed in {scan_run_time} with status {self.status}")

await self.dispatcher.on_finish(self)

self._stop_log_handlers()

async def _mark_finished(self):
log_fn = self.hugesuccess
if self.status == "ABORTING":
status = "ABORTED"
log_fn = self.hugewarning
elif not self._success:
status = "FAILED"
log_fn = self.critical
else:
status = "FINISHED"

self.end_time = datetime.now()
self.duration = self.end_time - self.start_time
self.duration_seconds = self.duration.total_seconds()
self.duration_human = self.helpers.human_timedelta(self.duration)

status_message = f"Scan {self.name} completed in {self.duration_human} with status {status}"

scan_finish_event = self.make_root_event(status_message)
scan_finish_event.data["status"] = status

# queue final scan event with output modules
output_modules = [m for m in self.modules.values() if m._type == "output" and m.name != "python"]
for m in output_modules:
await m.queue_event(scan_finish_event)
# wait until output modules are flushed
while 1:
modules_finished = all([m.finished for m in output_modules])
self.verbose(modules_finished)
if modules_finished:
break
await asyncio.sleep(0.05)

self.status = status
log_fn(status_message)

def _start_modules(self):
self.verbose(f"Starting module worker loops")
for module in self.modules.values():
Expand Down Expand Up @@ -727,8 +759,8 @@ async def finish(self):
await module.queue_event(finished_event)
self.verbose("Completed finish()")
return True
# Return False if no new events were generated since last time
self.verbose("Completed final finish()")
# Return False if no new events were generated since last time
return False

def _drain_queues(self):
Expand Down Expand Up @@ -948,15 +980,18 @@ def root_event(self):
```
"""
if self._root_event is None:
root_event = self.make_event(data=self.json, event_type="SCAN", dummy=True)
root_event._id = self.id
root_event.scope_distance = 0
root_event.parent = root_event
root_event.module = self._make_dummy_module(name="TARGET", _type="TARGET")
root_event.discovery_context = f"Scan {self.name} started at {root_event.timestamp}"
self._root_event = root_event
self._root_event = self.make_root_event(f"Scan {self.name} started at {self.start_time}")
self._root_event.data["status"] = self.status
return self._root_event

def make_root_event(self, context):
root_event = self.make_event(data=self.json, event_type="SCAN", dummy=True, context=context)
root_event._id = self.id
root_event.scope_distance = 0
root_event.parent = root_event
root_event.module = self._make_dummy_module(name="TARGET", _type="TARGET")
return root_event

@property
def dns_strings(self):
"""
Expand Down Expand Up @@ -1031,6 +1066,14 @@ def json(self):
j.update({i: v})
j["target"] = self.preset.target.json
j["preset"] = self.preset.to_dict(redact_secrets=True)
if self.start_time is not None:
j["started_at"] = self.start_time.isoformat()
if self.end_time is not None:
j["finished_at"] = self.end_time.isoformat()
if self.duration is not None:
j["duration_seconds"] = self.duration_seconds
if self.duration_human is not None:
j["duration"] = self.duration_human
return j

def debug(self, *args, trace=False, **kwargs):
Expand Down
16 changes: 8 additions & 8 deletions bbot/test/test_step_1/test_dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def custom_lookup(query, rdtype):
await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup)

events = [e async for e in scan.async_start()]
assert len(events) == 11
assert len(events) == 12
assert len([e for e in events if e.type == "DNS_NAME"]) == 5
assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4
assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [
Expand Down Expand Up @@ -320,7 +320,7 @@ def custom_lookup(query, rdtype):
await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup)

events = [e async for e in scan.async_start()]
assert len(events) == 11
assert len(events) == 12
assert len([e for e in events if e.type == "DNS_NAME"]) == 5
assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4
assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [
Expand Down Expand Up @@ -418,7 +418,7 @@ def custom_lookup(query, rdtype):

events = [e async for e in scan.async_start()]

assert len(events) == 10
assert len(events) == 11
assert len([e for e in events if e.type == "DNS_NAME"]) == 5
assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4
assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [
Expand Down Expand Up @@ -546,8 +546,8 @@ def custom_lookup(query, rdtype):
)
await scan2.ingress_module.queue_event(other_event, {})
events = [e async for e in scan2.async_start()]
assert len(events) == 3
assert 1 == len([e for e in events if e.type == "SCAN"])
assert len(events) == 4
assert 2 == len([e for e in events if e.type == "SCAN"])
unmodified_wildcard_events = [
e for e in events if e.type == "DNS_NAME" and e.data == "asdfl.gashdgkjsadgsdf.github.io"
]
Expand Down Expand Up @@ -592,8 +592,8 @@ def custom_lookup(query, rdtype):
)
await scan2.ingress_module.queue_event(other_event, {})
events = [e async for e in scan2.async_start()]
assert len(events) == 3
assert 1 == len([e for e in events if e.type == "SCAN"])
assert len(events) == 4
assert 2 == len([e for e in events if e.type == "SCAN"])
unmodified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and "_wildcard" not in e.data]
assert len(unmodified_wildcard_events) == 2
assert 1 == len(
Expand Down Expand Up @@ -729,7 +729,7 @@ async def test_dns_graph_structure(bbot_scanner):
}
)
events = [e async for e in scan.async_start()]
assert len(events) == 5
assert len(events) == 6
non_scan_events = [e for e in events if e.type != "SCAN"]
assert sorted([e.type for e in non_scan_events]) == ["DNS_NAME", "DNS_NAME", "DNS_NAME", "URL_UNVERIFIED"]
events_by_data = {e.data: e for e in non_scan_events}
Expand Down
2 changes: 1 addition & 1 deletion bbot/test/test_step_1/test_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -656,7 +656,7 @@ async def handle_event(self, event):
)

events = [e async for e in scan.async_start()]
assert len(events) == 6
assert len(events) == 7

assert 1 == len(
[
Expand Down
2 changes: 1 addition & 1 deletion bbot/test/test_step_1/test_manager_deduplication.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs)
_dns_mock=dns_mock_chain,
)

assert len(events) == 21
assert len(events) == 22
assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "accept_dupes.test.notreal" and str(e.module) == "accept_dupes"])
assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "default_module.test.notreal" and str(e.module) == "default_module"])
assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "no_suppress_dupes.test.notreal" and str(e.module) == "no_suppress_dupes" and e.parent.data == "accept_dupes.test.notreal"])
Expand Down
Loading

0 comments on commit 8a3916d

Please sign in to comment.