From 9821cd861f53373435762a98efcfb38449e8c3f6 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 9 Aug 2024 08:21:08 +0000 Subject: [PATCH] Deployed db565b4 to Stable with MkDocs 1.6.0 and mike 2.1.2 --- Stable/404.html | 346 +- ...m.min.css => extra-style.c5k7f2d4.min.css} | 2 +- ...s.map => extra-style.c5k7f2d4.min.css.map} | 2 +- Stable/comparison/index.html | 136 +- Stable/contribution/index.html | 403 +- Stable/data/chord_graph/entities.json | 1857 ----- Stable/data/chord_graph/rels.json | 1577 ---- Stable/data/chord_graph/vega.json | 197 + .../architecture}/index.html | 318 +- Stable/dev/basemodule/index.html | 1423 ++-- Stable/dev/core/index.html | 1212 +++ Stable/dev/dev_environment/index.html | 659 ++ Stable/dev/discord_bot/index.html | 672 ++ Stable/dev/engine/index.html | 1888 +++++ Stable/dev/event/index.html | 1411 ++-- Stable/dev/helpers/command/index.html | 172 +- Stable/dev/helpers/dns/index.html | 3276 ++------ Stable/dev/helpers/index.html | 138 +- Stable/dev/helpers/interactsh/index.html | 142 +- Stable/dev/helpers/misc/index.html | 6299 ++++++++-------- Stable/dev/helpers/web/index.html | 2359 +++--- Stable/dev/helpers/wordcloud/index.html | 152 +- Stable/dev/index.html | 337 +- Stable/dev/module_howto/index.html | 824 ++ Stable/dev/presets/index.html | 4647 ++++++++++++ Stable/dev/scanner/index.html | 1962 ++--- Stable/dev/target/index.html | 1343 ++-- Stable/dev/tests/index.html | 737 ++ .../diagrams/engine-architecture-bbot-v1.png | Bin 0 -> 81814 bytes .../diagrams/engine-architecture-bbot-v2.png | Bin 0 -> 163680 bytes Stable/diagrams/engine-architecture.drawio | 141 + Stable/diagrams/event-flow.drawio | 135 + Stable/diagrams/event-flow.png | Bin 0 -> 1853882 bytes Stable/diagrams/module-recursion.drawio | 57 + Stable/diagrams/module-recursion.png | Bin 0 -> 44633 bytes Stable/how_it_works/index.html | 204 +- Stable/index.html | 171 +- Stable/javascripts/vega-embed@6.js | 7 + Stable/javascripts/vega-lite@5.js | 2 + Stable/javascripts/vega@5.js | 2 + Stable/modules/custom_yara_rules/index.html | 694 ++ Stable/modules/internal_modules/index.html | 652 ++ Stable/modules/list_of_modules/index.html | 605 +- Stable/modules/nuclei/index.html | 144 +- Stable/objects.inv | Bin 2613 -> 3703 bytes Stable/release_history/index.html | 136 +- Stable/scanning/advanced/index.html | 245 +- Stable/scanning/configuration/index.html | 891 ++- Stable/scanning/events/index.html | 248 +- Stable/scanning/index.html | 210 +- Stable/scanning/output/index.html | 215 +- Stable/scanning/presets/index.html | 795 ++ Stable/scanning/presets_list/index.html | 1135 +++ Stable/scanning/tips_and_tricks/index.html | 195 +- Stable/search/search_index.json | 2 +- Stable/site/404.html | 616 -- Stable/site/assets/images/favicon.png | Bin 1870 -> 0 bytes .../assets/javascripts/bundle.b425cdc4.min.js | 29 - .../javascripts/bundle.b425cdc4.min.js.map | 8 - .../javascripts/lunr/min/lunr.ar.min.js | 1 - .../javascripts/lunr/min/lunr.da.min.js | 18 - .../javascripts/lunr/min/lunr.de.min.js | 18 - .../javascripts/lunr/min/lunr.du.min.js | 18 - .../javascripts/lunr/min/lunr.es.min.js | 18 - .../javascripts/lunr/min/lunr.fi.min.js | 18 - .../javascripts/lunr/min/lunr.fr.min.js | 18 - .../javascripts/lunr/min/lunr.hi.min.js | 1 - .../javascripts/lunr/min/lunr.hu.min.js | 18 - .../javascripts/lunr/min/lunr.hy.min.js | 1 - .../javascripts/lunr/min/lunr.it.min.js | 18 - .../javascripts/lunr/min/lunr.ja.min.js | 1 - .../javascripts/lunr/min/lunr.jp.min.js | 1 - .../javascripts/lunr/min/lunr.kn.min.js | 1 - .../javascripts/lunr/min/lunr.ko.min.js | 1 - .../javascripts/lunr/min/lunr.multi.min.js | 1 - .../javascripts/lunr/min/lunr.nl.min.js | 18 - .../javascripts/lunr/min/lunr.no.min.js | 18 - .../javascripts/lunr/min/lunr.pt.min.js | 18 - .../javascripts/lunr/min/lunr.ro.min.js | 18 - .../javascripts/lunr/min/lunr.ru.min.js | 18 - .../javascripts/lunr/min/lunr.sa.min.js | 1 - .../lunr/min/lunr.stemmer.support.min.js | 1 - .../javascripts/lunr/min/lunr.sv.min.js | 18 - .../javascripts/lunr/min/lunr.ta.min.js | 1 - .../javascripts/lunr/min/lunr.te.min.js | 1 - .../javascripts/lunr/min/lunr.th.min.js | 1 - .../javascripts/lunr/min/lunr.tr.min.js | 18 - .../javascripts/lunr/min/lunr.vi.min.js | 1 - .../javascripts/lunr/min/lunr.zh.min.js | 1 - .../site/assets/javascripts/lunr/tinyseg.js | 206 - .../site/assets/javascripts/lunr/wordcut.js | 6708 ----------------- .../workers/search.208ed371.min.js | 42 - .../workers/search.208ed371.min.js.map | 8 - .../stylesheets/extra-style.bft70rkg.min.css | 3 - .../extra-style.bft70rkg.min.css.map | 12 - .../assets/stylesheets/main.26e3688c.min.css | 1 - .../stylesheets/main.26e3688c.min.css.map | 1 - .../stylesheets/palette.ecc896b0.min.css | 1 - .../stylesheets/palette.ecc896b0.min.css.map | 1 - Stable/site/bbot.png | Bin 13380 -> 0 bytes Stable/site/comparison/index.html | 293 - Stable/site/contribution/index.html | 470 -- Stable/site/how_it_works/index.html | 341 - Stable/site/index.html | 405 - Stable/site/javascripts/tablesort.js | 6 - Stable/site/release_history/index.html | 397 - Stable/site/scanning/advanced/index.html | 429 -- Stable/site/scanning/configuration/index.html | 1272 ---- Stable/site/scanning/events/index.html | 527 -- Stable/site/scanning/index.html | 648 -- .../site/scanning/list_of_modules/index.html | 1018 --- Stable/site/scanning/output/index.html | 497 -- .../site/scanning/tips_and_tricks/index.html | 383 - Stable/site/search/search_index.json | 1 - Stable/site/sitemap.xml | 68 - Stable/site/sitemap.xml.gz | Bin 338 -> 0 bytes Stable/site/troubleshooting/index.html | 345 - Stable/sitemap.xml | 113 +- Stable/sitemap.xml.gz | Bin 452 -> 517 bytes Stable/troubleshooting/index.html | 136 +- 120 files changed, 27310 insertions(+), 29337 deletions(-) rename Stable/assets/stylesheets/{extra-style.xrba8u6m.min.css => extra-style.c5k7f2d4.min.css} (96%) rename Stable/assets/stylesheets/{extra-style.xrba8u6m.min.css.map => extra-style.c5k7f2d4.min.css.map} (98%) delete mode 100644 Stable/data/chord_graph/entities.json delete mode 100644 Stable/data/chord_graph/rels.json create mode 100644 Stable/data/chord_graph/vega.json rename Stable/{data/chord_graph/frontend_notes => dev/architecture}/index.html (62%) create mode 100644 Stable/dev/core/index.html create mode 100644 Stable/dev/dev_environment/index.html create mode 100644 Stable/dev/discord_bot/index.html create mode 100644 Stable/dev/engine/index.html create mode 100644 Stable/dev/module_howto/index.html create mode 100644 Stable/dev/presets/index.html create mode 100644 Stable/dev/tests/index.html create mode 100644 Stable/diagrams/engine-architecture-bbot-v1.png create mode 100644 Stable/diagrams/engine-architecture-bbot-v2.png create mode 100644 Stable/diagrams/engine-architecture.drawio create mode 100644 Stable/diagrams/event-flow.drawio create mode 100644 Stable/diagrams/event-flow.png create mode 100644 Stable/diagrams/module-recursion.drawio create mode 100644 Stable/diagrams/module-recursion.png create mode 100644 Stable/javascripts/vega-embed@6.js create mode 100644 Stable/javascripts/vega-lite@5.js create mode 100644 Stable/javascripts/vega@5.js create mode 100644 Stable/modules/custom_yara_rules/index.html create mode 100644 Stable/modules/internal_modules/index.html create mode 100644 Stable/scanning/presets/index.html create mode 100644 Stable/scanning/presets_list/index.html delete mode 100644 Stable/site/404.html delete mode 100644 Stable/site/assets/images/favicon.png delete mode 100644 Stable/site/assets/javascripts/bundle.b425cdc4.min.js delete mode 100644 Stable/site/assets/javascripts/bundle.b425cdc4.min.js.map delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.ar.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.da.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.de.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.du.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.es.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.fi.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.fr.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.hi.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.hu.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.hy.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.it.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.ja.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.jp.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.kn.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.ko.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.multi.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.nl.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.no.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.pt.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.ro.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.ru.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.sa.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.stemmer.support.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.sv.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.ta.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.te.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.th.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.tr.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.vi.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/min/lunr.zh.min.js delete mode 100644 Stable/site/assets/javascripts/lunr/tinyseg.js delete mode 100644 Stable/site/assets/javascripts/lunr/wordcut.js delete mode 100644 Stable/site/assets/javascripts/workers/search.208ed371.min.js delete mode 100644 Stable/site/assets/javascripts/workers/search.208ed371.min.js.map delete mode 100644 Stable/site/assets/stylesheets/extra-style.bft70rkg.min.css delete mode 100644 Stable/site/assets/stylesheets/extra-style.bft70rkg.min.css.map delete mode 100644 Stable/site/assets/stylesheets/main.26e3688c.min.css delete mode 100644 Stable/site/assets/stylesheets/main.26e3688c.min.css.map delete mode 100644 Stable/site/assets/stylesheets/palette.ecc896b0.min.css delete mode 100644 Stable/site/assets/stylesheets/palette.ecc896b0.min.css.map delete mode 100644 Stable/site/bbot.png delete mode 100644 Stable/site/comparison/index.html delete mode 100644 Stable/site/contribution/index.html delete mode 100644 Stable/site/how_it_works/index.html delete mode 100644 Stable/site/index.html delete mode 100644 Stable/site/javascripts/tablesort.js delete mode 100644 Stable/site/release_history/index.html delete mode 100644 Stable/site/scanning/advanced/index.html delete mode 100644 Stable/site/scanning/configuration/index.html delete mode 100644 Stable/site/scanning/events/index.html delete mode 100644 Stable/site/scanning/index.html delete mode 100644 Stable/site/scanning/list_of_modules/index.html delete mode 100644 Stable/site/scanning/output/index.html delete mode 100644 Stable/site/scanning/tips_and_tricks/index.html delete mode 100644 Stable/site/search/search_index.json delete mode 100644 Stable/site/sitemap.xml delete mode 100644 Stable/site/sitemap.xml.gz delete mode 100644 Stable/site/troubleshooting/index.html diff --git a/Stable/404.html b/Stable/404.html index 5de38f3e28..22b693c552 100644 --- a/Stable/404.html +++ b/Stable/404.html @@ -228,7 +228,7 @@
  • - + Developer Manual @@ -509,6 +509,98 @@ + + + + + + + + + +
  • + + + + + + + + + + + + +
  • + + + + + + + + + +
  • @@ -756,6 +848,27 @@ +
  • + + + + + Contribution + + + + +
  • + + + + + + + + + +
  • @@ -856,11 +969,11 @@
  • - + - How to Write a Module + Development Overview @@ -877,11 +990,11 @@
  • - + - Development Overview + Setting Up a Dev Environment @@ -897,6 +1010,132 @@ +
  • + + + + + BBOT Internal Architecture + + + + +
  • + + + + + + + + + + +
  • + + + + + How to Write a BBOT Module + + + + +
  • + + + + + + + + + + +
  • + + + + + Unit Tests + + + + +
  • + + + + + + + + + + +
  • + + + + + Discord Bot Example + + + + +
  • + + + + + + + + + + + + + + + + + + + +
  • + + + + + + + + + + + + +
  • + + @@ -1226,15 +1536,27 @@

    404 - Not found

    - + - - + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Stable/assets/stylesheets/extra-style.xrba8u6m.min.css b/Stable/assets/stylesheets/extra-style.c5k7f2d4.min.css similarity index 96% rename from Stable/assets/stylesheets/extra-style.xrba8u6m.min.css rename to Stable/assets/stylesheets/extra-style.c5k7f2d4.min.css index c4a56c69d3..5863d7da1c 100644 --- a/Stable/assets/stylesheets/extra-style.xrba8u6m.min.css +++ b/Stable/assets/stylesheets/extra-style.c5k7f2d4.min.css @@ -1,3 +1,3 @@ :root{--bbot-orange: #ff8400}p img{max-width:60em !important}.demonic-jimmy{color:var(--bbot-orange)}.md-nav__link--active{font-weight:bold}.md-typeset__table td:first-child{font-weight:bold}a.md-source,.md-header__topic>span,a:hover{color:var(--bbot-orange)}article.md-content__inner h1{font-weight:500;color:var(--bbot-orange)}article.md-content__inner h1,article.md-content__inner h2{color:var(--bbot-orange)}article.md-content__inner h2,article.md-content__inner h3,article.md-content__inner h4,article.md-content__inner h5{font-weight:300}article.md-content__inner div.highlight{background-color:unset !important}table{font-family:monospace}table td{max-width:100em}[data-md-color-primary=black] p a.md-button--primary{background-color:black;border:none}[data-md-color-primary=black] p a.md-button--primary:hover{background-color:var(--bbot-orange)}[data-md-color-scheme="slate"] div.md-source__repository ul{color:white}[data-md-color-scheme="slate"] .md-nav__link{color:white}[data-md-color-scheme="slate"] .md-nav__link--active{font-weight:bold}[data-md-color-scheme="slate"] .md-typeset__table tr{background-color:#202027}[data-md-color-scheme="slate"] .md-nav__link.md-nav__link--active{color:var(--bbot-orange)}[data-md-color-scheme="slate"] .md-typeset__table thead tr{color:var(--bbot-orange);background-color:var(--md-primary-fg-color--dark)} -/*# sourceMappingURL=extra-style.xrba8u6m.min.css.map */ \ No newline at end of file +/*# sourceMappingURL=extra-style.c5k7f2d4.min.css.map */ \ No newline at end of file diff --git a/Stable/assets/stylesheets/extra-style.xrba8u6m.min.css.map b/Stable/assets/stylesheets/extra-style.c5k7f2d4.min.css.map similarity index 98% rename from Stable/assets/stylesheets/extra-style.xrba8u6m.min.css.map rename to Stable/assets/stylesheets/extra-style.c5k7f2d4.min.css.map index e0dc7f9f40..f721b43c90 100644 --- a/Stable/assets/stylesheets/extra-style.xrba8u6m.min.css.map +++ b/Stable/assets/stylesheets/extra-style.c5k7f2d4.min.css.map @@ -1,6 +1,6 @@ { "version": 3, - "file": "extra-style.xrba8u6m.min.css", + "file": "extra-style.c5k7f2d4.min.css", "sources": [ "extra_sass/style.css.scss" ], diff --git a/Stable/comparison/index.html b/Stable/comparison/index.html index a53e10467a..1a5496b6bb 100644 --- a/Stable/comparison/index.html +++ b/Stable/comparison/index.html @@ -20,7 +20,7 @@ - + @@ -117,7 +117,7 @@
  • - + Developer Manual @@ -259,6 +259,37 @@
  • +
  • + + + +
  • @@ -343,6 +374,13 @@
  • -
  • - -
  • + + -
  • -
  • - -
  • + + -
  • - + Developer Manual @@ -223,6 +223,37 @@
  • +
  • + + + +
  • @@ -307,6 +338,13 @@ + @@ -808,6 +905,14 @@

  • +deps_modules + (List) + – +
    +

    Other BBOT modules this module depends on. Empty list by default.

    +
    +
  • +
  • deps_pip (List) – @@ -925,7 +1030,7 @@

  • -max_event_handlers +module_threads (int) –
    @@ -2388,7 +2493,31 @@

    1390 1391 1392 -1393

    class BaseModule:
    +1393
    +1394
    +1395
    +1396
    +1397
    +1398
    +1399
    +1400
    +1401
    +1402
    +1403
    +1404
    +1405
    +1406
    +1407
    +1408
    +1409
    +1410
    +1411
    +1412
    +1413
    +1414
    +1415
    +1416
    +1417
    class BaseModule:
         """The base class for all BBOT modules.
     
         Attributes:
    @@ -2400,6 +2529,8 @@ 

    flags (List): Flags indicating the type of module (must have at least "safe" or "aggressive" and "passive" or "active"). + deps_modules (List): Other BBOT modules this module depends on. Empty list by default. + deps_pip (List): Python dependencies to install via pip. Empty list by default. deps_apt (List): APT package dependencies to install. Empty list by default. @@ -2434,7 +2565,7 @@

    options_desc (Dict): Descriptions for options, e.g., {"api_key": "API Key"}. Empty dict by default. - max_event_handlers (int): Maximum concurrent instances of handle_event() or handle_batch(). Default is 1. + module_threads (int): Maximum concurrent instances of handle_event() or handle_batch(). Default is 1. batch_size (int): Size of batches processed by handle_batch(). Default is 1. @@ -2462,6 +2593,7 @@

    options = {} options_desc = {} + deps_modules = [] deps_pip = [] deps_apt = [] deps_shell = [] @@ -2476,17 +2608,21 @@

    target_only = False in_scope_only = False - _max_event_handlers = 1 + _module_threads = 1 _batch_size = 1 batch_wait = 10 failed_request_abort_threshold = 5 + default_discovery_context = "{module} discovered {event.type}: {event.data}" + _preserve_graph = False _stats_exclude = False _qsize = 1000 _priority = 3 _name = "base" _type = "scan" + _intercept = False + _shuffle_incoming_queue = True def __init__(self, scan): """Initializes a module instance. @@ -2712,11 +2848,11 @@

    return batch_size @property - def max_event_handlers(self): - max_event_handlers = self.config.get("max_event_handlers", None) - if max_event_handlers is None: - max_event_handlers = self._max_event_handlers - return max_event_handlers + def module_threads(self): + module_threads = self.config.get("module_threads", None) + if module_threads is None: + module_threads = self._module_threads + return module_threads @property def auth_secret(self): @@ -2770,8 +2906,7 @@

    self.verbose(f"Handling batch of {len(events):,} events") submitted = True async with self.scan._acatch(f"{self.name}.handle_batch()"): - handle_batch_task = asyncio.create_task(self.handle_batch(*events)) - await handle_batch_task + await self.handle_batch(*events) self.verbose(f"Finished handling batch of {len(events):,} events") if finish: context = f"{self.name}.finish()" @@ -2790,7 +2925,7 @@

    raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. Examples: - >>> new_event = self.make_event("1.2.3.4", source=event) + >>> new_event = self.make_event("1.2.3.4", parent=event) >>> await self.emit_event(new_event) Returns: @@ -2800,6 +2935,10 @@

    ValidationError: If the event could not be validated and raise_error is True. """ raise_error = kwargs.pop("raise_error", False) + module = kwargs.pop("module", None) + if module is None: + if (not args) or getattr(args[0], "module", None) is None: + kwargs["module"] = self try: event = self.scan.make_event(*args, **kwargs) except ValidationError as e: @@ -2807,8 +2946,6 @@

    raise self.warning(f"{e}") return - if not event.module: - event.module = self return event async def emit_event(self, *args, **kwargs): @@ -2829,9 +2966,9 @@

    ``` Examples: - >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + >>> await self.emit_event("www.evilcorp.com", parent=event, tags=["affiliate"]) - >>> new_event = self.make_event("1.2.3.4", source=event) + >>> new_event = self.make_event("1.2.3.4", parent=event) >>> await self.emit_event(new_event) Returns: @@ -2849,8 +2986,9 @@

    event = self.make_event(*args, **event_kwargs) if event: await self.queue_outgoing_event(event, **emit_kwargs) + return event - async def _events_waiting(self): + async def _events_waiting(self, batch_size=None): """ Asynchronously fetches events from the incoming_event_queue, up to a specified batch size. @@ -2868,10 +3006,12 @@

    - "FINISHED" events are handled differently and the finish flag is set to True. - If the queue is empty or the batch size is reached, the loop breaks. """ + if batch_size is None: + batch_size = self.batch_size events = [] finish = False while self.incoming_event_queue: - if len(events) > self.batch_size: + if batch_size != -1 and len(events) > self.batch_size: break try: event = self.incoming_event_queue.get_nowait() @@ -2898,7 +3038,8 @@

    def start(self): self._tasks = [ - asyncio.create_task(self._worker(), name=f"{self.name}._worker()") for _ in range(self.max_event_handlers) + asyncio.create_task(self._worker(), name=f"{self.scan.name}.{self.name}._worker()") + for _ in range(self.module_threads) ] async def _setup(self): @@ -2928,8 +3069,7 @@

    status = False self.debug(f"Setting up module {self.name}") try: - setup_task = asyncio.create_task(self.setup()) - result = await setup_task + result = await self.setup() if type(result) == tuple and len(result) == 2: status, msg = result else: @@ -2937,17 +3077,17 @@

    msg = status_codes[status] self.debug(f"Finished setting up module {self.name}") except Exception as e: - self.set_error_state() + self.set_error_state(f"Unexpected error during module setup: {e}", critical=True) msg = f"{e}" self.trace() - return self.name, status, str(msg) + return self, status, str(msg) async def _worker(self): """ The core worker loop for the module, responsible for handling events from the incoming event queue. This method is a coroutine and is run asynchronously. Multiple instances can run simultaneously based on - the 'max_event_handlers' configuration. The worker dequeues events from 'incoming_event_queue', performs + the 'module_threads' configuration. The worker dequeues events from 'incoming_event_queue', performs necessary prechecks, and passes the event to the appropriate handler function. Args: @@ -2966,7 +3106,7 @@

    - Each event is subject to a post-check via '_event_postcheck()' to decide whether it should be handled. - Special 'FINISHED' events trigger the 'finish()' method of the module. """ - async with self.scan._acatch(context=self._worker): + async with self.scan._acatch(context=self._worker, unhandled_is_critical=True): try: while not self.scan.stopping and not self.errored: # hold the reigns if our outgoing queue is full @@ -2996,28 +3136,34 @@

    if event.type == "FINISHED": context = f"{self.name}.finish()" async with self.scan._acatch(context), self._task_counter.count(context): - finish_task = asyncio.create_task(self.finish()) - await finish_task + await self.finish() else: context = f"{self.name}.handle_event({event})" self.scan.stats.event_consumed(event, self) self.debug(f"Handling {event}") async with self.scan._acatch(context), self._task_counter.count(context): - task_name = f"{self.name}.handle_event({event})" - handle_event_task = asyncio.create_task(self.handle_event(event), name=task_name) - await handle_event_task + await self.handle_event(event) self.debug(f"Finished handling {event}") else: self.debug(f"Not accepting {event} because {reason}") except asyncio.CancelledError: - self.log.trace("Worker cancelled") + # this trace was used for debugging leaked CancelledErrors from inside httpx + # self.log.trace("Worker cancelled") raise + except BaseException as e: + if self.helpers.in_exception_chain(e, (KeyboardInterrupt,)): + self.scan.stop() + else: + self.error(f"Critical failure in module {self.name}: {e}") + self.error(traceback.format_exc()) self.log.trace(f"Worker stopped") @property def max_scope_distance(self): if self.in_scope_only or self.target_only: return 0 + if self.scope_distance_modifier is None: + return 999 return max(0, self.scan.scope_search_distance + self.scope_distance_modifier) def _event_precheck(self, event): @@ -3060,7 +3206,9 @@

    if self.target_only: if "target" not in event.tags: return False, "it did not meet target_only filter criteria" + # exclude certain URLs (e.g. javascript): + # TODO: revisit this after httpx rework if event.type.startswith("URL") and self.name != "httpx" and "httpx-only" in event.tags: return False, "its extension was listed in url_extension_httpx_only" @@ -3070,16 +3218,19 @@

    """ A simple wrapper for dup tracking """ - acceptable, reason = await self.__event_postcheck(event) + # special exception for "FINISHED" event + if event.type in ("FINISHED",): + return True, "" + acceptable, reason = await self._event_postcheck_inner(event) if acceptable: # check duplicates is_incoming_duplicate, reason = self.is_incoming_duplicate(event, add=True) if is_incoming_duplicate and not self.accept_dupes: - return False, f"module has already seen {event}" + (f" ({reason})" if reason else "") + return False, f"module has already seen it" + (f" ({reason})" if reason else "") return acceptable, reason - async def __event_postcheck(self, event): + async def _event_postcheck_inner(self, event): """ Post-checks an event to determine if it should be accepted by the module for handling. @@ -3097,21 +3248,10 @@

    - This method also maintains host-based tracking when the `per_host_only` or similar flags are set. - The method will also update event production stats for output modules. """ - # special exception for "FINISHED" event - if event.type in ("FINISHED",): - return True, "" - # force-output certain events to the graph if self._is_graph_important(event): return True, "event is critical to the graph" - # don't send out-of-scope targets to active modules (excluding portscanners, because they can handle it) - # this only takes effect if your target and whitelist are different - # TODO: the logic here seems incomplete, it could probably use some work. - if "active" in self.flags and "portscan" not in self.flags: - if "target" in event.tags and event not in self.scan.whitelist: - return False, "it is not in whitelist and module has active flag" - # check scope distance filter_result, reason = self._scope_distance_check(event) if not filter_result: @@ -3119,7 +3259,12 @@

    # custom filtering async with self.scan._acatch(context=self.filter_event): - filter_result = await self.filter_event(event) + try: + filter_result = await self.filter_event(event) + except Exception as e: + msg = f"Unhandled exception in {self.name}.filter_event({event}): {e}" + self.error(msg) + return False, msg msg = str(self._custom_filter_criteria_msg) with suppress(ValueError, TypeError): filter_result, reason = filter_result @@ -3153,7 +3298,7 @@

    async with self.scan._acatch(context), self._task_counter.count(context): await self.helpers.execute_sync_or_async(callback) - async def queue_event(self, event, precheck=True): + async def queue_event(self, event): """ Asynchronously queues an incoming event to the module's event queue for further processing. @@ -3176,9 +3321,7 @@

    if self.incoming_event_queue is False: self.debug(f"Not in an acceptable state to queue incoming event") return - acceptable, reason = True, "precheck was skipped" - if precheck: - acceptable, reason = self._event_precheck(event) + acceptable, reason = self._event_precheck(event) if not acceptable: if reason and reason != "its type is not in watched_events": self.debug(f"Not queueing {event} because {reason}") @@ -3190,7 +3333,7 @@

    async with self._event_received: self._event_received.notify() if event.type != "FINISHED": - self.scan.manager._new_activity = True + self.scan._new_activity = True except AttributeError: self.debug(f"Not in an acceptable state to queue incoming event") @@ -3219,7 +3362,7 @@

    except AttributeError: self.debug(f"Not in an acceptable state to queue outgoing event") - def set_error_state(self, message=None, clear_outgoing_queue=False): + def set_error_state(self, message=None, clear_outgoing_queue=False, critical=False): """ Puts the module into an errored state where it cannot accept new events. Optionally logs a warning message. @@ -3244,7 +3387,11 @@

    log_msg = "Setting error state" if message is not None: log_msg += f": {message}" - self.warning(log_msg) + if critical: + log_fn = self.error + else: + log_fn = self.warning + log_fn(log_msg) self.errored = True # clear incoming queue if self.incoming_event_queue is not False: @@ -3265,7 +3412,12 @@

    if event.type in ("FINISHED",): return False, "" reason = "" - event_hash = self._incoming_dedup_hash(event) + try: + event_hash = self._incoming_dedup_hash(event) + except Exception as e: + msg = f"Unhandled exception in {self.name}._incoming_dedup_hash({event}): {e}" + self.error(msg) + return True, msg with suppress(TypeError, ValueError): event_hash, reason = event_hash is_dup = event_hash in self._incoming_dup_tracker @@ -3326,7 +3478,7 @@

    >>> event = self.make_event("https://example.com:8443") >>> self.get_per_hostport_hash(event) """ - parsed = getattr(event, "parsed", None) + parsed = getattr(event, "parsed_url", None) if parsed is None: to_hash = self.helpers.make_netloc(event.host, event.port) else: @@ -3447,6 +3599,10 @@

    self.set_error_state(f"Setting error state due to {self._request_failures:,} failed HTTP requests") return r + @property + def preset(self): + return self.scan.preset + @property def config(self): """Property that provides easy access to the module's configuration in the scan's config. @@ -3465,7 +3621,10 @@

    @property def incoming_event_queue(self): if self._incoming_event_queue is None: - self._incoming_event_queue = ShuffleQueue() + if self._shuffle_incoming_queue: + self._incoming_event_queue = ShuffleQueue() + else: + self._incoming_event_queue = asyncio.Queue() return self._incoming_event_queue @property @@ -3500,7 +3659,7 @@

    """ Convenience shortcut to `http_timeout` in the config """ - return self.scan.config.get("http_timeout", 10) + return self.scan.web_config.get("http_timeout", 10) @property def log(self): @@ -3543,9 +3702,14 @@

    >>> self.log_table(['Header1', 'Header2'], [['row1col1', 'row1col2'], ['row2col1', 'row2col2']], table_name="my_table") """ table_name = kwargs.pop("table_name", None) + max_log_entries = kwargs.pop("max_log_entries", None) table = self.helpers.make_table(*args, **kwargs) + lines_logged = 0 for line in table.splitlines(): + if max_log_entries is not None and lines_logged > max_log_entries: + break self.info(line) + lines_logged += 1 if table_name is not None: date = self.helpers.make_date() filename = self.scan.home / f"{self.helpers.tagify(table_name)}-table-{date}.txt" @@ -3555,7 +3719,7 @@

    return table def _is_graph_important(self, event): - return self.preserve_graph and getattr(event, "_graph_important", False) + return self.preserve_graph and getattr(event, "_graph_important", False) and not getattr(event, "_omit", False) @property def preserve_graph(self): @@ -3564,20 +3728,6 @@

    preserve_graph = self._preserve_graph return preserve_graph - def stdout(self, *args, **kwargs): - """Writes log messages directly to standard output. - - This is typically reserved for output modules only, e.g. `human` or `json`. - - Args: - *args: Variable length argument list to be passed to `self.log.stdout`. - **kwargs: Arbitrary keyword arguments to be passed to `self.log.stdout`. - - Examples: - >>> self.stdout("This will be printed to stdout") - """ - self.log.stdout(*args, extra={"scan_id": self.scan.id}, **kwargs) - def debug(self, *args, trace=False, **kwargs): """Logs debug messages and optionally the stack trace of the most recent exception. @@ -3738,7 +3888,7 @@

    if trace: self.trace() - def trace(self): + def trace(self, msg=None): """Logs the stack trace of the most recently caught exception. This method captures the type, value, and traceback of the most recent exception and logs it using the trace level. It is typically used for debugging purposes. @@ -3751,9 +3901,12 @@

    >>> except ZeroDivisionError: >>> self.trace() """ - e_type, e_val, e_traceback = exc_info() - if e_type is not None: - self.log.trace(traceback.format_exc()) + if msg is None: + e_type, e_val, e_traceback = exc_info() + if e_type is not None: + self.log.trace(traceback.format_exc()) + else: + self.log.trace(msg) def critical(self, *args, trace=True, **kwargs): """Logs a whole message in emboldened red text, and optionally the stack trace of the most recent exception. @@ -4010,14 +4163,7 @@

    Source code in bbot/modules/base.py -
    112
    -113
    -114
    -115
    -116
    -117
    -118
    -119
    +
    119
     120
     121
     122
    @@ -4050,7 +4196,14 @@ 

    149 150 151 -152

    def __init__(self, scan):
    +152
    +153
    +154
    +155
    +156
    +157
    +158
    +159
    def __init__(self, scan):
         """Initializes a module instance.
     
         Args:
    @@ -4122,18 +4275,18 @@ 

    Source code in bbot/modules/base.py -
    266
    -267
    -268
    -269
    -270
    -271
    -272
    -273
    +
    273
     274
     275
     276
    -277
    async def cleanup(self):
    +277
    +278
    +279
    +280
    +281
    +282
    +283
    +284
    async def cleanup(self):
         """Asynchronously performs final cleanup operations after the scan is complete.
     
         This method can be overridden to implement custom cleanup logic. It is called only once per scan and may not raise events.
    @@ -4190,21 +4343,21 @@ 

    Source code in bbot/modules/base.py -
    1379
    -1380
    -1381
    -1382
    -1383
    -1384
    -1385
    -1386
    -1387
    -1388
    -1389
    -1390
    -1391
    -1392
    -1393
    def critical(self, *args, trace=True, **kwargs):
    +
    1403
    +1404
    +1405
    +1406
    +1407
    +1408
    +1409
    +1410
    +1411
    +1412
    +1413
    +1414
    +1415
    +1416
    +1417
    def critical(self, *args, trace=True, **kwargs):
         """Logs a whole message in emboldened red text, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -4264,21 +4417,21 @@ 

    Source code in bbot/modules/base.py -
    1202
    -1203
    -1204
    -1205
    -1206
    -1207
    -1208
    -1209
    -1210
    -1211
    -1212
    -1213
    -1214
    -1215
    -1216
    def debug(self, *args, trace=False, **kwargs):
    +
    1223
    +1224
    +1225
    +1226
    +1227
    +1228
    +1229
    +1230
    +1231
    +1232
    +1233
    +1234
    +1235
    +1236
    +1237
    def debug(self, *args, trace=False, **kwargs):
         """Logs debug messages and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -4333,9 +4486,9 @@ 

    Examples:

    -
    >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"])
    +
    >>> await self.emit_event("www.evilcorp.com", parent=event, tags=["affiliate"])
     
    -
    >>> new_event = self.make_event("1.2.3.4", source=event)
    +
    >>> new_event = self.make_event("1.2.3.4", parent=event)
     >>> await self.emit_event(new_event)
     

    Returns:

    @@ -4350,7 +4503,7 @@

    Raises:

    • -ValidationError +ValidationError

      If the event cannot be validated (handled in self.make_event()).

      @@ -4359,15 +4512,7 @@

    Source code in bbot/modules/base.py -
    435
    -436
    -437
    -438
    -439
    -440
    -441
    -442
    -443
    +
    443
     444
     445
     446
    @@ -4396,7 +4541,16 @@ 

    469 470 471 -472

    async def emit_event(self, *args, **kwargs):
    +472
    +473
    +474
    +475
    +476
    +477
    +478
    +479
    +480
    +481
    async def emit_event(self, *args, **kwargs):
         """Emit an event to the event queue and distribute it to interested modules.
     
         This is how modules "return" data.
    @@ -4414,9 +4568,9 @@ 

    ``` Examples: - >>> await self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + >>> await self.emit_event("www.evilcorp.com", parent=event, tags=["affiliate"]) - >>> new_event = self.make_event("1.2.3.4", source=event) + >>> new_event = self.make_event("1.2.3.4", parent=event) >>> await self.emit_event(new_event) Returns: @@ -4434,6 +4588,7 @@

    event = self.make_event(*args, **event_kwargs) if event: await self.queue_outgoing_event(event, **emit_kwargs) + return event

    @@ -4479,21 +4634,21 @@

    Source code in bbot/modules/base.py -
    1346
    -1347
    -1348
    -1349
    -1350
    -1351
    -1352
    -1353
    -1354
    -1355
    -1356
    -1357
    -1358
    -1359
    -1360
    def error(self, *args, trace=True, **kwargs):
    +
    1367
    +1368
    +1369
    +1370
    +1371
    +1372
    +1373
    +1374
    +1375
    +1376
    +1377
    +1378
    +1379
    +1380
    +1381
    def error(self, *args, trace=True, **kwargs):
         """Logs an error message, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -4550,21 +4705,21 @@ 

    Source code in bbot/modules/base.py -
    224
    -225
    -226
    -227
    -228
    -229
    -230
    -231
    +
    231
     232
     233
     234
     235
     236
     237
    -238
    async def filter_event(self, event):
    +238
    +239
    +240
    +241
    +242
    +243
    +244
    +245
    async def filter_event(self, event):
         """Asynchronously filters incoming events based on custom criteria.
     
         Override this method for more granular control over which events are accepted by your module. This method is called automatically before `handle_event()` for each incoming event that matches any in `watched_events`.
    @@ -4606,18 +4761,18 @@ 

    Source code in bbot/modules/base.py -
    240
    -241
    -242
    -243
    -244
    -245
    -246
    -247
    +
    247
     248
     249
     250
    -251
    async def finish(self):
    +251
    +252
    +253
    +254
    +255
    +256
    +257
    +258
    async def finish(self):
         """Asynchronously performs final tasks as the scan nears completion.
     
         This method can be overridden to execute any necessary finalization logic. For example, if the module relies on a word cloud, you might wait for the scan to finish to ensure the word cloud is most complete before running an operation.
    @@ -4668,24 +4823,24 @@ 

    Source code in bbot/modules/base.py -
    957
    -958
    -959
    -960
    -961
    -962
    -963
    -964
    -965
    -966
    -967
    -968
    -969
    -970
    -971
    -972
    -973
    -974
    def get_per_domain_hash(self, event):
    +
    980
    +981
    +982
    +983
    +984
    +985
    +986
    +987
    +988
    +989
    +990
    +991
    +992
    +993
    +994
    +995
    +996
    +997
    def get_per_domain_hash(self, event):
         """
         Computes a per-domain hash value for a given event. This method may be optionally overridden in subclasses.
     
    @@ -4742,35 +4897,35 @@ 

    Source code in bbot/modules/base.py -
    915
    -916
    -917
    -918
    -919
    -920
    -921
    -922
    -923
    -924
    -925
    -926
    -927
    -928
    -929
    -930
    -931
    def get_per_host_hash(self, event):
    -    """
    -    Computes a per-host hash value for a given event. This method may be optionally overridden in subclasses.
    -
    -    The function uses the event's `host` to create a string to be hashed.
    -
    -    Args:
    -        event (Event): The event object containing host information.
    -
    -    Returns:
    -        int: The hash value computed for the host.
    -
    -    Examples:
    +
    938
    +939
    +940
    +941
    +942
    +943
    +944
    +945
    +946
    +947
    +948
    +949
    +950
    +951
    +952
    +953
    +954
    def get_per_host_hash(self, event):
    +    """
    +    Computes a per-host hash value for a given event. This method may be optionally overridden in subclasses.
    +
    +    The function uses the event's `host` to create a string to be hashed.
    +
    +    Args:
    +        event (Event): The event object containing host information.
    +
    +    Returns:
    +        int: The hash value computed for the host.
    +
    +    Examples:
             >>> event = self.make_event("https://example.com:8443")
             >>> self.get_per_host_hash(event)
         """
    @@ -4815,29 +4970,29 @@ 

    Source code in bbot/modules/base.py -
    933
    -934
    -935
    -936
    -937
    -938
    -939
    -940
    -941
    -942
    -943
    -944
    -945
    -946
    -947
    -948
    -949
    -950
    -951
    -952
    -953
    -954
    -955
    def get_per_hostport_hash(self, event):
    +
    956
    +957
    +958
    +959
    +960
    +961
    +962
    +963
    +964
    +965
    +966
    +967
    +968
    +969
    +970
    +971
    +972
    +973
    +974
    +975
    +976
    +977
    +978
    def get_per_hostport_hash(self, event):
         """
         Computes a per-host:port hash value for a given event. This method may be optionally overridden in subclasses.
     
    @@ -4854,7 +5009,7 @@ 

    >>> event = self.make_event("https://example.com:8443") >>> self.get_per_hostport_hash(event) """ - parsed = getattr(event, "parsed", None) + parsed = getattr(event, "parsed_url", None) if parsed is None: to_hash = self.helpers.make_netloc(event.host, event.port) else: @@ -4884,17 +5039,17 @@

    Source code in bbot/modules/base.py -
    353
    -354
    -355
    -356
    -357
    -358
    -359
    -360
    +
    360
     361
     362
    -363
    def get_watched_events(self):
    +363
    +364
    +365
    +366
    +367
    +368
    +369
    +370
    def get_watched_events(self):
         """Retrieve the set of events that the module is interested in observing.
     
         Override this method if the set of events the module should watch needs to be determined dynamically, e.g., based on configuration options or other runtime conditions.
    @@ -4949,21 +5104,21 @@ 

    Source code in bbot/modules/base.py -
    208
    -209
    -210
    -211
    -212
    -213
    -214
    -215
    +
    215
     216
     217
     218
     219
     220
     221
    -222
    async def handle_batch(self, *events):
    +222
    +223
    +224
    +225
    +226
    +227
    +228
    +229
    async def handle_batch(self, *events):
         """Handles incoming events in batches for optimized processing.
     
         This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module.
    @@ -5020,21 +5175,21 @@ 

    Source code in bbot/modules/base.py -
    192
    -193
    -194
    -195
    -196
    -197
    -198
    -199
    +
    199
     200
     201
     202
     203
     204
     205
    -206
    async def handle_event(self, event):
    +206
    +207
    +208
    +209
    +210
    +211
    +212
    +213
    async def handle_event(self, event):
         """Asynchronously handles incoming events that the module is configured to watch.
     
         This method is automatically invoked when an event that matches any in `watched_events` is encountered during a scan. Override this method to implement custom event-handling logic for your module.
    @@ -5094,21 +5249,21 @@ 

    Source code in bbot/modules/base.py -
    1266
    -1267
    -1268
    -1269
    -1270
    -1271
    -1272
    -1273
    -1274
    -1275
    -1276
    -1277
    -1278
    -1279
    -1280
    def hugeinfo(self, *args, trace=False, **kwargs):
    +
    1287
    +1288
    +1289
    +1290
    +1291
    +1292
    +1293
    +1294
    +1295
    +1296
    +1297
    +1298
    +1299
    +1300
    +1301
    def hugeinfo(self, *args, trace=False, **kwargs):
         """Logs a whole message in emboldened blue text, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -5168,21 +5323,21 @@ 

    Source code in bbot/modules/base.py -
    1298
    -1299
    -1300
    -1301
    -1302
    -1303
    -1304
    -1305
    -1306
    -1307
    -1308
    -1309
    -1310
    -1311
    -1312
    def hugesuccess(self, *args, trace=False, **kwargs):
    +
    1319
    +1320
    +1321
    +1322
    +1323
    +1324
    +1325
    +1326
    +1327
    +1328
    +1329
    +1330
    +1331
    +1332
    +1333
    def hugesuccess(self, *args, trace=False, **kwargs):
         """Logs a whole message in emboldened green text, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -5242,21 +5397,21 @@ 

    Source code in bbot/modules/base.py -
    1234
    -1235
    -1236
    -1237
    -1238
    -1239
    -1240
    -1241
    -1242
    -1243
    -1244
    -1245
    -1246
    -1247
    -1248
    def hugeverbose(self, *args, trace=False, **kwargs):
    +
    1255
    +1256
    +1257
    +1258
    +1259
    +1260
    +1261
    +1262
    +1263
    +1264
    +1265
    +1266
    +1267
    +1268
    +1269
    def hugeverbose(self, *args, trace=False, **kwargs):
         """Logs a whole message in emboldened white text, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -5316,21 +5471,21 @@ 

    Source code in bbot/modules/base.py -
    1330
    -1331
    -1332
    -1333
    -1334
    -1335
    -1336
    -1337
    -1338
    -1339
    -1340
    -1341
    -1342
    -1343
    -1344
    def hugewarning(self, *args, trace=True, **kwargs):
    +
    1351
    +1352
    +1353
    +1354
    +1355
    +1356
    +1357
    +1358
    +1359
    +1360
    +1361
    +1362
    +1363
    +1364
    +1365
    def hugewarning(self, *args, trace=True, **kwargs):
         """Logs a whole message in emboldened orange text, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -5390,21 +5545,21 @@ 

    Source code in bbot/modules/base.py -
    1250
    -1251
    -1252
    -1253
    -1254
    -1255
    -1256
    -1257
    -1258
    -1259
    -1260
    -1261
    -1262
    -1263
    -1264
    def info(self, *args, trace=False, **kwargs):
    +
    1271
    +1272
    +1273
    +1274
    +1275
    +1276
    +1277
    +1278
    +1279
    +1280
    +1281
    +1282
    +1283
    +1284
    +1285
    def info(self, *args, trace=False, **kwargs):
         """Logs informational messages and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -5464,33 +5619,38 @@ 

    Source code in bbot/modules/base.py -
    1150
    -1151
    -1152
    -1153
    -1154
    -1155
    -1156
    -1157
    -1158
    -1159
    -1160
    -1161
    -1162
    -1163
    -1164
    -1165
    -1166
    -1167
    -1168
    -1169
    -1170
    -1171
    -1172
    -1173
    -1174
    -1175
    -1176
    def log_table(self, *args, **kwargs):
    +
    1180
    +1181
    +1182
    +1183
    +1184
    +1185
    +1186
    +1187
    +1188
    +1189
    +1190
    +1191
    +1192
    +1193
    +1194
    +1195
    +1196
    +1197
    +1198
    +1199
    +1200
    +1201
    +1202
    +1203
    +1204
    +1205
    +1206
    +1207
    +1208
    +1209
    +1210
    +1211
    def log_table(self, *args, **kwargs):
         """Logs a table to the console and optionally writes it to a file.
     
         This function generates a table using `self.helpers.make_table`, then logs each line
    @@ -5507,9 +5667,14 @@ 

    >>> self.log_table(['Header1', 'Header2'], [['row1col1', 'row1col2'], ['row2col1', 'row2col2']], table_name="my_table") """ table_name = kwargs.pop("table_name", None) + max_log_entries = kwargs.pop("max_log_entries", None) table = self.helpers.make_table(*args, **kwargs) + lines_logged = 0 for line in table.splitlines(): + if max_log_entries is not None and lines_logged > max_log_entries: + break self.info(line) + lines_logged += 1 if table_name is not None: date = self.helpers.make_date() filename = self.scan.home / f"{self.helpers.tagify(table_name)}-table-{date}.txt" @@ -5556,7 +5721,7 @@

    Examples:

    -
    >>> new_event = self.make_event("1.2.3.4", source=event)
    +
    >>> new_event = self.make_event("1.2.3.4", parent=event)
     >>> await self.emit_event(new_event)
     

    Returns:

    @@ -5571,7 +5736,7 @@

    Raises:

    • -ValidationError +ValidationError

      If the event could not be validated and raise_error is True.

      @@ -5580,13 +5745,7 @@

    Source code in bbot/modules/base.py -
    403
    -404
    -405
    -406
    -407
    -408
    -409
    +
    409
     410
     411
     412
    @@ -5610,7 +5769,15 @@ 

    430 431 432 -433

    def make_event(self, *args, **kwargs):
    +433
    +434
    +435
    +436
    +437
    +438
    +439
    +440
    +441
    def make_event(self, *args, **kwargs):
         """Create an event for the scan.
     
         Raises a validation error if the event could not be created, unless raise_error is set to False.
    @@ -5621,7 +5788,7 @@ 

    raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. Examples: - >>> new_event = self.make_event("1.2.3.4", source=event) + >>> new_event = self.make_event("1.2.3.4", parent=event) >>> await self.emit_event(new_event) Returns: @@ -5631,6 +5798,10 @@

    ValidationError: If the event could not be validated and raise_error is True. """ raise_error = kwargs.pop("raise_error", False) + module = kwargs.pop("module", None) + if module is None: + if (not args) or getattr(args[0], "module", None) is None: + kwargs["module"] = self try: event = self.scan.make_event(*args, **kwargs) except ValidationError as e: @@ -5638,8 +5809,6 @@

    raise self.warning(f"{e}") return - if not event.module: - event.module = self return event

    @@ -5686,14 +5855,7 @@

    Source code in bbot/modules/base.py -
    306
    -307
    -308
    -309
    -310
    -311
    -312
    -313
    +
    313
     314
     315
     316
    @@ -5704,7 +5866,14 @@ 

    321 322 323 -324

    async def ping(self):
    +324
    +325
    +326
    +327
    +328
    +329
    +330
    +331
    async def ping(self):
         """Asynchronously checks the health of the configured API.
     
         This method is used in conjunction with require_api_key() to verify that the API is not just configured, but also responsive. This method should include an assert statement to validate the API's health, typically by making a test request to a known endpoint.
    @@ -5734,7 +5903,7 @@ 

    async

    -
    queue_event(event, precheck=True)
    +
    queue_event(event)
     

    Asynchronously queues an incoming event to the module's event queue for further processing.

    @@ -5774,23 +5943,7 @@

    Source code in bbot/modules/base.py -
    777
    -778
    -779
    -780
    -781
    -782
    -783
    -784
    -785
    -786
    -787
    -788
    -789
    -790
    -791
    -792
    -793
    +
    793
     794
     795
     796
    @@ -5813,7 +5966,21 @@ 

    813 814 815 -816

    async def queue_event(self, event, precheck=True):
    +816
    +817
    +818
    +819
    +820
    +821
    +822
    +823
    +824
    +825
    +826
    +827
    +828
    +829
    +830
    async def queue_event(self, event):
         """
         Asynchronously queues an incoming event to the module's event queue for further processing.
     
    @@ -5836,9 +6003,7 @@ 

    if self.incoming_event_queue is False: self.debug(f"Not in an acceptable state to queue incoming event") return - acceptable, reason = True, "precheck was skipped" - if precheck: - acceptable, reason = self._event_precheck(event) + acceptable, reason = self._event_precheck(event) if not acceptable: if reason and reason != "its type is not in watched_events": self.debug(f"Not queueing {event} because {reason}") @@ -5850,7 +6015,7 @@

    async with self._event_received: self._event_received.notify() if event.type != "FINISHED": - self.scan.manager._new_activity = True + self.scan._new_activity = True except AttributeError: self.debug(f"Not in an acceptable state to queue incoming event")

    @@ -5911,21 +6076,7 @@

    Source code in bbot/modules/base.py -
    818
    -819
    -820
    -821
    -822
    -823
    -824
    -825
    -826
    -827
    -828
    -829
    -830
    -831
    -832
    +
    832
     833
     834
     835
    @@ -5934,7 +6085,21 @@ 

    838 839 840 -841

    async def queue_outgoing_event(self, event, **kwargs):
    +841
    +842
    +843
    +844
    +845
    +846
    +847
    +848
    +849
    +850
    +851
    +852
    +853
    +854
    +855
    async def queue_outgoing_event(self, event, **kwargs):
         """
         Queues an outgoing event to the module's outgoing event queue for further processing.
     
    @@ -5989,18 +6154,18 @@ 

    Source code in bbot/modules/base.py -
    253
    -254
    -255
    -256
    -257
    -258
    -259
    -260
    +
    260
     261
     262
     263
    -264
    async def report(self):
    +264
    +265
    +266
    +267
    +268
    +269
    +270
    +271
    async def report(self):
         """Asynchronously executes a final task after the scan is complete but before cleanup.
     
         This method can be overridden to aggregate data and raise summary events at the end of the scan.
    @@ -6068,31 +6233,31 @@ 

    Source code in bbot/modules/base.py -
    1045
    -1046
    -1047
    -1048
    -1049
    -1050
    -1051
    -1052
    -1053
    -1054
    -1055
    -1056
    -1057
    -1058
    -1059
    -1060
    -1061
    -1062
    -1063
    -1064
    -1065
    -1066
    -1067
    -1068
    -1069
    async def request_with_fail_count(self, *args, **kwargs):
    +
    1068
    +1069
    +1070
    +1071
    +1072
    +1073
    +1074
    +1075
    +1076
    +1077
    +1078
    +1079
    +1080
    +1081
    +1082
    +1083
    +1084
    +1085
    +1086
    +1087
    +1088
    +1089
    +1090
    +1091
    +1092
    async def request_with_fail_count(self, *args, **kwargs):
         """Asynchronously perform an HTTP request while keeping track of consecutive failures.
     
         This function wraps the `self.helpers.request` method, incrementing a failure counter if
    @@ -6152,14 +6317,7 @@ 

    Source code in bbot/modules/base.py -
    279
    -280
    -281
    -282
    -283
    -284
    -285
    -286
    +
    286
     287
     288
     289
    @@ -6177,7 +6335,14 @@ 

    301 302 303 -304

    async def require_api_key(self):
    +304
    +305
    +306
    +307
    +308
    +309
    +310
    +311
    async def require_api_key(self):
         """
         Asynchronously checks if an API key is required and valid.
     
    @@ -6211,7 +6376,7 @@ 

    set_error_state

    -
    set_error_state(message=None, clear_outgoing_queue=False)
    +
    set_error_state(message=None, clear_outgoing_queue=False, critical=False)
     

    Puts the module into an errored state where it cannot accept new events. Optionally logs a warning message.

    @@ -6252,21 +6417,7 @@

    Source code in bbot/modules/base.py -
    843
    -844
    -845
    -846
    -847
    -848
    -849
    -850
    -851
    -852
    -853
    -854
    -855
    -856
    -857
    +
    857
     858
     859
     860
    @@ -6292,7 +6443,25 @@ 

    880 881 882 -883

    def set_error_state(self, message=None, clear_outgoing_queue=False):
    +883
    +884
    +885
    +886
    +887
    +888
    +889
    +890
    +891
    +892
    +893
    +894
    +895
    +896
    +897
    +898
    +899
    +900
    +901
    def set_error_state(self, message=None, clear_outgoing_queue=False, critical=False):
         """
         Puts the module into an errored state where it cannot accept new events. Optionally logs a warning message.
     
    @@ -6317,7 +6486,11 @@ 

    log_msg = "Setting error state" if message is not None: log_msg += f": {message}" - self.warning(log_msg) + if critical: + log_fn = self.error + else: + log_fn = self.warning + log_fn(log_msg) self.errored = True # clear incoming queue if self.incoming_event_queue is not False: @@ -6387,14 +6560,7 @@

    Source code in bbot/modules/base.py -
    154
    -155
    -156
    -157
    -158
    -159
    -160
    -161
    +
    161
     162
     163
     164
    @@ -6423,7 +6589,14 @@ 

    187 188 189 -190

    async def setup(self):
    +190
    +191
    +192
    +193
    +194
    +195
    +196
    +197
    async def setup(self):
         """
         Performs one-time setup tasks for the module.
     
    @@ -6465,66 +6638,6 @@ 

    -

    -stdout -

    -
    stdout(*args, **kwargs)
    -
    -
    -

    Writes log messages directly to standard output.

    -

    This is typically reserved for output modules only, e.g. human or json.

    -

    Parameters:

    -
      -
    • -*args - – -
      -

      Variable length argument list to be passed to self.log.stdout.

      -
      -
    • -
    • -**kwargs - – -
      -

      Arbitrary keyword arguments to be passed to self.log.stdout.

      -
      -
    • -
    -

    Examples:

    -
    >>> self.stdout("This will be printed to stdout")
    -
    -
    -Source code in bbot/modules/base.py -
    1188
    -1189
    -1190
    -1191
    -1192
    -1193
    -1194
    -1195
    -1196
    -1197
    -1198
    -1199
    -1200
    def stdout(self, *args, **kwargs):
    -    """Writes log messages directly to standard output.
    -
    -    This is typically reserved for output modules only, e.g. `human` or `json`.
    -
    -    Args:
    -        *args: Variable length argument list to be passed to `self.log.stdout`.
    -        **kwargs: Arbitrary keyword arguments to be passed to `self.log.stdout`.
    -
    -    Examples:
    -        >>> self.stdout("This will be printed to stdout")
    -    """
    -    self.log.stdout(*args, extra={"scan_id": self.scan.id}, **kwargs)
    -
    -
    -
    -
    -

    success

    @@ -6565,21 +6678,21 @@

    Source code in bbot/modules/base.py -
    1282
    -1283
    -1284
    -1285
    -1286
    -1287
    -1288
    -1289
    -1290
    -1291
    -1292
    -1293
    -1294
    -1295
    -1296
    def success(self, *args, trace=False, **kwargs):
    +
    1303
    +1304
    +1305
    +1306
    +1307
    +1308
    +1309
    +1310
    +1311
    +1312
    +1313
    +1314
    +1315
    +1316
    +1317
    def success(self, *args, trace=False, **kwargs):
         """Logs a success message, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -6602,7 +6715,7 @@ 

    trace

    -
    trace()
    +
    trace(msg=None)
     

    Logs the stack trace of the most recently caught exception.

    @@ -6616,22 +6729,25 @@

    Source code in bbot/modules/base.py -
    1362
    -1363
    -1364
    -1365
    -1366
    -1367
    -1368
    -1369
    -1370
    -1371
    -1372
    -1373
    -1374
    -1375
    -1376
    -1377
    def trace(self):
    +
    1383
    +1384
    +1385
    +1386
    +1387
    +1388
    +1389
    +1390
    +1391
    +1392
    +1393
    +1394
    +1395
    +1396
    +1397
    +1398
    +1399
    +1400
    +1401
    def trace(self, msg=None):
         """Logs the stack trace of the most recently caught exception.
     
         This method captures the type, value, and traceback of the most recent exception and logs it using the trace level. It is typically used for debugging purposes.
    @@ -6644,9 +6760,12 @@ 

    >>> except ZeroDivisionError: >>> self.trace() """ - e_type, e_val, e_traceback = exc_info() - if e_type is not None: - self.log.trace(traceback.format_exc()) + if msg is None: + e_type, e_val, e_traceback = exc_info() + if e_type is not None: + self.log.trace(traceback.format_exc()) + else: + self.log.trace(msg)

    @@ -6692,21 +6811,21 @@

    Source code in bbot/modules/base.py -
    1218
    -1219
    -1220
    -1221
    -1222
    -1223
    -1224
    -1225
    -1226
    -1227
    -1228
    -1229
    -1230
    -1231
    -1232
    def verbose(self, *args, trace=False, **kwargs):
    +
    1239
    +1240
    +1241
    +1242
    +1243
    +1244
    +1245
    +1246
    +1247
    +1248
    +1249
    +1250
    +1251
    +1252
    +1253
    def verbose(self, *args, trace=False, **kwargs):
         """Logs messages and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -6766,21 +6885,21 @@ 

    Source code in bbot/modules/base.py -
    1314
    -1315
    -1316
    -1317
    -1318
    -1319
    -1320
    -1321
    -1322
    -1323
    -1324
    -1325
    -1326
    -1327
    -1328
    def warning(self, *args, trace=True, **kwargs):
    +
    1335
    +1336
    +1337
    +1338
    +1339
    +1340
    +1341
    +1342
    +1343
    +1344
    +1345
    +1346
    +1347
    +1348
    +1349
    def warning(self, *args, trace=True, **kwargs):
         """Logs a warning message, and optionally the stack trace of the most recent exception.
     
         Args:
    @@ -6825,9 +6944,15 @@ 

    - + - + + + + + + + \ No newline at end of file diff --git a/Stable/dev/core/index.html b/Stable/dev/core/index.html new file mode 100644 index 0000000000..c3ec482c54 --- /dev/null +++ b/Stable/dev/core/index.html @@ -0,0 +1,1212 @@ + + + + + + + + + + + + + + +BBOTCore - BBOT Docs + + + + + + + + + + + + + +
    +
    + +
    + +
    +
    + +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    +

    +BBOTCore +

    +
    +

    This is the first thing that loads when you import BBOT.

    +

    Unlike a Preset, BBOTCore holds only the config, not scan-specific stuff like targets, flags, modules, etc.

    +

    Its main jobs are:

    +
      +
    • set up logging
    • +
    • keep separation between the default and custom config (this allows presets to only display the config options that have changed)
    • +
    • allow for easy merging of configs
    • +
    • load quickly
    • +
    +
    +Source code in bbot/core/core.py +
     14
    + 15
    + 16
    + 17
    + 18
    + 19
    + 20
    + 21
    + 22
    + 23
    + 24
    + 25
    + 26
    + 27
    + 28
    + 29
    + 30
    + 31
    + 32
    + 33
    + 34
    + 35
    + 36
    + 37
    + 38
    + 39
    + 40
    + 41
    + 42
    + 43
    + 44
    + 45
    + 46
    + 47
    + 48
    + 49
    + 50
    + 51
    + 52
    + 53
    + 54
    + 55
    + 56
    + 57
    + 58
    + 59
    + 60
    + 61
    + 62
    + 63
    + 64
    + 65
    + 66
    + 67
    + 68
    + 69
    + 70
    + 71
    + 72
    + 73
    + 74
    + 75
    + 76
    + 77
    + 78
    + 79
    + 80
    + 81
    + 82
    + 83
    + 84
    + 85
    + 86
    + 87
    + 88
    + 89
    + 90
    + 91
    + 92
    + 93
    + 94
    + 95
    + 96
    + 97
    + 98
    + 99
    +100
    +101
    +102
    +103
    +104
    +105
    +106
    +107
    +108
    +109
    +110
    +111
    +112
    +113
    +114
    +115
    +116
    +117
    +118
    +119
    +120
    +121
    +122
    +123
    +124
    +125
    +126
    +127
    +128
    +129
    +130
    +131
    +132
    +133
    +134
    +135
    +136
    +137
    +138
    +139
    +140
    +141
    +142
    +143
    +144
    +145
    +146
    +147
    +148
    +149
    +150
    +151
    +152
    +153
    +154
    +155
    +156
    +157
    +158
    +159
    +160
    +161
    +162
    +163
    +164
    +165
    +166
    +167
    +168
    +169
    +170
    +171
    +172
    +173
    +174
    +175
    +176
    +177
    +178
    +179
    +180
    +181
    +182
    +183
    +184
    +185
    +186
    +187
    +188
    +189
    +190
    +191
    +192
    +193
    +194
    +195
    +196
    +197
    +198
    +199
    +200
    +201
    +202
    +203
    +204
    +205
    +206
    +207
    +208
    +209
    +210
    +211
    +212
    +213
    class BBOTCore:
    +    """
    +    This is the first thing that loads when you import BBOT.
    +
    +    Unlike a Preset, BBOTCore holds only the config, not scan-specific stuff like targets, flags, modules, etc.
    +
    +    Its main jobs are:
    +
    +    - set up logging
    +    - keep separation between the `default` and `custom` config (this allows presets to only display the config options that have changed)
    +    - allow for easy merging of configs
    +    - load quickly
    +    """
    +
    +    # used for filtering out sensitive config values
    +    secrets_strings = ["api_key", "username", "password", "token", "secret", "_id"]
    +    # don't filter/remove entries under this key
    +    secrets_exclude_keys = ["modules"]
    +
    +    def __init__(self):
    +        self._logger = None
    +        self._files_config = None
    +
    +        self.bbot_sudo_pass = None
    +
    +        self._config = None
    +        self._custom_config = None
    +
    +        # bare minimum == logging
    +        self.logger
    +        self.log = logging.getLogger("bbot.core")
    +
    +        import multiprocessing
    +
    +        self.process_name = multiprocessing.current_process().name
    +
    +    @property
    +    def home(self):
    +        return Path(self.config["home"]).expanduser().resolve()
    +
    +    @property
    +    def cache_dir(self):
    +        return self.home / "cache"
    +
    +    @property
    +    def tools_dir(self):
    +        return self.home / "tools"
    +
    +    @property
    +    def temp_dir(self):
    +        return self.home / "temp"
    +
    +    @property
    +    def lib_dir(self):
    +        return self.home / "lib"
    +
    +    @property
    +    def scans_dir(self):
    +        return self.home / "scans"
    +
    +    @property
    +    def config(self):
    +        """
    +        .config is just .default_config + .custom_config merged together
    +
    +        any new values should be added to custom_config.
    +        """
    +        if self._config is None:
    +            self._config = OmegaConf.merge(self.default_config, self.custom_config)
    +            # set read-only flag (change .custom_config instead)
    +            OmegaConf.set_readonly(self._config, True)
    +        return self._config
    +
    +    @property
    +    def default_config(self):
    +        """
    +        The default BBOT config (from `defaults.yml`). Read-only.
    +        """
    +        global DEFAULT_CONFIG
    +        if DEFAULT_CONFIG is None:
    +            self.default_config = self.files_config.get_default_config()
    +            # ensure bbot home dir
    +            if not "home" in self.default_config:
    +                self.default_config["home"] = "~/.bbot"
    +        return DEFAULT_CONFIG
    +
    +    @default_config.setter
    +    def default_config(self, value):
    +        # we temporarily clear out the config so it can be refreshed if/when default_config changes
    +        global DEFAULT_CONFIG
    +        self._config = None
    +        DEFAULT_CONFIG = value
    +        # set read-only flag (change .custom_config instead)
    +        OmegaConf.set_readonly(DEFAULT_CONFIG, True)
    +
    +    @property
    +    def custom_config(self):
    +        """
    +        Custom BBOT config (from `~/.config/bbot/bbot.yml`)
    +        """
    +        # we temporarily clear out the config so it can be refreshed if/when custom_config changes
    +        self._config = None
    +        if self._custom_config is None:
    +            self.custom_config = self.files_config.get_custom_config()
    +        return self._custom_config
    +
    +    @custom_config.setter
    +    def custom_config(self, value):
    +        # we temporarily clear out the config so it can be refreshed if/when custom_config changes
    +        self._config = None
    +        # ensure the modules key is always a dictionary
    +        modules_entry = value.get("modules", None)
    +        if modules_entry is not None and not OmegaConf.is_dict(modules_entry):
    +            value["modules"] = {}
    +        self._custom_config = value
    +
    +    def no_secrets_config(self, config):
    +        from .helpers.misc import clean_dict
    +
    +        with suppress(ValueError):
    +            config = OmegaConf.to_object(config)
    +
    +        return clean_dict(
    +            config,
    +            *self.secrets_strings,
    +            fuzzy=True,
    +            exclude_keys=self.secrets_exclude_keys,
    +        )
    +
    +    def secrets_only_config(self, config):
    +        from .helpers.misc import filter_dict
    +
    +        with suppress(ValueError):
    +            config = OmegaConf.to_object(config)
    +
    +        return filter_dict(
    +            config,
    +            *self.secrets_strings,
    +            fuzzy=True,
    +            exclude_keys=self.secrets_exclude_keys,
    +        )
    +
    +    def merge_custom(self, config):
    +        """
    +        Merge a config into the custom config.
    +        """
    +        self.custom_config = OmegaConf.merge(self.custom_config, OmegaConf.create(config))
    +
    +    def merge_default(self, config):
    +        """
    +        Merge a config into the default config.
    +        """
    +        self.default_config = OmegaConf.merge(self.default_config, OmegaConf.create(config))
    +
    +    def copy(self):
    +        """
    +        Return a semi-shallow copy of self. (`custom_config` is copied, but `default_config` stays the same)
    +        """
    +        core_copy = copy(self)
    +        core_copy._custom_config = self._custom_config.copy()
    +        return core_copy
    +
    +    @property
    +    def files_config(self):
    +        """
    +        Get the configs from `bbot.yml` and `defaults.yml`
    +        """
    +        if self._files_config is None:
    +            from .config import files
    +
    +            self.files = files
    +            self._files_config = files.BBOTConfigFiles(self)
    +        return self._files_config
    +
    +    def create_process(self, *args, **kwargs):
    +        if os.environ.get("BBOT_TESTING", "") == "True":
    +            process = self.create_thread(*args, **kwargs)
    +        else:
    +            if self.process_name == "MainProcess":
    +                from .helpers.process import BBOTProcess
    +
    +                process = BBOTProcess(*args, **kwargs)
    +            else:
    +                raise BBOTError(f"Tried to start server from process {self.process_name}")
    +        process.daemon = True
    +        return process
    +
    +    def create_thread(self, *args, **kwargs):
    +        from .helpers.process import BBOTThread
    +
    +        return BBOTThread(*args, **kwargs)
    +
    +    @property
    +    def logger(self):
    +        self.config
    +        if self._logger is None:
    +            from .config.logger import BBOTLogger
    +
    +            self._logger = BBOTLogger(self)
    +        return self._logger
    +
    +
    +
    +
    +

    +config + +property + +

    +
    config
    +
    +
    +

    .config is just .default_config + .custom_config merged together

    +

    any new values should be added to custom_config.

    +
    +
    +
    +

    +custom_config + +property +writable + +

    +
    custom_config
    +
    +
    +

    Custom BBOT config (from ~/.config/bbot/bbot.yml)

    +
    +
    +
    +

    +default_config + +property +writable + +

    +
    default_config
    +
    +
    +

    The default BBOT config (from defaults.yml). Read-only.

    +
    +
    +
    +

    +files_config + +property + +

    +
    files_config
    +
    +
    +

    Get the configs from bbot.yml and defaults.yml

    +
    +
    +
    +

    +copy +

    +
    copy()
    +
    +
    +

    Return a semi-shallow copy of self. (custom_config is copied, but default_config stays the same)

    +
    +Source code in bbot/core/core.py +
    168
    +169
    +170
    +171
    +172
    +173
    +174
    def copy(self):
    +    """
    +    Return a semi-shallow copy of self. (`custom_config` is copied, but `default_config` stays the same)
    +    """
    +    core_copy = copy(self)
    +    core_copy._custom_config = self._custom_config.copy()
    +    return core_copy
    +
    +
    +
    +
    +
    +

    +merge_custom +

    +
    merge_custom(config)
    +
    +
    +

    Merge a config into the custom config.

    +
    +Source code in bbot/core/core.py +
    156
    +157
    +158
    +159
    +160
    def merge_custom(self, config):
    +    """
    +    Merge a config into the custom config.
    +    """
    +    self.custom_config = OmegaConf.merge(self.custom_config, OmegaConf.create(config))
    +
    +
    +
    +
    +
    +

    +merge_default +

    +
    merge_default(config)
    +
    +
    +

    Merge a config into the default config.

    +
    +Source code in bbot/core/core.py +
    162
    +163
    +164
    +165
    +166
    def merge_default(self, config):
    +    """
    +    Merge a config into the default config.
    +    """
    +    self.default_config = OmegaConf.merge(self.default_config, OmegaConf.create(config))
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    + +
    +
    +
    +
    + + + + + + + + + + + + \ No newline at end of file diff --git a/Stable/dev/dev_environment/index.html b/Stable/dev/dev_environment/index.html new file mode 100644 index 0000000000..c7e825b9ea --- /dev/null +++ b/Stable/dev/dev_environment/index.html @@ -0,0 +1,659 @@ + + + + + + + + + + + + + + +Setting Up a Dev Environment - BBOT Docs + + + + + + + + + + + + + +
    +
    + +
    + +
    +
    + +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +

    Setting Up a Dev Environment

    +

    The following will show you how to set up a fully functioning python environment for devving on BBOT.

    +

    Installation (Poetry)

    +

    Poetry is the recommended method of installation if you want to dev on BBOT. To set up a dev environment with Poetry, you can follow these steps:

    +
      +
    • Fork BBOT on GitHub
    • +
    • Clone your fork and set up a development environment with Poetry:
    • +
    +
    # clone your forked repo and cd into it
    +git clone git@github.com/<username>/bbot.git
    +cd bbot
    +
    +# install poetry
    +curl -sSL https://install.python-poetry.org | python3 -
    +
    +# install pip dependencies
    +poetry install
    +# install pre-commit hooks, etc.
    +poetry run pre-commit install
    +
    +# enter virtual environment
    +poetry shell
    +
    +bbot --help
    +
    +
      +
    • Now, any changes you make in the code will be reflected in the bbot command.
    • +
    • After making your changes, run the tests locally to ensure they pass.
    • +
    +
    # auto-format code indentation, etc.
    +black .
    +
    +# run tests
    +./bbot/test/run_tests.sh
    +
    +
      +
    • Finally, commit and push your changes, and create a pull request to the dev branch of the main BBOT repo.
    • +
    +
    +
    + +
    +
    + +
    +
    +
    +
    + + + + + + + + + + + + \ No newline at end of file diff --git a/Stable/dev/discord_bot/index.html b/Stable/dev/discord_bot/index.html new file mode 100644 index 0000000000..fd6145da4e --- /dev/null +++ b/Stable/dev/discord_bot/index.html @@ -0,0 +1,672 @@ + + + + + + + + + + + + + + +Discord Bot Example - BBOT Docs + + + + + + + + + + + + +
    +
    +
    +
    + +
    + +
    +
    + +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +

    Discord Bot Example

    +

    bbot-discord

    +

    Below is a simple Discord bot designed to run BBOT scans.

    +
    examples/discord_bot.py
    import discord
    +from discord.ext import commands
    +
    +from bbot.scanner import Scanner
    +from bbot.modules.output.discord import Discord
    +
    +
    +class BBOTDiscordBot(commands.Cog):
    +    """
    +    A simple Discord bot capable of running a BBOT scan.
    +
    +    To set up:
    +        1. Go to Discord Developer Portal (https://discord.com/developers)
    +        2. Create a new application
    +        3. Create an invite link for the bot, visit the link to invite it to your server
    +            - Your Application --> OAuth2 --> URL Generator
    +                - For Scopes, select "bot""
    +                - For Bot Permissions, select:
    +                    - Read Messages/View Channels
    +                    - Send Messages
    +        4. Turn on "Message Content Intent"
    +            - Your Application --> Bot --> Privileged Gateway Intents --> Message Content Intent
    +        5. Copy your Discord Bot Token and put it at the top this file
    +            - Your Application --> Bot --> Reset Token
    +        6. Run this script
    +
    +    To scan evilcorp.com, you would type:
    +
    +        /scan evilcorp.com
    +
    +    Results will be output to the same channel.
    +    """
    +
    +    def __init__(self):
    +        self.current_scan = None
    +
    +    @commands.command(name="scan", description="Scan a target with BBOT.")
    +    async def scan(self, ctx, target: str):
    +        if self.current_scan is not None:
    +            self.current_scan.stop()
    +        await ctx.send(f"Starting scan against {target}.")
    +
    +        # creates scan instance
    +        self.current_scan = Scanner(target, flags="subdomain-enum")
    +        discord_module = Discord(self.current_scan)
    +
    +        seen = set()
    +        num_events = 0
    +        # start scan and iterate through results
    +        async for event in self.current_scan.async_start():
    +            if hash(event) in seen:
    +                continue
    +            seen.add(hash(event))
    +            await ctx.send(discord_module.format_message(event))
    +            num_events += 1
    +
    +        await ctx.send(f"Finished scan against {target}. {num_events:,} results.")
    +        self.current_scan = None
    +
    +
    +if __name__ == "__main__":
    +    intents = discord.Intents.default()
    +    intents.message_content = True
    +    bot = commands.Bot(command_prefix="/", intents=intents)
    +
    +    @bot.event
    +    async def on_ready():
    +        print(f"We have logged in as {bot.user}")
    +        await bot.add_cog(BBOTDiscordBot())
    +
    +    bot.run("DISCORD_BOT_TOKEN_HERE")
    +
    +
    +
    + +
    +
    + +
    +
    +
    +
    + + + + + + + + + + + + \ No newline at end of file diff --git a/Stable/dev/engine/index.html b/Stable/dev/engine/index.html new file mode 100644 index 0000000000..bb01b85c76 --- /dev/null +++ b/Stable/dev/engine/index.html @@ -0,0 +1,1888 @@ + + + + + + + + + + + + + + +Engine - BBOT Docs + + + + + + + + + + + + + +
    +
    + +
    + +
    +
    + +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    +

    +EngineBase +

    +
    +

    Base Engine class for Server and Client.

    +

    An Engine is a simple and lightweight RPC implementation that allows offloading async tasks +to a separate process. It leverages ZeroMQ in a ROUTER-DEALER configuration.

    +

    BBOT makes use of this by spawning a dedicated engine for DNS and HTTP tasks. +This offloads I/O and helps free up the main event loop for other tasks.

    +

    To use Engine, you must subclass both EngineClient and EngineServer.

    +

    See the respective EngineClient and EngineServer classes for usage examples.

    +
    +Source code in bbot/core/engine.py +
    26
    +27
    +28
    +29
    +30
    +31
    +32
    +33
    +34
    +35
    +36
    +37
    +38
    +39
    +40
    +41
    +42
    +43
    +44
    +45
    +46
    +47
    +48
    +49
    +50
    +51
    +52
    +53
    +54
    +55
    +56
    +57
    +58
    +59
    +60
    +61
    +62
    +63
    +64
    +65
    +66
    +67
    +68
    +69
    +70
    +71
    +72
    +73
    +74
    +75
    +76
    +77
    +78
    +79
    class EngineBase:
    +    """
    +    Base Engine class for Server and Client.
    +
    +    An Engine is a simple and lightweight RPC implementation that allows offloading async tasks
    +    to a separate process. It leverages ZeroMQ in a ROUTER-DEALER configuration.
    +
    +    BBOT makes use of this by spawning a dedicated engine for DNS and HTTP tasks.
    +    This offloads I/O and helps free up the main event loop for other tasks.
    +
    +    To use Engine, you must subclass both EngineClient and EngineServer.
    +
    +    See the respective EngineClient and EngineServer classes for usage examples.
    +    """
    +
    +    ERROR_CLASS = BBOTEngineError
    +
    +    def __init__(self):
    +        self._shutdown_status = False
    +        self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}")
    +
    +    def pickle(self, obj):
    +        try:
    +            return pickle.dumps(obj)
    +        except Exception as e:
    +            self.log.error(f"Error serializing object: {obj}: {e}")
    +            self.log.trace(traceback.format_exc())
    +        return error_sentinel
    +
    +    def unpickle(self, binary):
    +        try:
    +            return pickle.loads(binary)
    +        except Exception as e:
    +            self.log.error(f"Error deserializing binary: {e}")
    +            self.log.trace(f"Offending binary: {binary}")
    +            self.log.trace(traceback.format_exc())
    +        return error_sentinel
    +
    +    async def _infinite_retry(self, callback, *args, **kwargs):
    +        interval = kwargs.pop("_interval", 15)
    +        context = kwargs.pop("_context", "")
    +        # default overall timeout of 5 minutes (15 second interval * 20 iterations)
    +        max_retries = kwargs.pop("_max_retries", 4 * 5)
    +        if not context:
    +            context = f"{callback.__name__}({args}, {kwargs})"
    +        retries = 0
    +        while not self._shutdown_status:
    +            try:
    +                return await asyncio.wait_for(callback(*args, **kwargs), timeout=interval)
    +            except (TimeoutError, asyncio.exceptions.TimeoutError):
    +                self.log.debug(f"{self.name}: Timeout after {interval:,} seconds{context}, retrying...")
    +                retries += 1
    +                if max_retries is not None and retries > max_retries:
    +                    raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}")
    +
    +
    +
    +
    +
    +
    +
    +

    +EngineClient +

    +
    +

    + Bases: EngineBase

    +

    The client portion of BBOT's RPC Engine.

    +

    To create an engine, you must create a subclass of this class and also +define methods for each of your desired functions.

    +

    Note that this only supports async functions. If you need to offload a synchronous function to another CPU, use BBOT's multiprocessing pool instead.

    +

    Any CPU or I/O intense logic should be implemented in the EngineServer.

    +

    These functions are typically stubs whose only job is to forward the arguments to the server.

    +

    Functions with the same names should be defined on the EngineServer.

    +

    The EngineClient must specify its associated server class via the SERVER_CLASS variable.

    +

    Depending on whether your function is a generator, you will use either run_and_return(), or run_and_yield.

    +

    Examples:

    +
    >>> from bbot.core.engine import EngineClient
    +>>>
    +>>> class MyClient(EngineClient):
    +>>>     SERVER_CLASS = MyServer
    +>>>
    +>>>     async def my_function(self, **kwargs)
    +>>>         return await self.run_and_return("my_function", **kwargs)
    +>>>
    +>>>     async def my_generator(self, **kwargs):
    +>>>         async for _ in self.run_and_yield("my_generator", **kwargs):
    +>>>             yield _
    +
    +
    +Source code in bbot/core/engine.py +
     82
    + 83
    + 84
    + 85
    + 86
    + 87
    + 88
    + 89
    + 90
    + 91
    + 92
    + 93
    + 94
    + 95
    + 96
    + 97
    + 98
    + 99
    +100
    +101
    +102
    +103
    +104
    +105
    +106
    +107
    +108
    +109
    +110
    +111
    +112
    +113
    +114
    +115
    +116
    +117
    +118
    +119
    +120
    +121
    +122
    +123
    +124
    +125
    +126
    +127
    +128
    +129
    +130
    +131
    +132
    +133
    +134
    +135
    +136
    +137
    +138
    +139
    +140
    +141
    +142
    +143
    +144
    +145
    +146
    +147
    +148
    +149
    +150
    +151
    +152
    +153
    +154
    +155
    +156
    +157
    +158
    +159
    +160
    +161
    +162
    +163
    +164
    +165
    +166
    +167
    +168
    +169
    +170
    +171
    +172
    +173
    +174
    +175
    +176
    +177
    +178
    +179
    +180
    +181
    +182
    +183
    +184
    +185
    +186
    +187
    +188
    +189
    +190
    +191
    +192
    +193
    +194
    +195
    +196
    +197
    +198
    +199
    +200
    +201
    +202
    +203
    +204
    +205
    +206
    +207
    +208
    +209
    +210
    +211
    +212
    +213
    +214
    +215
    +216
    +217
    +218
    +219
    +220
    +221
    +222
    +223
    +224
    +225
    +226
    +227
    +228
    +229
    +230
    +231
    +232
    +233
    +234
    +235
    +236
    +237
    +238
    +239
    +240
    +241
    +242
    +243
    +244
    +245
    +246
    +247
    +248
    +249
    +250
    +251
    +252
    +253
    +254
    +255
    +256
    +257
    +258
    +259
    +260
    +261
    +262
    +263
    +264
    +265
    +266
    +267
    +268
    +269
    +270
    +271
    +272
    +273
    +274
    +275
    +276
    +277
    +278
    +279
    +280
    +281
    +282
    +283
    +284
    +285
    +286
    +287
    +288
    +289
    +290
    +291
    +292
    +293
    +294
    +295
    +296
    +297
    +298
    +299
    +300
    +301
    +302
    +303
    +304
    +305
    +306
    +307
    +308
    +309
    +310
    +311
    +312
    +313
    +314
    +315
    +316
    +317
    +318
    +319
    +320
    +321
    +322
    +323
    +324
    +325
    +326
    +327
    +328
    +329
    +330
    +331
    +332
    +333
    +334
    +335
    +336
    +337
    class EngineClient(EngineBase):
    +    """
    +    The client portion of BBOT's RPC Engine.
    +
    +    To create an engine, you must create a subclass of this class and also
    +    define methods for each of your desired functions.
    +
    +    Note that this only supports async functions. If you need to offload a synchronous function to another CPU, use BBOT's multiprocessing pool instead.
    +
    +    Any CPU or I/O intense logic should be implemented in the EngineServer.
    +
    +    These functions are typically stubs whose only job is to forward the arguments to the server.
    +
    +    Functions with the same names should be defined on the EngineServer.
    +
    +    The EngineClient must specify its associated server class via the `SERVER_CLASS` variable.
    +
    +    Depending on whether your function is a generator, you will use either `run_and_return()`, or `run_and_yield`.
    +
    +    Examples:
    +        >>> from bbot.core.engine import EngineClient
    +        >>>
    +        >>> class MyClient(EngineClient):
    +        >>>     SERVER_CLASS = MyServer
    +        >>>
    +        >>>     async def my_function(self, **kwargs)
    +        >>>         return await self.run_and_return("my_function", **kwargs)
    +        >>>
    +        >>>     async def my_generator(self, **kwargs):
    +        >>>         async for _ in self.run_and_yield("my_generator", **kwargs):
    +        >>>             yield _
    +    """
    +
    +    SERVER_CLASS = None
    +
    +    def __init__(self, **kwargs):
    +        super().__init__()
    +        self.name = f"EngineClient {self.__class__.__name__}"
    +        self.process = None
    +        if self.SERVER_CLASS is None:
    +            raise ValueError(f"Must set EngineClient SERVER_CLASS, {self.SERVER_CLASS}")
    +        self.CMDS = dict(self.SERVER_CLASS.CMDS)
    +        for k, v in list(self.CMDS.items()):
    +            self.CMDS[v] = k
    +        self.socket_address = f"zmq_{rand_string(8)}.sock"
    +        self.socket_path = Path(tempfile.gettempdir()) / self.socket_address
    +        self.server_kwargs = kwargs.pop("server_kwargs", {})
    +        self._server_process = None
    +        self.context = zmq.asyncio.Context()
    +        self.context.setsockopt(zmq.LINGER, 0)
    +        self.sockets = set()
    +
    +    def check_error(self, message):
    +        if isinstance(message, dict) and len(message) == 1 and "_e" in message:
    +            error, trace = message["_e"]
    +            error = self.ERROR_CLASS(error)
    +            error.engine_traceback = trace
    +            raise error
    +        return False
    +
    +    async def run_and_return(self, command, *args, **kwargs):
    +        fn_str = f"{command}({args}, {kwargs})"
    +        self.log.debug(f"{self.name}: executing run-and-return {fn_str}")
    +        if self._shutdown_status and not command == "_shutdown":
    +            self.log.verbose(f"{self.name} has been shut down and is not accepting new tasks")
    +            return
    +        async with self.new_socket() as socket:
    +            try:
    +                message = self.make_message(command, args=args, kwargs=kwargs)
    +                if message is error_sentinel:
    +                    return
    +                await self._infinite_retry(socket.send, message)
    +                binary = await self._infinite_retry(socket.recv, _context=f"waiting for return value from {fn_str}")
    +            except BaseException:
    +                try:
    +                    await self.send_cancel_message(socket, fn_str)
    +                except Exception:
    +                    self.log.debug(f"{self.name}: {fn_str} failed to send cancel message after exception")
    +                    self.log.trace(traceback.format_exc())
    +                raise
    +        # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}")
    +        message = self.unpickle(binary)
    +        self.log.debug(f"{self.name}: {fn_str} got return value: {message}")
    +        # error handling
    +        if self.check_error(message):
    +            return
    +        return message
    +
    +    async def run_and_yield(self, command, *args, **kwargs):
    +        fn_str = f"{command}({args}, {kwargs})"
    +        self.log.debug(f"{self.name}: executing run-and-yield {fn_str}")
    +        if self._shutdown_status:
    +            self.log.verbose("Engine has been shut down and is not accepting new tasks")
    +            return
    +        message = self.make_message(command, args=args, kwargs=kwargs)
    +        if message is error_sentinel:
    +            return
    +        async with self.new_socket() as socket:
    +            # TODO: synchronize server-side generator by limiting qsize
    +            # socket.setsockopt(zmq.RCVHWM, 1)
    +            # socket.setsockopt(zmq.SNDHWM, 1)
    +            await socket.send(message)
    +            while 1:
    +                try:
    +                    binary = await self._infinite_retry(
    +                        socket.recv, _context=f"waiting for new iteration from {fn_str}"
    +                    )
    +                    # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}")
    +                    message = self.unpickle(binary)
    +                    self.log.debug(f"{self.name} {command} got iteration: {message}")
    +                    # error handling
    +                    if self.check_error(message) or self.check_stop(message):
    +                        break
    +                    yield message
    +                except (StopAsyncIteration, GeneratorExit) as e:
    +                    exc_name = e.__class__.__name__
    +                    self.log.debug(f"{self.name}.{command} got {exc_name}")
    +                    try:
    +                        await self.send_cancel_message(socket, fn_str)
    +                    except Exception:
    +                        self.log.debug(f"{self.name}.{command} failed to send cancel message after {exc_name}")
    +                        self.log.trace(traceback.format_exc())
    +                    break
    +
    +    async def send_cancel_message(self, socket, context):
    +        """
    +        Send a cancel message and wait for confirmation from the server
    +        """
    +        # -1 == special "cancel" signal
    +        message = pickle.dumps({"c": -1})
    +        await self._infinite_retry(socket.send, message)
    +        while 1:
    +            response = await self._infinite_retry(
    +                socket.recv, _context=f"waiting for CANCEL_OK from {context}", _max_retries=4
    +            )
    +            response = pickle.loads(response)
    +            if isinstance(response, dict):
    +                response = response.get("m", "")
    +                if response == "CANCEL_OK":
    +                    break
    +
    +    async def send_shutdown_message(self):
    +        async with self.new_socket() as socket:
    +            # -99 == special shutdown message
    +            message = pickle.dumps({"c": -99})
    +            with suppress(TimeoutError, asyncio.exceptions.TimeoutError):
    +                await asyncio.wait_for(socket.send(message), 0.5)
    +            with suppress(TimeoutError, asyncio.exceptions.TimeoutError):
    +                while 1:
    +                    response = await asyncio.wait_for(socket.recv(), 0.5)
    +                    response = pickle.loads(response)
    +                    if isinstance(response, dict):
    +                        response = response.get("m", "")
    +                        if response == "SHUTDOWN_OK":
    +                            break
    +
    +    def check_stop(self, message):
    +        if isinstance(message, dict) and len(message) == 1 and "_s" in message:
    +            return True
    +        return False
    +
    +    def make_message(self, command, args=None, kwargs=None):
    +        try:
    +            cmd_id = self.CMDS[command]
    +        except KeyError:
    +            raise KeyError(f'Command "{command}" not found. Available commands: {",".join(self.available_commands)}')
    +        message = {"c": cmd_id}
    +        if args:
    +            message["a"] = args
    +        if kwargs:
    +            message["k"] = kwargs
    +        return pickle.dumps(message)
    +
    +    @property
    +    def available_commands(self):
    +        return [s for s in self.CMDS if isinstance(s, str)]
    +
    +    def start_server(self):
    +        import multiprocessing
    +
    +        process_name = multiprocessing.current_process().name
    +        if process_name == "MainProcess":
    +            kwargs = dict(self.server_kwargs)
    +            # if we're in tests, we use a single event loop to avoid weird race conditions
    +            # this allows us to more easily mock http, etc.
    +            if os.environ.get("BBOT_TESTING", "") == "True":
    +                kwargs["_loop"] = get_event_loop()
    +            self.process = CORE.create_process(
    +                target=self.server_process,
    +                args=(
    +                    self.SERVER_CLASS,
    +                    self.socket_path,
    +                ),
    +                kwargs=kwargs,
    +                custom_name=f"BBOT {self.__class__.__name__}",
    +            )
    +            self.process.start()
    +            return self.process
    +        else:
    +            raise BBOTEngineError(
    +                f"Tried to start server from process {process_name}. Did you forget \"if __name__ == '__main__'?\""
    +            )
    +
    +    @staticmethod
    +    def server_process(server_class, socket_path, **kwargs):
    +        try:
    +            loop = kwargs.pop("_loop", None)
    +            engine_server = server_class(socket_path, **kwargs)
    +            if loop is not None:
    +                future = asyncio.run_coroutine_threadsafe(engine_server.worker(), loop)
    +                future.result()
    +            else:
    +                asyncio.run(engine_server.worker())
    +        except (asyncio.CancelledError, KeyboardInterrupt, CancelledError):
    +            return
    +        except Exception:
    +            import traceback
    +
    +            log = logging.getLogger("bbot.core.engine.server")
    +            log.critical(f"Unhandled error in {server_class.__name__} server process: {traceback.format_exc()}")
    +
    +    @asynccontextmanager
    +    async def new_socket(self):
    +        if self._server_process is None:
    +            self._server_process = self.start_server()
    +            while not self.socket_path.exists():
    +                self.log.debug(f"{self.name}: waiting for server process to start...")
    +                await asyncio.sleep(0.1)
    +        socket = self.context.socket(zmq.DEALER)
    +        socket.setsockopt(zmq.LINGER, 0)
    +        socket.connect(f"ipc://{self.socket_path}")
    +        self.sockets.add(socket)
    +        try:
    +            yield socket
    +        finally:
    +            self.sockets.remove(socket)
    +            with suppress(Exception):
    +                socket.close()
    +
    +    async def shutdown(self):
    +        if not self._shutdown_status:
    +            self._shutdown_status = True
    +            self.log.verbose(f"{self.name}: shutting down...")
    +            # send shutdown signal
    +            await self.send_shutdown_message()
    +            # then terminate context
    +            try:
    +                self.context.destroy(linger=0)
    +            except Exception:
    +                print(traceback.format_exc(), file=sys.stderr)
    +            try:
    +                self.context.term()
    +            except Exception:
    +                print(traceback.format_exc(), file=sys.stderr)
    +            # delete socket file on exit
    +            self.socket_path.unlink(missing_ok=True)
    +
    +
    +
    +
    +

    +send_cancel_message + +async + +

    +
    send_cancel_message(socket, context)
    +
    +
    +

    Send a cancel message and wait for confirmation from the server

    +
    +Source code in bbot/core/engine.py +
    206
    +207
    +208
    +209
    +210
    +211
    +212
    +213
    +214
    +215
    +216
    +217
    +218
    +219
    +220
    +221
    async def send_cancel_message(self, socket, context):
    +    """
    +    Send a cancel message and wait for confirmation from the server
    +    """
    +    # -1 == special "cancel" signal
    +    message = pickle.dumps({"c": -1})
    +    await self._infinite_retry(socket.send, message)
    +    while 1:
    +        response = await self._infinite_retry(
    +            socket.recv, _context=f"waiting for CANCEL_OK from {context}", _max_retries=4
    +        )
    +        response = pickle.loads(response)
    +        if isinstance(response, dict):
    +            response = response.get("m", "")
    +            if response == "CANCEL_OK":
    +                break
    +
    +
    +
    +
    +
    +
    +
    +
    +

    +EngineServer +

    +
    +

    + Bases: EngineBase

    +

    The server portion of BBOT's RPC Engine.

    +

    Methods defined here must match the methods in your EngineClient.

    +

    To use the functions, you must create mappings for them in the CMDS attribute, as shown below.

    +

    Examples:

    +
    >>> from bbot.core.engine import EngineServer
    +>>>
    +>>> class MyServer(EngineServer):
    +>>>     CMDS = {
    +>>>         0: "my_function",
    +>>>         1: "my_generator",
    +>>>     }
    +>>>
    +>>>     def my_function(self, arg1=None):
    +>>>         await asyncio.sleep(1)
    +>>>         return str(arg1)
    +>>>
    +>>>     def my_generator(self):
    +>>>         for i in range(10):
    +>>>             await asyncio.sleep(1)
    +>>>             yield i
    +
    +
    +Source code in bbot/core/engine.py +
    340
    +341
    +342
    +343
    +344
    +345
    +346
    +347
    +348
    +349
    +350
    +351
    +352
    +353
    +354
    +355
    +356
    +357
    +358
    +359
    +360
    +361
    +362
    +363
    +364
    +365
    +366
    +367
    +368
    +369
    +370
    +371
    +372
    +373
    +374
    +375
    +376
    +377
    +378
    +379
    +380
    +381
    +382
    +383
    +384
    +385
    +386
    +387
    +388
    +389
    +390
    +391
    +392
    +393
    +394
    +395
    +396
    +397
    +398
    +399
    +400
    +401
    +402
    +403
    +404
    +405
    +406
    +407
    +408
    +409
    +410
    +411
    +412
    +413
    +414
    +415
    +416
    +417
    +418
    +419
    +420
    +421
    +422
    +423
    +424
    +425
    +426
    +427
    +428
    +429
    +430
    +431
    +432
    +433
    +434
    +435
    +436
    +437
    +438
    +439
    +440
    +441
    +442
    +443
    +444
    +445
    +446
    +447
    +448
    +449
    +450
    +451
    +452
    +453
    +454
    +455
    +456
    +457
    +458
    +459
    +460
    +461
    +462
    +463
    +464
    +465
    +466
    +467
    +468
    +469
    +470
    +471
    +472
    +473
    +474
    +475
    +476
    +477
    +478
    +479
    +480
    +481
    +482
    +483
    +484
    +485
    +486
    +487
    +488
    +489
    +490
    +491
    +492
    +493
    +494
    +495
    +496
    +497
    +498
    +499
    +500
    +501
    +502
    +503
    +504
    +505
    +506
    +507
    +508
    +509
    +510
    +511
    +512
    +513
    +514
    +515
    +516
    +517
    +518
    +519
    +520
    +521
    +522
    +523
    +524
    +525
    +526
    +527
    +528
    +529
    +530
    +531
    +532
    +533
    +534
    +535
    +536
    +537
    +538
    +539
    +540
    +541
    +542
    +543
    +544
    +545
    +546
    +547
    +548
    +549
    +550
    +551
    +552
    +553
    +554
    +555
    +556
    +557
    +558
    +559
    +560
    +561
    +562
    +563
    +564
    +565
    +566
    +567
    +568
    +569
    +570
    +571
    +572
    +573
    +574
    +575
    +576
    +577
    +578
    +579
    +580
    +581
    +582
    +583
    +584
    +585
    +586
    +587
    +588
    +589
    +590
    +591
    +592
    +593
    +594
    +595
    +596
    +597
    +598
    +599
    +600
    +601
    +602
    +603
    +604
    class EngineServer(EngineBase):
    +    """
    +    The server portion of BBOT's RPC Engine.
    +
    +    Methods defined here must match the methods in your EngineClient.
    +
    +    To use the functions, you must create mappings for them in the CMDS attribute, as shown below.
    +
    +    Examples:
    +        >>> from bbot.core.engine import EngineServer
    +        >>>
    +        >>> class MyServer(EngineServer):
    +        >>>     CMDS = {
    +        >>>         0: "my_function",
    +        >>>         1: "my_generator",
    +        >>>     }
    +        >>>
    +        >>>     def my_function(self, arg1=None):
    +        >>>         await asyncio.sleep(1)
    +        >>>         return str(arg1)
    +        >>>
    +        >>>     def my_generator(self):
    +        >>>         for i in range(10):
    +        >>>             await asyncio.sleep(1)
    +        >>>             yield i
    +    """
    +
    +    CMDS = {}
    +
    +    def __init__(self, socket_path):
    +        super().__init__()
    +        self.name = f"EngineServer {self.__class__.__name__}"
    +        self.socket_path = socket_path
    +        self.client_id_var = contextvars.ContextVar("client_id", default=None)
    +        # task <--> client id mapping
    +        self.tasks = {}
    +        # child tasks spawned by main tasks
    +        self.child_tasks = {}
    +        if self.socket_path is not None:
    +            # create ZeroMQ context
    +            self.context = zmq.asyncio.Context()
    +            self.context.setsockopt(zmq.LINGER, 0)
    +            # ROUTER socket can handle multiple concurrent requests
    +            self.socket = self.context.socket(zmq.ROUTER)
    +            self.socket.setsockopt(zmq.LINGER, 0)
    +            # create socket file
    +            self.socket.bind(f"ipc://{self.socket_path}")
    +
    +    @contextlib.contextmanager
    +    def client_id_context(self, value):
    +        token = self.client_id_var.set(value)
    +        try:
    +            yield
    +        finally:
    +            self.client_id_var.reset(token)
    +
    +    async def run_and_return(self, client_id, command_fn, *args, **kwargs):
    +        fn_str = f"{command_fn.__name__}({args}, {kwargs})"
    +        with self.client_id_context(client_id):
    +            try:
    +                self.log.debug(f"{self.name} run-and-return {fn_str}")
    +                result = error_sentinel
    +                try:
    +                    result = await command_fn(*args, **kwargs)
    +                except BaseException as e:
    +                    if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
    +                        error = f"Error in {self.name}.{fn_str}: {e}"
    +                        self.log.debug(error)
    +                        trace = traceback.format_exc()
    +                        self.log.debug(trace)
    +                        result = {"_e": (error, trace)}
    +                finally:
    +                    self.tasks.pop(client_id, None)
    +                    if result is not error_sentinel:
    +                        self.log.debug(f"{self.name}: Sending response to {fn_str}: {result}")
    +                        await self.send_socket_multipart(client_id, result)
    +            except BaseException as e:
    +                self.log.critical(
    +                    f"Unhandled exception in {self.name}.run_and_return({client_id}, {command_fn}, {args}, {kwargs}): {e}"
    +                )
    +                self.log.critical(traceback.format_exc())
    +            finally:
    +                self.log.debug(f"{self.name} finished run-and-return {command_fn.__name__}({args}, {kwargs})")
    +
    +    async def run_and_yield(self, client_id, command_fn, *args, **kwargs):
    +        fn_str = f"{command_fn.__name__}({args}, {kwargs})"
    +        with self.client_id_context(client_id):
    +            try:
    +                self.log.debug(f"{self.name} run-and-yield {fn_str}")
    +                try:
    +                    async for _ in command_fn(*args, **kwargs):
    +                        self.log.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}")
    +                        await self.send_socket_multipart(client_id, _)
    +                except BaseException as e:
    +                    if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
    +                        error = f"Error in {self.name}.{fn_str}: {e}"
    +                        trace = traceback.format_exc()
    +                        self.log.debug(error)
    +                        self.log.debug(trace)
    +                        result = {"_e": (error, trace)}
    +                        await self.send_socket_multipart(client_id, result)
    +                finally:
    +                    self.log.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()")
    +                    # _s == special signal that means StopIteration
    +                    await self.send_socket_multipart(client_id, {"_s": None})
    +                    self.tasks.pop(client_id, None)
    +            except BaseException as e:
    +                self.log.critical(
    +                    f"Unhandled exception in {self.name}.run_and_yield({client_id}, {command_fn}, {args}, {kwargs}): {e}"
    +                )
    +                self.log.critical(traceback.format_exc())
    +            finally:
    +                self.log.debug(f"{self.name} finished run-and-yield {command_fn.__name__}()")
    +
    +    async def send_socket_multipart(self, client_id, message):
    +        try:
    +            message = pickle.dumps(message)
    +            await self._infinite_retry(self.socket.send_multipart, [client_id, message])
    +        except Exception as e:
    +            self.log.verbose(f"Error sending ZMQ message: {e}")
    +            self.log.trace(traceback.format_exc())
    +
    +    def check_error(self, message):
    +        if message is error_sentinel:
    +            return True
    +
    +    async def worker(self):
    +        self.log.debug(f"{self.name}: starting worker")
    +        try:
    +            while 1:
    +                client_id, binary = await self.socket.recv_multipart()
    +                message = self.unpickle(binary)
    +                # self.log.debug(f"{self.name} got message: {message}")
    +                if self.check_error(message):
    +                    continue
    +
    +                cmd = message.get("c", None)
    +                if not isinstance(cmd, int):
    +                    self.log.warning(f"{self.name}: no command sent in message: {message}")
    +                    continue
    +
    +                # -1 == cancel task
    +                if cmd == -1:
    +                    self.log.debug(f"{self.name} got cancel signal")
    +                    await self.send_socket_multipart(client_id, {"m": "CANCEL_OK"})
    +                    await self.cancel_task(client_id)
    +                    continue
    +
    +                # -99 == shutdown task
    +                if cmd == -99:
    +                    self.log.debug(f"{self.name} got shutdown signal")
    +                    await self.send_socket_multipart(client_id, {"m": "SHUTDOWN_OK"})
    +                    await self._shutdown()
    +                    return
    +
    +                args = message.get("a", ())
    +                if not isinstance(args, tuple):
    +                    self.log.warning(f"{self.name}: received invalid args of type {type(args)}, should be tuple")
    +                    continue
    +                kwargs = message.get("k", {})
    +                if not isinstance(kwargs, dict):
    +                    self.log.warning(f"{self.name}: received invalid kwargs of type {type(kwargs)}, should be dict")
    +                    continue
    +
    +                command_name = self.CMDS[cmd]
    +                command_fn = getattr(self, command_name, None)
    +
    +                if command_fn is None:
    +                    self.log.warning(f'{self.name} has no function named "{command_fn}"')
    +                    continue
    +
    +                if inspect.isasyncgenfunction(command_fn):
    +                    # self.log.debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()")
    +                    coroutine = self.run_and_yield(client_id, command_fn, *args, **kwargs)
    +                else:
    +                    # self.log.debug(f"{self.name}: creating run-and-return coroutine for {command_name}()")
    +                    coroutine = self.run_and_return(client_id, command_fn, *args, **kwargs)
    +
    +                # self.log.debug(f"{self.name}: creating task for {command_name}() coroutine")
    +                task = asyncio.create_task(coroutine)
    +                self.tasks[client_id] = task, command_fn, args, kwargs
    +                # self.log.debug(f"{self.name}: finished creating task for {command_name}() coroutine")
    +        except BaseException as e:
    +            await self._shutdown()
    +            if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
    +                self.log.error(f"{self.name}: error in EngineServer worker: {e}")
    +                self.log.trace(traceback.format_exc())
    +        finally:
    +            self.log.debug(f"{self.name}: finished worker()")
    +
    +    async def _shutdown(self):
    +        if not self._shutdown_status:
    +            self.log.verbose(f"{self.name}: shutting down...")
    +            self._shutdown_status = True
    +            await self.cancel_all_tasks()
    +            try:
    +                self.context.destroy(linger=0)
    +            except Exception:
    +                self.log.trace(traceback.format_exc())
    +            try:
    +                self.context.term()
    +            except Exception:
    +                self.log.trace(traceback.format_exc())
    +            self.log.debug(f"{self.name}: finished shutting down")
    +
    +    def new_child_task(self, client_id, coro):
    +        task = asyncio.create_task(coro)
    +        try:
    +            self.child_tasks[client_id].add(task)
    +        except KeyError:
    +            self.child_tasks[client_id] = {task}
    +        return task
    +
    +    async def finished_tasks(self, client_id, timeout=None):
    +        child_tasks = self.child_tasks.get(client_id, set())
    +        try:
    +            done, pending = await asyncio.wait(child_tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout)
    +        except BaseException as e:
    +            if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)):
    +                done = set()
    +                self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({child_tasks})")
    +                for task in child_tasks:
    +                    task.cancel()
    +            else:
    +                if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)):
    +                    self.log.error(f"{self.name}: Unhandled exception in finished_tasks({child_tasks}): {e}")
    +                    self.log.trace(traceback.format_exc())
    +                raise
    +        self.child_tasks[client_id] = pending
    +        return done
    +
    +    async def cancel_task(self, client_id):
    +        parent_task = self.tasks.pop(client_id, None)
    +        if parent_task is None:
    +            return
    +        parent_task, _cmd, _args, _kwargs = parent_task
    +        self.log.debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})")
    +        parent_task.cancel()
    +        child_tasks = self.child_tasks.pop(client_id, set())
    +        if child_tasks:
    +            self.log.debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}")
    +            for child_task in child_tasks:
    +                child_task.cancel()
    +
    +        for task in [parent_task] + list(child_tasks):
    +            await self._cancel_task(task)
    +
    +    async def _cancel_task(self, task):
    +        try:
    +            await asyncio.wait_for(task, timeout=10)
    +        except (TimeoutError, asyncio.exceptions.TimeoutError):
    +            self.log.debug(f"{self.name}: Timeout cancelling task")
    +            return
    +        except (KeyboardInterrupt, asyncio.CancelledError):
    +            return
    +        except BaseException as e:
    +            self.log.error(f"Unhandled error in {task.get_coro().__name__}(): {e}")
    +            self.log.trace(traceback.format_exc())
    +
    +    async def cancel_all_tasks(self):
    +        for client_id in list(self.tasks):
    +            await self.cancel_task(client_id)
    +        for client_id, tasks in self.child_tasks.items():
    +            for task in tasks:
    +                await self._cancel_task(task)
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    + +
    +
    +
    +
    + + + + + + + + + + + + \ No newline at end of file diff --git a/Stable/dev/event/index.html b/Stable/dev/event/index.html index 620f7ddf5d..2840173720 100644 --- a/Stable/dev/event/index.html +++ b/Stable/dev/event/index.html @@ -8,7 +8,7 @@ - + @@ -20,7 +20,7 @@ - + @@ -117,7 +117,7 @@
  • - + Developer Manual @@ -223,6 +223,37 @@
  • +
  • + + + +
  • @@ -307,6 +338,13 @@
  • @@ -464,18 +568,7 @@

    make_event

    -
    make_event(
    -    data,
    -    event_type=None,
    -    source=None,
    -    module=None,
    -    scan=None,
    -    scans=None,
    -    tags=None,
    -    confidence=5,
    -    dummy=False,
    -    internal=None,
    -)
    +
    make_event(data, event_type=None, parent=None, context=None, module=None, scan=None, scans=None, tags=None, confidence=5, dummy=False, internal=None)
     

    Creates and returns a new event object or modifies an existing one.

    @@ -503,13 +596,23 @@

  • -source +parent (BaseEvent, default: None ) –
    -

    Source event leading to this event's discovery.

    +

    Parent event leading to this event's discovery.

    +
    +
  • +
  • +context + (str, default: + None +) + – +
    +

    Description of circumstances leading to event's discovery.

  • @@ -595,7 +698,7 @@

    Raises:

    • -ValidationError +ValidationError

      Raised when there's an error in event data or type sanitization.

      @@ -604,11 +707,11 @@

    Examples:

    If inside a module, e.g. from within its handle_event():

    -
    >>> self.make_event("1.2.3.4", source=event)
    -IP_ADDRESS("1.2.3.4", module=nmap, tags={'ipv4', 'distance-1'})
    +
    >>> self.make_event("1.2.3.4", parent=event)
    +IP_ADDRESS("1.2.3.4", module=portscan, tags={'ipv4', 'distance-1'})
     

    If you're outside a module but you have a scan object:

    -
    >>> scan.make_event("1.2.3.4", source=scan.root_event)
    +
    >>> scan.make_event("1.2.3.4", parent=scan.root_event)
     IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'})
     

    If you're outside a scan and just messing around:

    @@ -623,132 +726,138 @@

    Source code in bbot/core/event/base.py -
    1206
    -1207
    -1208
    -1209
    -1210
    -1211
    -1212
    -1213
    -1214
    -1215
    -1216
    -1217
    -1218
    -1219
    -1220
    -1221
    -1222
    -1223
    -1224
    -1225
    -1226
    -1227
    -1228
    -1229
    -1230
    -1231
    -1232
    -1233
    -1234
    -1235
    -1236
    -1237
    -1238
    -1239
    -1240
    -1241
    -1242
    -1243
    -1244
    -1245
    -1246
    -1247
    -1248
    -1249
    -1250
    -1251
    -1252
    -1253
    -1254
    -1255
    -1256
    -1257
    -1258
    -1259
    -1260
    -1261
    -1262
    -1263
    -1264
    -1265
    -1266
    -1267
    -1268
    -1269
    -1270
    -1271
    -1272
    -1273
    -1274
    -1275
    -1276
    -1277
    -1278
    -1279
    -1280
    -1281
    -1282
    -1283
    -1284
    -1285
    -1286
    -1287
    -1288
    -1289
    -1290
    -1291
    -1292
    -1293
    -1294
    -1295
    -1296
    -1297
    -1298
    -1299
    -1300
    -1301
    -1302
    -1303
    -1304
    -1305
    -1306
    -1307
    -1308
    -1309
    -1310
    -1311
    -1312
    -1313
    -1314
    -1315
    -1316
    -1317
    -1318
    -1319
    -1320
    -1321
    -1322
    -1323
    -1324
    -1325
    -1326
    -1327
    -1328
    def make_event(
    +
    1458
    +1459
    +1460
    +1461
    +1462
    +1463
    +1464
    +1465
    +1466
    +1467
    +1468
    +1469
    +1470
    +1471
    +1472
    +1473
    +1474
    +1475
    +1476
    +1477
    +1478
    +1479
    +1480
    +1481
    +1482
    +1483
    +1484
    +1485
    +1486
    +1487
    +1488
    +1489
    +1490
    +1491
    +1492
    +1493
    +1494
    +1495
    +1496
    +1497
    +1498
    +1499
    +1500
    +1501
    +1502
    +1503
    +1504
    +1505
    +1506
    +1507
    +1508
    +1509
    +1510
    +1511
    +1512
    +1513
    +1514
    +1515
    +1516
    +1517
    +1518
    +1519
    +1520
    +1521
    +1522
    +1523
    +1524
    +1525
    +1526
    +1527
    +1528
    +1529
    +1530
    +1531
    +1532
    +1533
    +1534
    +1535
    +1536
    +1537
    +1538
    +1539
    +1540
    +1541
    +1542
    +1543
    +1544
    +1545
    +1546
    +1547
    +1548
    +1549
    +1550
    +1551
    +1552
    +1553
    +1554
    +1555
    +1556
    +1557
    +1558
    +1559
    +1560
    +1561
    +1562
    +1563
    +1564
    +1565
    +1566
    +1567
    +1568
    +1569
    +1570
    +1571
    +1572
    +1573
    +1574
    +1575
    +1576
    +1577
    +1578
    +1579
    +1580
    +1581
    +1582
    +1583
    +1584
    +1585
    def make_event(
         data,
         event_type=None,
    -    source=None,
    +    parent=None,
    +    context=None,
         module=None,
         scan=None,
         scans=None,
    @@ -767,7 +876,8 @@ 

    Parameters: data (Union[str, dict, BaseEvent]): The primary data for the event or an existing event object. event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. Auto-detected if not provided. - source (BaseEvent, optional): Source event leading to this event's discovery. + parent (BaseEvent, optional): Parent event leading to this event's discovery. + context (str, optional): Description of circumstances leading to event's discovery. module (str, optional): Module that discovered the event. scan (Scan, optional): BBOT Scan object associated with the event. scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. @@ -784,11 +894,11 @@

    Examples: If inside a module, e.g. from within its `handle_event()`: - >>> self.make_event("1.2.3.4", source=event) - IP_ADDRESS("1.2.3.4", module=nmap, tags={'ipv4', 'distance-1'}) + >>> self.make_event("1.2.3.4", parent=event) + IP_ADDRESS("1.2.3.4", module=portscan, tags={'ipv4', 'distance-1'}) If you're outside a module but you have a scan object: - >>> scan.make_event("1.2.3.4", source=scan.root_event) + >>> scan.make_event("1.2.3.4", parent=scan.root_event) IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'}) If you're outside a scan and just messing around: @@ -816,8 +926,10 @@

    data.scans = scans if module is not None: data.module = module - if source is not None: - data.source = source + if parent is not None: + data.parent = parent + if context is not None: + data.discovery_context = context if internal == True: data.internal = True if tags: @@ -859,7 +971,8 @@

    return event_class( data, event_type=event_type, - source=source, + parent=parent, + context=context, module=module, scan=scan, scans=scans, @@ -907,7 +1020,7 @@

    Raises: