From 2fb24fb3c36454eb358cacd1da15f145a760db01 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 14 Aug 2023 14:10:35 -0400 Subject: [PATCH 001/123] webhook output modules --- bbot/core/event/base.py | 7 ++ bbot/modules/output/discord.py | 73 +++++++++++++++++++ bbot/modules/output/slack.py | 25 +++++++ bbot/modules/output/teams.py | 15 ++++ .../module_tests/test_module_discord.py | 39 ++++++++++ .../module_tests/test_module_slack.py | 9 +++ .../module_tests/test_module_teams.py | 29 ++++++++ 7 files changed, 197 insertions(+) create mode 100644 bbot/modules/output/discord.py create mode 100644 bbot/modules/output/slack.py create mode 100644 bbot/modules/output/teams.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_discord.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_slack.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_teams.py diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index af3867518b..74058b3788 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -845,6 +845,13 @@ def _pretty_string(self): class VULNERABILITY(DictHostEvent): _always_emit = True + severity_colors = { + "CRITICAL": "🟪", + "HIGH": "🟥", + "MEDIUM": "🟧", + "LOW": "🟨", + "UNKNOWN": "⬜", + } def sanitize_data(self, data): self.add_tag(data["severity"].lower()) diff --git a/bbot/modules/output/discord.py b/bbot/modules/output/discord.py new file mode 100644 index 0000000000..4aa63de74d --- /dev/null +++ b/bbot/modules/output/discord.py @@ -0,0 +1,73 @@ +import yaml + +from bbot.modules.output.base import BaseOutputModule + + +class Discord(BaseOutputModule): + watched_events = ["*"] + meta = {"description": "Message a Discord channel when certain events are encountered"} + options = {"webhook_url": "", "event_types": ["VULNERABILITY"]} + options_desc = {"webhook_url": "Discord webhook URL", "event_types": "Types of events to send"} + accept_dupes = False + good_status_code = 204 + content_key = "content" + + async def setup(self): + self.webhook_url = self.config.get("webhook_url", "") + if not self.webhook_url: + self.warning("Must set Webhook URL") + return False + return True + + async def handle_event(self, event): + while 1: + data = {self.content_key: self.format_message(event)} + response = await self.helpers.request( + url=self.webhook_url, + method="POST", + json=data, + ) + status_code = getattr(response, "status_code", 0) + if self.evaluate_response(response): + break + else: + response_data = getattr(response, "text", "") + try: + retry_after = response.json().get("retry_after", 1) + except Exception: + retry_after = 1 + self.verbose( + f"Error sending {event}: status code {status_code}, response: {response_data}, retrying in {retry_after} seconds" + ) + await self.helpers.sleep(retry_after) + + def get_watched_events(self): + if self._watched_events is None: + event_types = self.config.get("event_types", ["VULNERABILITY"]) + if isinstance(event_types, str): + event_types = [event_types] + self._watched_events = set(event_types) + return self._watched_events + + def format_message_str(self, event): + event_tags = ",".join(event.tags) + return f"`[{event.type}]`\t**`{event.data}`**\ttags:{event_tags}" + + def format_message_other(self, event): + event_yaml = yaml.dump(event.data) + event_type = f"**`[{event.type}]`**" + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "UNKNOWN") + severity_color = event.severity_colors[severity] + event_type = f"{severity_color} {event.type} ({severity}) {severity_color}" + return f"""**`{event_type}`**\n```yaml\n{event_yaml}\n```""" + + def format_message(self, event): + if isinstance(event.data, str): + return self.format_message_str(event) + else: + return self.format_message_other(event) + + def evaluate_response(self, response): + status_code = getattr(response, "status_code", 0) + return status_code == self.good_status_code diff --git a/bbot/modules/output/slack.py b/bbot/modules/output/slack.py new file mode 100644 index 0000000000..03671e2571 --- /dev/null +++ b/bbot/modules/output/slack.py @@ -0,0 +1,25 @@ +import yaml + +from .discord import Discord + + +class Slack(Discord): + watched_events = ["*"] + meta = {"description": "Message a Slack channel when certain events are encountered"} + options = {"webhook_url": "", "event_types": ["VULNERABILITY"]} + options_desc = {"webhook_url": "Discord webhook URL", "event_types": "Types of events to send"} + good_status_code = 200 + content_key = "text" + + def format_message_str(self, event): + event_tags = ",".join(sorted(event.tags)) + return f"`[{event.type}]`\t*`{event.data}`*\t`{event_tags}`" + + def format_message_other(self, event): + event_yaml = yaml.dump(event.data) + event_type = f"*`[{event.type}]`*" + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "UNKNOWN") + severity_color = event.severity_colors[severity] + event_type = f"{severity_color} `{event.type} ({severity})` {severity_color}" + return f"""*{event_type}*\n```\n{event_yaml}\n```""" diff --git a/bbot/modules/output/teams.py b/bbot/modules/output/teams.py new file mode 100644 index 0000000000..f3d8dbfc1c --- /dev/null +++ b/bbot/modules/output/teams.py @@ -0,0 +1,15 @@ +from .discord import Discord + + +class Teams(Discord): + watched_events = ["*"] + meta = {"description": "Message a Slack channel when certain events are encountered"} + options = {"webhook_url": "", "event_types": ["VULNERABILITY"]} + options_desc = {"webhook_url": "Discord webhook URL", "event_types": "Types of events to send"} + max_event_handlers = 5 + good_status_code = 200 + content_key = "text" + + def evaluate_response(self, response): + text = getattr(response, "text", "") + return text == "1" diff --git a/bbot/test/test_step_2/module_tests/test_module_discord.py b/bbot/test/test_step_2/module_tests/test_module_discord.py new file mode 100644 index 0000000000..0e374d8339 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_discord.py @@ -0,0 +1,39 @@ +import httpx + +from .base import ModuleTestBase + + +class TestDiscord(ModuleTestBase): + targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx"] + modules_overrides = ["discord", "badsecrets", "httpx"] + + webhook_url = "output_modules.discord.webhook_url=https://discord.com/api/webhooks/1234/deadbeef-P-uF-asdf" + config_overrides = {"output_modules": {"discord": {"webhook_url": webhook_url}}} + + def custom_setup(self, module_test): + respond_args = { + "response_data": "

Express Cookie Test

", + "headers": { + "set-cookie": "connect.sid=s%3A8FnPwdeM9kdGTZlWvdaVtQ0S1BCOhY5G.qys7H2oGSLLdRsEq7sqh7btOohHsaRKqyjV4LiVnBvc; Path=/; Expires=Wed, 05 Apr 2023 04:47:29 GMT; HttpOnly" + }, + } + module_test.set_expect_requests(expect_args={"uri": "/cookie.aspx"}, respond_args=respond_args) + module_test.set_expect_requests(expect_args={"uri": "/cookie2.aspx"}, respond_args=respond_args) + module_test.request_count = 0 + + async def setup_after_prep(self, module_test): + self.custom_setup(module_test) + + def custom_response(request: httpx.Request): + module_test.request_count += 1 + if module_test.request_count == 2: + return httpx.Response(status_code=429, json={"retry_after": 0.01}) + else: + return httpx.Response(status_code=module_test.module.good_status_code) + + module_test.httpx_mock.add_callback(custom_response) + + def check(self, module_test, events): + vulns = [e for e in events if e.type == "VULNERABILITY"] + assert len(vulns) == 2 + assert module_test.request_count == 3 diff --git a/bbot/test/test_step_2/module_tests/test_module_slack.py b/bbot/test/test_step_2/module_tests/test_module_slack.py new file mode 100644 index 0000000000..e94d091779 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_slack.py @@ -0,0 +1,9 @@ +from .test_module_discord import TestDiscord as DiscordBase + + +class TestSlack(DiscordBase): + targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx"] + modules_overrides = ["slack", "badsecrets", "httpx"] + + webhook_url = "output_modules.slack.webhook_url=https://hooks.slack.com/services/deadbeef/deadbeef/deadbeef" + config_overrides = {"output_modules": {"slack": {"webhook_url": webhook_url}}} diff --git a/bbot/test/test_step_2/module_tests/test_module_teams.py b/bbot/test/test_step_2/module_tests/test_module_teams.py new file mode 100644 index 0000000000..6d49d6c9fd --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_teams.py @@ -0,0 +1,29 @@ +import httpx + +from .test_module_discord import TestDiscord as DiscordBase + + +class TestTeams(DiscordBase): + targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx"] + modules_overrides = ["teams", "badsecrets", "httpx"] + + webhook_url = "https://evilcorp.webhook.office.com/webhookb2/deadbeef@deadbeef/IncomingWebhook/deadbeef/deadbeef" + config_overrides = {"output_modules": {"teams": {"webhook_url": webhook_url}}} + + async def setup_after_prep(self, module_test): + self.custom_setup(module_test) + + def custom_response(request: httpx.Request): + module_test.request_count += 1 + if module_test.request_count == 2: + return httpx.Response( + status_code=200, + text="Webhook message delivery failed with error: Microsoft Teams endpoint returned HTTP error 429 with ContextId tcid=0,server=msgapi-production-eus-azsc2-4-170,cv=deadbeef=2..", + ) + else: + return httpx.Response( + status_code=200, + text="1", + ) + + module_test.httpx_mock.add_callback(custom_response) From 25fa2db7118cf3ac76657f0b8d9fd0d3487ce59a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 14 Aug 2023 15:42:22 -0400 Subject: [PATCH 002/123] updated docs, fixed docs publishing pipeline --- .github/workflows/tests.yml | 5 +---- README.md | 26 +++++++++++++++++++++++++- docs/scanning/output.md | 33 +++++++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+), 5 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 660ec4329b..088cc7ee38 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -5,9 +5,6 @@ on: - stable - dev pull_request: - pull_request_target: - types: - - closed concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} @@ -100,7 +97,7 @@ jobs: publish_code: needs: update_docs runs-on: ubuntu-latest - if: github.event_name == 'pull_request_target' && github.event.pull_request.merged == true && (github.event.pull_request.base.ref == 'dev' || github.event.pull_request.base.ref == 'stable') + if: github.event_name == 'push' && (github.event.pull_request.base.ref == 'dev' || github.event.pull_request.base.ref == 'stable') continue-on-error: true steps: - uses: actions/checkout@v3 diff --git a/README.md b/README.md index 77ed65a577..87fef8fa94 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,7 @@ bbot -t evilcorp.com -f subdomain-enum email-enum cloud-enum web-basic -m nmap g ## Targets -BBOT accepts an unlimited number of targets. You can specify targets either directly on the command line or in files (or both!). Targets can be any of the following: +BBOT accepts an unlimited number of targets via `-t`. You can specify targets either directly on the command line or in files (or both!). Targets can be any of the following: - `DNS_NAME` (`evilcorp.com`) - `IP_ADDRESS` (`1.2.3.4`) @@ -94,6 +94,30 @@ BBOT accepts an unlimited number of targets. You can specify targets either dire For more information, see [Targets](https://www.blacklanternsecurity.com/bbot/scanning/#targets-t). To learn how BBOT handles scope, see [Scope](https://www.blacklanternsecurity.com/bbot/scanning/#scope). +## API Keys + +Similar to Amass or Subfinder, BBOT supports API keys for various third-party services such as SecurityTrails, etc. + +The standard way to do this is to enter your API keys in **`~/.config/bbot/secrets.yml`**: +```yaml +modules: + shodan_dns: + api_key: 4f41243847da693a4f356c0486114bc6 + c99: + api_key: 21a270d5f59c9b05813a72bb41707266 + virustotal: + api_key: dd5f0eee2e4a99b71a939bded450b246 + securitytrails: + api_key: d9a05c3fd9a514497713c54b4455d0b0 +``` + +If you like, you can also specify them on the command line: +```bash +bbot -c modules.virustotal.api_key=dd5f0eee2e4a99b71a939bded450b246 +``` + +For details, see [Configuration](https://www.blacklanternsecurity.com/bbot/scanning/configuration/) + ## BBOT as a Python library **Synchronous** diff --git a/docs/scanning/output.md b/docs/scanning/output.md index edfb0c9926..0edf7d55ec 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -70,6 +70,39 @@ www.evilcorp.com mail.evilcorp.com ``` +### Discord / Slack / Teams + +![bbot-discord](https://github.com/blacklanternsecurity/bbot/assets/20261699/6d88045c-8eac-43b6-8de9-c621ecf60c2d) + +BBOT supports output via webhooks to `discord`, `slack`, and `teams`. To use them, you must specify a webhook URL either in the config: + +```yaml title="~/.bbot/config/bbot.yml" +output_modules: + discord: + webhook_url: output_modules.discord.webhook_url=https://discord.com/api/webhooks/1234/deadbeef +``` + +...or on the command line: +```bash +bbot -t evilcorp.com -om discord -c output_modules.discord.webhook_url=https://discord.com/api/webhooks/1234/deadbeef +``` + +By default, only `VULNERABILITY` events are sent. You can customize this by setting `event_types`: + +```yaml title="~/.bbot/config/bbot.yml" +output_modules: + discord: + event_types: + - STORAGE_BUCKET + - FINDING + - VULNERABILITY +``` + +...or on the command line: +```bash +bbot -t evilcorp.com -om discord -c output_modules.discord.event_types=["STORAGE_BUCKET","FINDING","VULNERABILITY"] +``` + ### HTTP The `http` output module sends [events](./events) in JSON format to a desired HTTP endpoint. From 78837f131da79290ce963b0c265617374610375e Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Mon, 14 Aug 2023 19:50:31 +0000 Subject: [PATCH 003/123] Refresh module docs --- docs/modules/list_of_modules.md | 3 +++ docs/scanning/advanced.md | 2 +- docs/scanning/configuration.md | 6 ++++++ docs/scanning/events.md | 2 +- 4 files changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 79cefba118..76cccfe326 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -87,12 +87,15 @@ | zoomeye | scan | Yes | Query ZoomEye's API for subdomains | affiliates, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | asset_inventory | output | No | Output to an asset inventory style flattened CSV file | | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT, TECHNOLOGY, URL, VULNERABILITY | IP_ADDRESS, OPEN_TCP_PORT | | csv | output | No | Output to CSV | | * | | +| discord | output | No | Message a Discord channel when certain events are encountered | | * | | | http | output | No | Send every event to a custom URL via a web request | | * | | | human | output | No | Output to text | | * | | | json | output | No | Output to JSON | | * | | | neo4j | output | No | Output to Neo4j | | * | | | python | output | No | Output via Python API | | * | | +| slack | output | No | Message a Slack channel when certain events are encountered | | * | | | subdomains | output | No | Output only resolved, in-scope subdomains | subdomain-enum | DNS_NAME, DNS_NAME_UNRESOLVED | | +| teams | output | No | Message a Slack channel when certain events are encountered | | * | | | web_report | output | No | Create a markdown report with web assets | | FINDING, TECHNOLOGY, URL, VHOST, VULNERABILITY | | | websocket | output | No | Output to websockets | | * | | | aggregate | internal | No | Summarize statistics at the end of a scan | passive, safe | | | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 0afe70bbd8..1617378369 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -73,7 +73,7 @@ Modules: -ef FLAG [FLAG ...], --exclude-flags FLAG [FLAG ...] Disable modules with these flags. (e.g. -ef aggressive) -om MODULE [MODULE ...], --output-modules MODULE [MODULE ...] - Output module(s). Choices: asset_inventory,csv,http,human,json,neo4j,python,subdomains,web_report,websocket + Output module(s). Choices: asset_inventory,csv,discord,http,human,json,neo4j,python,slack,subdomains,teams,web_report,websocket --allow-deadly Enable the use of highly aggressive modules Scan: diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 8a1a46508c..95fef65fb8 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -317,6 +317,8 @@ Many modules accept their own configuration options. These options have the abil | output_modules.asset_inventory.summary_netmask | int | Subnet mask to use when summarizing IP addresses at end of scan | 16 | | output_modules.asset_inventory.use_previous | bool |` Emit previous asset inventory as new events (use in conjunction with -n ) `| False | | output_modules.csv.output_file | str | Output to CSV file | | +| output_modules.discord.event_types | list | Types of events to send | ['VULNERABILITY'] | +| output_modules.discord.webhook_url | str | Discord webhook URL | | | output_modules.http.bearer | str | Authorization Bearer token | | | output_modules.http.method | str | HTTP method | POST | | output_modules.http.password | str | Password (basic auth) | | @@ -330,8 +332,12 @@ Many modules accept their own configuration options. These options have the abil | output_modules.neo4j.password | str | Neo4j password | bbotislife | | output_modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | output_modules.neo4j.username | str | Neo4j username | neo4j | +| output_modules.slack.event_types | list | Types of events to send | ['VULNERABILITY'] | +| output_modules.slack.webhook_url | str | Discord webhook URL | | | output_modules.subdomains.include_unresolved | bool | Include unresolved subdomains in output | False | | output_modules.subdomains.output_file | str | Output to file | | +| output_modules.teams.event_types | list | Types of events to send | ['VULNERABILITY'] | +| output_modules.teams.webhook_url | str | Discord webhook URL | | | output_modules.web_report.css_theme_file | str | CSS theme URL for HTML output | https://cdnjs.cloudflare.com/ajax/libs/github-markdown-css/5.1.0/github-markdown.min.css | | output_modules.web_report.output_file | str | Output to file | | | output_modules.websocket.token | str | Authorization Bearer token | | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 3871d98390..274f6a8be5 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -49,7 +49,7 @@ Below is a full list of event types along with which modules produce/consume the | Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | |---------------------|-----------------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 8 | 0 | affiliates, csv, http, human, json, neo4j, python, websocket | | +| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | | ASN | 0 | 1 | | asn | | DNS_NAME | 53 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github, hackertarget, hunterio, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | | DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | From 6a05ccbf625bd3560f941d81308f607ea97ac9ab Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 14 Aug 2023 15:53:26 -0400 Subject: [PATCH 004/123] increment version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f7ed520ff3..6d0b54416f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ line-length = 119 [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.0.{{ distance }}{% if branch == "dev" %}rc{% endif %}' +format-jinja = 'v1.1.1.{{ distance }}{% if branch == "dev" %}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From 331d5814e33af7bf7e32c1a6b937f8af2304a6f3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 14 Aug 2023 16:20:21 -0400 Subject: [PATCH 005/123] updated versioning template --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6d0b54416f..57d398133d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ line-length = 119 [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.1.{{ distance }}{% if branch == "dev" %}rc{% endif %}' +format-jinja = 'v1.1.0a{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From 66a52e4ba0514b46572adf15235b0b8e886b0ca3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 14 Aug 2023 16:42:57 -0400 Subject: [PATCH 006/123] update for PEP 440 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 57d398133d..5ccb595916 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ line-length = 119 [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.0a{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v1.1.0.1{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From 2c1195aa002a75c84d6569054d14a342fda2996d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 14 Aug 2023 17:45:55 -0400 Subject: [PATCH 007/123] added chaos module --- bbot/modules/chaos.py | 37 +++++++++++++++++++ .../module_tests/test_module_chaos.py | 25 +++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 bbot/modules/chaos.py create mode 100644 bbot/test/test_step_2/module_tests/test_module_chaos.py diff --git a/bbot/modules/chaos.py b/bbot/modules/chaos.py new file mode 100644 index 0000000000..f247ce18db --- /dev/null +++ b/bbot/modules/chaos.py @@ -0,0 +1,37 @@ +from .shodan_dns import shodan_dns + + +class chaos(shodan_dns): + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query ProjectDiscovery's Chaos API for subdomains", "auth_required": True} + options = {"api_key": ""} + options_desc = {"api_key": "Chaos API key"} + + base_url = "https://dns.projectdiscovery.io/dns" + + async def ping(self): + url = f"{self.base_url}/example.com" + response = await self.request_with_fail_count(url, headers={"Authorization": self.api_key}) + assert response.json()["domain"] == "example.com" + + async def request_url(self, query): + _, domain = self.helpers.split_domain(query) + url = f"{self.base_url}/{domain}/subdomains" + return await self.request_with_fail_count(url, headers={"Authorization": self.api_key}) + + def parse_results(self, r, query): + j = r.json() + subdomains_set = set() + if isinstance(j, dict): + domain = j.get("domain", "") + if domain: + subdomains = j.get("subdomains", []) + for s in subdomains: + s = s.lower().strip(".*") + subdomains_set.add(s) + for s in subdomains_set: + full_subdomain = f"{s}.{domain}" + if full_subdomain and full_subdomain.endswith(f".{query}"): + yield full_subdomain diff --git a/bbot/test/test_step_2/module_tests/test_module_chaos.py b/bbot/test/test_step_2/module_tests/test_module_chaos.py new file mode 100644 index 0000000000..193bded584 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_chaos.py @@ -0,0 +1,25 @@ +from .base import ModuleTestBase + + +class TestChaos(ModuleTestBase): + config_overrides = {"modules": {"chaos": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="https://dns.projectdiscovery.io/dns/example.com", + match_headers={"Authorization": "asdf"}, + json={"domain": "example.com", "subdomains": 65}, + ) + module_test.httpx_mock.add_response( + url="https://dns.projectdiscovery.io/dns/blacklanternsecurity.com/subdomains", + match_headers={"Authorization": "asdf"}, + json={ + "domain": "blacklanternsecurity.com", + "subdomains": [ + "*.asdf.cloud", + ], + }, + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.cloud.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" From 33c15f0dd02aeb47217d29725651494763109a8b Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Tue, 15 Aug 2023 13:15:44 +0000 Subject: [PATCH 008/123] Refresh module docs --- docs/modules/list_of_modules.md | 1 + docs/scanning/advanced.md | 2 +- docs/scanning/configuration.md | 1 + docs/scanning/events.md | 50 ++++++++++++++++----------------- docs/scanning/index.md | 44 ++++++++++++++--------------- 5 files changed, 50 insertions(+), 48 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 76cccfe326..cb1b304cee 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -51,6 +51,7 @@ | c99 | scan | Yes | Query the C99 API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | censys | scan | Yes | Query the Censys API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | certspotter | scan | No | Query Certspotter's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | +| chaos | scan | Yes | Query ProjectDiscovery's Chaos API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | columbus | scan | No | Query the Columbus Project API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | crobat | scan | No | Query Project Crobat for subdomains | passive, safe | DNS_NAME | DNS_NAME | | crt | scan | No | Query crt.sh (certificate transparency) for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 1617378369..852aae1228 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -61,7 +61,7 @@ Target: Modules: -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: affiliates,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_aws,bucket_azure,bucket_digitalocean,bucket_firebase,bucket_gcp,builtwith,bypass403,c99,censys,certspotter,columbus,crobat,crt,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,fingerprintx,fullhunt,generic_ssrf,git,github,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,ipneighbor,ipstack,leakix,masscan,massdns,myssl,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye + Modules to enable. Choices: affiliates,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_aws,bucket_azure,bucket_digitalocean,bucket_firebase,bucket_gcp,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,crobat,crt,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,fingerprintx,fullhunt,generic_ssrf,git,github,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,ipneighbor,ipstack,leakix,masscan,massdns,myssl,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye -l, --list-modules List available modules. -em MODULE [MODULE ...], --exclude-modules MODULE [MODULE ...] Exclude these modules. diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 95fef65fb8..2ceb761ed8 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -292,6 +292,7 @@ Many modules accept their own configuration options. These options have the abil | modules.censys.api_id | str | Censys.io API ID | | | modules.censys.api_secret | str | Censys.io API Secret | | | modules.censys.max_pages | int | Maximum number of pages to fetch (100 results per page) | 5 | +| modules.chaos.api_key | str | Chaos API key | | | modules.fullhunt.api_key | str | FullHunt API Key | | | modules.github.api_key | str | Github token | | | modules.hunterio.api_key | str | Hunter.IO API key | | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 274f6a8be5..4567b784fc 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -47,31 +47,31 @@ Below is a full list of event types along with which modules produce/consume the ## List of Event Types -| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | -|---------------------|-----------------------|-----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | -| ASN | 0 | 1 | | asn | -| DNS_NAME | 53 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github, hackertarget, hunterio, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | -| DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | -| EMAIL_ADDRESS | 0 | 5 | | emailformat, hunterio, pgp, skymem, sslcert | -| FINDING | 2 | 21 | asset_inventory, web_report | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, git, host_header, hunt, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | -| GEOLOCATION | 0 | 1 | | ipstack | -| HTTP_RESPONSE | 11 | 1 | badsecrets, excavate, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, wappalyzer | httpx | -| IP_ADDRESS | 6 | 3 | asn, asset_inventory, ipneighbor, ipstack, nmap, speculate | asset_inventory, ipneighbor, speculate | -| IP_RANGE | 1 | 0 | speculate | | -| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, masscan, nmap, speculate | -| PROTOCOL | 0 | 1 | | fingerprintx | -| SCAN | 1 | 0 | masscan | | -| SOCIAL | 0 | 1 | | social | -| STORAGE_BUCKET | 6 | 5 | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, speculate | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp | -| TECHNOLOGY | 2 | 2 | asset_inventory, web_report | gowitness, wappalyzer | -| URL | 18 | 2 | asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | -| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | -| URL_UNVERIFIED | 4 | 10 | httpx, oauth, social, speculate | bevigil, excavate, ffuf, ffuf_shortnames, github, gowitness, hunterio, robots, urlscan, wayback | -| VHOST | 1 | 1 | web_report | vhost | -| VULNERABILITY | 2 | 4 | asset_inventory, web_report | badsecrets, generic_ssrf, nuclei, telerik | -| WAF | 0 | 1 | | wafw00f | -| WEBSCREENSHOT | 0 | 1 | | gowitness | +| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | +|---------------------|-----------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | +| ASN | 0 | 1 | | asn | +| DNS_NAME | 54 | 44 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github, hackertarget, hunterio, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | +| DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | +| EMAIL_ADDRESS | 0 | 5 | | emailformat, hunterio, pgp, skymem, sslcert | +| FINDING | 2 | 21 | asset_inventory, web_report | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, git, host_header, hunt, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | +| GEOLOCATION | 0 | 1 | | ipstack | +| HTTP_RESPONSE | 11 | 1 | badsecrets, excavate, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, wappalyzer | httpx | +| IP_ADDRESS | 6 | 3 | asn, asset_inventory, ipneighbor, ipstack, nmap, speculate | asset_inventory, ipneighbor, speculate | +| IP_RANGE | 1 | 0 | speculate | | +| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, masscan, nmap, speculate | +| PROTOCOL | 0 | 1 | | fingerprintx | +| SCAN | 1 | 0 | masscan | | +| SOCIAL | 0 | 1 | | social | +| STORAGE_BUCKET | 6 | 5 | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, speculate | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp | +| TECHNOLOGY | 2 | 2 | asset_inventory, web_report | gowitness, wappalyzer | +| URL | 18 | 2 | asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | +| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | +| URL_UNVERIFIED | 4 | 10 | httpx, oauth, social, speculate | bevigil, excavate, ffuf, ffuf_shortnames, github, gowitness, hunterio, robots, urlscan, wayback | +| VHOST | 1 | 1 | web_report | vhost | +| VULNERABILITY | 2 | 4 | asset_inventory, web_report | badsecrets, generic_ssrf, nuclei, telerik | +| WAF | 0 | 1 | | wafw00f | +| WEBSCREENSHOT | 0 | 1 | | gowitness | ## Findings Vs. Vulnerabilties diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 06b689f3c0..3051888a1f 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -107,28 +107,28 @@ A single module can have multiple flags. For example, the `securitytrails` modul ### List of Flags -| Flag | # Modules | Description | Modules | -|------------------|-------------|-----------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 65 | Non-intrusive, safe to run | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fingerprintx, fullhunt, git, github, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 48 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github, hackertarget, hunterio, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 43 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github, hackertarget, httpx, hunterio, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 37 | Makes active connections to target systems | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, dnszonetransfer, ffuf, ffuf_shortnames, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, social, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 24 | More advanced web scanning functionality | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | -| aggressive | 18 | Generates a large amount of network traffic | bypass403, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_aws, bucket_azure, bucket_firebase, bucket_gcp, git, httpx, hunt, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | -| cloud-enum | 10 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, httpx, oauth, subdomain_hijack | -| slow | 9 | May take a long time to complete | bucket_digitalocean, fingerprintx, massdns, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, vhost | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| email-enum | 5 | Enumerates email addresses | emailformat, hunterio, pgp, skymem, sslcert | -| deadly | 3 | Highly aggressive | ffuf, nuclei, vhost | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| portscan | 2 | Discovers open ports | masscan, nmap | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|-----------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 66 | Non-intrusive, safe to run | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fingerprintx, fullhunt, git, github, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 49 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github, hackertarget, hunterio, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 44 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github, hackertarget, httpx, hunterio, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 37 | Makes active connections to target systems | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, dnszonetransfer, ffuf, ffuf_shortnames, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, social, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 24 | More advanced web scanning functionality | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| aggressive | 18 | Generates a large amount of network traffic | bypass403, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_aws, bucket_azure, bucket_firebase, bucket_gcp, git, httpx, hunt, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | +| cloud-enum | 10 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, httpx, oauth, subdomain_hijack | +| slow | 9 | May take a long time to complete | bucket_digitalocean, fingerprintx, massdns, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, vhost | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| email-enum | 5 | Enumerates email addresses | emailformat, hunterio, pgp, skymem, sslcert | +| deadly | 3 | Highly aggressive | ffuf, nuclei, vhost | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| portscan | 2 | Discovers open ports | masscan, nmap | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | ## Dependencies From cd41be4e139c7ffae144475eb6a428e83efdb41a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 15 Aug 2023 14:14:09 -0400 Subject: [PATCH 009/123] include findings in webhook output modules --- bbot/core/helpers/names_generator.py | 22 ++++++++++--- bbot/modules/output/discord.py | 33 ++++++++++++++++--- bbot/modules/output/slack.py | 17 ++++++---- bbot/modules/output/teams.py | 8 +++-- .../module_tests/test_module_discord.py | 8 +++-- .../module_tests/test_module_slack.py | 2 +- .../module_tests/test_module_teams.py | 2 +- 7 files changed, 68 insertions(+), 24 deletions(-) diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 0098740374..831cdbff2f 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -236,6 +236,7 @@ "twitchy", "ugly", "unabated", + "unchained", "unexplained", "unhinged", "unholy", @@ -321,7 +322,7 @@ "brittany", "bruce", "bryan", - "caleb", + "caitlyn" "caleb", "cameron", "carl", "carlos", @@ -452,6 +453,7 @@ "jaskier", "jasmine", "jason", + "jayce", "jean", "jean-luc", "jeffrey", @@ -461,6 +463,7 @@ "jesse", "jessica", "jimmy", + "jinx", "joan", "joe", "joel", @@ -564,6 +567,7 @@ "phillip", "phyllis", "pippin", + "powder", "rachel", "radagast", "ralph", @@ -601,6 +605,7 @@ "shawn", "shelob", "shirley", + "silco", "sirius", "skywalker", "snape", @@ -630,9 +635,12 @@ "tyrell", "vader", "valerie", + "vander", "vanessa", + "vi", "victor", "victoria", + "viktor", "vincent", "virginia", "voldemort", @@ -648,12 +656,16 @@ "xavier", "yennefer", "yoda", + "zach", "zachary", ] def random_name(): - name = f"{random.choice(adjectives)}_{random.choice(names)}" - if name == "white_lantern": - name = "black_lantern" - return name + name = random.choice(names) + adjective = random.choice(adjectives) + if adjective == "unchained": + scan_name = f"{name}_{adjective}" + else: + scan_name = f"{adjective}_{name}" + return scan_name diff --git a/bbot/modules/output/discord.py b/bbot/modules/output/discord.py index 4aa63de74d..3acb68f0ba 100644 --- a/bbot/modules/output/discord.py +++ b/bbot/modules/output/discord.py @@ -6,14 +6,24 @@ class Discord(BaseOutputModule): watched_events = ["*"] meta = {"description": "Message a Discord channel when certain events are encountered"} - options = {"webhook_url": "", "event_types": ["VULNERABILITY"]} - options_desc = {"webhook_url": "Discord webhook URL", "event_types": "Types of events to send"} + options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"} + options_desc = { + "webhook_url": "Discord webhook URL", + "event_types": "Types of events to send", + "min_severity": "Only allow VULNERABILITY events of this severity or highter", + } accept_dupes = False good_status_code = 204 content_key = "content" + vuln_severities = ["UNKNOWN", "LOW", "MEDIUM", "HIGH", "CRITICAL"] async def setup(self): self.webhook_url = self.config.get("webhook_url", "") + self.min_severity = self.config.get("min_severity", "LOW").strip().upper() + assert ( + self.min_severity in self.vuln_severities + ), f"min_severity must be one of the following: {','.join(self.vuln_severities)}" + self.allowed_severities = self.vuln_severities[self.vuln_severities.index(self.min_severity) :] if not self.webhook_url: self.warning("Must set Webhook URL") return False @@ -49,6 +59,13 @@ def get_watched_events(self): self._watched_events = set(event_types) return self._watched_events + async def filter_event(self, event): + if event.type == "VULNERABILITY": + severity = event.data.get("severity", "UNKNOWN") + if not severity in self.allowed_severities: + return False, f"{severity} is below min_severity threshold" + return True + def format_message_str(self, event): event_tags = ",".join(event.tags) return f"`[{event.type}]`\t**`{event.data}`**\ttags:{event_tags}" @@ -56,11 +73,17 @@ def format_message_str(self, event): def format_message_other(self, event): event_yaml = yaml.dump(event.data) event_type = f"**`[{event.type}]`**" + if event.type in ("VULNERABILITY", "FINDING"): + event_str, color = self.get_severity_color(event) + event_type = f"{color} {event_str} {color}" + return f"""**`{event_type}`**\n```yaml\n{event_yaml}```""" + + def get_severity_color(self, event): if event.type == "VULNERABILITY": severity = event.data.get("severity", "UNKNOWN") - severity_color = event.severity_colors[severity] - event_type = f"{severity_color} {event.type} ({severity}) {severity_color}" - return f"""**`{event_type}`**\n```yaml\n{event_yaml}\n```""" + return f"{event.type} ({severity})", event.severity_colors[severity] + else: + return event.type, "🟦" def format_message(self, event): if isinstance(event.data, str): diff --git a/bbot/modules/output/slack.py b/bbot/modules/output/slack.py index 03671e2571..e7151ce0fb 100644 --- a/bbot/modules/output/slack.py +++ b/bbot/modules/output/slack.py @@ -6,8 +6,12 @@ class Slack(Discord): watched_events = ["*"] meta = {"description": "Message a Slack channel when certain events are encountered"} - options = {"webhook_url": "", "event_types": ["VULNERABILITY"]} - options_desc = {"webhook_url": "Discord webhook URL", "event_types": "Types of events to send"} + options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"} + options_desc = { + "webhook_url": "Discord webhook URL", + "event_types": "Types of events to send", + "min_severity": "Only allow VULNERABILITY events of this severity or highter", + } good_status_code = 200 content_key = "text" @@ -18,8 +22,7 @@ def format_message_str(self, event): def format_message_other(self, event): event_yaml = yaml.dump(event.data) event_type = f"*`[{event.type}]`*" - if event.type == "VULNERABILITY": - severity = event.data.get("severity", "UNKNOWN") - severity_color = event.severity_colors[severity] - event_type = f"{severity_color} `{event.type} ({severity})` {severity_color}" - return f"""*{event_type}*\n```\n{event_yaml}\n```""" + if event.type in ("VULNERABILITY", "FINDING"): + event_str, color = self.get_severity_color(event) + event_type = f"{color} `{event_str}` {color}" + return f"""*{event_type}*\n```\n{event_yaml}```""" diff --git a/bbot/modules/output/teams.py b/bbot/modules/output/teams.py index f3d8dbfc1c..0953fef5fc 100644 --- a/bbot/modules/output/teams.py +++ b/bbot/modules/output/teams.py @@ -4,8 +4,12 @@ class Teams(Discord): watched_events = ["*"] meta = {"description": "Message a Slack channel when certain events are encountered"} - options = {"webhook_url": "", "event_types": ["VULNERABILITY"]} - options_desc = {"webhook_url": "Discord webhook URL", "event_types": "Types of events to send"} + options = {"webhook_url": "", "event_types": ["VULNERABILITY", "FINDING"], "min_severity": "LOW"} + options_desc = { + "webhook_url": "Discord webhook URL", + "event_types": "Types of events to send", + "min_severity": "Only allow VULNERABILITY events of this severity or highter", + } max_event_handlers = 5 good_status_code = 200 content_key = "text" diff --git a/bbot/test/test_step_2/module_tests/test_module_discord.py b/bbot/test/test_step_2/module_tests/test_module_discord.py index 0e374d8339..35f6250c47 100644 --- a/bbot/test/test_step_2/module_tests/test_module_discord.py +++ b/bbot/test/test_step_2/module_tests/test_module_discord.py @@ -5,14 +5,14 @@ class TestDiscord(ModuleTestBase): targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx"] - modules_overrides = ["discord", "badsecrets", "httpx"] + modules_overrides = ["discord", "excavate", "badsecrets", "httpx"] webhook_url = "output_modules.discord.webhook_url=https://discord.com/api/webhooks/1234/deadbeef-P-uF-asdf" config_overrides = {"output_modules": {"discord": {"webhook_url": webhook_url}}} def custom_setup(self, module_test): respond_args = { - "response_data": "

Express Cookie Test

", + "response_data": '

Express Cookie Test

', "headers": { "set-cookie": "connect.sid=s%3A8FnPwdeM9kdGTZlWvdaVtQ0S1BCOhY5G.qys7H2oGSLLdRsEq7sqh7btOohHsaRKqyjV4LiVnBvc; Path=/; Expires=Wed, 05 Apr 2023 04:47:29 GMT; HttpOnly" }, @@ -35,5 +35,7 @@ def custom_response(request: httpx.Request): def check(self, module_test, events): vulns = [e for e in events if e.type == "VULNERABILITY"] + findings = [e for e in events if e.type == "FINDING"] + assert len(findings) == 2 assert len(vulns) == 2 - assert module_test.request_count == 3 + assert module_test.request_count == 5 diff --git a/bbot/test/test_step_2/module_tests/test_module_slack.py b/bbot/test/test_step_2/module_tests/test_module_slack.py index e94d091779..33a3e74a01 100644 --- a/bbot/test/test_step_2/module_tests/test_module_slack.py +++ b/bbot/test/test_step_2/module_tests/test_module_slack.py @@ -3,7 +3,7 @@ class TestSlack(DiscordBase): targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx"] - modules_overrides = ["slack", "badsecrets", "httpx"] + modules_overrides = ["slack", "excavate", "badsecrets", "httpx"] webhook_url = "output_modules.slack.webhook_url=https://hooks.slack.com/services/deadbeef/deadbeef/deadbeef" config_overrides = {"output_modules": {"slack": {"webhook_url": webhook_url}}} diff --git a/bbot/test/test_step_2/module_tests/test_module_teams.py b/bbot/test/test_step_2/module_tests/test_module_teams.py index 6d49d6c9fd..ead4caf243 100644 --- a/bbot/test/test_step_2/module_tests/test_module_teams.py +++ b/bbot/test/test_step_2/module_tests/test_module_teams.py @@ -5,7 +5,7 @@ class TestTeams(DiscordBase): targets = ["http://127.0.0.1:8888/cookie.aspx", "http://127.0.0.1:8888/cookie2.aspx"] - modules_overrides = ["teams", "badsecrets", "httpx"] + modules_overrides = ["teams", "excavate", "badsecrets", "httpx"] webhook_url = "https://evilcorp.webhook.office.com/webhookb2/deadbeef@deadbeef/IncomingWebhook/deadbeef/deadbeef" config_overrides = {"output_modules": {"teams": {"webhook_url": webhook_url}}} From 0ef1f1abda111acb1859f663de3f154cc2d5e310 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 15 Aug 2023 14:44:03 -0400 Subject: [PATCH 010/123] fixed names --- bbot/core/helpers/names_generator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/names_generator.py b/bbot/core/helpers/names_generator.py index 831cdbff2f..49ed866d67 100644 --- a/bbot/core/helpers/names_generator.py +++ b/bbot/core/helpers/names_generator.py @@ -322,7 +322,8 @@ "brittany", "bruce", "bryan", - "caitlyn" "caleb", + "caitlyn", + "caleb", "cameron", "carl", "carlos", From e07b91fe8d1b0e2002533d4514d272dabfce45ba Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 15 Aug 2023 15:41:53 -0400 Subject: [PATCH 011/123] fix publish actions, update docs for webhook modules --- .github/workflows/tests.yml | 10 +++++----- docs/scanning/output.md | 15 ++++++++++++--- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 088cc7ee38..7d53eef79c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -79,7 +79,7 @@ jobs: publish_docs: needs: update_docs runs-on: ubuntu-latest - if: github.event_name == 'push' && github.event.pull_request.base.ref == 'dev' + if: github.event_name == 'push' && github.ref == 'refs/heads/dev' steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 @@ -97,7 +97,7 @@ jobs: publish_code: needs: update_docs runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.event.pull_request.base.ref == 'dev' || github.event.pull_request.base.ref == 'stable') + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable') continue-on-error: true steps: - uses: actions/checkout@v3 @@ -125,7 +125,7 @@ jobs: run: | echo "::set-output name=BBOT_VERSION::$(poetry version | cut -d' ' -f2 | tr -d v)" - name: Publish to Docker Hub (dev) - if: github.ref == 'refs/heads/dev' + if: github.event_name == 'push' && github.ref == 'refs/heads/dev' uses: elgohr/Publish-Docker-Github-Action@v5 with: name: blacklanternsecurity/bbot @@ -133,7 +133,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} tags: "latest,dev,${{ steps.version.outputs.BBOT_VERSION }}" - name: Publish to Docker Hub (stable) - if: github.ref == 'refs/heads/stable' + if: github.event_name == 'push' && github.ref == 'refs/heads/stable' uses: elgohr/Publish-Docker-Github-Action@v5 with: name: blacklanternsecurity/bbot @@ -141,7 +141,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} tags: "stable,${{ steps.version.outputs.BBOT_VERSION }}" - name: Docker Hub Description - if: github.ref == 'refs/heads/dev' + if: github.event_name == 'push' && github.ref == 'refs/heads/dev' uses: peter-evans/dockerhub-description@v3 with: username: ${{ secrets.DOCKER_USERNAME }} diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 0edf7d55ec..e1c042a274 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -87,15 +87,15 @@ output_modules: bbot -t evilcorp.com -om discord -c output_modules.discord.webhook_url=https://discord.com/api/webhooks/1234/deadbeef ``` -By default, only `VULNERABILITY` events are sent. You can customize this by setting `event_types`: +By default, only `VULNERABILITY` and `FINDING` events are sent, but this can be customized by setting `event_types` in the config like so: ```yaml title="~/.bbot/config/bbot.yml" output_modules: discord: event_types: - - STORAGE_BUCKET - - FINDING - VULNERABILITY + - FINDING + - STORAGE_BUCKET ``` ...or on the command line: @@ -103,6 +103,15 @@ output_modules: bbot -t evilcorp.com -om discord -c output_modules.discord.event_types=["STORAGE_BUCKET","FINDING","VULNERABILITY"] ``` +You can also filter on the severity of `VULNERABILITY` events by setting `min_severity`: + + +```yaml title="~/.bbot/config/bbot.yml" +output_modules: + discord: + min_severity: HIGH +``` + ### HTTP The `http` output module sends [events](./events) in JSON format to a desired HTTP endpoint. From c6bf1f912d8c0dd223223ab5920f358946550d88 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Tue, 15 Aug 2023 19:50:57 +0000 Subject: [PATCH 012/123] Refresh module docs --- docs/scanning/configuration.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 2ceb761ed8..0c5037b152 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -318,7 +318,8 @@ Many modules accept their own configuration options. These options have the abil | output_modules.asset_inventory.summary_netmask | int | Subnet mask to use when summarizing IP addresses at end of scan | 16 | | output_modules.asset_inventory.use_previous | bool |` Emit previous asset inventory as new events (use in conjunction with -n ) `| False | | output_modules.csv.output_file | str | Output to CSV file | | -| output_modules.discord.event_types | list | Types of events to send | ['VULNERABILITY'] | +| output_modules.discord.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | +| output_modules.discord.min_severity | str | Only allow VULNERABILITY events of this severity or highter | LOW | | output_modules.discord.webhook_url | str | Discord webhook URL | | | output_modules.http.bearer | str | Authorization Bearer token | | | output_modules.http.method | str | HTTP method | POST | @@ -333,11 +334,13 @@ Many modules accept their own configuration options. These options have the abil | output_modules.neo4j.password | str | Neo4j password | bbotislife | | output_modules.neo4j.uri | str | Neo4j server + port | bolt://localhost:7687 | | output_modules.neo4j.username | str | Neo4j username | neo4j | -| output_modules.slack.event_types | list | Types of events to send | ['VULNERABILITY'] | +| output_modules.slack.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | +| output_modules.slack.min_severity | str | Only allow VULNERABILITY events of this severity or highter | LOW | | output_modules.slack.webhook_url | str | Discord webhook URL | | | output_modules.subdomains.include_unresolved | bool | Include unresolved subdomains in output | False | | output_modules.subdomains.output_file | str | Output to file | | -| output_modules.teams.event_types | list | Types of events to send | ['VULNERABILITY'] | +| output_modules.teams.event_types | list | Types of events to send | ['VULNERABILITY', 'FINDING'] | +| output_modules.teams.min_severity | str | Only allow VULNERABILITY events of this severity or highter | LOW | | output_modules.teams.webhook_url | str | Discord webhook URL | | | output_modules.web_report.css_theme_file | str | CSS theme URL for HTML output | https://cdnjs.cloudflare.com/ajax/libs/github-markdown-css/5.1.0/github-markdown.min.css | | output_modules.web_report.output_file | str | Output to file | | From 7ec5d191d6dc89deba43c351580668e8a418fb62 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 15 Aug 2023 16:07:01 -0400 Subject: [PATCH 013/123] fix version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5ccb595916..d36a8e9be3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ line-length = 119 [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v1.1.0.1{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v1.1.1{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"] From e416fcb67ef30fa304b9bd1677abdbeee3bc91b8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 16 Aug 2023 15:35:43 -0400 Subject: [PATCH 014/123] more aggressive task cancelling --- bbot/core/helpers/misc.py | 13 +++++++------ bbot/test/test_step_2/module_tests/base.py | 6 +++++- pyproject.toml | 3 ++- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 8ef44d8dc5..9d7d06e46c 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1201,17 +1201,18 @@ def get_traceback_details(e): return filename, lineno, funcname -async def cancel_tasks(tasks): +async def cancel_tasks(tasks, ignore_errors=True): current_task = asyncio.current_task() tasks = [t for t in tasks if t != current_task] for task in tasks: log.debug(f"Cancelling task: {task}") task.cancel() - for task in tasks: - try: - await task - except asyncio.CancelledError: - log.trace(traceback.format_exc()) + if ignore_errors: + for task in tasks: + try: + await task + except BaseException: + log.trace(traceback.format_exc()) def cancel_tasks_sync(tasks): diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index d2fcd78261..69f2d642cf 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -110,9 +110,13 @@ async def module_test(self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, mo yield module_test @pytest.mark.asyncio - async def test_module_run(self, module_test): + async def test_module_run(self, module_test, x): self.check(module_test, module_test.events) module_test.log.info(f"Finished {self.name} module test") + current_task = asyncio.current_task() + tasks = [t for t in asyncio.all_tasks() if t != current_task] + if len(tasks) > 0: + module_test.log.info(f"Unfinished tasks detected: {tasks}") def check(self, module_test, events): assert False, f"Must override {self.name}.check()" diff --git a/pyproject.toml b/pyproject.toml index d36a8e9be3..8739eb661c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,8 @@ pytest-httpx = "^0.22.0" [tool.pytest.ini_options] env = [ - "BBOT_TESTING = True" + "BBOT_TESTING = True", + "PYTHONASYNCIODEBUG = 1" ] [build-system] From d8955a843fdc4382b40782981d257437a2409706 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 16 Aug 2023 15:36:24 -0400 Subject: [PATCH 015/123] fix missing import --- bbot/test/test_step_2/module_tests/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index 69f2d642cf..dbbacb7b65 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -1,4 +1,5 @@ import pytest +import asyncio import logging import pytest_asyncio from omegaconf import OmegaConf From 39e02ec0336dd12a32912f3c27da051c048afca1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 16 Aug 2023 15:57:24 -0400 Subject: [PATCH 016/123] fix tests --- bbot/test/test_step_2/module_tests/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index dbbacb7b65..1392e557a6 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -111,7 +111,7 @@ async def module_test(self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, mo yield module_test @pytest.mark.asyncio - async def test_module_run(self, module_test, x): + async def test_module_run(self, module_test): self.check(module_test, module_test.events) module_test.log.info(f"Finished {self.name} module test") current_task = asyncio.current_task() From 11e714c31fcc67b416a79b92ab5d089a5e541950 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 17 Aug 2023 11:55:13 -0400 Subject: [PATCH 017/123] pytest timeouts --- .github/workflows/tests.yml | 2 +- bbot/cli.py | 1 + bbot/core/configurator/environ.py | 2 -- bbot/scanner/scanner.py | 6 +++--- bbot/test/test_step_1/test_cli.py | 34 ++++++++----------------------- poetry.lock | 16 ++++++++++++++- pyproject.toml | 3 ++- 7 files changed, 31 insertions(+), 33 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7d53eef79c..5f6df72feb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -43,7 +43,7 @@ jobs: poetry install - name: Run tests run: | - poetry run pytest --exitfirst --reruns 2 --disable-warnings --log-cli-level=DEBUG --cov-report xml:cov.xml --cov=bbot . + poetry run pytest --exitfirst --reruns 2 -o timeout_func_only=true --timeout 600 --disable-warnings --log-cli-level=DEBUG --cov-report xml:cov.xml --cov=bbot . - name: Upload Code Coverage uses: codecov/codecov-action@v3 with: diff --git a/bbot/cli.py b/bbot/cli.py index df8cfb9faf..4069867158 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -138,6 +138,7 @@ async def _main(): *options.targets, modules=list(modules), output_modules=list(output_modules), + output_dir=options.output_dir, config=config, name=options.name, whitelist=options.whitelist, diff --git a/bbot/core/configurator/environ.py b/bbot/core/configurator/environ.py index 95f3b05cc0..4358bb78db 100644 --- a/bbot/core/configurator/environ.py +++ b/bbot/core/configurator/environ.py @@ -107,8 +107,6 @@ def prepare_environment(bbot_config): # debug bbot_config["debug"] = args.cli_options.debug bbot_config["silent"] = args.cli_options.silent - if args.cli_options.output_dir: - bbot_config["output_dir"] = args.cli_options.output_dir import logging diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index af936eff0c..552f0cd85c 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -55,6 +55,7 @@ def __init__( name=None, modules=None, output_modules=None, + output_dir=None, config=None, dispatcher=None, strict_scope=False, @@ -91,7 +92,6 @@ def __init__( self.max_workers = max(1, self.config.get("max_threads", 25)) self.helpers = ConfigAwareHelper(config=self.config, scan=self) - output_dir = self.config.get("output_dir", "") if name is None: tries = 0 @@ -103,7 +103,7 @@ def __init__( self.name = random_name() - if output_dir: + if output_dir is not None: home_path = Path(output_dir).resolve() / self.name else: home_path = self.helpers.bbot_home / "scans" / self.name @@ -114,7 +114,7 @@ def __init__( else: self.name = str(name) - if output_dir: + if output_dir is not None: self.home = Path(output_dir).resolve() / self.name else: self.home = self.helpers.bbot_home / "scans" / self.name diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index a0e821a12e..cef678231a 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -18,35 +18,19 @@ async def test_cli(monkeypatch, bbot_config): monkeypatch.setattr( sys, "argv", - [ - "bbot", - "-y", - "-t", - "127.0.0.1", - "www.example.com", - "-om", - "human", - "csv", - "json", - "-n", - "test_cli_scan", - "-c", - "dns_resolution=False", - "-o", - "/tmp", - ], + ["bbot", "-y", "-t", "127.0.0.1", "www.example.com", "-n", "test_cli_scan", "-c", "dns_resolution=False"], ) await cli._main() scan_home = scans_home / "test_cli_scan" - assert (scan_home / "wordcloud.tsv").is_file() - assert (scan_home / "output.txt").is_file() - assert (scan_home / "output.csv").is_file() - assert (scan_home / "output.json").is_file() + assert (scan_home / "wordcloud.tsv").is_file(), "wordcloud.tsv not found" + assert (scan_home / "output.txt").is_file(), "output.txt not found" + assert (scan_home / "output.csv").is_file(), "output.csv not found" + assert (scan_home / "output.json").is_file(), "output.json not found" with open(scan_home / "output.csv") as f: lines = f.readlines() assert lines[0] == "Event type,Event data,IP Address,Source Module,Scope Distance,Event Tags\n" - assert len(lines) > 1 + assert len(lines) > 1, "output.csv is not long enough" ip_success = False dns_success = False @@ -58,7 +42,7 @@ async def test_cli(monkeypatch, bbot_config): ip_success = True if "[DNS_NAME] \twww.example.com\tTARGET" in line: dns_success = True - assert ip_success and dns_success + assert ip_success and dns_success, "IP_ADDRESS and/or DNS_NAME are not present in output.txt" # show version monkeypatch.setattr("sys.argv", ["bbot", "--version"]) @@ -111,12 +95,12 @@ async def test_cli(monkeypatch, bbot_config): # deadly modules monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei"]) result = await cli._main() - assert result == False + assert result == False, "-m nuclei ran without --allow-deadly" # --allow-deadly monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei", "--allow-deadly"]) result = await cli._main() - assert result != False + assert result != False, "-m nuclei failed to run with --allow-deadly" # show current config monkeypatch.setattr("sys.argv", ["bbot", "-y", "--current-config"]) diff --git a/poetry.lock b/poetry.lock index 964085f21c..87ccbb7f5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1349,6 +1349,20 @@ files = [ packaging = ">=17.1" pytest = ">=5.3" +[[package]] +name = "pytest-timeout" +version = "2.1.0" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, + {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, +] + +[package.dependencies] +pytest = ">=5.0.0" + [[package]] name = "python-daemon" version = "3.0.1" @@ -1779,4 +1793,4 @@ xmltodict = ">=0.12.0,<0.13.0" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f466d106f02b1d01755bc0caa87eaeb20c3ccb9ef598bc0cb15a90c2738340c0" +content-hash = "a3a06c8e09c28f7c058f58c92ec6006b3b59ea38b34e1f36e02034879458e246" diff --git a/pyproject.toml b/pyproject.toml index 8739eb661c..545baac345 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,6 @@ anyio = "==4.0.0rc1" dnspython = "^2.4.2" [tool.poetry.group.dev.dependencies] -pytest = "^7.2.2" flake8 = "^6.0.0" black = "^23.1.0" pytest-cov = "^4.0.0" @@ -60,6 +59,8 @@ werkzeug = "^2.3.4" pytest-httpserver = "^1.0.8" pytest-env = "^0.8.2" pytest-httpx = "^0.22.0" +pytest-timeout = "^2.1.0" +pytest = "^7.4.0" [tool.pytest.ini_options] env = [ From 625fdb232dddb3fea280aee2fcf90bd43bdf2eda Mon Sep 17 00:00:00 2001 From: liquidsec Date: Thu, 17 Aug 2023 13:32:08 -0400 Subject: [PATCH 018/123] better error handling for canary check --- bbot/modules/url_manipulation.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bbot/modules/url_manipulation.py b/bbot/modules/url_manipulation.py index c41c68cc2a..f4d598c636 100644 --- a/bbot/modules/url_manipulation.py +++ b/bbot/modules/url_manipulation.py @@ -47,7 +47,10 @@ async def handle_event(self, event): self.debug(e) return - if await compare_helper.canary_check(event.data, mode="getparam") == False: + try: + if not await compare_helper.canary_check(event.data, mode="getparam"): + raise HttpCompareError() + except HttpCompareError: self.verbose(f'Aborting "{event.data}" due to failed canary check') return From 1f81ad2b39abf5022c093d345f09f6dfe8e8324b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 17 Aug 2023 16:57:57 -0400 Subject: [PATCH 019/123] fix oauth module, suppress cancellederrors --- bbot/core/helpers/misc.py | 5 +++-- bbot/modules/oauth.py | 8 +++++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 9d7d06e46c..d7be455118 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1211,8 +1211,9 @@ async def cancel_tasks(tasks, ignore_errors=True): for task in tasks: try: await task - except BaseException: - log.trace(traceback.format_exc()) + except BaseException as e: + if not isinstance(e, asyncio.CancelledError): + log.trace(traceback.format_exc()) def cancel_tasks_sync(tasks): diff --git a/bbot/modules/oauth.py b/bbot/modules/oauth.py index 4ead86e21a..0bf6457c21 100644 --- a/bbot/modules/oauth.py +++ b/bbot/modules/oauth.py @@ -29,8 +29,12 @@ async def filter_event(self, event): return False async def handle_event(self, event): - oidc_tasks = [] _, domain = self.helpers.split_domain(event.data) + source_domain = getattr(event, "source_domain", domain) + if not self.scan.in_scope(source_domain): + return + + oidc_tasks = [] if event.scope_distance == 0: domain_hash = hash(domain) if domain_hash not in self.processed: @@ -49,8 +53,6 @@ async def handle_event(self, event): for u in self.url_and_base(url): oidc_tasks.append(self.helpers.create_task(self.getoidc(u))) - source_domain = getattr(event, "source_domain", domain) - for oidc_task in oidc_tasks: url, token_endpoint, oidc_results = await oidc_task if token_endpoint: From 4d60184cb2622ce6c2804553fbdcedc885d17641 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 18 Aug 2023 17:35:10 -0400 Subject: [PATCH 020/123] httpx race condition temporary fix --- README.md | 2 +- bbot/core/helpers/web.py | 6 ++++++ bbot/modules/nmap.py | 1 + 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 87fef8fa94..02f6f27cd1 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ### OSINT automation for hackers. -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Pypi Downloads](https://img.shields.io/pypi/dm/bbot)](https://pypi.org/project/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Demo Labs 2023](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://forum.defcon.org/node/246338) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Pypi Downloads](https://img.shields.io/pypi/dm/bbot)](https://pypistats.org/packages/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) BBOT (Bighuge BLS OSINT Tool) is a modular, recursive OSINT framework that can execute the entire OSINT workflow in a single command. diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index 63edebcc54..7d801ca5af 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -145,6 +145,12 @@ async def request(self, *args, **kwargs): f"Web response from {url}: {response} (Length: {len(response.content)}) headers: {response.headers}" ) return response + except httpx.PoolTimeout: + # this block exists because of this: + # https://github.com/encode/httpcore/discussions/783 + log.verbose(f"PoolTimeout to URL: {url}") + self.web_client = self.AsyncClient(persist_cookies=False) + return await self.request(*args, **kwargs) except httpx.TimeoutException: log.verbose(f"HTTP timeout to URL: {url}") if raise_error: diff --git a/bbot/modules/nmap.py b/bbot/modules/nmap.py index 96a66d3942..d0671d16b2 100644 --- a/bbot/modules/nmap.py +++ b/bbot/modules/nmap.py @@ -66,6 +66,7 @@ def construct_command(self, targets): command = [ "nmap", "-n", + "--resolve-all", f"-{self.timing}", "-oX", temp_filename, From a64420586defa0c505b2241c288306297832e385 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 21 Aug 2023 10:14:22 -0400 Subject: [PATCH 021/123] temporarily disabled ntlm cancellations --- bbot/modules/ntlm.py | 8 ++++++-- bbot/modules/telerik.py | 2 +- poetry.lock | 16 ++++++++++------ pyproject.toml | 3 ++- 4 files changed, 19 insertions(+), 10 deletions(-) diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 91101374ab..3411d17084 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -132,13 +132,17 @@ async def handle_url(self, event): try: result, url = await task if result: - await self.helpers.cancel_tasks(tasks) + # disabled until this is resolved + # https://github.com/encode/httpcore/discussions/783 + # await self.helpers.cancel_tasks(tasks) await gen.aclose() except HTTPError as e: if str(e): self.warning(str(e)) # cancel all the tasks if there's an error - await self.helpers.cancel_tasks(tasks) + # disabled until this is resolved + # https://github.com/encode/httpcore/discussions/783 + # await self.helpers.cancel_tasks(tasks) await gen.aclose() return result, url diff --git a/bbot/modules/telerik.py b/bbot/modules/telerik.py index cf5d701041..f65ad9df2e 100644 --- a/bbot/modules/telerik.py +++ b/bbot/modules/telerik.py @@ -229,7 +229,7 @@ async def handle_event(self, event): continue self.debug(f"Cancelling run against {event.data} due to failed request") await self.helpers.cancel_tasks(tasks) - break + await gen.aclose() else: if "Cannot deserialize dialog parameters" in result.text: await self.helpers.cancel_tasks(tasks) diff --git a/poetry.lock b/poetry.lock index 87ccbb7f5b..f6cfb54a78 100644 --- a/poetry.lock +++ b/poetry.lock @@ -659,11 +659,9 @@ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" -files = [ - {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, - {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, -] +python-versions = ">=3.8" +files = [] +develop = false [package.dependencies] anyio = ">=3.0,<5.0" @@ -675,6 +673,12 @@ sniffio = "==1.*" http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +[package.source] +type = "git" +url = "https://github.com/encode/httpcore" +reference = "HEAD" +resolved_reference = "b649bb0a9c3e176189b17bd10b1c929752454efe" + [[package]] name = "httpx" version = "0.24.1" @@ -1793,4 +1797,4 @@ xmltodict = ">=0.12.0,<0.13.0" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "a3a06c8e09c28f7c058f58c92ec6006b3b59ea38b34e1f36e02034879458e246" +content-hash = "8e6022e220500621d7255464e89ab93b59e6856bd9d29be80ad054012f4d5b57" diff --git a/pyproject.toml b/pyproject.toml index 545baac345..93242e4189 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,8 +44,9 @@ pyjwt = "^2.7.0" beautifulsoup4 = "^4.12.2" lxml = "^4.9.2" httpx = {extras = ["http2"], version = "^0.24.1"} -anyio = "==4.0.0rc1" dnspython = "^2.4.2" +httpcore = {git = "https://github.com/encode/httpcore"} +anyio = "4.0.0rc1" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" From f4bc30edc17cf8cc3a957f81721e4c0f838b33f1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 11:43:36 -0400 Subject: [PATCH 022/123] fixed URL parsing bug --- bbot/core/helpers/__init__.py | 1 + bbot/core/helpers/regexes.py | 5 +- bbot/core/helpers/url.py | 72 +-------------------------- bbot/core/helpers/validators.py | 72 ++++++++++++++++++++++++++- bbot/modules/bevigil.py | 2 +- bbot/modules/wayback.py | 2 +- bbot/test/test_step_1/test_events.py | 2 + bbot/test/test_step_1/test_helpers.py | 13 ++--- 8 files changed, 87 insertions(+), 82 deletions(-) diff --git a/bbot/core/helpers/__init__.py b/bbot/core/helpers/__init__.py index 3b00b5e4c5..294ec82d3f 100644 --- a/bbot/core/helpers/__init__.py +++ b/bbot/core/helpers/__init__.py @@ -1,3 +1,4 @@ from .url import * from .misc import * from . import regexes +from . import validators diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 9eb01d9fa8..5d22384ad9 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -22,7 +22,7 @@ _ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*" ipv6_regex = re.compile(_ipv6_regex, re.I) # dns names with periods -_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+[^\W_]{1,63}" +_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+[^\W_]{1,63}\.?" # dns names without periods _hostname_regex = r"(?!\w*\.\w+)\w(?:[\w-]{0,100}\w)?" _email_regex = r"(?:[^\W_][\w\-\.\+]{,100})@" + _dns_name_regex @@ -44,6 +44,9 @@ ) url_regexes = list(re.compile(r, re.I) for r in _url_regexes) +_double_slash_regex = r"/{2,}" +double_slash_regex = re.compile(_double_slash_regex) + event_type_regexes = OrderedDict( ( (k, tuple(re.compile(r, re.I) for r in regexes)) diff --git a/bbot/core/helpers/url.py b/bbot/core/helpers/url.py index 4d14262bd4..e595a99bba 100644 --- a/bbot/core/helpers/url.py +++ b/bbot/core/helpers/url.py @@ -1,10 +1,9 @@ -import re import uuid import logging from contextlib import suppress from urllib.parse import urlparse, parse_qs, urlencode, ParseResult -from .punycode import smart_decode_punycode +from .regexes import double_slash_regex log = logging.getLogger("bbot.core.helpers.url") @@ -59,44 +58,6 @@ def param_type(p): return 3 -double_slash_regex = re.compile(r"/{2,}") - - -def clean_url(url): - """ - Remove query string and fragment, lowercase netloc, remove redundant port - - http://evilcorp.com:80 --> http://evilcorp.com/ - http://eViLcORp.com/ --> http://evilcorp.com/ - http://evilcorp.com/api?user=bob#place --> http://evilcorp.com/api - """ - parsed = parse_url(url) - parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="", query="") - try: - scheme = parsed.scheme - except ValueError: - scheme = "https" - try: - port = parsed.port - except ValueError: - port = 80 if scheme == "http" else 443 - # remove ports if they're redundant - if (scheme == "http" and port == 80) or (scheme == "https" and port == 443): - hostname = parsed.hostname - # special case for IPv6 URLs - if parsed.netloc.startswith("["): - hostname = f"[{hostname}]" - # punycode - hostname = smart_decode_punycode(hostname) - parsed = parsed._replace(netloc=hostname) - # normalize double slashes - parsed = parsed._replace(path=double_slash_regex.sub("/", parsed.path)) - # append / if path is empty - if parsed.path == "": - parsed = parsed._replace(path="/") - return parsed - - def hash_url(url): parsed = parse_url(url) parsed = parsed._replace(fragment="", query="") @@ -114,37 +75,6 @@ def hash_url(url): return hash(tuple(to_hash)) -def collapse_urls(urls, threshold=10): - """ - Smartly dedupe suspiciously-similar URLs like these: - - http://evilcorp.com/user/11111/info - - http://evilcorp.com/user/2222/info - - http://evilcorp.com/user/333/info - - http://evilcorp.com/user/44/info - - http://evilcorp.com/user/5/info - - Useful for cleaning large lists of garbage-riddled URLs from sources like wayback - """ - url_hashes = {} - for url in urls: - new_url = clean_url(url) - url_hash = hash_url(new_url) - try: - url_hashes[url_hash].add(new_url) - except KeyError: - url_hashes[url_hash] = { - new_url, - } - - for url_hash, new_urls in url_hashes.items(): - # if the number of URLs exceeds the threshold - if len(new_urls) > threshold: - # yield only one - yield next(iter(new_urls)) - else: - yield from new_urls - - def url_depth(url): parsed = parse_url(url) parsed = parsed._replace(path=double_slash_regex.sub("/", parsed.path)) diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 33e41fe821..3fa759b95e 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -1,10 +1,11 @@ import logging import ipaddress +from contextlib import suppress from bbot.core.helpers import regexes -from bbot.core.helpers.url import clean_url +from bbot.core.helpers.url import parse_url, hash_url from bbot.core.helpers.punycode import smart_decode_punycode -from bbot.core.helpers.misc import split_host_port, make_netloc +from bbot.core.helpers.misc import split_host_port, make_netloc, is_ip log = logging.getLogger("bbot.core.helpers.validators") @@ -94,6 +95,73 @@ def validate_email(email): assert False, f'Invalid email: "{email}"' +def clean_url(url): + """ + Remove query string and fragment, lowercase netloc, remove redundant port + + http://evilcorp.com:80 --> http://evilcorp.com/ + http://eViLcORp.com/ --> http://evilcorp.com/ + http://evilcorp.com/api?user=bob#place --> http://evilcorp.com/api + """ + parsed = parse_url(url) + parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="", query="") + try: + scheme = parsed.scheme + except ValueError: + scheme = "https" + with suppress(Exception): + port = parsed.port + if port is None: + port = 80 if scheme == "http" else 443 + hostname = validate_host(parsed.hostname) + # remove ports if they're redundant + if (scheme == "http" and port == 80) or (scheme == "https" and port == 443): + port = None + # special case for IPv6 URLs + netloc = make_netloc(hostname, port) + # urlparse is special - it needs square brackets even if there's no port + if is_ip(netloc, version=6): + netloc = f"[{netloc}]" + parsed = parsed._replace(netloc=netloc) + # normalize double slashes + parsed = parsed._replace(path=regexes.double_slash_regex.sub("/", parsed.path)) + # append / if path is empty + if parsed.path == "": + parsed = parsed._replace(path="/") + return parsed + + +def collapse_urls(urls, threshold=10): + """ + Smartly dedupe suspiciously-similar URLs like these: + - http://evilcorp.com/user/11111/info + - http://evilcorp.com/user/2222/info + - http://evilcorp.com/user/333/info + - http://evilcorp.com/user/44/info + - http://evilcorp.com/user/5/info + + Useful for cleaning large lists of garbage-riddled URLs from sources like wayback + """ + url_hashes = {} + for url in urls: + new_url = clean_url(url) + url_hash = hash_url(new_url) + try: + url_hashes[url_hash].add(new_url) + except KeyError: + url_hashes[url_hash] = { + new_url, + } + + for url_hash, new_urls in url_hashes.items(): + # if the number of URLs exceeds the threshold + if len(new_urls) > threshold: + # yield only one + yield next(iter(new_urls)) + else: + yield from new_urls + + def soft_validate(s, t): """ Friendly validation wrapper that returns True/False instead of raising an error diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index 3bf821e759..87d81d838c 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -34,7 +34,7 @@ async def handle_event(self, event): if self.urls: urls = await self.query(query, request_fn=self.request_urls, parse_fn=self.parse_urls) if urls: - for parsed_url in self.helpers.collapse_urls(urls): + for parsed_url in self.helpers.validators.collapse_urls(urls): self.emit_event(parsed_url.geturl(), "URL_UNVERIFIED", source=event) async def request_subdomains(self, query): diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 40bbb9f5cb..08e32926a4 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -50,7 +50,7 @@ async def query(self, query): continue dns_names = set() - for parsed_url in self.helpers.collapse_urls(urls, threshold=self.garbage_threshold): + for parsed_url in self.helpers.validators.collapse_urls(urls, threshold=self.garbage_threshold): if not self.urls: dns_name = parsed_url.hostname h = hash(dns_name) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 124a6900f3..7c7563c1ae 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -62,6 +62,7 @@ async def test_events(events, scan, helpers, bbot_config): assert events.emoji not in events.domain assert events.domain not in events.emoji assert "evilcorp.com" == scan.make_event(" eViLcorp.COM.:88", "DNS_NAME", dummy=True) + assert "evilcorp.com" == scan.make_event("evilcorp.com.", "DNS_NAME", dummy=True) # url tests assert scan.make_event("http://evilcorp.com", dummy=True) == scan.make_event("http://evilcorp.com/", dummy=True) @@ -87,6 +88,7 @@ async def test_events(events, scan, helpers, bbot_config): assert "http://evilcorp.com:443" == scan.make_event("http://evilcorp.com:443", dummy=True) assert scan.make_event("https://evilcorp.com", dummy=True).with_port().geturl() == "https://evilcorp.com:443/" assert scan.make_event("https://evilcorp.com:666", dummy=True).with_port().geturl() == "https://evilcorp.com:666/" + assert scan.make_event("https://evilcorp.com.:666", dummy=True) == "https://evilcorp.com:666/" assert scan.make_event("https://[bad::c0de]", dummy=True).with_port().geturl() == "https://[bad::c0de]:443/" assert scan.make_event("https://[bad::c0de]:666", dummy=True).with_port().geturl() == "https://[bad::c0de]:666/" assert "status-200" in scan.make_event("https://evilcorp.com", "URL", events.ipv4_url, tags=["status-200"]).tags diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index c3e61fb995..016e6d79f9 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -16,9 +16,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "http://e.co/u/4444/info", "http://e.co/u/5555/info", ) - new_urls = tuple(helpers.collapse_urls(bad_urls, threshold=4)) + new_urls = tuple(helpers.validators.collapse_urls(bad_urls, threshold=4)) assert len(new_urls) == 2 - new_urls = tuple(sorted([u.geturl() for u in helpers.collapse_urls(bad_urls, threshold=5)])) + new_urls = tuple(sorted([u.geturl() for u in helpers.validators.collapse_urls(bad_urls, threshold=5)])) assert new_urls == bad_urls new_url = helpers.add_get_params("http://evilcorp.com/a?p=1&q=2", {"r": 3, "s": "asdf"}).geturl() @@ -35,9 +35,10 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https ("q", ["2"]), ) - assert helpers.clean_url("http://evilcorp.com:80").geturl() == "http://evilcorp.com/" - assert helpers.clean_url("http://evilcorp.com/asdf?a=asdf#frag").geturl() == "http://evilcorp.com/asdf" - assert helpers.clean_url("http://evilcorp.com//asdf").geturl() == "http://evilcorp.com/asdf" + assert helpers.validators.clean_url("http://evilcorp.com:80").geturl() == "http://evilcorp.com/" + assert helpers.validators.clean_url("http://evilcorp.com/asdf?a=asdf#frag").geturl() == "http://evilcorp.com/asdf" + assert helpers.validators.clean_url("http://evilcorp.com//asdf").geturl() == "http://evilcorp.com/asdf" + assert helpers.validators.clean_url("http://evilcorp.com.").geturl() == "http://evilcorp.com/" assert helpers.url_depth("http://evilcorp.com/asdf/user/") == 2 assert helpers.url_depth("http://evilcorp.com/asdf/user") == 2 @@ -296,7 +297,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https ### VALIDATORS ### # hosts - assert helpers.validators.validate_host(" evilCorp.COM") == "evilcorp.com" + assert helpers.validators.validate_host(" evilCorp.COM.") == "evilcorp.com" assert helpers.validators.validate_host("LOCALHOST ") == "localhost" assert helpers.validators.validate_host(" 192.168.1.1") == "192.168.1.1" assert helpers.validators.validate_host(" Dead::c0dE ") == "dead::c0de" From b3fff6c5d416ce57713e0351e0148c3ca84778ae Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 11:59:00 -0400 Subject: [PATCH 023/123] fix dns tests --- bbot/test/test_step_1/test_regexes.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bbot/test/test_step_1/test_regexes.py b/bbot/test/test_step_1/test_regexes.py index a371f62f0a..db889ec9c7 100644 --- a/bbot/test/test_step_1/test_regexes.py +++ b/bbot/test/test_step_1/test_regexes.py @@ -11,7 +11,6 @@ def test_dns_name_regexes(): "evilcorp-.com", # DNS names cannot end with a dash "evilcorp..com", # DNS names cannot have two consecutive dots ".evilcorp.com", # DNS names cannot begin with a dot - "evilcorp.com.", # DNS names cannot end with a dot (in most cases) "ev*lcorp.com", # DNS names cannot have special characters (other than dash and dot) "evilcorp/.com", # DNS names cannot have slashes "evilcorp..", # DNS names cannot end with a dot From 437367cfcb7462040a0f1175851dc58bd4bfce30 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 12:25:03 -0400 Subject: [PATCH 024/123] reverted httpcore version --- poetry.lock | 16 ++++++---------- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/poetry.lock b/poetry.lock index f6cfb54a78..358dac3899 100644 --- a/poetry.lock +++ b/poetry.lock @@ -659,9 +659,11 @@ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.8" -files = [] -develop = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, +] [package.dependencies] anyio = ">=3.0,<5.0" @@ -673,12 +675,6 @@ sniffio = "==1.*" http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -[package.source] -type = "git" -url = "https://github.com/encode/httpcore" -reference = "HEAD" -resolved_reference = "b649bb0a9c3e176189b17bd10b1c929752454efe" - [[package]] name = "httpx" version = "0.24.1" @@ -1797,4 +1793,4 @@ xmltodict = ">=0.12.0,<0.13.0" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "8e6022e220500621d7255464e89ab93b59e6856bd9d29be80ad054012f4d5b57" +content-hash = "22dc78b01007c5fb6a7d8729277e75b0382e5571ba5ec23bd1d53a8467f368bc" diff --git a/pyproject.toml b/pyproject.toml index 93242e4189..70e6b185ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,8 +45,8 @@ beautifulsoup4 = "^4.12.2" lxml = "^4.9.2" httpx = {extras = ["http2"], version = "^0.24.1"} dnspython = "^2.4.2" -httpcore = {git = "https://github.com/encode/httpcore"} anyio = "4.0.0rc1" +httpcore = "^0.17.3" [tool.poetry.group.dev.dependencies] flake8 = "^6.0.0" From fca2ff832cd4002b7e05b66a39c46d8f8c90c0ef Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 15:34:13 -0400 Subject: [PATCH 025/123] log to scan folders --- bbot/core/__init__.py | 2 +- bbot/core/helpers/async_helpers.py | 8 +- bbot/core/logger/__init__.py | 2 +- bbot/core/logger/logger.py | 183 ++++++++++++++--------------- bbot/modules/base.py | 1 - bbot/scanner/scanner.py | 23 +++- 6 files changed, 115 insertions(+), 104 deletions(-) diff --git a/bbot/core/__init__.py b/bbot/core/__init__.py index b8b0c378ae..52cf06cc5a 100644 --- a/bbot/core/__init__.py +++ b/bbot/core/__init__.py @@ -1,4 +1,4 @@ # logging from .logger import init_logging -logging_queue, logging_handlers = init_logging() +init_logging() diff --git a/bbot/core/helpers/async_helpers.py b/bbot/core/helpers/async_helpers.py index df69ff6c78..f2a0548926 100644 --- a/bbot/core/helpers/async_helpers.py +++ b/bbot/core/helpers/async_helpers.py @@ -64,8 +64,8 @@ def __init__(self, manager, task_name, _log=True): async def __aenter__(self): self.task_id = uuid.uuid4() # generate a unique ID for the task - if self.log: - log.trace(f"Starting task {self.task_name} ({self.task_id})") + # if self.log: + # log.trace(f"Starting task {self.task_name} ({self.task_id})") async with self.manager.lock: # acquire the lock self.start_time = datetime.now() self.manager.tasks[self.task_id] = self @@ -74,8 +74,8 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc_val, exc_tb): async with self.manager.lock: # acquire the lock self.manager.tasks.pop(self.task_id, None) # remove only current task - if self.log: - log.trace(f"Finished task {self.task_name} ({self.task_id})") + # if self.log: + # log.trace(f"Finished task {self.task_name} ({self.task_id})") def __str__(self): running_for = human_timedelta(datetime.now() - self.start_time) diff --git a/bbot/core/logger/__init__.py b/bbot/core/logger/__init__.py index 83f2f048ae..77752a7f9d 100644 --- a/bbot/core/logger/__init__.py +++ b/bbot/core/logger/__init__.py @@ -1 +1 @@ -from .logger import init_logging, get_log_level, set_log_level, ColoredFormatter, toggle_log_level +from .logger import init_logging, get_log_level, set_log_level, ColoredFormatter, toggle_log_level, add_log_handler diff --git a/bbot/core/logger/logger.py b/bbot/core/logger/logger.py index af534ff07c..b7892e145a 100644 --- a/bbot/core/logger/logger.py +++ b/bbot/core/logger/logger.py @@ -1,12 +1,9 @@ import os import sys -import atexit import logging from copy import copy +import logging.handlers from pathlib import Path -from queue import SimpleQueue -from contextlib import suppress -from logging.handlers import QueueHandler, QueueListener from ..configurator import config from ..helpers.misc import mkdir, error_and_exit @@ -15,14 +12,10 @@ _log_level_override = None +bbot_loggers = None +bbot_log_handlers = None -# Log to stderr -stderr_handler = logging.StreamHandler(sys.stderr) - -# Log to stdout -stdout_handler = logging.StreamHandler(sys.stdout) - -log_listener = None +debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)s %(message)s") class ColoredFormatter(logging.Formatter): @@ -76,11 +69,11 @@ def addLoggingLevel(levelName, levelNum, methodName=None): methodName = levelName.lower() if hasattr(logging, levelName): - raise AttributeError("{} already defined in logging module".format(levelName)) + raise AttributeError(f"{levelName} already defined in logging module") if hasattr(logging, methodName): - raise AttributeError("{} already defined in logging module".format(methodName)) + raise AttributeError(f"{methodName} already defined in logging module") if hasattr(logging.getLoggerClass(), methodName): - raise AttributeError("{} already defined in logger class".format(methodName)) + raise AttributeError(f"{methodName} already defined in logger class") # This method was inspired by the answers to Stack Overflow post # http://stackoverflow.com/q/2183233/2988730, especially @@ -112,92 +105,90 @@ def logToRoot(message, *args, **kwargs): verbosity_levels_toggle = [logging.INFO, logging.VERBOSE, logging.DEBUG] -def stop_listener(listener): - with suppress(Exception): - listener.stop() +def get_bbot_loggers(): + global bbot_loggers + if bbot_loggers is None: + bbot_loggers = [ + logging.getLogger("bbot"), + logging.getLogger("asyncio"), + ] + return bbot_loggers -def log_worker_setup(logging_queue): - """ - This needs to be run whenever a new multiprocessing.Process() is spawned - """ - log_level = get_log_level() - bbot_log = logging.getLogger("bbot") - asyncio_log = logging.getLogger("asyncio") - # Don't do this more than once - if len(bbot_log.handlers) == 0: - queue_handler = QueueHandler(logging_queue) - for log in (bbot_log, asyncio_log): - log.setLevel(log_level) - log.addHandler(queue_handler) - return bbot_log - - -def log_listener_setup(logging_queue): - log_dir = Path(config["home"]) / "logs" - if not mkdir(log_dir, raise_error=False): - error_and_exit(f"Failure creating or error writing to BBOT logs directory ({log_dir})") - - # Main log file - main_handler = logging.handlers.TimedRotatingFileHandler( - f"{log_dir}/bbot.log", when="d", interval=1, backupCount=14 - ) - - # Separate log file for debugging - debug_handler = logging.handlers.TimedRotatingFileHandler( - f"{log_dir}/bbot.debug.log", when="d", interval=1, backupCount=14 - ) - - def stderr_filter(record): - log_level = get_log_level() - if record.levelno == logging.STDOUT or (record.levelno == logging.TRACE and log_level > logging.DEBUG): - return False - if record.levelno < log_level: - return False - return True - - stderr_handler.addFilter(stderr_filter) - stdout_handler.addFilter(lambda x: x.levelno == logging.STDOUT) - debug_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.DEBUG) - main_handler.addFilter(lambda x: x.levelno not in (logging.STDOUT, logging.TRACE) and x.levelno >= logging.VERBOSE) - - # Set log format - debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)s %(message)s") - debug_handler.setFormatter(debug_format) - main_handler.setFormatter(debug_format) - stderr_handler.setFormatter(ColoredFormatter("%(levelname)s %(name)s: %(message)s")) - stdout_handler.setFormatter(logging.Formatter("%(message)s")) - - handlers = [stdout_handler, stderr_handler, main_handler, debug_handler] - - global log_listener - log_listener = QueueListener(logging_queue, *handlers) - log_listener.start() - atexit.register(stop_listener, log_listener) - return { - "stderr": stderr_handler, - "stdout": stdout_handler, - "file_debug": debug_handler, - "file_main": main_handler, - } +def add_log_handler(handler, formatter=None): + if handler.formatter is None: + handler.setFormatter(debug_format) + for logger in get_bbot_loggers(): + if handler not in logger.handlers: + logger.addHandler(handler) def init_logging(): - """ - Initializes logging, returns logging queue and dictionary containing log handlers - """ - - handlers = {} - logging_queue = None - - log = logging.getLogger("bbot") # Don't do this more than once - if len(log.handlers) == 0: - logging_queue = SimpleQueue() - handlers = log_listener_setup(logging_queue) - log_worker_setup(logging_queue) - - return logging_queue, handlers + if len(logging.getLogger("bbot").handlers) == 0: + for logger in get_bbot_loggers(): + include_logger(logger) + + +def include_logger(logger): + bbot_loggers = get_bbot_loggers() + if logger not in bbot_loggers: + bbot_loggers.append(logger) + logger.setLevel(get_log_level()) + for handler in get_log_handlers().values(): + logger.addHandler(handler) + + +def get_log_handlers(): + global bbot_log_handlers + + if bbot_log_handlers is None: + log_dir = Path(config["home"]) / "logs" + if not mkdir(log_dir, raise_error=False): + error_and_exit(f"Failure creating or error writing to BBOT logs directory ({log_dir})") + + # Main log file + main_handler = logging.handlers.TimedRotatingFileHandler( + f"{log_dir}/bbot.log", when="d", interval=1, backupCount=14 + ) + + # Separate log file for debugging + debug_handler = logging.handlers.TimedRotatingFileHandler( + f"{log_dir}/bbot.debug.log", when="d", interval=1, backupCount=14 + ) + + def stderr_filter(record): + log_level = get_log_level() + if record.levelno == logging.STDOUT or (record.levelno == logging.TRACE and log_level > logging.DEBUG): + return False + if record.levelno < log_level: + return False + return True + + # Log to stderr + stderr_handler = logging.StreamHandler(sys.stderr) + stderr_handler.addFilter(stderr_filter) + # Log to stdout + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.addFilter(lambda x: x.levelno == logging.STDOUT) + debug_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.DEBUG) + main_handler.addFilter( + lambda x: x.levelno not in (logging.STDOUT, logging.TRACE) and x.levelno >= logging.VERBOSE + ) + + # Set log format + debug_handler.setFormatter(debug_format) + main_handler.setFormatter(debug_format) + stderr_handler.setFormatter(ColoredFormatter("%(levelname)s %(name)s: %(message)s")) + stdout_handler.setFormatter(logging.Formatter("%(message)s")) + + bbot_log_handlers = { + "stderr": stderr_handler, + "stdout": stdout_handler, + "file_debug": debug_handler, + "file_main": main_handler, + } + return bbot_log_handlers def get_log_level(): @@ -224,8 +215,8 @@ def set_log_level(level, logger=None): logger.hugeinfo(f"Setting log level to {logging.getLevelName(level)}") config["silent"] = False _log_level_override = level - for logname in ("bbot", "asyncio"): - logging.getLogger(logname).setLevel(level) + for logger in bbot_loggers: + logger.setLevel(level) def toggle_log_level(logger=None): diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 6f5b5445c7..354d83b3f3 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -364,7 +364,6 @@ async def _worker(self): self.debug(f"Finished handling {event}") except asyncio.CancelledError: self.log.trace("Worker cancelled") - self.trace() raise self.log.trace(f"Worker stopped") diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index af936eff0c..f6c9b0451f 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -26,7 +26,7 @@ from bbot.core.helpers.async_helpers import async_to_sync_gen from bbot.core.configurator.environ import prepare_environment from bbot.core.errors import BBOTError, ScanError, ValidationError -from bbot.core.logger import init_logging, get_log_level, set_log_level +from bbot.core.logger import init_logging, get_log_level, set_log_level, add_log_handler log = logging.getLogger("bbot.scanner") @@ -181,6 +181,7 @@ def __init__( self._stopping = False self._dns_regexes = None + self._log_handlers = None def _on_keyboard_interrupt(self, loop, event): self.stop() @@ -227,6 +228,10 @@ async def async_start(self): try: await self.prep() + # add log handlers + for handler in self.log_handlers: + add_log_handler(handler) + if not self.target: self.warning(f"No scan targets specified") @@ -598,6 +603,22 @@ def critical(self, *args, trace=True, **kwargs): if trace: self.trace() + @property + def log_handlers(self): + if self._log_handlers is None: + main_handler = logging.handlers.TimedRotatingFileHandler( + str(self.home / "scan.log"), when="d", interval=1, backupCount=14 + ) + main_handler.addFilter( + lambda x: x.levelno not in (logging.STDOUT, logging.TRACE) and x.levelno >= logging.VERBOSE + ) + debug_handler = logging.handlers.TimedRotatingFileHandler( + str(self.home / "debug.log"), when="d", interval=1, backupCount=14 + ) + debug_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.DEBUG) + self._log_handlers = [main_handler, debug_handler] + return self._log_handlers + def _internal_modules(self): for modname in module_loader.preloaded(type="internal"): if self.config.get(modname, True): From ab09a9dd0d26809090724ea9ea2fdb921952e157 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 15:41:11 -0400 Subject: [PATCH 026/123] fix logging in tests --- bbot/test/bbot_fixtures.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/bbot/test/bbot_fixtures.py b/bbot/test/bbot_fixtures.py index d75cb0585c..2c74fe1906 100644 --- a/bbot/test/bbot_fixtures.py +++ b/bbot/test/bbot_fixtures.py @@ -5,9 +5,9 @@ import logging import subprocess import tldextract +import pytest_httpserver from pathlib import Path from omegaconf import OmegaConf -import pytest_httpserver from werkzeug.wrappers import Request @@ -26,11 +26,6 @@ def match_data(self, request: Request) -> bool: if test_config.get("debug", False): os.environ["BBOT_DEBUG"] = "True" -# clear log handlers (pytest takes care of this) -from bbot.core.logger import logger - -logger.log_listener.handlers = [] - from .bbot_fixtures import * # noqa: F401 import bbot.core.logger # noqa: F401 from bbot.core.errors import * # noqa: F401 From 71a31619e0d161f7f8741094a75ecad1194ab97f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 16:11:06 -0400 Subject: [PATCH 027/123] improve logging tests --- bbot/core/logger/__init__.py | 10 ++++++++- bbot/core/logger/logger.py | 6 ++++++ bbot/scanner/scanner.py | 6 +++++- bbot/test/test.conf | 2 +- bbot/test/test_step_1/test_python_api.py | 26 +++++++++++++++++++++++- 5 files changed, 46 insertions(+), 4 deletions(-) diff --git a/bbot/core/logger/__init__.py b/bbot/core/logger/__init__.py index 77752a7f9d..25327390bf 100644 --- a/bbot/core/logger/__init__.py +++ b/bbot/core/logger/__init__.py @@ -1 +1,9 @@ -from .logger import init_logging, get_log_level, set_log_level, ColoredFormatter, toggle_log_level, add_log_handler +from .logger import ( + init_logging, + get_log_level, + set_log_level, + ColoredFormatter, + toggle_log_level, + add_log_handler, + remove_log_handler, +) diff --git a/bbot/core/logger/logger.py b/bbot/core/logger/logger.py index b7892e145a..a2c12b1895 100644 --- a/bbot/core/logger/logger.py +++ b/bbot/core/logger/logger.py @@ -123,6 +123,12 @@ def add_log_handler(handler, formatter=None): logger.addHandler(handler) +def remove_log_handler(handler): + for logger in get_bbot_loggers(): + if handler in logger.handlers: + logger.removeHandler(handler) + + def init_logging(): # Don't do this more than once if len(logging.getLogger("bbot").handlers) == 0: diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index f6c9b0451f..d3b81666b4 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -26,7 +26,7 @@ from bbot.core.helpers.async_helpers import async_to_sync_gen from bbot.core.configurator.environ import prepare_environment from bbot.core.errors import BBOTError, ScanError, ValidationError -from bbot.core.logger import init_logging, get_log_level, set_log_level, add_log_handler +from bbot.core.logger import init_logging, get_log_level, set_log_level, add_log_handler, remove_log_handler log = logging.getLogger("bbot.scanner") @@ -321,6 +321,10 @@ async def async_start(self): await self.dispatcher.on_finish(self) + # remove log handlers + for handler in self.log_handlers: + remove_log_handler(handler) + def start_modules(self): self.verbose(f"Starting module worker loops") for module_name, module in self.modules.items(): diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 194e7890d8..8b4d3e1c9e 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -32,7 +32,7 @@ plumbus: asdf dns_debug: true user_agent: "BBOT Test User-Agent" http_debug: false -keep_scans: 1 +keep_scans: 2 agent_url: ws://127.0.0.1:8765 agent_token: test dns_resolution: false diff --git a/bbot/test/test_step_1/test_python_api.py b/bbot/test/test_step_1/test_python_api.py index 7a632ed3d3..5491145704 100644 --- a/bbot/test/test_step_1/test_python_api.py +++ b/bbot/test/test_step_1/test_python_api.py @@ -14,8 +14,32 @@ async def test_python_api(bbot_config): # make sure output files work scan2 = Scanner("127.0.0.1", config=bbot_config, output_modules=["json"], name="python_api_test") await scan2.async_start_without_generator() - out_file = scan2.helpers.scans_dir / "python_api_test" / "output.json" + scan_home = scan2.helpers.scans_dir / "python_api_test" + out_file = scan_home / "output.json" assert list(scan2.helpers.read_file(out_file)) + scan_log = scan_home / "scan.log" + debug_log = scan_home / "debug.log" + assert scan_log.is_file() + assert "python_api_test" in open(scan_log).read() + assert debug_log.is_file() + assert "python_api_test" in open(debug_log).read() + + scan3 = Scanner("127.0.0.1", config=bbot_config, output_modules=["json"], name="scan_logging_test") + await scan3.async_start_without_generator() + + assert "scan_logging_test" not in open(scan_log).read() + assert "scan_logging_test" not in open(debug_log).read() + + scan_home = scan3.helpers.scans_dir / "scan_logging_test" + out_file = scan_home / "output.json" + assert list(scan3.helpers.read_file(out_file)) + scan_log = scan_home / "scan.log" + debug_log = scan_home / "debug.log" + assert scan_log.is_file() + assert debug_log.is_file() + assert "scan_logging_test" in open(scan_log).read() + assert "scan_logging_test" in open(debug_log).read() + # make sure config loads properly bbot_home = "/tmp/.bbot_python_api_test" Scanner("127.0.0.1", config={"home": bbot_home}) From bb8f4407e3b24a97e19270df5454ab9756ab07aa Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 16:33:09 -0400 Subject: [PATCH 028/123] make double sure scan homedir is created --- bbot/scanner/scanner.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index d3b81666b4..c46ec9c344 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -610,6 +610,7 @@ def critical(self, *args, trace=True, **kwargs): @property def log_handlers(self): if self._log_handlers is None: + self.helpers.mkdir(self.home) main_handler = logging.handlers.TimedRotatingFileHandler( str(self.home / "scan.log"), when="d", interval=1, backupCount=14 ) From 084d753a1660efa787acbd6eb96de95638d732b4 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 16:45:14 -0400 Subject: [PATCH 029/123] keep more scans during tests --- bbot/test/test.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 8b4d3e1c9e..6f16c6ba61 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -32,7 +32,7 @@ plumbus: asdf dns_debug: true user_agent: "BBOT Test User-Agent" http_debug: false -keep_scans: 2 +keep_scans: 5 agent_url: ws://127.0.0.1:8765 agent_token: test dns_resolution: false From 2080895bab74e4a7c7546063d4173fbebf6473e0 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 22 Aug 2023 16:50:20 -0400 Subject: [PATCH 030/123] don't clean up older scans during tests --- bbot/test/test.conf | 1 - 1 file changed, 1 deletion(-) diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 6f16c6ba61..fe360effc0 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -32,7 +32,6 @@ plumbus: asdf dns_debug: true user_agent: "BBOT Test User-Agent" http_debug: false -keep_scans: 5 agent_url: ws://127.0.0.1:8765 agent_token: test dns_resolution: false From 43b4dab4fb45584ef9f2b0cb0bf2ab761f2233e2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 23 Aug 2023 09:43:54 -0400 Subject: [PATCH 031/123] improved scan logging --- bbot/core/logger/__init__.py | 3 ++- bbot/scanner/scanner.py | 37 +++++++++++++++++++++++++++++------- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/bbot/core/logger/__init__.py b/bbot/core/logger/__init__.py index 25327390bf..39f447d6a7 100644 --- a/bbot/core/logger/__init__.py +++ b/bbot/core/logger/__init__.py @@ -2,8 +2,9 @@ init_logging, get_log_level, set_log_level, + add_log_handler, ColoredFormatter, + get_log_handlers, toggle_log_level, - add_log_handler, remove_log_handler, ) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index c46ec9c344..058eb17aa7 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -26,7 +26,14 @@ from bbot.core.helpers.async_helpers import async_to_sync_gen from bbot.core.configurator.environ import prepare_environment from bbot.core.errors import BBOTError, ScanError, ValidationError -from bbot.core.logger import init_logging, get_log_level, set_log_level, add_log_handler, remove_log_handler +from bbot.core.logger import ( + init_logging, + get_log_level, + set_log_level, + add_log_handler, + get_log_handlers, + remove_log_handler, +) log = logging.getLogger("bbot.scanner") @@ -182,6 +189,7 @@ def __init__( self._dns_regexes = None self._log_handlers = None + self._log_handler_backup = [] def _on_keyboard_interrupt(self, loop, event): self.stop() @@ -228,9 +236,7 @@ async def async_start(self): try: await self.prep() - # add log handlers - for handler in self.log_handlers: - add_log_handler(handler) + self.start_log_handlers() if not self.target: self.warning(f"No scan targets specified") @@ -321,9 +327,7 @@ async def async_start(self): await self.dispatcher.on_finish(self) - # remove log handlers - for handler in self.log_handlers: - remove_log_handler(handler) + self.stop_log_handlers() def start_modules(self): self.verbose(f"Starting module worker loops") @@ -624,6 +628,25 @@ def log_handlers(self): self._log_handlers = [main_handler, debug_handler] return self._log_handlers + def start_log_handlers(self): + # add log handlers + for handler in self.log_handlers: + add_log_handler(handler) + # temporarily disable main ones + for handler_name in ("file_main", "file_debug"): + handler = get_log_handlers().get(handler_name, None) + if handler is not None and handler not in self._log_handler_backup: + self._log_handler_backup.append(handler) + remove_log_handler(handler) + + def stop_log_handlers(self): + # remove log handlers + for handler in self.log_handlers: + remove_log_handler(handler) + # restore main ones + for handler in self._log_handler_backup: + add_log_handler(handler) + def _internal_modules(self): for modname in module_loader.preloaded(type="internal"): if self.config.get(modname, True): From 1f36524c1f51dd689149253878ade4d46d9d6357 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 24 Aug 2023 10:26:11 -0400 Subject: [PATCH 032/123] fix inconsistency with human output --- bbot/modules/base.py | 4 ++-- bbot/modules/output/csv.py | 1 + bbot/scanner/manager.py | 4 +--- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 354d83b3f3..2057212b63 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -348,8 +348,6 @@ async def _worker(self): self.debug(f"Got {event} from {getattr(event, 'module', 'unknown_module')}") async with self._task_counter.count(f"event_postcheck({event})"): acceptable, reason = await self._event_postcheck(event) - if not acceptable: - self.debug(f"Not accepting {event} because {reason}") if acceptable: if event.type == "FINISHED": context = f"{self.name}.finish()" @@ -362,6 +360,8 @@ async def _worker(self): async with self.scan.acatch(context), self._task_counter.count(context): await self.handle_event(event) self.debug(f"Finished handling {event}") + else: + self.debug(f"Not accepting {event} because {reason}") except asyncio.CancelledError: self.log.trace("Worker cancelled") raise diff --git a/bbot/modules/output/csv.py b/bbot/modules/output/csv.py index feed0491db..fe8af7e89e 100644 --- a/bbot/modules/output/csv.py +++ b/bbot/modules/output/csv.py @@ -12,6 +12,7 @@ class CSV(BaseOutputModule): header_row = ["Event type", "Event data", "IP Address", "Source Module", "Scope Distance", "Event Tags"] filename = "output.csv" + accept_dupes = False async def setup(self): self.custom_headers = [] diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 3bae7990b6..d085a60b0d 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -158,8 +158,6 @@ async def _emit_event(self, event, *args, **kwargs): for provider in self.scan.helpers.cloud.providers.values(): provider.tag_event(event) - event_is_duplicate = self.is_duplicate_event(event) - # Scope shepherding # here, we buff or nerf the scope distance of an event based on its attributes and certain scan settings event_is_duplicate = self.is_duplicate_event(event) @@ -332,7 +330,7 @@ async def distribute_event(self, *args, **kwargs): if not dup and -1 < event.scope_distance < 1: self.scan.word_cloud.absorb_event(event) for mod in self.scan.modules.values(): - if not dup or mod.accept_dupes: + if not dup or mod.accept_dupes or (mod._type == "output" and event._force_output): await mod.queue_event(event) async def _worker_loop(self): From c0f63a5fa11feffb849ca248fd3665735a141a1b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 28 Aug 2023 14:15:18 -0400 Subject: [PATCH 033/123] introduce AZURE_TENANT event type --- bbot/core/event/base.py | 4 ++ bbot/core/helpers/regexes.py | 3 + bbot/modules/azure_tenant.py | 46 +++++++++++--- .../module_tests/test_module_azure_tenant.py | 63 ++++++++++++++++++- 4 files changed, 105 insertions(+), 11 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 74058b3788..c4fca0d83d 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -943,6 +943,10 @@ class WEBSCREENSHOT(DictHostEvent): _always_emit = True +class AZURE_TENANT(DictEvent): + _always_emit = True + + def make_event( data, event_type=None, diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 5d22384ad9..5ed1693455 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -29,6 +29,9 @@ email_regex = re.compile(_email_regex, re.I) _ptr_regex = r"(?:[0-9]{1,3}[-_\.]){3}[0-9]{1,3}" ptr_regex = re.compile(_ptr_regex) +# uuid regex +_uuid_regex = r"[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}" +uuid_regex = re.compile(_uuid_regex, re.I) _open_port_regexes = ( _dns_name_regex + r":[0-9]{1,5}", diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index 0ed5f83841..1ea068e8f1 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -1,4 +1,5 @@ import re +from contextlib import suppress from .viewdns import viewdns @@ -19,13 +20,30 @@ async def setup(self): async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) - domains, _ = await self.query(query) + domains, openid_config = await self.query(query) + + tenant_id = None + authorization_endpoint = openid_config.get("authorization_endpoint", "") + matches = self.helpers.regexes.uuid_regex.findall(authorization_endpoint) + if matches: + tenant_id = matches[0] + + tenant_names = set() if domains: - self.success(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') + self.verbose(f'Found {len(domains):,} domains under tenant for "{query}": {", ".join(sorted(domains))}') for domain in domains: if domain != query: - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) - # todo: tenants? + self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate", "azure-tenant"]) + # tenant names + if domain.lower().endswith(".onmicrosoft.com"): + tenantname = domain.split(".")[0].lower() + if tenantname: + tenant_names.add(tenantname) + + event_data = {"tenant-names": sorted(tenant_names), "domains": sorted(domains)} + if tenant_id is not None: + event_data["tenant-id"] = tenant_id + self.emit_event(event_data, "AZURE_TENANT", source=event) async def query(self, domain): url = f"{self.base_url}/autodiscover/autodiscover.svc" @@ -56,19 +74,21 @@ async def query(self, domain): self.debug(f"Retrieving tenant domains at {url}") - r = await self.helpers.request(url, method="POST", headers=headers, data=data) + autodiscover_task = self.helpers.create_task( + self.helpers.request(url, method="POST", headers=headers, data=data) + ) + openid_url = f"https://login.windows.net/{domain}/.well-known/openid-configuration" + openid_task = self.helpers.create_task(self.helpers.request(openid_url)) + + r = await autodiscover_task status_code = getattr(r, "status_code", 0) if status_code not in (200, 421): self.verbose(f'Error retrieving azure_tenant domains for "{domain}" (status code: {status_code})') return set(), set() found_domains = list(set(self.d_xml_regex.findall(r.text))) domains = set() - tenantnames = set() for d in found_domains: - # tenant names - if d.lower().endswith(".onmicrosoft.com"): - tenantnames.add(d.split(".")[0].lower()) # make sure we don't make any unnecessary api calls d = str(d).lower() _, query = self.helpers.split_domain(d) @@ -77,4 +97,10 @@ async def query(self, domain): # absorb into word cloud self.scan.word_cloud.absorb_word(d) - return domains, tenantnames + r = await openid_task + openid_config = dict() + with suppress(Exception): + openid_config = r.json() + + domains = sorted(domains) + return domains, openid_config diff --git a/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py b/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py index 28f2ab4c2a..b7986d3a11 100644 --- a/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py +++ b/bbot/test/test_step_2/module_tests/test_module_azure_tenant.py @@ -35,12 +35,73 @@ class TestAzure_Tenant(ModuleTestBase): """ + openid_config_azure = { + "token_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/token", + "token_endpoint_auth_methods_supported": ["client_secret_post", "private_key_jwt", "client_secret_basic"], + "jwks_uri": "https://login.windows.net/common/discovery/keys", + "response_modes_supported": ["query", "fragment", "form_post"], + "subject_types_supported": ["pairwise"], + "id_token_signing_alg_values_supported": ["RS256"], + "response_types_supported": ["code", "id_token", "code id_token", "token id_token", "token"], + "scopes_supported": ["openid"], + "issuer": "https://sts.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/", + "microsoft_multi_refresh_token": True, + "authorization_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/authorize", + "device_authorization_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/devicecode", + "http_logout_supported": True, + "frontchannel_logout_supported": True, + "end_session_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/logout", + "claims_supported": [ + "sub", + "iss", + "cloud_instance_name", + "cloud_instance_host_name", + "cloud_graph_host_name", + "msgraph_host", + "aud", + "exp", + "iat", + "auth_time", + "acr", + "amr", + "nonce", + "email", + "given_name", + "family_name", + "nickname", + ], + "check_session_iframe": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/oauth2/checksession", + "userinfo_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/openid/userinfo", + "kerberos_endpoint": "https://login.windows.net/cc74fc12-4142-400e-a653-f98bdeadbeef/kerberos", + "tenant_region_scope": "NA", + "cloud_instance_name": "microsoftonline.com", + "cloud_graph_host_name": "graph.windows.net", + "msgraph_host": "graph.microsoft.com", + "rbac_url": "https://pas.windows.net", + } + async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( method="POST", url="https://autodiscover-s.outlook.com/autodiscover/autodiscover.svc", text=self.tenant_response, ) + module_test.httpx_mock.add_response( + url="https://login.windows.net/blacklanternsecurity.com/.well-known/openid-configuration", + json=self.openid_config_azure, + ) def check(self, module_test, events): - assert any(e.data == "blacklanternsecurity.onmicrosoft.com" and "affiliate" in e.tags for e in events) + assert any( + e.type.startswith("DNS_NAME") + and e.data == "blacklanternsecurity.onmicrosoft.com" + and "affiliate" in e.tags + for e in events + ) + assert any( + e.type == "AZURE_TENANT" + and e.data["tenant-id"] == "cc74fc12-4142-400e-a653-f98bdeadbeef" + and "blacklanternsecurity.onmicrosoft.com" in e.data["domains"] + and "blacklanternsecurity" in e.data["tenant-names"] + for e in events + ) From ddc3a9aa7a41052466f1fec2b2eb875f43463ca4 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 12:23:01 -0400 Subject: [PATCH 034/123] temporary fix - ntlm tests --- bbot/modules/internal/excavate.py | 9 +++++---- bbot/modules/ntlm.py | 6 ++++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 1e60ce5d75..3f3da12933 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -112,8 +112,6 @@ def report(self, result, name, event, **kwargs): web_spider_distance = getattr(event, "web_spider_distance", 0) exceeded_max_links = kwargs.get("exceeded_max_links", False) - tags = [] - parsed_uri = self.excavate.helpers.urlparse(result) host, port = self.excavate.helpers.split_host_port(parsed_uri.netloc) # Handle non-HTTP URIs (ftp, s3, etc.) @@ -134,9 +132,12 @@ def report(self, result, name, event, **kwargs): ) return + url_event = self.excavate.make_event(result, "URL_UNVERIFIED", source=event) + url_in_scope = self.excavate.scan.in_scope(url_event) + is_spider_danger = self.excavate.is_spider_danger(event, result) if ( - exceeded_max_links # if we exceeded the max number of links + (exceeded_max_links and url_in_scope) # if we exceeded the max number of links or (consider_spider_danger and is_spider_danger) # or if there's spider danger or ( (not consider_spider_danger) and (web_spider_distance > self.excavate.max_redirects) @@ -145,7 +146,7 @@ def report(self, result, name, event, **kwargs): tags.append("spider-danger") self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") - self.excavate.emit_event(result, "URL_UNVERIFIED", source=event, tags=tags) + self.excavate.emit_event(url_event) class EmailExtractor(BaseExtractor): diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 3411d17084..dfe53a21f0 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -135,7 +135,8 @@ async def handle_url(self, event): # disabled until this is resolved # https://github.com/encode/httpcore/discussions/783 # await self.helpers.cancel_tasks(tasks) - await gen.aclose() + # await gen.aclose() + break except HTTPError as e: if str(e): self.warning(str(e)) @@ -143,7 +144,8 @@ async def handle_url(self, event): # disabled until this is resolved # https://github.com/encode/httpcore/discussions/783 # await self.helpers.cancel_tasks(tasks) - await gen.aclose() + # await gen.aclose() + break return result, url From 26db909f88966a9a782f7f8fd19757bee836f881 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 13:00:06 -0400 Subject: [PATCH 035/123] fix bug with tags --- bbot/modules/internal/excavate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 3f3da12933..61222f5e1a 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -143,7 +143,7 @@ def report(self, result, name, event, **kwargs): (not consider_spider_danger) and (web_spider_distance > self.excavate.max_redirects) ) # or if the spider distance is way out of control (greater than max_redirects) ): - tags.append("spider-danger") + url_event.add_tag("spider-danger") self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") self.excavate.emit_event(url_event) From 7e4891fa2cbe3ebb699c2d1fc2ca7eb27b572a8d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 13:07:45 -0400 Subject: [PATCH 036/123] don't update docs except on push --- .github/workflows/tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5f6df72feb..cafdad9824 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -53,6 +53,7 @@ jobs: update_docs: needs: test runs-on: ubuntu-latest + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable') steps: - uses: actions/checkout@v3 with: From d533ea50c11703f953c191f221c6e64eceda00a6 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 13:18:53 -0400 Subject: [PATCH 037/123] fixed event loop error with agent tests --- bbot/test/test_step_1/test_agent.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/bbot/test/test_step_1/test_agent.py b/bbot/test/test_step_1/test_agent.py index 2497791ef5..e9da3ba455 100644 --- a/bbot/test/test_step_1/test_agent.py +++ b/bbot/test/test_step_1/test_agent.py @@ -1,15 +1,15 @@ import json import websockets +from functools import partial from ..bbot_fixtures import * # noqa: F401 _first_run = True success = False -scan_done = asyncio.Event() -async def websocket_handler(websocket, path): +async def websocket_handler(websocket, path, scan_done=None): # whether this is the first run global _first_run first_run = int(_first_run) @@ -132,11 +132,14 @@ async def websocket_handler(websocket, path): @pytest.mark.asyncio async def test_agent(agent): + scan_done = asyncio.Event() scan_status = await agent.scan_status() assert scan_status["error"] == "Scan not in progress" + _websocket_handler = partial(websocket_handler, scan_done=scan_done) + global success - async with websockets.serve(websocket_handler, "127.0.0.1", 8765): + async with websockets.serve(_websocket_handler, "127.0.0.1", 8765): asyncio.create_task(agent.start()) # wait for 30 seconds await asyncio.wait_for(scan_done.wait(), 10) From b57fdbcf15e4b455f9b8128b6fb7df710147d998 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 14:45:24 -0400 Subject: [PATCH 038/123] temporarily squash ntlm bug --- bbot/core/helpers/misc.py | 2 +- bbot/modules/ntlm.py | 66 ++++++++++++++------------------------- 2 files changed, 25 insertions(+), 43 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index d7be455118..3f9e864257 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -29,7 +29,7 @@ import xml.etree.ElementTree as ET from collections.abc import Mapping from hashlib import sha1 as hashlib_sha1 -from asyncio import create_task, sleep, wait_for # noqa +from asyncio import create_task, gather, sleep, wait_for # noqa from urllib.parse import urlparse, quote, unquote, urlunparse # noqa F401 from .url import * # noqa F401 diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index dfe53a21f0..63f6eba415 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -1,5 +1,5 @@ +from bbot.core.errors import NTLMError from bbot.modules.base import BaseModule -from bbot.core.errors import NTLMError, HTTPError ntlm_discovery_endpoints = [ "", @@ -60,6 +60,11 @@ class ntlm(BaseModule): + """ + Todo: + Cancel web requests and break out of loop when valid endpoint is found + (waiting on https://github.com/encode/httpcore/discussions/783/ to be fixed first) + """ watched_events = ["URL", "HTTP_RESPONSE"] produced_events = ["FINDING", "DNS_NAME"] flags = ["active", "safe", "web-basic", "web-thorough"] @@ -78,21 +83,22 @@ async def setup(self): async def handle_event(self, event): found_hash = hash(f"{event.host}:{event.port}") if found_hash not in self.found: - result, request_url = await self.handle_url(event) - if result and request_url: - self.found.add(found_hash) - self.emit_event( - { - "host": str(event.host), - "url": request_url, - "description": f"NTLM AUTH: {result}", - }, - "FINDING", - source=event, - ) - fqdn = result.get("FQDN", "") - if fqdn: - self.emit_event(fqdn, "DNS_NAME", source=event) + for result, request_url in await self.handle_url(event): + if result and request_url: + self.found.add(found_hash) + self.emit_event( + { + "host": str(event.host), + "url": request_url, + "description": f"NTLM AUTH: {result}", + }, + "FINDING", + source=event, + ) + fqdn = result.get("FQDN", "") + if fqdn: + self.emit_event(fqdn, "DNS_NAME", source=event) + break async def filter_event(self, event): if self.try_all: @@ -125,36 +131,12 @@ async def handle_url(self, event): self.processed.add(url_hash) tasks.append(self.helpers.create_task(self.check_ntlm(url))) - result, url = None, None - - gen = self.helpers.as_completed(tasks) - async for task in gen: - try: - result, url = await task - if result: - # disabled until this is resolved - # https://github.com/encode/httpcore/discussions/783 - # await self.helpers.cancel_tasks(tasks) - # await gen.aclose() - break - except HTTPError as e: - if str(e): - self.warning(str(e)) - # cancel all the tasks if there's an error - # disabled until this is resolved - # https://github.com/encode/httpcore/discussions/783 - # await self.helpers.cancel_tasks(tasks) - # await gen.aclose() - break - - return result, url + return await self.helpers.gather(*tasks) async def check_ntlm(self, test_url): # use lower timeout value http_timeout = self.config.get("httpx_timeout", 5) - r = await self.helpers.request( - test_url, headers=NTLM_test_header, raise_error=True, allow_redirects=False, timeout=http_timeout - ) + r = await self.helpers.request(test_url, headers=NTLM_test_header, allow_redirects=False, timeout=http_timeout) ntlm_resp = r.headers.get("WWW-Authenticate", "") if ntlm_resp: ntlm_resp_b64 = max(ntlm_resp.split(","), key=lambda x: len(x)).split()[-1] From 12a1d433688764bb40f21486dd8bb1d293481fdf Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 14:53:40 -0400 Subject: [PATCH 039/123] blacked --- bbot/modules/ntlm.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 63f6eba415..59f4f85c3e 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -65,6 +65,7 @@ class ntlm(BaseModule): Cancel web requests and break out of loop when valid endpoint is found (waiting on https://github.com/encode/httpcore/discussions/783/ to be fixed first) """ + watched_events = ["URL", "HTTP_RESPONSE"] produced_events = ["FINDING", "DNS_NAME"] flags = ["active", "safe", "web-basic", "web-thorough"] From 53bde9cae6d947bfe4af2613b39ba292352f54bb Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 18:00:44 -0400 Subject: [PATCH 040/123] tests for web_spider_links_per_page --- bbot/modules/internal/excavate.py | 53 ++++++++++--------- bbot/modules/ntlm.py | 2 +- .../module_tests/test_module_excavate.py | 43 +++++++++++++++ 3 files changed, 71 insertions(+), 27 deletions(-) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 61222f5e1a..720b9b96b5 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -62,19 +62,38 @@ def __init__(self, *args, **kwargs): self.web_spider_links_per_page = self.excavate.scan.config.get("web_spider_links_per_page", 20) async def search(self, content, event, **kwargs): + consider_spider_danger = kwargs.get("consider_spider_danger", True) + web_spider_distance = getattr(event, "web_spider_distance", 0) + result_hashes = set() results = [] async for result in self._search(content, event, **kwargs): - result_hash = hash(result) + result_hash = hash(result[0]) if result_hash not in result_hashes: result_hashes.add(result_hash) results.append(result) - for i, (result, name) in enumerate(results): - new_kwargs = dict(kwargs) - if i > self.web_spider_links_per_page: - # self.excavate.critical(f"SPIDER DANGER: {result}") - new_kwargs["exceeded_max_links"] = True - self.report(result, name, event, **new_kwargs) + + urls_found = 0 + for result, name in results: + url_event = self.report(result, name, event, **kwargs) + if url_event is not None: + url_in_scope = self.excavate.scan.in_scope(url_event) + is_spider_danger = self.excavate.is_spider_danger(event, result) + if ( + ( + urls_found >= self.web_spider_links_per_page and url_in_scope + ) # if we exceeded the max number of links + or (consider_spider_danger and is_spider_danger) # or if there's spider danger + or ( + (not consider_spider_danger) and (web_spider_distance > self.excavate.max_redirects) + ) # or if the spider distance is way out of control (greater than max_redirects) + ): + url_event.add_tag("spider-danger") + + self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") + self.excavate.emit_event(url_event) + if url_in_scope: + urls_found += 1 async def _search(self, content, event, **kwargs): parsed = getattr(event, "parsed", None) @@ -108,10 +127,6 @@ async def _search(self, content, event, **kwargs): yield result, name def report(self, result, name, event, **kwargs): - consider_spider_danger = kwargs.get("consider_spider_danger", True) - web_spider_distance = getattr(event, "web_spider_distance", 0) - exceeded_max_links = kwargs.get("exceeded_max_links", False) - parsed_uri = self.excavate.helpers.urlparse(result) host, port = self.excavate.helpers.split_host_port(parsed_uri.netloc) # Handle non-HTTP URIs (ftp, s3, etc.) @@ -132,21 +147,7 @@ def report(self, result, name, event, **kwargs): ) return - url_event = self.excavate.make_event(result, "URL_UNVERIFIED", source=event) - url_in_scope = self.excavate.scan.in_scope(url_event) - - is_spider_danger = self.excavate.is_spider_danger(event, result) - if ( - (exceeded_max_links and url_in_scope) # if we exceeded the max number of links - or (consider_spider_danger and is_spider_danger) # or if there's spider danger - or ( - (not consider_spider_danger) and (web_spider_distance > self.excavate.max_redirects) - ) # or if the spider distance is way out of control (greater than max_redirects) - ): - url_event.add_tag("spider-danger") - - self.excavate.debug(f"Found URL [{result}] from parsing [{event.data.get('url')}] with regex [{name}]") - self.excavate.emit_event(url_event) + return self.excavate.make_event(result, "URL_UNVERIFIED", source=event) class EmailExtractor(BaseExtractor): diff --git a/bbot/modules/ntlm.py b/bbot/modules/ntlm.py index 59f4f85c3e..76e93c595a 100644 --- a/bbot/modules/ntlm.py +++ b/bbot/modules/ntlm.py @@ -62,7 +62,7 @@ class ntlm(BaseModule): """ Todo: - Cancel web requests and break out of loop when valid endpoint is found + Cancel pending requests and break out of loop when valid endpoint is found (waiting on https://github.com/encode/httpcore/discussions/783/ to be fixed first) """ diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 284d941f5f..1d74aa2bad 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -166,3 +166,46 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any(e.data == "https://www.test.notreal/yep" for e in events) assert any(e.data == "http://127.0.0.1:8888/relative/owa/" for e in events) + + +class TestExcavateMaxLinksPerPage(TestExcavate): + targets = ["http://127.0.0.1:8888/"] + config_overrides = {"web_spider_links_per_page": 10, "web_spider_distance": 1} + + lots_of_links = """ + + + + + + + + + + + + + + + + + + + + + + + + + + """ + + async def setup_before_prep(self, module_test): + module_test.httpserver.expect_request("/").respond_with_data(self.lots_of_links) + + def check(self, module_test, events): + url_events = [e for e in events if e.type == "URL_UNVERIFIED"] + assert len(url_events) == 26 + url_data = [e.data for e in url_events if "spider-danger" not in e.tags] + assert "http://127.0.0.1:8888/10" in url_data + assert "http://127.0.0.1:8888/11" not in url_data From ee16634329368e046cebc930493d1ae431f4a81d Mon Sep 17 00:00:00 2001 From: AkikoOrenji <30274508+AkikoOrenji@users.noreply.github.com> Date: Tue, 29 Aug 2023 17:04:37 +1000 Subject: [PATCH 041/123] Update nuclei.py for usability and impoves error handling Ensured the proxy is passed through to nuclei when configured in bbot. Fixed failed nuclei scan upon correlation failure one event (line 140 in nuclei.handle_batch()) --- bbot/modules/deadly/nuclei.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 33b033bbce..a6c9fc290d 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -66,7 +66,7 @@ async def setup(self): self.warning(f"Failure while updating nuclei templates: {update_results.stderr}") else: self.warning("Error running nuclei template update command") - + self.proxy = self.scan.config.get("http_proxy", "") self.mode = self.config.get("mode", "severe") self.ratelimit = int(self.config.get("ratelimit", 150)) self.concurrency = int(self.config.get("concurrency", 25)) @@ -135,8 +135,10 @@ async def handle_batch(self, *events): cleaned_host = temp_target.get(host) source_event = self.correlate_event(events, cleaned_host) - if url == "": - url = str(source_event.data) + if url == "": + if not source_event: + continue + url = str(source_event.data) description_string = f"template: [{template}], name: [{name}]" if len(extracted_results) > 0: @@ -204,6 +206,10 @@ async def execute_nuclei(self, nuclei_input): command.append("-t") command.append(self.budget_templates_file) + if self.proxy: + command.append("-proxy") + command.append(f"{self.proxy}") + stats_file = self.helpers.tempfile_tail(callback=self.log_nuclei_status) try: with open(stats_file, "w") as stats_fh: From ea66ab7fe0a43daa394020679b80181df5fa68d3 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 30 Aug 2023 12:06:33 -0400 Subject: [PATCH 042/123] code formatting, tweaking source correlation logic --- bbot/modules/deadly/nuclei.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index a6c9fc290d..d45edd2863 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -135,10 +135,11 @@ async def handle_batch(self, *events): cleaned_host = temp_target.get(host) source_event = self.correlate_event(events, cleaned_host) - if url == "": - if not source_event: + if not source_event: continue - url = str(source_event.data) + + if url == "": + url = str(source_event.data) description_string = f"template: [{template}], name: [{name}]" if len(extracted_results) > 0: From 6b1bd1cf65c2caf0df074c42ed44cff1303c40ad Mon Sep 17 00:00:00 2001 From: Jason Booth Date: Thu, 24 Aug 2023 15:28:57 -0400 Subject: [PATCH 043/123] add '/control/' to websocket url --- bbot/agent/agent.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index f3b2969270..a24f1504d1 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -49,8 +49,8 @@ async def ws(self, rebuild=False): verbs = ("Building", "Built") if rebuild: verbs = ("Rebuilding", "Rebuilt") - log.debug(f"{verbs[0]} websocket connection to {self.url}") - self._ws = await websockets.connect(self.url, **kwargs) + log.debug(f"{verbs[0]} websocket connection to {self.url}/control/") + self._ws = await websockets.connect(f"{self.url}/control/", **kwargs) log.debug(f"{verbs[1]} websocket connection to {self.url}") return self._ws From b5e9e620c4d388891b79ce5fe727045fe6ca058a Mon Sep 17 00:00:00 2001 From: Jason Booth Date: Fri, 25 Aug 2023 09:17:21 -0400 Subject: [PATCH 044/123] add '/control/ to another spot --- bbot/agent/agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index a24f1504d1..519d11ea31 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -114,7 +114,7 @@ async def start_scan(self, scan_id, name=None, targets=[], modules=[], output_mo f"Starting scan with targets={targets}, modules={modules}, output_modules={output_modules}" ) output_module_config = OmegaConf.create( - {"output_modules": {"websocket": {"url": f"{self.url}/scan/{scan_id}/", "token": self.token}}} + {"output_modules": {"websocket": {"url": f"{self.url}/control/scan/{scan_id}/", "token": self.token}}} ) config = OmegaConf.create(config) config = OmegaConf.merge(self.config, config, output_module_config) From 3656e302681381b779f58b778c16cdf773989025 Mon Sep 17 00:00:00 2001 From: Jason Booth Date: Fri, 25 Aug 2023 10:14:48 -0400 Subject: [PATCH 045/123] formatting --- bbot/agent/agent.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index 519d11ea31..f5ec42ca22 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -114,7 +114,11 @@ async def start_scan(self, scan_id, name=None, targets=[], modules=[], output_mo f"Starting scan with targets={targets}, modules={modules}, output_modules={output_modules}" ) output_module_config = OmegaConf.create( - {"output_modules": {"websocket": {"url": f"{self.url}/control/scan/{scan_id}/", "token": self.token}}} + { + "output_modules": { + "websocket": {"url": f"{self.url}/control/scan/{scan_id}/", "token": self.token} + } + } ) config = OmegaConf.create(config) config = OmegaConf.merge(self.config, config, output_module_config) From 7bb559718691bfe43b0bc32a9053ced94e7cfe5e Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 31 Aug 2023 14:35:15 -0400 Subject: [PATCH 046/123] updated agent tests --- bbot/agent/agent.py | 7 ++++--- bbot/test/test_step_1/test_agent.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index f5ec42ca22..f30fb94d15 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -49,9 +49,10 @@ async def ws(self, rebuild=False): verbs = ("Building", "Built") if rebuild: verbs = ("Rebuilding", "Rebuilt") - log.debug(f"{verbs[0]} websocket connection to {self.url}/control/") - self._ws = await websockets.connect(f"{self.url}/control/", **kwargs) - log.debug(f"{verbs[1]} websocket connection to {self.url}") + url = f"{self.url}/control/" + log.debug(f"{verbs[0]} websocket connection to {url}") + self._ws = await websockets.connect(url, **kwargs) + log.debug(f"{verbs[1]} websocket connection to {url}") return self._ws async def start(self): diff --git a/bbot/test/test_step_1/test_agent.py b/bbot/test/test_step_1/test_agent.py index e9da3ba455..a4b8e447e9 100644 --- a/bbot/test/test_step_1/test_agent.py +++ b/bbot/test/test_step_1/test_agent.py @@ -20,7 +20,7 @@ async def websocket_handler(websocket, path, scan_done=None): # control channel or event channel? control = True - if path == "/" and first_run: + if path == "/control/" and first_run: # test ping await websocket.send(json.dumps({"conversation": "90196cc1-299f-4555-82a0-bc22a4247590", "command": "ping"})) _first_run = False From 5dd6da39e85093c6b82b05b18f6879c24eff61ae Mon Sep 17 00:00:00 2001 From: ip2location Date: Fri, 8 Sep 2023 12:44:12 +0800 Subject: [PATCH 047/123] Create ip2locationio.py --- bbot/modules/ip2locationio.py | 63 +++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 bbot/modules/ip2locationio.py diff --git a/bbot/modules/ip2locationio.py b/bbot/modules/ip2locationio.py new file mode 100644 index 0000000000..8460fb54a4 --- /dev/null +++ b/bbot/modules/ip2locationio.py @@ -0,0 +1,63 @@ +from .shodan_dns import shodan_dns + + +class IP2Locationio(shodan_dns): + ''' + IP2Location.io Geolocation API. + ''' + + watched_events = ["IP_ADDRESS"] + produced_events = ["GEOLOCATION"] + flags = ["passive", "safe"] + meta = {"description": "Query IP2location.io's API for geolocation information. ", "auth_required": True} + options = {"api_key": "", "lang": ""} + options_desc = {"api_key": "IP2location.io API Key", "lang": "Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name."} + scope_distance_modifier = 1 + _priority = 2 + suppress_dupes = False + + base_url = "http://api.ip2location.io/" + + async def filter_event(self, event): + return True + + async def handle_event(self, event): + try: + # url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&format=json&source=bbot" + # if self.lang != "": + if self.config.get("lang") != "": + url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&lang={self.config.get('lang')}&format=json&source=bbot" + else: + url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&format=json&source=bbot" + result = await self.request_with_fail_count(url) + if result: + j = result.json() + if not j: + self.verbose(f"No JSON response from {url}") + else: + self.verbose(f"No response from {url}") + # except Exception: + except Exception as err: + self.verbose(f"Error retrieving results for {event.data}", trace=True) + # print(f"Unexpected {err=}, {type(err)=}") + return + ''' + geo_data = { + "ip": j.get("ip"), + "country": j.get("country_name"), + "city": j.get("city_name"), + "zip_code": j.get("zip_code"), + "region": j.get("region_name"), + "latitude": j.get("latitude"), + "longitude": j.get("longitude"), + }''' + geo_data = j + geo_data = {k: v for k, v in geo_data.items() if v is not None} + if geo_data: + # event_data = ", ".join(f"{k.capitalize()}: {v}" for k, v in geo_data.items()) + event_data = ", ".join(f"{k}: {v}" for k, v in geo_data.items()) + self.emit_event(event_data, "GEOLOCATION", event) + elif "error" in j: + error_msg = j.get("error").get("error_message", "") + if error_msg: + self.warning(error_msg) \ No newline at end of file From d224aa2a5bfe010d2dc6bd73719552dfe7534a23 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 10:42:28 -0400 Subject: [PATCH 048/123] blacked, code cleanup --- bbot/modules/ip2location.py | 54 ++++++++++++++++++++++++++++++ bbot/modules/ip2locationio.py | 63 ----------------------------------- 2 files changed, 54 insertions(+), 63 deletions(-) create mode 100644 bbot/modules/ip2location.py delete mode 100644 bbot/modules/ip2locationio.py diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py new file mode 100644 index 0000000000..851bf1e040 --- /dev/null +++ b/bbot/modules/ip2location.py @@ -0,0 +1,54 @@ +from .shodan_dns import shodan_dns + + +class IP2Location(shodan_dns): + """ + IP2Location.io Geolocation API. + """ + + watched_events = ["IP_ADDRESS"] + produced_events = ["GEOLOCATION"] + flags = ["passive", "safe"] + meta = {"description": "Query IP2location.io's API for geolocation information. ", "auth_required": True} + options = {"api_key": "", "lang": ""} + options_desc = { + "api_key": "IP2location.io API Key", + "lang": "Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name.", + } + scope_distance_modifier = 1 + _priority = 2 + suppress_dupes = False + + base_url = "http://api.ip2location.io/" + + async def filter_event(self, event): + return True + + async def setup(self): + self.lang = self.config.get("lang", "") + return await super().setup() + + async def handle_event(self, event): + try: + url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&format=json&source=bbot" + if self.lang: + url = f"{url}&lang={self.lang}" + result = await self.request_with_fail_count(url) + if result: + geo_data = result.json() + if not geo_data: + self.verbose(f"No JSON response from {url}") + else: + self.verbose(f"No response from {url}") + except Exception: + self.verbose(f"Error retrieving results for {event.data}", trace=True) + return + + geo_data = {k: v for k, v in geo_data.items() if v is not None} + if geo_data: + event_data = ", ".join(f"{k}: {v}" for k, v in geo_data.items()) + self.emit_event(event_data, "GEOLOCATION", event) + elif "error" in geo_data: + error_msg = geo_data.get("error").get("error_message", "") + if error_msg: + self.warning(error_msg) diff --git a/bbot/modules/ip2locationio.py b/bbot/modules/ip2locationio.py deleted file mode 100644 index 8460fb54a4..0000000000 --- a/bbot/modules/ip2locationio.py +++ /dev/null @@ -1,63 +0,0 @@ -from .shodan_dns import shodan_dns - - -class IP2Locationio(shodan_dns): - ''' - IP2Location.io Geolocation API. - ''' - - watched_events = ["IP_ADDRESS"] - produced_events = ["GEOLOCATION"] - flags = ["passive", "safe"] - meta = {"description": "Query IP2location.io's API for geolocation information. ", "auth_required": True} - options = {"api_key": "", "lang": ""} - options_desc = {"api_key": "IP2location.io API Key", "lang": "Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name."} - scope_distance_modifier = 1 - _priority = 2 - suppress_dupes = False - - base_url = "http://api.ip2location.io/" - - async def filter_event(self, event): - return True - - async def handle_event(self, event): - try: - # url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&format=json&source=bbot" - # if self.lang != "": - if self.config.get("lang") != "": - url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&lang={self.config.get('lang')}&format=json&source=bbot" - else: - url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&format=json&source=bbot" - result = await self.request_with_fail_count(url) - if result: - j = result.json() - if not j: - self.verbose(f"No JSON response from {url}") - else: - self.verbose(f"No response from {url}") - # except Exception: - except Exception as err: - self.verbose(f"Error retrieving results for {event.data}", trace=True) - # print(f"Unexpected {err=}, {type(err)=}") - return - ''' - geo_data = { - "ip": j.get("ip"), - "country": j.get("country_name"), - "city": j.get("city_name"), - "zip_code": j.get("zip_code"), - "region": j.get("region_name"), - "latitude": j.get("latitude"), - "longitude": j.get("longitude"), - }''' - geo_data = j - geo_data = {k: v for k, v in geo_data.items() if v is not None} - if geo_data: - # event_data = ", ".join(f"{k.capitalize()}: {v}" for k, v in geo_data.items()) - event_data = ", ".join(f"{k}: {v}" for k, v in geo_data.items()) - self.emit_event(event_data, "GEOLOCATION", event) - elif "error" in j: - error_msg = j.get("error").get("error_message", "") - if error_msg: - self.warning(error_msg) \ No newline at end of file From 2e2acbe8639ae3138add109c79dd6b7728f4e37c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 11:02:32 -0400 Subject: [PATCH 049/123] wrote module tests --- bbot/modules/ip2location.py | 21 +++++++++++++----- bbot/modules/ipstack.py | 22 +++++-------------- .../module_tests/test_module_ipstack.py | 9 +++----- 3 files changed, 24 insertions(+), 28 deletions(-) diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index 851bf1e040..823ede94da 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -19,7 +19,7 @@ class IP2Location(shodan_dns): _priority = 2 suppress_dupes = False - base_url = "http://api.ip2location.io/" + base_url = "http://api.ip2location.io" async def filter_event(self, event): return True @@ -28,11 +28,21 @@ async def setup(self): self.lang = self.config.get("lang", "") return await super().setup() + async def ping(self): + url = self.build_url("8.8.8.8") + r = await self.request_with_fail_count(url) + resp_content = getattr(r, "text", "") + assert getattr(r, "status_code", 0) == 200, resp_content + + def build_url(self, data): + url = f"{self.base_url}/?key={self.api_key}&ip={data}&format=json&source=bbot" + if self.lang: + url = f"{url}&lang={self.lang}" + return url + async def handle_event(self, event): try: - url = f"{self.base_url}/?key={self.api_key}&ip={event.data}&format=json&source=bbot" - if self.lang: - url = f"{url}&lang={self.lang}" + url = self.build_url(event.data) result = await self.request_with_fail_count(url) if result: geo_data = result.json() @@ -46,8 +56,7 @@ async def handle_event(self, event): geo_data = {k: v for k, v in geo_data.items() if v is not None} if geo_data: - event_data = ", ".join(f"{k}: {v}" for k, v in geo_data.items()) - self.emit_event(event_data, "GEOLOCATION", event) + self.emit_event(geo_data, "GEOLOCATION", event) elif "error" in geo_data: error_msg = geo_data.get("error").get("error_message", "") if error_msg: diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index 798065d635..22dce58be5 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -17,7 +17,7 @@ class Ipstack(shodan_dns): _priority = 2 suppress_dupes = False - base_url = "http://api.ipstack.com/" + base_url = "http://api.ipstack.com" async def filter_event(self, event): return True @@ -33,28 +33,18 @@ async def handle_event(self, event): url = f"{self.base_url}/{event.data}?access_key={self.api_key}" result = await self.request_with_fail_count(url) if result: - j = result.json() - if not j: + geo_data = result.json() + if not geo_data: self.verbose(f"No JSON response from {url}") else: self.verbose(f"No response from {url}") except Exception: self.verbose(f"Error retrieving results for {event.data}", trace=True) return - geo_data = { - "ip": j.get("ip"), - "country": j.get("country_name"), - "city": j.get("city"), - "zip_code": j.get("zip"), - "region": j.get("region_name"), - "latitude": j.get("latitude"), - "longitude": j.get("longitude"), - } geo_data = {k: v for k, v in geo_data.items() if v is not None} if geo_data: - event_data = ", ".join(f"{k.capitalize()}: {v}" for k, v in geo_data.items()) - self.emit_event(event_data, "GEOLOCATION", event) - elif "error" in j: - error_msg = j.get("error").get("info", "") + self.emit_event(geo_data, "GEOLOCATION", event) + elif "error" in geo_data: + error_msg = geo_data.get("error").get("info", "") if error_msg: self.warning(error_msg) diff --git a/bbot/test/test_step_2/module_tests/test_module_ipstack.py b/bbot/test/test_step_2/module_tests/test_module_ipstack.py index 0e848ac6fb..dea0b28657 100644 --- a/bbot/test/test_step_2/module_tests/test_module_ipstack.py +++ b/bbot/test/test_step_2/module_tests/test_module_ipstack.py @@ -7,7 +7,7 @@ class TestIPStack(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url="http://api.ipstack.com//check?access_key=asdf", + url="http://api.ipstack.com/check?access_key=asdf", json={ "ip": "1.2.3.4", "type": "ipv4", @@ -34,7 +34,7 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url="http://api.ipstack.com//8.8.8.8?access_key=asdf", + url="http://api.ipstack.com/8.8.8.8?access_key=asdf", json={ "ip": "8.8.8.8", "type": "ipv4", @@ -63,8 +63,5 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any( - e.type == "GEOLOCATION" - and e.data - == "Ip: 8.8.8.8, Country: United States, City: Glenmont, Zip_code: 44628, Region: Ohio, Latitude: 40.5369987487793, Longitude: -82.12859344482422" - for e in events + e.type == "GEOLOCATION" and e.data["ip"] == "8.8.8.8" and e.data["city"] == "Glenmont" for e in events ), "Failed to geolocate IP" From 786f950946003b189a4b518cdb0ad54ec55e1e2b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 11:08:46 -0400 Subject: [PATCH 050/123] add test file --- .../module_tests/test_module_ip2location.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 bbot/test/test_step_2/module_tests/test_module_ip2location.py diff --git a/bbot/test/test_step_2/module_tests/test_module_ip2location.py b/bbot/test/test_step_2/module_tests/test_module_ip2location.py new file mode 100644 index 0000000000..2a63607207 --- /dev/null +++ b/bbot/test/test_step_2/module_tests/test_module_ip2location.py @@ -0,0 +1,31 @@ +from .base import ModuleTestBase + + +class TestIP2Location(ModuleTestBase): + targets = ["8.8.8.8"] + config_overrides = {"modules": {"ip2location": {"api_key": "asdf"}}} + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url="http://api.ip2location.io/?key=asdf&ip=8.8.8.8&format=json&source=bbot", + json={ + "ip": "8.8.8.8", + "country_code": "US", + "country_name": "United States of America", + "region_name": "California", + "city_name": "Mountain View", + "latitude": 37.405992, + "longitude": -122.078515, + "zip_code": "94043", + "time_zone": "-07:00", + "asn": "15169", + "as": "Google LLC", + "is_proxy": False, + }, + ) + + def check(self, module_test, events): + assert any( + e.type == "GEOLOCATION" and e.data["ip"] == "8.8.8.8" and e.data["city_name"] == "Mountain View" + for e in events + ), "Failed to geolocate IP" From bb250eb743eb9e50140b07fe2f3a3d4b2441261a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 11:22:21 -0400 Subject: [PATCH 051/123] azure_tenant bugfix --- bbot/modules/azure_tenant.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index 1ea068e8f1..8ba59dcfcd 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -84,7 +84,7 @@ async def query(self, domain): status_code = getattr(r, "status_code", 0) if status_code not in (200, 421): self.verbose(f'Error retrieving azure_tenant domains for "{domain}" (status code: {status_code})') - return set(), set() + return set(), dict() found_domains = list(set(self.d_xml_regex.findall(r.text))) domains = set() From 896f74eeb03fcc7de90a38e4a116d7dec4690701 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 11:28:22 -0400 Subject: [PATCH 052/123] fix produced_events for azure_realm --- bbot/modules/azure_realm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/azure_realm.py b/bbot/modules/azure_realm.py index 869183a884..f299ca3dc1 100644 --- a/bbot/modules/azure_realm.py +++ b/bbot/modules/azure_realm.py @@ -3,7 +3,7 @@ class azure_realm(BaseModule): watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] + produced_events = ["URL_UNVERIFIED"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "web-basic", "passive", "safe"] meta = {"description": 'Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm'} From ba4fc612350d957ad07fa5925439b224c8b224e7 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Tue, 12 Sep 2023 17:44:14 +0000 Subject: [PATCH 053/123] Refresh module docs --- docs/modules/list_of_modules.md | 3 +- docs/scanning/advanced.md | 2 +- docs/scanning/configuration.md | 2 ++ docs/scanning/events.md | 50 ++++++++++++++++----------------- docs/scanning/index.md | 44 ++++++++++++++--------------- 5 files changed, 52 insertions(+), 49 deletions(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index cb1b304cee..391f744475 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -43,7 +43,7 @@ | affiliates | scan | No | Summarize affiliate domains at the end of a scan | affiliates, passive, report, safe | * | | | anubisdb | scan | No | Query jldc.me's database for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | asn | scan | No | Query ripe and bgpview.io for ASNs | passive, report, safe, subdomain-enum | IP_ADDRESS | ASN | -| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | DNS_NAME | +| azure_realm | scan | No | Retrieves the "AuthURL" from login.microsoftonline.com/getuserrealm | affiliates, cloud-enum, passive, safe, subdomain-enum, web-basic | DNS_NAME | URL_UNVERIFIED | | azure_tenant | scan | No | Query Azure for tenant sister domains | affiliates, cloud-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | bevigil | scan | Yes | Retrieve OSINT data from mobile applications using BeVigil | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, URL_UNVERIFIED | | binaryedge | scan | Yes | Query the BinaryEdge API | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | @@ -63,6 +63,7 @@ | github | scan | Yes | Query Github's API for related repositories | passive, safe, subdomain-enum | DNS_NAME | URL_UNVERIFIED | | hackertarget | scan | No | Query the hackertarget.com API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | +| ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | | ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | | ipstack | scan | Yes | Query IPStack's API for GeoIP | passive, safe | IP_ADDRESS | GEOLOCATION | | leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | diff --git a/docs/scanning/advanced.md b/docs/scanning/advanced.md index 852aae1228..2509946bdd 100644 --- a/docs/scanning/advanced.md +++ b/docs/scanning/advanced.md @@ -61,7 +61,7 @@ Target: Modules: -m MODULE [MODULE ...], --modules MODULE [MODULE ...] - Modules to enable. Choices: affiliates,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_aws,bucket_azure,bucket_digitalocean,bucket_firebase,bucket_gcp,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,crobat,crt,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,fingerprintx,fullhunt,generic_ssrf,git,github,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,ipneighbor,ipstack,leakix,masscan,massdns,myssl,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye + Modules to enable. Choices: affiliates,anubisdb,asn,azure_realm,azure_tenant,badsecrets,bevigil,binaryedge,bucket_aws,bucket_azure,bucket_digitalocean,bucket_firebase,bucket_gcp,builtwith,bypass403,c99,censys,certspotter,chaos,columbus,crobat,crt,digitorus,dnscommonsrv,dnsdumpster,dnszonetransfer,emailformat,ffuf,ffuf_shortnames,fingerprintx,fullhunt,generic_ssrf,git,github,gowitness,hackertarget,host_header,httpx,hunt,hunterio,iis_shortnames,ip2location,ipneighbor,ipstack,leakix,masscan,massdns,myssl,nmap,nsec,ntlm,nuclei,oauth,otx,paramminer_cookies,paramminer_getparams,paramminer_headers,passivetotal,pgp,rapiddns,riddler,robots,secretsdb,securitytrails,shodan_dns,sitedossier,skymem,smuggler,social,sslcert,subdomain_hijack,subdomaincenter,sublist3r,telerik,threatminer,url_manipulation,urlscan,vhost,viewdns,virustotal,wafw00f,wappalyzer,wayback,zoomeye -l, --list-modules List available modules. -em MODULE [MODULE ...], --exclude-modules MODULE [MODULE ...] Exclude these modules. diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 0c5037b152..2b76fa99c9 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -296,6 +296,8 @@ Many modules accept their own configuration options. These options have the abil | modules.fullhunt.api_key | str | FullHunt API Key | | | modules.github.api_key | str | Github token | | | modules.hunterio.api_key | str | Hunter.IO API key | | +| modules.ip2location.api_key | str | IP2location.io API Key | | +| modules.ip2location.lang | str | Translation information(ISO639-1). The translation is only applicable for continent, country, region and city name. | | | modules.ipneighbor.num_bits | int | Netmask size (in CIDR notation) to check. Default is 4 bits (16 hosts) | 4 | | modules.ipstack.api_key | str | IPStack GeoIP API Key | | | modules.leakix.api_key | str | LeakIX API Key | | diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 4567b784fc..0f0487073b 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -47,31 +47,31 @@ Below is a full list of event types along with which modules produce/consume the ## List of Event Types -| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | -|---------------------|-----------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | -| ASN | 0 | 1 | | asn | -| DNS_NAME | 54 | 44 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github, hackertarget, hunterio, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | -| DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | -| EMAIL_ADDRESS | 0 | 5 | | emailformat, hunterio, pgp, skymem, sslcert | -| FINDING | 2 | 21 | asset_inventory, web_report | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, git, host_header, hunt, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | -| GEOLOCATION | 0 | 1 | | ipstack | -| HTTP_RESPONSE | 11 | 1 | badsecrets, excavate, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, wappalyzer | httpx | -| IP_ADDRESS | 6 | 3 | asn, asset_inventory, ipneighbor, ipstack, nmap, speculate | asset_inventory, ipneighbor, speculate | -| IP_RANGE | 1 | 0 | speculate | | -| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, masscan, nmap, speculate | -| PROTOCOL | 0 | 1 | | fingerprintx | -| SCAN | 1 | 0 | masscan | | -| SOCIAL | 0 | 1 | | social | -| STORAGE_BUCKET | 6 | 5 | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, speculate | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp | -| TECHNOLOGY | 2 | 2 | asset_inventory, web_report | gowitness, wappalyzer | -| URL | 18 | 2 | asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | -| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | -| URL_UNVERIFIED | 4 | 10 | httpx, oauth, social, speculate | bevigil, excavate, ffuf, ffuf_shortnames, github, gowitness, hunterio, robots, urlscan, wayback | -| VHOST | 1 | 1 | web_report | vhost | -| VULNERABILITY | 2 | 4 | asset_inventory, web_report | badsecrets, generic_ssrf, nuclei, telerik | -| WAF | 0 | 1 | | wafw00f | -| WEBSCREENSHOT | 0 | 1 | | gowitness | +| Event Type | # Consuming Modules | # Producing Modules | Consuming Modules | Producing Modules | +|---------------------|-----------------------|-----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| * | 11 | 0 | affiliates, csv, discord, http, human, json, neo4j, python, slack, teams, websocket | | +| ASN | 0 | 1 | | asn | +| DNS_NAME | 54 | 43 | anubisdb, asset_inventory, azure_realm, azure_tenant, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fullhunt, github, hackertarget, hunterio, leakix, massdns, myssl, nmap, nsec, oauth, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomain_hijack, subdomaincenter, subdomains, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | anubisdb, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, hackertarget, hunterio, leakix, massdns, myssl, nsec, ntlm, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, speculate, sslcert, subdomaincenter, sublist3r, threatminer, urlscan, vhost, viewdns, virustotal, wayback, zoomeye | +| DNS_NAME_UNRESOLVED | 3 | 0 | speculate, subdomain_hijack, subdomains | | +| EMAIL_ADDRESS | 0 | 5 | | emailformat, hunterio, pgp, skymem, sslcert | +| FINDING | 2 | 21 | asset_inventory, web_report | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, git, host_header, hunt, ntlm, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, smuggler, speculate, subdomain_hijack, telerik, url_manipulation | +| GEOLOCATION | 0 | 2 | | ip2location, ipstack | +| HTTP_RESPONSE | 11 | 1 | badsecrets, excavate, host_header, hunt, ntlm, paramminer_cookies, paramminer_getparams, paramminer_headers, secretsdb, speculate, wappalyzer | httpx | +| IP_ADDRESS | 7 | 3 | asn, asset_inventory, ip2location, ipneighbor, ipstack, nmap, speculate | asset_inventory, ipneighbor, speculate | +| IP_RANGE | 1 | 0 | speculate | | +| OPEN_TCP_PORT | 4 | 4 | asset_inventory, fingerprintx, httpx, sslcert | asset_inventory, masscan, nmap, speculate | +| PROTOCOL | 0 | 1 | | fingerprintx | +| SCAN | 1 | 0 | masscan | | +| SOCIAL | 0 | 1 | | social | +| STORAGE_BUCKET | 6 | 5 | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, speculate | bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp | +| TECHNOLOGY | 2 | 2 | asset_inventory, web_report | gowitness, wappalyzer | +| URL | 18 | 2 | asset_inventory, bypass403, ffuf, generic_ssrf, git, gowitness, httpx, iis_shortnames, ntlm, nuclei, robots, smuggler, speculate, telerik, url_manipulation, vhost, wafw00f, web_report | gowitness, httpx | +| URL_HINT | 1 | 1 | ffuf_shortnames | iis_shortnames | +| URL_UNVERIFIED | 4 | 11 | httpx, oauth, social, speculate | azure_realm, bevigil, excavate, ffuf, ffuf_shortnames, github, gowitness, hunterio, robots, urlscan, wayback | +| VHOST | 1 | 1 | web_report | vhost | +| VULNERABILITY | 2 | 4 | asset_inventory, web_report | badsecrets, generic_ssrf, nuclei, telerik | +| WAF | 0 | 1 | | wafw00f | +| WEBSCREENSHOT | 0 | 1 | | gowitness | ## Findings Vs. Vulnerabilties diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 3051888a1f..7aed1e0062 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -107,28 +107,28 @@ A single module can have multiple flags. For example, the `securitytrails` modul ### List of Flags -| Flag | # Modules | Description | Modules | -|------------------|-------------|-----------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| safe | 66 | Non-intrusive, safe to run | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fingerprintx, fullhunt, git, github, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | -| passive | 49 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github, hackertarget, hunterio, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | -| subdomain-enum | 44 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github, hackertarget, httpx, hunterio, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | -| active | 37 | Makes active connections to target systems | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, dnszonetransfer, ffuf, ffuf_shortnames, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, social, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | -| web-thorough | 24 | More advanced web scanning functionality | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | -| aggressive | 18 | Generates a large amount of network traffic | bypass403, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | -| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_aws, bucket_azure, bucket_firebase, bucket_gcp, git, httpx, hunt, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | -| cloud-enum | 10 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, httpx, oauth, subdomain_hijack | -| slow | 9 | May take a long time to complete | bucket_digitalocean, fingerprintx, massdns, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, vhost | -| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | -| email-enum | 5 | Enumerates email addresses | emailformat, hunterio, pgp, skymem, sslcert | -| deadly | 3 | Highly aggressive | ffuf, nuclei, vhost | -| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | -| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | -| portscan | 2 | Discovers open ports | masscan, nmap | -| report | 2 | Generates a report at the end of the scan | affiliates, asn | -| social-enum | 2 | Enumerates social media | httpx, social | -| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | -| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | -| web-screenshots | 1 | Takes screenshots of web pages | gowitness | +| Flag | # Modules | Description | Modules | +|------------------|-------------|-----------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| safe | 67 | Non-intrusive, safe to run | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, badsecrets, bevigil, binaryedge, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, emailformat, fingerprintx, fullhunt, git, github, gowitness, hackertarget, httpx, hunt, hunterio, iis_shortnames, ip2location, ipstack, leakix, myssl, nsec, ntlm, oauth, otx, passivetotal, pgp, rapiddns, riddler, robots, secretsdb, securitytrails, shodan_dns, sitedossier, skymem, social, sslcert, subdomain_hijack, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wappalyzer, wayback, zoomeye | +| passive | 50 | Never connects to target systems | affiliates, aggregate, anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crobat, crt, digitorus, dnscommonsrv, dnsdumpster, emailformat, excavate, fullhunt, github, hackertarget, hunterio, ip2location, ipneighbor, ipstack, leakix, massdns, myssl, nsec, otx, passivetotal, pgp, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, skymem, speculate, subdomaincenter, sublist3r, threatminer, urlscan, viewdns, virustotal, wayback, zoomeye | +| subdomain-enum | 44 | Enumerates subdomains | anubisdb, asn, azure_realm, azure_tenant, bevigil, binaryedge, builtwith, c99, censys, certspotter, chaos, columbus, crt, digitorus, dnscommonsrv, dnsdumpster, dnszonetransfer, fullhunt, github, hackertarget, httpx, hunterio, ipneighbor, leakix, massdns, myssl, nsec, oauth, otx, passivetotal, rapiddns, riddler, securitytrails, shodan_dns, sitedossier, sslcert, subdomain_hijack, subdomaincenter, subdomains, threatminer, urlscan, virustotal, wayback, zoomeye | +| active | 37 | Makes active connections to target systems | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, dnszonetransfer, ffuf, ffuf_shortnames, fingerprintx, generic_ssrf, git, gowitness, host_header, httpx, hunt, iis_shortnames, masscan, nmap, ntlm, nuclei, oauth, paramminer_cookies, paramminer_getparams, paramminer_headers, robots, secretsdb, smuggler, social, sslcert, subdomain_hijack, telerik, url_manipulation, vhost, wafw00f, wappalyzer | +| web-thorough | 24 | More advanced web scanning functionality | badsecrets, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, bypass403, ffuf_shortnames, generic_ssrf, git, host_header, httpx, hunt, iis_shortnames, nmap, ntlm, robots, secretsdb, smuggler, sslcert, subdomain_hijack, telerik, url_manipulation, wappalyzer | +| aggressive | 18 | Generates a large amount of network traffic | bypass403, ffuf, ffuf_shortnames, generic_ssrf, host_header, ipneighbor, masscan, massdns, nmap, nuclei, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, url_manipulation, vhost, wafw00f | +| web-basic | 17 | Basic, non-intrusive web scan functionality | azure_realm, badsecrets, bucket_aws, bucket_azure, bucket_firebase, bucket_gcp, git, httpx, hunt, iis_shortnames, ntlm, oauth, robots, secretsdb, sslcert, subdomain_hijack, wappalyzer | +| cloud-enum | 10 | Enumerates cloud resources | azure_realm, azure_tenant, bucket_aws, bucket_azure, bucket_digitalocean, bucket_firebase, bucket_gcp, httpx, oauth, subdomain_hijack | +| slow | 9 | May take a long time to complete | bucket_digitalocean, fingerprintx, massdns, paramminer_cookies, paramminer_getparams, paramminer_headers, smuggler, telerik, vhost | +| affiliates | 8 | Discovers affiliated hostnames/domains | affiliates, azure_realm, azure_tenant, builtwith, oauth, sslcert, viewdns, zoomeye | +| email-enum | 5 | Enumerates email addresses | emailformat, hunterio, pgp, skymem, sslcert | +| deadly | 3 | Highly aggressive | ffuf, nuclei, vhost | +| web-paramminer | 3 | Discovers HTTP parameters through brute-force | paramminer_cookies, paramminer_getparams, paramminer_headers | +| iis-shortnames | 2 | Scans for IIS Shortname vulnerability | ffuf_shortnames, iis_shortnames | +| portscan | 2 | Discovers open ports | masscan, nmap | +| report | 2 | Generates a report at the end of the scan | affiliates, asn | +| social-enum | 2 | Enumerates social media | httpx, social | +| service-enum | 1 | Identifies protocols running on open ports | fingerprintx | +| subdomain-hijack | 1 | Detects hijackable subdomains | subdomain_hijack | +| web-screenshots | 1 | Takes screenshots of web pages | gowitness | ## Dependencies From 995358f2718f34e553fba38e4b1092463cee4d1e Mon Sep 17 00:00:00 2001 From: Nic Date: Sat, 16 Sep 2023 19:27:46 -0500 Subject: [PATCH 054/123] Update json.py to NDJSON terminology Change output.json to output.ndjson, and update the output text to the end user for clarification. --- bbot/modules/output/json.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index 561354c45c..5b1c95195b 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -6,12 +6,12 @@ class JSON(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to JSON"} + meta = {"description": "Output to NDJSON"} options = {"output_file": "", "console": False} options_desc = {"output_file": "Output to file", "console": "Output to console"} async def setup(self): - self._prep_output_dir("output.json") + self._prep_output_dir("output.ndjson") return True async def handle_event(self, event): @@ -29,4 +29,4 @@ async def cleanup(self): async def report(self): if self._file is not None: - self.info(f"Saved JSON output to {self.output_file}") + self.info(f"Saved NDJSON output to {self.output_file}") From 82b6f91b39a98b4d7b6096e8e358ed675676fa70 Mon Sep 17 00:00:00 2001 From: Nic Date: Sat, 16 Sep 2023 19:32:45 -0500 Subject: [PATCH 055/123] Update output.md to use output.ndjson Update docs to reflect .ndjson file change --- docs/scanning/output.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/output.md b/docs/scanning/output.md index edfb0c9926..b2ae1c8667 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -64,7 +64,7 @@ You can filter on the JSON output with `jq`: ```bash # pull out only the .data attribute of every DNS_NAME -$ jq -r 'select(.type=="DNS_NAME") | .data' ~/.bbot/scans/extreme_johnny/output.json +$ jq -r 'select(.type=="DNS_NAME") | .data' ~/.bbot/scans/extreme_johnny/output.ndjson evilcorp.com www.evilcorp.com mail.evilcorp.com From 3170cadbe108ab7f7ffd8879c8a103c9a35faf90 Mon Sep 17 00:00:00 2001 From: Nic Date: Sat, 16 Sep 2023 19:34:06 -0500 Subject: [PATCH 056/123] Rename test_output.json to test_output.ndjson Updating to .ndjson --- bbot/test/{test_output.json => test_output.ndjson} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename bbot/test/{test_output.json => test_output.ndjson} (100%) diff --git a/bbot/test/test_output.json b/bbot/test/test_output.ndjson similarity index 100% rename from bbot/test/test_output.json rename to bbot/test/test_output.ndjson From 4935d03324d724ba7c8afc906d6b5b54e6753e96 Mon Sep 17 00:00:00 2001 From: Nic Date: Sat, 16 Sep 2023 19:36:00 -0500 Subject: [PATCH 057/123] Update test_python_api.py to use output.ndjson Updating for output.ndjson change --- bbot/test/test_step_1/test_python_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/test/test_step_1/test_python_api.py b/bbot/test/test_step_1/test_python_api.py index 7a632ed3d3..b393158b64 100644 --- a/bbot/test/test_step_1/test_python_api.py +++ b/bbot/test/test_step_1/test_python_api.py @@ -14,7 +14,7 @@ async def test_python_api(bbot_config): # make sure output files work scan2 = Scanner("127.0.0.1", config=bbot_config, output_modules=["json"], name="python_api_test") await scan2.async_start_without_generator() - out_file = scan2.helpers.scans_dir / "python_api_test" / "output.json" + out_file = scan2.helpers.scans_dir / "python_api_test" / "output.ndjson" assert list(scan2.helpers.read_file(out_file)) # make sure config loads properly bbot_home = "/tmp/.bbot_python_api_test" @@ -34,7 +34,7 @@ def test_python_api_sync(bbot_config): # make sure output files work scan2 = Scanner("127.0.0.1", config=bbot_config, output_modules=["json"], name="python_api_test") scan2.start_without_generator() - out_file = scan2.helpers.scans_dir / "python_api_test" / "output.json" + out_file = scan2.helpers.scans_dir / "python_api_test" / "output.ndjson" assert list(scan2.helpers.read_file(out_file)) # make sure config loads properly bbot_home = "/tmp/.bbot_python_api_test" From d3c84e4fb4be9affad1f2a9313190873cbe9c153 Mon Sep 17 00:00:00 2001 From: Nic Date: Sat, 16 Sep 2023 19:37:12 -0500 Subject: [PATCH 058/123] Update test_module_json.py to use output.ndjson Updating for ndjson filename use. --- bbot/test/test_step_2/module_tests/test_module_json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_2/module_tests/test_module_json.py b/bbot/test/test_step_2/module_tests/test_module_json.py index dd552742a5..6dafb68a5b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_json.py +++ b/bbot/test/test_step_2/module_tests/test_module_json.py @@ -6,7 +6,7 @@ class TestJSON(ModuleTestBase): def check(self, module_test, events): - txt_file = module_test.scan.home / "output.json" + txt_file = module_test.scan.home / "output.ndjson" lines = list(module_test.scan.helpers.read_file(txt_file)) assert lines e = event_from_json(json.loads(lines[0])) From d7b5673d35597d7807fcfc96586bdb22502bbe7f Mon Sep 17 00:00:00 2001 From: Nic Date: Sat, 16 Sep 2023 19:39:02 -0500 Subject: [PATCH 059/123] Update test_cli.py to use output.ndjson Update filename to make use of ndjson extension. --- bbot/test/test_step_1/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index a0e821a12e..e2b7a25837 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -42,7 +42,7 @@ async def test_cli(monkeypatch, bbot_config): assert (scan_home / "wordcloud.tsv").is_file() assert (scan_home / "output.txt").is_file() assert (scan_home / "output.csv").is_file() - assert (scan_home / "output.json").is_file() + assert (scan_home / "output.ndjson").is_file() with open(scan_home / "output.csv") as f: lines = f.readlines() assert lines[0] == "Event type,Event data,IP Address,Source Module,Scope Distance,Event Tags\n" From bc5b82fa737168ecc34db7d8ae4844731172ab2f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sun, 17 Sep 2023 18:46:05 -0400 Subject: [PATCH 060/123] fix NSEC infinite loop bug --- bbot/modules/nsec.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bbot/modules/nsec.py b/bbot/modules/nsec.py index 12c4582738..2ab254da7f 100644 --- a/bbot/modules/nsec.py +++ b/bbot/modules/nsec.py @@ -33,11 +33,13 @@ async def get_nsec_record(self, domain): self.warning(f"Error getting NSEC record for {domain}: {e}") async def nsec_walk(self, domain): + encountered = set() current_domain = domain while 1: next_domain = await self.get_nsec_record(current_domain) - if next_domain == domain or next_domain is None: + if next_domain is None or next_domain in encountered: break + encountered.add(next_domain) if not next_domain.startswith("\\"): yield next_domain current_domain = next_domain From 9c6c490edf3b630825c2a5c9e12defb302d31b25 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 08:58:02 -0400 Subject: [PATCH 061/123] fixed tests, updated docs image --- bbot/modules/output/json.py | 4 ++-- bbot/test/test_step_1/test_python_api.py | 2 +- docs/scanning/output.md | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bbot/modules/output/json.py b/bbot/modules/output/json.py index 5b1c95195b..e37a3829d8 100644 --- a/bbot/modules/output/json.py +++ b/bbot/modules/output/json.py @@ -6,7 +6,7 @@ class JSON(BaseOutputModule): watched_events = ["*"] - meta = {"description": "Output to NDJSON"} + meta = {"description": "Output to Newline-Delimited JSON (NDJSON)"} options = {"output_file": "", "console": False} options_desc = {"output_file": "Output to file", "console": "Output to console"} @@ -29,4 +29,4 @@ async def cleanup(self): async def report(self): if self._file is not None: - self.info(f"Saved NDJSON output to {self.output_file}") + self.info(f"Saved JSON output to {self.output_file}") diff --git a/bbot/test/test_step_1/test_python_api.py b/bbot/test/test_step_1/test_python_api.py index d2a9958f85..00ad2d9722 100644 --- a/bbot/test/test_step_1/test_python_api.py +++ b/bbot/test/test_step_1/test_python_api.py @@ -31,7 +31,7 @@ async def test_python_api(bbot_config): assert "scan_logging_test" not in open(debug_log).read() scan_home = scan3.helpers.scans_dir / "scan_logging_test" - out_file = scan_home / "output.json" + out_file = scan_home / "output.ndjson" assert list(scan3.helpers.read_file(out_file)) scan_log = scan_home / "scan.log" debug_log = scan_home / "debug.log" diff --git a/docs/scanning/output.md b/docs/scanning/output.md index c6543309b1..310b92288e 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -1,7 +1,7 @@ # Output By default, BBOT saves its output in TXT, JSON, and CSV formats: -![image](https://github.com/blacklanternsecurity/bbot/assets/20261699/779207f4-1c2f-4f65-a132-794ca8bd2f8a) +![bbot output](https://github.com/blacklanternsecurity/bbot/assets/20261699/bb3da441-2682-408f-b955-19b268823b82) Every BBOT scan gets a unique and mildly-entertaining name like **`demonic_jimmy`**. Output for that scan, including scan stats and any web screenshots, etc., are saved to a folder by that name in `~/.bbot/scans`. The most recent 20 scans are kept, and older ones are removed. You can change the location of BBOT's output with `--output`, and you can also pick a custom scan name with `--name`. From 36e07b21de49765af0a418c56005addd147f1dfe Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 09:28:40 -0400 Subject: [PATCH 062/123] update nuclei version --- bbot/modules/deadly/nuclei.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index d45edd2863..0ea880b317 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -13,7 +13,7 @@ class nuclei(BaseModule): batch_size = 25 options = { - "version": "2.9.9", + "version": "2.9.15", "tags": "", "templates": "", "severity": "", From 763268fc9dc7c81d1e92f9a76677d7144eab6314 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 10:21:52 -0400 Subject: [PATCH 063/123] improve punycode support --- bbot/core/event/base.py | 4 +-- bbot/core/event/helpers.py | 4 +-- bbot/core/helpers/punycode.py | 14 ++++----- bbot/core/helpers/regexes.py | 2 +- bbot/core/helpers/validators.py | 6 ++-- bbot/test/test_step_1/test_events.py | 44 ++++++++++++++++++++++++---- 6 files changed, 52 insertions(+), 22 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index c4fca0d83d..261d637ca3 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -24,7 +24,7 @@ smart_decode, get_file_extension, validators, - smart_decode_punycode, + smart_encode_punycode, tagify, ) @@ -983,7 +983,7 @@ def make_event( else: if event_type is None: if isinstance(data, str): - data = smart_decode_punycode(data) + data = smart_encode_punycode(data) event_type = get_event_type(data) if not dummy: log.debug(f'Autodetected event type "{event_type}" based on data: "{data}"') diff --git a/bbot/core/event/helpers.py b/bbot/core/event/helpers.py index 6df0fe2ee8..1d7043de61 100644 --- a/bbot/core/event/helpers.py +++ b/bbot/core/event/helpers.py @@ -3,7 +3,7 @@ from contextlib import suppress from bbot.core.errors import ValidationError -from bbot.core.helpers import sha1, smart_decode, smart_decode_punycode +from bbot.core.helpers import sha1, smart_decode, smart_encode_punycode from bbot.core.helpers.regexes import event_type_regexes, event_id_regex @@ -14,7 +14,7 @@ def get_event_type(data): """ Attempt to divine event type from data """ - data = smart_decode_punycode(smart_decode(data).strip()) + data = smart_encode_punycode(smart_decode(data).strip()) # IP address with suppress(Exception): diff --git a/bbot/core/helpers/punycode.py b/bbot/core/helpers/punycode.py index d7055f6dbc..466e5f7ebc 100644 --- a/bbot/core/helpers/punycode.py +++ b/bbot/core/helpers/punycode.py @@ -2,16 +2,15 @@ import idna -alphanum_regex = re.compile(r"([\w-]+)") -alphanum_anchored = re.compile(r"^[\w-]+$") +split_regex = re.compile(r"([/:@\[\]]+)") def split_text(text): - # Split text into segments by special characters - # We assume that only alphanumeric segments should be encoded + # We have to split this way in order to handle URLs and email addresses + # which the idna library is not equipped to deal with if not isinstance(text, str): raise ValueError(f"data must be a string, not {type(text)}") - segments = alphanum_regex.split(text) + segments = split_regex.split(text) return segments @@ -24,7 +23,7 @@ def smart_encode_punycode(text: str) -> str: for segment in segments: try: - if alphanum_anchored.match(segment): # Only encode alphanumeric segments + if not split_regex.match(segment): segment = idna.encode(segment).decode(errors="ignore") except UnicodeError: pass # If encoding fails, leave the segment as it is @@ -43,8 +42,7 @@ def smart_decode_punycode(text: str) -> str: for segment in segments: try: - if alphanum_anchored.match(segment): # Only decode alphanumeric segments - segment = idna.decode(segment) + segment = idna.decode(segment) except UnicodeError: pass # If decoding fails, leave the segment as it is diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 5ed1693455..846d9d9b58 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -22,7 +22,7 @@ _ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*" ipv6_regex = re.compile(_ipv6_regex, re.I) # dns names with periods -_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+[^\W_]{1,63}\.?" +_dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?" # dns names without periods _hostname_regex = r"(?!\w*\.\w+)\w(?:[\w-]{0,100}\w)?" _email_regex = r"(?:[^\W_][\w\-\.\+]{,100})@" + _dns_name_regex diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 3fa759b95e..9a672a79bc 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -4,7 +4,7 @@ from bbot.core.helpers import regexes from bbot.core.helpers.url import parse_url, hash_url -from bbot.core.helpers.punycode import smart_decode_punycode +from bbot.core.helpers.punycode import smart_encode_punycode from bbot.core.helpers.misc import split_host_port, make_netloc, is_ip log = logging.getLogger("bbot.core.helpers.validators") @@ -57,7 +57,7 @@ def validate_host(host): return str(ip) except Exception: # finally, try DNS_NAME - host = smart_decode_punycode(host) + host = smart_encode_punycode(host) # clean asterisks and clinging dashes host = host.strip("*.-").replace("*", "") for r in regexes.event_type_regexes["DNS_NAME"]: @@ -89,7 +89,7 @@ def validate_severity(severity): @validator def validate_email(email): - email = smart_decode_punycode(str(email).strip().lower()) + email = smart_encode_punycode(str(email).strip().lower()) if any(r.match(email) for r in regexes.event_type_regexes["EMAIL_ADDRESS"]): return email assert False, f'Invalid email: "{email}"' diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 7c7563c1ae..ee96be6a21 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -245,22 +245,54 @@ async def test_events(events, scan, helpers, bbot_config): {"host": "evilcorp.com", "severity": "WACK", "description": "asdf"}, "VULNERABILITY", dummy=True ) - # punycode + # punycode - event type detection + + # japanese assert scan.make_event("ドメイン.テスト", dummy=True).type == "DNS_NAME" assert scan.make_event("bob@ドメイン.テスト", dummy=True).type == "EMAIL_ADDRESS" assert scan.make_event("ドメイン.テスト:80", dummy=True).type == "OPEN_TCP_PORT" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).type == "URL_UNVERIFIED" - assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).data == "ドメイン.テスト" - assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).data == "bob@ドメイン.テスト" - assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "ドメイン.テスト:80" - assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "http://ドメイン.テスト/" - assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).type == "DNS_NAME" assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).type == "EMAIL_ADDRESS" assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).type == "OPEN_TCP_PORT" assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).type == "URL_UNVERIFIED" + # thai + assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).type == "DNS_NAME" + assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).type == "OPEN_TCP_PORT" + assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).type == "URL_UNVERIFIED" + + assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "DNS_NAME" + assert scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).type == "OPEN_TCP_PORT" + assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).type == "URL_UNVERIFIED" + + # punycode - encoding / decoding tests + + # japanese + assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).data == "xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).data == "bob@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" + assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" + + assert scan.make_event("ドメイン.テスト", dummy=True).data == "xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("bob@ドメイン.テスト", dummy=True).data == "bob@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" + assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" + + # thai + assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" + assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + + assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" + assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + # test event serialization from bbot.core.event import event_from_json From 5d4d434f13eea14d38d9b879d1046146ed239434 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 10:22:04 -0400 Subject: [PATCH 064/123] blacked --- bbot/test/test_step_1/test_events.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index ee96be6a21..ec26adf11c 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -283,15 +283,29 @@ async def test_events(events, scan, helpers, bbot_config): assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" # thai - assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" - assert scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" - assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" - assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + assert ( + scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + ) + assert ( + scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data + == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + ) + assert ( + scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data + == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" + ) + assert ( + scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + ) assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" - assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + assert ( + scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" + ) # test event serialization from bbot.core.event import event_from_json From 72524fc9479b16ebe7765c299ff81fd1775d9449 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 11:27:18 -0400 Subject: [PATCH 065/123] added pre-commit --- poetry.lock | 169 +++++++++++++++++++++++++++++++++++++------------ pyproject.toml | 2 + 2 files changed, 129 insertions(+), 42 deletions(-) diff --git a/poetry.lock b/poetry.lock index 358dac3899..edda4d0057 100644 --- a/poetry.lock +++ b/poetry.lock @@ -130,36 +130,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.3.0" +version = "23.9.1" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, - {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, - {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, - {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, - {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, - {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, - {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, - {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, - {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, - {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, - {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, - {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, - {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, - {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, + {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, + {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, + {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, + {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, + {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, + {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, + {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, + {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, + {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, + {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, ] [package.dependencies] @@ -169,7 +166,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -279,6 +276,17 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.1.0" @@ -528,6 +536,17 @@ ordered-set = ">=4.0.2,<4.2.0" cli = ["click (==8.1.3)", "pyyaml (==6.0)"] optimize = ["orjson"] +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + [[package]] name = "dnspython" version = "2.4.2" @@ -603,19 +622,19 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "p [[package]] name = "flake8" -version = "6.0.0" +version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "h11" @@ -710,6 +729,20 @@ files = [ {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, ] +[[package]] +name = "identify" +version = "2.5.29" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.29-py2.py3-none-any.whl", hash = "sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b"}, + {file = "identify-2.5.29.tar.gz", hash = "sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.4" @@ -933,6 +966,20 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + [[package]] name = "omegaconf" version = "2.3.0" @@ -1047,6 +1094,24 @@ tomlkit = ">=0.4" [package.extras] plugin = ["poetry (>=1.2.0,<2.0.0)"] +[[package]] +name = "pre-commit" +version = "3.4.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, + {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "psutil" version = "5.9.5" @@ -1086,13 +1151,13 @@ files = [ [[package]] name = "pycodestyle" -version = "2.10.0" +version = "2.11.0" description = "Python style guide checker" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, ] [[package]] @@ -1201,13 +1266,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyflakes" -version = "3.0.1" +version = "3.1.0" description = "passive checker of Python programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] [[package]] @@ -1659,6 +1724,26 @@ secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17. socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "virtualenv" +version = "20.24.1" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.1-py3-none-any.whl", hash = "sha256:01aacf8decd346cf9a865ae85c0cdc7f64c8caa07ff0d8b1dfc1733d10677442"}, + {file = "virtualenv-20.24.1.tar.gz", hash = "sha256:2ef6a237c31629da6442b0bcaa3999748108c7166318d1f55cc9f8d7294e97bd"}, +] + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.12,<4" +platformdirs = ">=3.5.1,<4" + +[package.extras] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] + [[package]] name = "websockets" version = "11.0.3" @@ -1793,4 +1878,4 @@ xmltodict = ">=0.12.0,<0.13.0" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "22dc78b01007c5fb6a7d8729277e75b0382e5571ba5ec23bd1d53a8467f368bc" +content-hash = "caa5eee14e9087c9a47b3f25af5e0a453269fe68eaa8617d141c6b1169250c3f" diff --git a/pyproject.toml b/pyproject.toml index 70e6b185ed..56d7cac487 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ pytest-env = "^0.8.2" pytest-httpx = "^0.22.0" pytest-timeout = "^2.1.0" pytest = "^7.4.0" +pre-commit = "^3.4.0" [tool.pytest.ini_options] env = [ @@ -72,6 +73,7 @@ env = [ [build-system] requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] build-backend = "poetry_dynamic_versioning.backend" +script = "pre-commit install" [tool.black] line-length = 119 From 5e3de3a3b7a8b27883d90d912e6dddd13082d54a Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Mon, 18 Sep 2023 17:00:06 +0000 Subject: [PATCH 066/123] Refresh module docs --- docs/scanning/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 2b76fa99c9..66d86991cb 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -257,7 +257,7 @@ Many modules accept their own configuration options. These options have the abil | modules.nuclei.severity | str | Filter based on severity field available in the template. | | | modules.nuclei.tags | str | execute a subset of templates that contain the provided tags | | | modules.nuclei.templates | str | template or template directory paths to include in the scan | | -| modules.nuclei.version | str | nuclei version | 2.9.9 | +| modules.nuclei.version | str | nuclei version | 2.9.15 | | modules.oauth.try_all | bool | Check for OAUTH/IODC on every subdomain and URL. | False | | modules.paramminer_cookies.http_extract | bool | Attempt to find additional wordlist words from the HTTP Response | True | | modules.paramminer_cookies.skip_boring_words | bool | Remove commonly uninteresting words from the wordlist | True | From 283306021dc5beb592719fab101a112160c64ce0 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 13:03:45 -0400 Subject: [PATCH 067/123] update docs, custom poetry script --- docs/contribution.md | 15 ++++++++------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/docs/contribution.md b/docs/contribution.md index 6815944e2f..c8cf360924 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -13,13 +13,16 @@ We welcome contributions! If you have an idea for a new module, or are a Python ```bash # clone your forked repo and cd into it -git clone git@github.com//bbot.git && cd bbot +git clone git@github.com//bbot.git +cd bbot # install poetry curl -sSL https://install.python-poetry.org | python3 - # install pip dependencies poetry install +# install pre-commit hooks, etc. +poetry postinstall # enter virtual environment poetry shell @@ -28,12 +31,7 @@ bbot --help ``` - Now, any changes you make in the code will be reflected in the `bbot` command. -- Run the tests locally to ensure they pass. -- Finally, commit and push your changes, and create a pull request to the `dev` branch of the main BBOT repo. - -## Running Tests - -BBOT makes use of pytest for its unit testing. You can run the tests simply by executing the bash script at `bbot/test/run_tests.sh`: +- After making your changes, run the tests locally to ensure they pass. ```bash # auto-format code indentation, etc. @@ -43,6 +41,9 @@ black . ./bbot/test/run_tests.sh ``` +- Finally, commit and push your changes, and create a pull request to the `dev` branch of the main BBOT repo. + + ## Creating a Module Writing a module is easy and requires only a basic understanding of Python. It consists of a few steps: diff --git a/pyproject.toml b/pyproject.toml index 56d7cac487..d5d60fbe97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ classifiers = [ [tool.poetry.scripts] bbot = 'bbot.cli:main' +postinstall = "pre-commit install" [tool.poetry.dependencies] python = "^3.9" @@ -73,7 +74,6 @@ env = [ [build-system] requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"] build-backend = "poetry_dynamic_versioning.backend" -script = "pre-commit install" [tool.black] line-length = 119 From e4af629e9d541db671658ffe02dafaae963288a2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 13:05:28 -0400 Subject: [PATCH 068/123] fix pyproject.toml --- docs/contribution.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contribution.md b/docs/contribution.md index c8cf360924..c2bc805819 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -22,7 +22,7 @@ curl -sSL https://install.python-poetry.org | python3 - # install pip dependencies poetry install # install pre-commit hooks, etc. -poetry postinstall +poetry run postinstall # enter virtual environment poetry shell From b32a8a60e2ca5e15166871920ac9f8483c9f7659 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 13:08:07 -0400 Subject: [PATCH 069/123] update docs --- docs/contribution.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contribution.md b/docs/contribution.md index c2bc805819..2bc585898f 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -22,7 +22,7 @@ curl -sSL https://install.python-poetry.org | python3 - # install pip dependencies poetry install # install pre-commit hooks, etc. -poetry run postinstall +poetry run pre-commit install # enter virtual environment poetry shell From 0640bce7ca4b2dee76ef2a0c261437665ca71dd0 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 13:08:54 -0400 Subject: [PATCH 070/123] fix tests --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d5d60fbe97..3b8ed842b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,6 @@ classifiers = [ [tool.poetry.scripts] bbot = 'bbot.cli:main' -postinstall = "pre-commit install" [tool.poetry.dependencies] python = "^3.9" From c43b30c8452a44c5138d94de018cd86046e36522 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 16:10:30 -0400 Subject: [PATCH 071/123] fix minor but with waf events scope distance --- bbot/core/event/base.py | 15 +++++++++++++++ bbot/modules/wafw00f.py | 13 +++++++++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index c4fca0d83d..22326fb496 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -947,6 +947,21 @@ class AZURE_TENANT(DictEvent): _always_emit = True +class WAF(DictHostEvent): + _always_emit = True + + class _data_validator(BaseModel): + url: str + host: str + WAF: str + info: Optional[str] + _validate_url = validator("url", allow_reuse=True)(validators.validate_url) + _validate_host = validator("host", allow_reuse=True)(validators.validate_host) + + def _pretty_string(self): + return self.data["WAF"] + + def make_event( data, event_type=None, diff --git a/bbot/modules/wafw00f.py b/bbot/modules/wafw00f.py index 192cf83c88..f15b82263d 100644 --- a/bbot/modules/wafw00f.py +++ b/bbot/modules/wafw00f.py @@ -21,18 +21,23 @@ class wafw00f(BaseModule): per_host_only = True async def handle_event(self, event): - host = f"{event.parsed.scheme}://{event.parsed.netloc}/" - WW = await self.scan.run_in_executor(wafw00f_main.WAFW00F, host) + url = f"{event.parsed.scheme}://{event.parsed.netloc}/" + WW = await self.scan.run_in_executor(wafw00f_main.WAFW00F, url) waf_detections = await self.scan.run_in_executor(WW.identwaf) if waf_detections: for waf in waf_detections: - self.emit_event({"host": host, "WAF": waf}, "WAF", source=event) + self.emit_event({"host": str(event.host), "url": url, "WAF": waf}, "WAF", source=event) else: if self.config.get("generic_detect") == True: generic = await self.scan.run_in_executor(WW.genericdetect) if generic: self.emit_event( - {"host": host, "WAF": "generic detection", "info": WW.knowledge["generic"]["reason"]}, + { + "host": str(event.host), + "url": url, + "WAF": "generic detection", + "info": WW.knowledge["generic"]["reason"], + }, "WAF", source=event, ) From 7c3449903d5c81fae9d36d1e5e55034d8af3d6d2 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 11:24:30 -0400 Subject: [PATCH 072/123] don't punycode-encode non-host segments --- bbot/core/event/base.py | 5 +- bbot/core/event/helpers.py | 11 ++-- bbot/core/helpers/misc.py | 80 ++++++++++++++++++++++++++- bbot/core/helpers/punycode.py | 51 ----------------- bbot/core/helpers/regexes.py | 3 + bbot/core/helpers/validators.py | 3 +- bbot/test/test_step_1/test_events.py | 29 +++++++++- bbot/test/test_step_1/test_helpers.py | 50 +++++++++++++++++ bbot/test/test_step_1/test_regexes.py | 14 +++-- 9 files changed, 176 insertions(+), 70 deletions(-) delete mode 100644 bbot/core/helpers/punycode.py diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 261d637ca3..098f2c5c81 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -24,7 +24,6 @@ smart_decode, get_file_extension, validators, - smart_encode_punycode, tagify, ) @@ -982,9 +981,7 @@ def make_event( return data else: if event_type is None: - if isinstance(data, str): - data = smart_encode_punycode(data) - event_type = get_event_type(data) + event_type, data = get_event_type(data) if not dummy: log.debug(f'Autodetected event type "{event_type}" based on data: "{data}"') diff --git a/bbot/core/event/helpers.py b/bbot/core/event/helpers.py index 1d7043de61..228be7c335 100644 --- a/bbot/core/event/helpers.py +++ b/bbot/core/event/helpers.py @@ -14,25 +14,26 @@ def get_event_type(data): """ Attempt to divine event type from data """ - data = smart_encode_punycode(smart_decode(data).strip()) # IP address with suppress(Exception): ipaddress.ip_address(data) - return "IP_ADDRESS" + return "IP_ADDRESS", data # IP network with suppress(Exception): ipaddress.ip_network(data, strict=False) - return "IP_RANGE" + return "IP_RANGE", data + + data = smart_encode_punycode(smart_decode(data).strip()) # Strict regexes for t, regexes in event_type_regexes.items(): for r in regexes: if r.match(data): if t == "URL": - return "URL_UNVERIFIED" - return t + return "URL_UNVERIFIED", data + return t, data raise ValidationError(f'Unable to autodetect event type from "{data}"') diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 3f9e864257..d985b67e78 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -2,6 +2,7 @@ import re import sys import copy +import idna import json import atexit import codecs @@ -34,7 +35,6 @@ from .url import * # noqa F401 from .. import errors -from .punycode import * # noqa F401 from .logger import log_to_stderr from . import regexes as bbot_regexes from .names_generator import random_name, names, adjectives # noqa F401 @@ -898,10 +898,88 @@ def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, def extract_emails(s): + """ + Extract email addresses from a body of text + """ for email in bbot_regexes.email_regex.findall(smart_decode(s)): yield email.lower() +def extract_host(s): + """ + Attempts to find and extract the host portion of a string. + + Args: + s (str): The string from which to extract the host. + + Returns: + tuple: A tuple containing three strings: + (hostname (None if not found), string_before_hostname, string_after_hostname). + + Examples: + >>> extract_host("evilcorp.com:80") + ("evilcorp.com", "", ":80") + + >>> extract_host("http://evilcorp.com:80/asdf.php?a=b") + ("evilcorp.com", "http://", ":80/asdf.php?a=b") + + >>> extract_host("bob@evilcorp.com") + ("evilcorp.com", "bob@", "") + + >>> extract_host("[dead::beef]:22") + ("dead::beef", "[", "]:22") + """ + match = bbot_regexes.extract_host_regex.search(s) + + if match: + hostname = match.group(1) + before = s[: match.start(1)] + after = s[match.end(1) :] + host, port = split_host_port(hostname) + if host is not None: + hostname = str(host) + if port is not None: + after = f":{port}{after}" + if is_ip(hostname, version=6): + before = f"{before}[" + after = f"]{after}" + return (hostname, before, after) + + return (None, s, "") + + +def smart_encode_punycode(text: str) -> str: + """ + ドメイン.テスト --> xn--eckwd4c7c.xn--zckzah + """ + host, before, after = extract_host(text) + if host is None: + return text + + try: + host = idna.encode(host).decode(errors="ignore") + except UnicodeError: + pass # If encoding fails, leave the host as it is + + return f"{before}{host}{after}" + + +def smart_decode_punycode(text: str) -> str: + """ + xn--eckwd4c7c.xn--zckzah --> ドメイン.テスト + """ + host, before, after = extract_host(text) + if host is None: + return text + + try: + host = idna.decode(host) + except UnicodeError: + pass # If decoding fails, leave the host as it is + + return f"{before}{host}{after}" + + def can_sudo_without_password(): """ Return True if the current user can sudo without a password diff --git a/bbot/core/helpers/punycode.py b/bbot/core/helpers/punycode.py deleted file mode 100644 index 466e5f7ebc..0000000000 --- a/bbot/core/helpers/punycode.py +++ /dev/null @@ -1,51 +0,0 @@ -import re -import idna - - -split_regex = re.compile(r"([/:@\[\]]+)") - - -def split_text(text): - # We have to split this way in order to handle URLs and email addresses - # which the idna library is not equipped to deal with - if not isinstance(text, str): - raise ValueError(f"data must be a string, not {type(text)}") - segments = split_regex.split(text) - return segments - - -def smart_encode_punycode(text: str) -> str: - """ - ドメイン.テスト --> xn--eckwd4c7c.xn--zckzah - """ - segments = split_text(text) - result_segments = [] - - for segment in segments: - try: - if not split_regex.match(segment): - segment = idna.encode(segment).decode(errors="ignore") - except UnicodeError: - pass # If encoding fails, leave the segment as it is - - result_segments.append(segment) - - return "".join(result_segments) - - -def smart_decode_punycode(text: str) -> str: - """ - xn--eckwd4c7c.xn--zckzah --> ドメイン.テスト - """ - segments = split_text(text) - result_segments = [] - - for segment in segments: - try: - segment = idna.decode(segment) - except UnicodeError: - pass # If decoding fails, leave the segment as it is - - result_segments.append(segment) - - return "".join(result_segments) diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 846d9d9b58..d8e980a831 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -87,3 +87,6 @@ jquery_get_regex = re.compile(r"url:\s?[\"\'].+?\?(\w+)=") jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") a_tag_regex = re.compile(r"]*href=[\"\'][^\"\'?>]*\?([^&\"\'=]+)") + +_extract_host_regex = r"(?:[a-z0-9]{1,20}://)?(?:[^?]*@)?([^\s!@#$%^&()=/?\\]+)" +extract_host_regex = re.compile(_extract_host_regex, re.I) diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 9a672a79bc..82d7a38d4f 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -4,8 +4,7 @@ from bbot.core.helpers import regexes from bbot.core.helpers.url import parse_url, hash_url -from bbot.core.helpers.punycode import smart_encode_punycode -from bbot.core.helpers.misc import split_host_port, make_netloc, is_ip +from bbot.core.helpers.misc import smart_encode_punycode, split_host_port, make_netloc, is_ip log = logging.getLogger("bbot.core.helpers.validators") diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index ec26adf11c..842b91f9cb 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -250,38 +250,52 @@ async def test_events(events, scan, helpers, bbot_config): # japanese assert scan.make_event("ドメイン.テスト", dummy=True).type == "DNS_NAME" assert scan.make_event("bob@ドメイン.テスト", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("テスト@ドメイン.テスト", dummy=True).type == "EMAIL_ADDRESS" assert scan.make_event("ドメイン.テスト:80", dummy=True).type == "OPEN_TCP_PORT" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).type == "URL_UNVERIFIED" assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).type == "DNS_NAME" assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("テスト@xn--eckwd4c7c.xn--zckzah", dummy=True).type == "EMAIL_ADDRESS" assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).type == "OPEN_TCP_PORT" assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80/テスト", dummy=True).type == "URL_UNVERIFIED" # thai assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).type == "DNS_NAME" assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).type == "EMAIL_ADDRESS" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).type == "OPEN_TCP_PORT" assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).type == "URL_UNVERIFIED" assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "DNS_NAME" assert scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "EMAIL_ADDRESS" + assert scan.make_event("ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).type == "EMAIL_ADDRESS" assert scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).type == "OPEN_TCP_PORT" assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).type == "URL_UNVERIFIED" + assert scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80/ทดสอบ", dummy=True).type == "URL_UNVERIFIED" # punycode - encoding / decoding tests # japanese assert scan.make_event("xn--eckwd4c7c.xn--zckzah", dummy=True).data == "xn--eckwd4c7c.xn--zckzah" assert scan.make_event("bob@xn--eckwd4c7c.xn--zckzah", dummy=True).data == "bob@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("テスト@xn--eckwd4c7c.xn--zckzah", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" + assert ( + scan.make_event("http://xn--eckwd4c7c.xn--zckzah:80/テスト", dummy=True).data + == "http://xn--eckwd4c7c.xn--zckzah/テスト" + ) assert scan.make_event("ドメイン.テスト", dummy=True).data == "xn--eckwd4c7c.xn--zckzah" assert scan.make_event("bob@ドメイン.テスト", dummy=True).data == "bob@xn--eckwd4c7c.xn--zckzah" + assert scan.make_event("テスト@ドメイン.テスト", dummy=True).data == "テスト@xn--eckwd4c7c.xn--zckzah" assert scan.make_event("ドメイン.テスト:80", dummy=True).data == "xn--eckwd4c7c.xn--zckzah:80" assert scan.make_event("http://ドメイン.テスト:80", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/" - + assert scan.make_event("http://ドメイン.テスト:80/テスト", dummy=True).data == "http://xn--eckwd4c7c.xn--zckzah/テスト" # thai assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" @@ -290,6 +304,10 @@ async def test_events(events, scan, helpers, bbot_config): scan.make_event("bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" ) + assert ( + scan.make_event("ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com", dummy=True).data + == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + ) assert ( scan.make_event("xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" @@ -298,14 +316,23 @@ async def test_events(events, scan, helpers, bbot_config): scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) + assert ( + scan.make_event("http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80/ทดสอบ", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/ทดสอบ" + ) assert scan.make_event("เราเที่ยวด้วยกัน.com", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("bob@เราเที่ยวด้วยกัน.com", dummy=True).data == "bob@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" + assert scan.make_event("ทดสอบ@เราเที่ยวด้วยกัน.com", dummy=True).data == "ทดสอบ@xn--12c1bik6bbd8ab6hd1b5jc6jta.com" assert scan.make_event("เราเที่ยวด้วยกัน.com:80", dummy=True).data == "xn--12c1bik6bbd8ab6hd1b5jc6jta.com:80" assert ( scan.make_event("http://เราเที่ยวด้วยกัน.com:80", dummy=True).data == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/" ) + assert ( + scan.make_event("http://เราเที่ยวด้วยกัน.com:80/ทดสอบ", dummy=True).data + == "http://xn--12c1bik6bbd8ab6hd1b5jc6jta.com/ทดสอบ" + ) # test event serialization from bbot.core.event import event_from_json diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 016e6d79f9..98488607a8 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -108,6 +108,54 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "b@b.com", ) + assert helpers.extract_host("evilcorp.com:80") == ("evilcorp.com", "", ":80") + assert helpers.extract_host("http://evilcorp.com:80/asdf.php?a=b") == ( + "evilcorp.com", + "http://", + ":80/asdf.php?a=b", + ) + assert helpers.extract_host("http://evilcorp.com:80/asdf.php?a=b@a.com") == ( + "evilcorp.com", + "http://", + ":80/asdf.php?a=b@a.com", + ) + assert helpers.extract_host("bob@evilcorp.com") == ("evilcorp.com", "bob@", "") + assert helpers.extract_host("[dead::beef]:22") == ("dead::beef", "[", "]:22") + assert helpers.extract_host("scp://[dead::beef]:22") == ("dead::beef", "scp://[", "]:22") + assert helpers.extract_host("https://[dead::beef]:22?a=b") == ("dead::beef", "https://[", "]:22?a=b") + assert helpers.extract_host("https://[dead::beef]/?a=b") == ("dead::beef", "https://[", "]/?a=b") + assert helpers.extract_host("https://[dead::beef]?a=b") == ("dead::beef", "https://[", "]?a=b") + assert helpers.extract_host("ftp://username:password@my-ftp.com/my-file.csv") == ( + "my-ftp.com", + "ftp://username:password@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:p@ssword@my-ftp.com/my-file.csv") == ( + "my-ftp.com", + "ftp://username:p@ssword@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@my-ftp.com/my-file.csv") == ( + "my-ftp.com", + "ftp://username:password:/@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@dead::beef/my-file.csv") == ( + "my-ftp.com", + "ftp://username:password:/@", + "/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@[dead::beef]/my-file.csv") == ( + "dead::beef", + "ftp://username:password:/@[", + "]/my-file.csv", + ) + assert helpers.extract_host("ftp://username:password:/@[dead::beef]:22/my-file.csv") == ( + "dead::beef", + "ftp://username:password:/@[", + "]:22/my-file.csv", + ) + assert helpers.split_domain("www.evilcorp.co.uk") == ("www", "evilcorp.co.uk") assert helpers.split_domain("asdf.www.test.notreal") == ("asdf.www", "test.notreal") assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") @@ -120,6 +168,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.split_host_port("evilcorp.co.uk") == ("evilcorp.co.uk", None) assert helpers.split_host_port("d://wat:wat") == ("wat", None) assert helpers.split_host_port("https://[dead::beef]:8338") == (ipaddress.ip_address("dead::beef"), 8338) + assert helpers.split_host_port("[dead::beef]") == (ipaddress.ip_address("dead::beef"), None) + assert helpers.split_host_port("dead::beef") == (ipaddress.ip_address("dead::beef"), None) extracted_words = helpers.extract_words("blacklanternsecurity") assert "black" in extracted_words # assert "blacklantern" in extracted_words diff --git a/bbot/test/test_step_1/test_regexes.py b/bbot/test/test_step_1/test_regexes.py index db889ec9c7..bb31f1dc05 100644 --- a/bbot/test/test_step_1/test_regexes.py +++ b/bbot/test/test_step_1/test_regexes.py @@ -40,7 +40,7 @@ def test_dns_name_regexes(): assert not r.match(dns), f"BAD DNS NAME: {dns} matched regex: {r}" try: - event_type = get_event_type(dns) + event_type, _ = get_event_type(dns) if event_type == "OPEN_TCP_PORT": assert dns == "evilcorp.com:80" continue @@ -56,7 +56,7 @@ def test_dns_name_regexes(): for dns in good_dns: matches = list(r.match(dns) for r in dns_name_regexes) assert any(matches), f"Good DNS_NAME {dns} did not match regexes" - event_type = get_event_type(dns) + event_type, _ = get_event_type(dns) if not event_type == "DNS_NAME": assert ( dns == "1.2.3.4" and event_type == "IP_ADDRESS" @@ -102,7 +102,7 @@ def test_open_port_regexes(): assert not r.match(open_port), f"BAD OPEN_TCP_PORT: {open_port} matched regex: {r}" try: - event_type = get_event_type(open_port) + event_type, _ = get_event_type(open_port) if event_type == "IP_ADDRESS": assert open_port in ("1.2.3.4", "[dead::beef]") continue @@ -118,7 +118,7 @@ def test_open_port_regexes(): for open_port in good_ports: matches = list(r.match(open_port) for r in open_port_regexes) assert any(matches), f"Good OPEN_TCP_PORT {open_port} did not match regexes" - event_type = get_event_type(open_port) + event_type, _ = get_event_type(open_port) assert event_type == "OPEN_TCP_PORT" @@ -170,7 +170,7 @@ def test_url_regexes(): event_type = "" try: - event_type = get_event_type(bad_url) + event_type, _ = get_event_type(bad_url) if event_type == "DNS_NAME": assert bad_url == "evilcorp.com" continue @@ -183,4 +183,6 @@ def test_url_regexes(): for good_url in good_urls: matches = list(r.match(good_url) for r in url_regexes) assert any(matches), f"Good URL {good_url} did not match regexes" - assert get_event_type(good_url) == "URL_UNVERIFIED", f"Event type for URL {good_url} was not properly detected" + assert ( + get_event_type(good_url)[0] == "URL_UNVERIFIED" + ), f"Event type for URL {good_url} was not properly detected" From 7daaf75b0657231ee3fb5b7d0485c1933d4ca6d4 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 11:26:04 -0400 Subject: [PATCH 073/123] smart decode in extract_host --- bbot/core/helpers/misc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index d985b67e78..9c9d6d467e 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -929,6 +929,7 @@ def extract_host(s): >>> extract_host("[dead::beef]:22") ("dead::beef", "[", "]:22") """ + s = smart_decode(s) match = bbot_regexes.extract_host_regex.search(s) if match: From 3c795dc72474b464f3a1835533a11efb7180b2af Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 11:38:03 -0400 Subject: [PATCH 074/123] fixed tests --- bbot/core/helpers/misc.py | 10 ++++++---- bbot/core/helpers/regexes.py | 2 +- bbot/test/test_step_1/test_helpers.py | 6 +----- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 9c9d6d467e..6c10d30c64 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -106,11 +106,13 @@ def split_host_port(d): "192.168.1.1:443" --> (IPv4Address('192.168.1.1'), 443) "[dead::beef]:443" --> (IPv6Address('dead::beef'), 443) """ + port = None + host = None + if is_ip(d): + return make_ip_type(d), port if not "://" in d: d = f"d://{d}" parsed = urlparse(d) - port = None - host = None with suppress(ValueError): if parsed.port is None: if parsed.scheme in ("https", "wss"): @@ -938,12 +940,12 @@ def extract_host(s): after = s[match.end(1) :] host, port = split_host_port(hostname) if host is not None: - hostname = str(host) if port is not None: after = f":{port}{after}" - if is_ip(hostname, version=6): + if is_ip(host, version=6) and hostname.startswith("["): before = f"{before}[" after = f"]{after}" + hostname = str(host) return (hostname, before, after) return (None, s, "") diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index d8e980a831..6fc108e9e9 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -88,5 +88,5 @@ jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") a_tag_regex = re.compile(r"]*href=[\"\'][^\"\'?>]*\?([^&\"\'=]+)") -_extract_host_regex = r"(?:[a-z0-9]{1,20}://)?(?:[^?]*@)?([^\s!@#$%^&()=/?\\]+)" +_extract_host_regex = r"(?:[a-z0-9]{1,20}://)?(?:[^?]*@)?([^\s!@#$%^&()=/?\\'\";~`<>]+)" extract_host_regex = re.compile(_extract_host_regex, re.I) diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 98488607a8..b74cc41c11 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -141,7 +141,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "/my-file.csv", ) assert helpers.extract_host("ftp://username:password:/@dead::beef/my-file.csv") == ( - "my-ftp.com", + "dead::beef", "ftp://username:password:/@", "/my-file.csv", ) @@ -396,10 +396,6 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.smart_decode_punycode("bob_smith@xn--eckwd4c7c.xn--zckzah") == "bob_smith@ドメイン.テスト" assert helpers.smart_encode_punycode("ドメイン.テスト:80") == "xn--eckwd4c7c.xn--zckzah:80" assert helpers.smart_decode_punycode("xn--eckwd4c7c.xn--zckzah:80") == "ドメイン.テスト:80" - with pytest.raises(ValueError): - helpers.smart_decode_punycode(b"asdf") - with pytest.raises(ValueError): - helpers.smart_encode_punycode(b"asdf") assert helpers.recursive_decode("Hello%20world%21") == "Hello world!" assert helpers.recursive_decode("Hello%20%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442") == "Hello Привет" From edb4f599ab9ebbe7b2db141edbaa74d6c40b94ae Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 12:43:44 -0400 Subject: [PATCH 075/123] continued work on tests --- bbot/core/helpers/misc.py | 16 ++++++++++++++-- bbot/test/test_step_1/test_helpers.py | 6 +++--- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 6c10d30c64..c164144e73 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -634,12 +634,13 @@ def make_netloc(host, port): ("192.168.1.1", None) --> "192.168.1.1" ("192.168.1.1", 443) --> "192.168.1.1:443" ("evilcorp.com", 80) --> "evilcorp.com:80" + ("dead::beef", None) --> "[dead::beef]" ("dead::beef", 443) --> "[dead::beef]:443" """ - if port is None: - return host if is_ip(host, version=6): host = f"[{host}]" + if port is None: + return host return f"{host}:{port}" @@ -930,6 +931,13 @@ def extract_host(s): >>> extract_host("[dead::beef]:22") ("dead::beef", "[", "]:22") + + >>> extract_host("ftp://username:password@my-ftp.com/my-file.csv") + ( + "my-ftp.com", + "ftp://username:password@", + "/my-file.csv", + ) """ s = smart_decode(s) match = bbot_regexes.extract_host_regex.search(s) @@ -939,6 +947,10 @@ def extract_host(s): before = s[: match.start(1)] after = s[match.end(1) :] host, port = split_host_port(hostname) + netloc = make_netloc(host, port) + if netloc != hostname: + # invalid host / port + return (None, s, "") if host is not None: if port is not None: after = f":{port}{after}" diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index b74cc41c11..e8d74ec823 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -141,9 +141,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https "/my-file.csv", ) assert helpers.extract_host("ftp://username:password:/@dead::beef/my-file.csv") == ( - "dead::beef", - "ftp://username:password:/@", - "/my-file.csv", + None, + "ftp://username:password:/@dead::beef/my-file.csv", + "", ) assert helpers.extract_host("ftp://username:password:/@[dead::beef]/my-file.csv") == ( "dead::beef", From 54d33d6a1884daf66f01bfe2b331607dda9300db Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 15:53:46 -0400 Subject: [PATCH 076/123] include traceback --- bbot/test/test_step_1/test_regexes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_1/test_regexes.py b/bbot/test/test_step_1/test_regexes.py index bb31f1dc05..7807e6c79f 100644 --- a/bbot/test/test_step_1/test_regexes.py +++ b/bbot/test/test_step_1/test_regexes.py @@ -1,4 +1,5 @@ import pytest +import traceback from bbot.core.event.helpers import get_event_type from bbot.core.helpers import regexes @@ -178,7 +179,7 @@ def test_url_regexes(): except ValidationError: continue except Exception as e: - pytest.fail(f"BAD URL: {bad_url} raised unknown error: {e}") + pytest.fail(f"BAD URL: {bad_url} raised unknown error: {e}: {traceback.format_exc()}") for good_url in good_urls: matches = list(r.match(good_url) for r in url_regexes) From 457453219867d2cd09ac4b8697481e4c143139ce Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 17:05:56 -0400 Subject: [PATCH 077/123] revised split_host_port() helper --- bbot/core/helpers/misc.py | 46 +++++++++++++++++++-------- bbot/core/helpers/regexes.py | 10 +++++- bbot/test/test_step_1/test_helpers.py | 3 ++ 3 files changed, 44 insertions(+), 15 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index c164144e73..14b58b73a3 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -106,23 +106,41 @@ def split_host_port(d): "192.168.1.1:443" --> (IPv4Address('192.168.1.1'), 443) "[dead::beef]:443" --> (IPv6Address('dead::beef'), 443) """ - port = None + d = str(d) host = None + port = None + scheme = None if is_ip(d): return make_ip_type(d), port - if not "://" in d: - d = f"d://{d}" - parsed = urlparse(d) - with suppress(ValueError): - if parsed.port is None: - if parsed.scheme in ("https", "wss"): - port = 443 - elif parsed.scheme in ("http", "ws"): - port = 80 - else: - port = int(parsed.port) - with suppress(ValueError): - host = parsed.hostname + + match = bbot_regexes.split_host_port_regex.match(d) + if match is None: + raise ValueError(f'split_port() failed to parse "{d}"') + scheme = match.group("scheme") + netloc = match.group("netloc") + if netloc is None: + raise ValueError(f'split_port() failed to parse "{d}"') + + match = bbot_regexes.extract_open_port_regex.match(netloc) + if match is None: + raise ValueError(f'split_port() failed to parse netloc "{netloc}"') + + host = match.group(2) + if host is None: + host = match.group(1) + if host is None: + raise ValueError(f'split_port() failed to locate host in netloc "{netloc}"') + + port = match.group(3) + if port is None and scheme is not None: + if scheme in ("https", "wss"): + port = 443 + elif scheme in ("http", "ws"): + port = 80 + elif port is not None: + with suppress(ValueError): + port = int(port) + return make_ip_type(host), port diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 6fc108e9e9..3761b09e7f 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -88,5 +88,13 @@ jquery_post_regex = re.compile(r"\$.post\([\'\"].+[\'\"].+\{(.+)\}") a_tag_regex = re.compile(r"]*href=[\"\'][^\"\'?>]*\?([^&\"\'=]+)") -_extract_host_regex = r"(?:[a-z0-9]{1,20}://)?(?:[^?]*@)?([^\s!@#$%^&()=/?\\'\";~`<>]+)" +valid_netloc = r"[^\s!@#$%^&()=/?\\'\";~`<>]+" + +_split_host_port_regex = r"(?:(?P[a-z0-9]{1,20})://)?(?:[^?]*@)?(?P" + valid_netloc + ")" +split_host_port_regex = re.compile(_split_host_port_regex, re.I) + +_extract_open_port_regex = r"(?:(?:\[([0-9a-f:]+)\])|([^\s:]+))(?::(\d{1,5}))?" +extract_open_port_regex = re.compile(_extract_open_port_regex) + +_extract_host_regex = r"(?:[a-z0-9]{1,20}://)?(?:[^?]*@)?(" + valid_netloc + ")" extract_host_regex = re.compile(_extract_host_regex, re.I) diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index e8d74ec823..abf09cadc2 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -166,6 +166,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) assert helpers.split_host_port("evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) assert helpers.split_host_port("evilcorp.co.uk") == ("evilcorp.co.uk", None) + assert helpers.split_host_port("192.168.0.1") == (ipaddress.ip_address("192.168.0.1"), None) + assert helpers.split_host_port("192.168.0.1:80") == (ipaddress.ip_address("192.168.0.1"), 80) + assert helpers.split_host_port("[e]:80") == ("e", 80) assert helpers.split_host_port("d://wat:wat") == ("wat", None) assert helpers.split_host_port("https://[dead::beef]:8338") == (ipaddress.ip_address("dead::beef"), 8338) assert helpers.split_host_port("[dead::beef]") == (ipaddress.ip_address("dead::beef"), None) From 60d1af1f0b91a2b34fed45b99650131e81a86345 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 20 Sep 2023 12:05:24 -0400 Subject: [PATCH 078/123] utilize target for source event correlation in massdns --- bbot/core/helpers/helper.py | 2 +- bbot/modules/deadly/nuclei.py | 2 +- bbot/modules/massdns.py | 15 +++------------ bbot/modules/nmap.py | 2 +- 4 files changed, 6 insertions(+), 15 deletions(-) diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index b052a63fbd..241ec15d89 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -70,7 +70,7 @@ def clean_old_scans(self): _filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name) self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter) - def make_target(self, events): + def make_target(self, *events): return Target(self.scan, *events) @property diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 0ea880b317..35eb6aabf9 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -128,7 +128,7 @@ async def setup(self): return True async def handle_batch(self, *events): - temp_target = self.helpers.make_target(events) + temp_target = self.helpers.make_target(*events) nuclei_input = [str(e.data) for e in events] async for severity, template, host, url, name, extracted_results in self.execute_nuclei(nuclei_input): # this is necessary because sometimes nuclei is inconsistent about the data returned in the host field diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 2ec1031afe..ed54fc6550 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -65,7 +65,7 @@ class massdns(crobat): async def setup(self): self.found = dict() self.mutations_tried = set() - self.source_events = dict() + self.source_events = self.helpers.make_target() self.subdomain_file = await self.helpers.wordlist(self.config.get("wordlist")) self.max_resolvers = self.config.get("max_resolvers", 1000) self.max_mutations = self.config.get("max_mutations", 500) @@ -94,9 +94,7 @@ async def filter_event(self, event): async def handle_event(self, event): query = self.make_query(event) - h = hash(query) - if not h in self.source_events: - self.source_events[h] = event + self.source_events.add_target(event) self.info(f"Brute-forcing subdomains for {query} (source: {event.data})") for hostname in await self.massdns(query, self.helpers.read_file(self.subdomain_file)): @@ -354,7 +352,7 @@ def add_mutation(_domain_hash, m): self.info(f"Trying {len(mutations):,} mutations against {domain} ({i+1}/{len(found)})") results = list(await self.massdns(query, mutations)) for hostname in results: - source_event = self.get_source_event(hostname) + source_event = self.source_events.get(hostname) if source_event is None: self.warning(f"Could not correlate source event from: {hostname}") source_event = self.scan.root_event @@ -395,10 +393,3 @@ def gen_random_subdomains(self, n=50): yield subdomain for _ in range(5): yield self.helpers.rand_string(length=8, digits=False) - - def get_source_event(self, hostname): - for p in self.helpers.domain_parents(hostname): - try: - return self.source_events[hash(p)] - except KeyError: - continue diff --git a/bbot/modules/nmap.py b/bbot/modules/nmap.py index d0671d16b2..e2900ab1b1 100644 --- a/bbot/modules/nmap.py +++ b/bbot/modules/nmap.py @@ -35,7 +35,7 @@ async def setup(self): return True async def handle_batch(self, *events): - target = self.helpers.make_target(events) + target = self.helpers.make_target(*events) targets = list(set(str(e.data) for e in events)) command, output_file = self.construct_command(targets) try: From 02faebe529a484e8d0bac93ff373f7bb40eb6474 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 20 Sep 2023 13:23:19 -0400 Subject: [PATCH 079/123] reintroduce custom dns resolvers for projectdiscovery tools --- bbot/modules/deadly/nuclei.py | 5 +++-- bbot/modules/httpx.py | 7 +++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 0ea880b317..bb8d1ed9a2 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -185,10 +185,11 @@ async def execute_nuclei(self, nuclei_input): self.concurrency, "-disable-update-check", "-stats-json", - # "-r", - # self.helpers.resolver_file, ] + if self.helpers.system_resolvers: + command += ["-r", self.helpers.resolver_file] + for cli_option in ("severity", "templates", "iserver", "itoken", "tags", "etags"): option = getattr(self, cli_option) diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index aa375f7ea6..ef77668db0 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -98,9 +98,12 @@ async def handle_batch(self, *events): f"User-Agent: {self.scan.useragent}", "-response-size-to-read", f"{self.max_response_size}", - # "-r", - # self.helpers.resolver_file, ] + + dns_resolvers = ",".join(self.helpers.system_resolvers) + if dns_resolvers: + command += ["-r", dns_resolvers] + for hk, hv in self.scan.config.get("http_headers", {}).items(): command += ["-header", f"{hk}: {hv}"] proxy = self.scan.config.get("http_proxy", "") From 9cb214b2c020d3db94f97d5e7bb910e1f89ee98b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 20 Sep 2023 14:27:20 -0400 Subject: [PATCH 080/123] Added ability to kill modules during scan --- bbot/cli.py | 14 +++++++++++++- bbot/modules/base.py | 9 +++++++-- bbot/scanner/manager.py | 4 ++++ 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index 4069867158..675f320df4 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import os +import re import sys import asyncio import logging @@ -307,6 +308,7 @@ async def _main(): def keyboard_listen(): allowed_errors = 10 + kill_regex = re.compile(r"kill (?P[a-z0-9_]+)") while 1: keyboard_input = "a" try: @@ -314,7 +316,17 @@ def keyboard_listen(): allowed_errors = 10 except Exception: allowed_errors -= 1 - if not keyboard_input: + if keyboard_input: + log.verbose(f'Got keyboard input: "{keyboard_input}"') + kill_match = kill_regex.match(keyboard_input) + if kill_match: + module = kill_match.group("module") + if module in scanner.modules: + log.hugewarning(f'Killing module: "{module}"') + scanner.manager.kill_module(module, message="killed by user") + else: + log.warning(f'Invalid module: "{module}"') + else: toggle_log_level(logger=log) scanner.manager.modules_status(_log=True) if allowed_errors <= 0: diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 2057212b63..3bb206f983 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -324,7 +324,7 @@ async def _setup(self): async def _worker(self): async with self.scan.acatch(context=self._worker): try: - while not self.scan.stopping: + while not self.scan.stopping and not self.errored: # hold the reigns if our outgoing queue is full if self._qsize > 0 and self.outgoing_event_queue.qsize() >= self._qsize: await asyncio.sleep(0.1) @@ -505,7 +505,7 @@ def queue_outgoing_event(self, event, **kwargs): except AttributeError: self.debug(f"Not in an acceptable state to queue outgoing event") - def set_error_state(self, message=None): + def set_error_state(self, message=None, clear_outgoing_queue=False): if not self.errored: log_msg = f"Setting error state for module {self.name}" if message is not None: @@ -522,6 +522,11 @@ def set_error_state(self, message=None): # if there are leftover objects in the queue, the scan will hang. self._incoming_event_queue = False + if clear_outgoing_queue: + with suppress(asyncio.queues.QueueEmpty): + while 1: + self.outgoing_event_queue.get_nowait() + # override in the module to define different values to comprise the hash def get_per_host_hash(self, event): parsed = getattr(event, "parsed", None) diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index d085a60b0d..1ed44d37ff 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -346,6 +346,10 @@ async def _worker_loop(self): except Exception: log.critical(traceback.format_exc()) + def kill_module(self, module_name, message=None): + module = self.scan.modules[module_name] + module.set_error_state(message=message, clear_outgoing_queue=True) + @property def modules_by_priority(self): if not self._modules_by_priority: From aaa3abafa0e219449f72e2ca38d6314ecc2eea2b Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 22 Sep 2023 14:46:45 +0000 Subject: [PATCH 081/123] Refresh module docs --- docs/modules/list_of_modules.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index 391f744475..b3abd7b4ca 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -92,7 +92,7 @@ | discord | output | No | Message a Discord channel when certain events are encountered | | * | | | http | output | No | Send every event to a custom URL via a web request | | * | | | human | output | No | Output to text | | * | | -| json | output | No | Output to JSON | | * | | +| json | output | No | Output to Newline-Delimited JSON (NDJSON) | | * | | | neo4j | output | No | Output to Neo4j | | * | | | python | output | No | Output via Python API | | * | | | slack | output | No | Message a Slack channel when certain events are encountered | | * | | From a11e7b8c9730a398ebe846a679e0d8e049df7277 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 20 Sep 2023 10:36:35 -0400 Subject: [PATCH 082/123] fix poetry.lock conflicts --- bbot/core/helpers/interactsh.py | 2 +- bbot/modules/base.py | 14 +- bbot/scanner/manager.py | 14 +- bbot/scanner/scanner.py | 73 +- docs/contribution.md | 6 +- docs/dev/scanner.md | 3 + docs/how_it_works.md | 2 +- docs/index.md | 4 +- docs/modules/list_of_modules.md | 2 +- docs/scanning/configuration.md | 2 +- docs/scanning/events.md | 2 +- docs/scanning/index.md | 20 +- docs/scanning/output.md | 4 +- docs/scanning/tips_and_tricks.md | 6 +- mkdocs.yml | 18 + poetry.lock | 1506 ++++++++++++++++++++---------- pyproject.toml | 9 + 17 files changed, 1144 insertions(+), 543 deletions(-) create mode 100644 docs/dev/scanner.md diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 205fc2bb31..8f81ec0af1 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -126,7 +126,7 @@ async def poll(self): return ret async def poll_loop(self, callback): - async with self.parent_helper.scan.acatch(context=self._poll_loop): + async with self.parent_helper.scan._acatch(context=self._poll_loop): return await self._poll_loop(callback) async def _poll_loop(self, callback): diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 3bb206f983..a5faa45108 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -219,12 +219,12 @@ async def _handle_batch(self): if events and not self.errored: self.debug(f"Handling batch of {len(events):,} events") submitted = True - async with self.scan.acatch(f"{self.name}.handle_batch()"): + async with self.scan._acatch(f"{self.name}.handle_batch()"): await self.handle_batch(*events) self.debug(f"Finished handling batch of {len(events):,} events") if finish: context = f"{self.name}.finish()" - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.finish() return submitted @@ -322,7 +322,7 @@ async def _setup(self): return self.name, status, str(msg) async def _worker(self): - async with self.scan.acatch(context=self._worker): + async with self.scan._acatch(context=self._worker): try: while not self.scan.stopping and not self.errored: # hold the reigns if our outgoing queue is full @@ -351,13 +351,13 @@ async def _worker(self): if acceptable: if event.type == "FINISHED": context = f"{self.name}.finish()" - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.finish() else: context = f"{self.name}.handle_event({event})" self.scan.stats.event_consumed(event, self) self.debug(f"Handling {event}") - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.handle_event(event) self.debug(f"Finished handling {event}") else: @@ -427,7 +427,7 @@ async def _event_postcheck(self, event): return filter_result, reason # custom filtering - async with self.scan.acatch(context=self.filter_event): + async with self.scan._acatch(context=self.filter_event): filter_result = await self.filter_event(event) msg = str(self._custom_filter_criteria_msg) with suppress(ValueError, TypeError): @@ -469,7 +469,7 @@ async def _cleanup(self): for callback in [self.cleanup] + self.cleanup_callbacks: context = f"{self.name}.cleanup()" if callable(callback): - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.helpers.execute_sync_or_async(callback) async def queue_event(self, event): diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 1ed44d37ff..4797bf1367 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -35,7 +35,7 @@ async def init_events(self): seed scanner with target events """ context = f"manager.init_events()" - async with self.scan.acatch(context), self._task_counter.count(context): + async with self.scan._acatch(context), self._task_counter.count(context): await self.distribute_event(self.scan.root_event) sorted_events = sorted(self.scan.target.events, key=lambda e: len(e.data)) for event in sorted_events: @@ -66,10 +66,10 @@ async def emit_event(self, event, *args, **kwargs): event._resolved.set() for kwarg in ["abort_if", "on_success_callback"]: kwargs.pop(kwarg, None) - async with self.scan.acatch(context=self.distribute_event): + async with self.scan._acatch(context=self.distribute_event): await self.distribute_event(event, *args, **kwargs) else: - async with self.scan.acatch(context=self._emit_event, finally_callback=event._resolved.set): + async with self.scan._acatch(context=self._emit_event, finally_callback=event._resolved.set): await self._emit_event(event, *args, **kwargs) def _event_precheck(self, event, exclude=("DNS_NAME",)): @@ -194,7 +194,7 @@ async def _emit_event(self, event, *args, **kwargs): # now that the event is properly tagged, we can finally make decisions about it abort_result = False if callable(abort_if): - async with self.scan.acatch(context=abort_if): + async with self.scan._acatch(context=abort_if): abort_result = await self.scan.helpers.execute_sync_or_async(abort_if, event) msg = f"{event.module}: not raising event {event} due to custom criteria in abort_if()" with suppress(ValueError, TypeError): @@ -210,7 +210,7 @@ async def _emit_event(self, event, *args, **kwargs): # run success callback before distributing event (so it can add tags, etc.) if distribute_event: if callable(on_success_callback): - async with self.scan.acatch(context=on_success_callback): + async with self.scan._acatch(context=on_success_callback): await self.scan.helpers.execute_sync_or_async(on_success_callback, event) if not event.host or (event.always_emit and not event_is_duplicate): @@ -244,7 +244,7 @@ async def _emit_event(self, event, *args, **kwargs): ### Emit DNS children ### if self.dns_resolution: - emit_children = -1 < event.scope_distance < self.scan.dns_search_distance + emit_children = -1 < event.scope_distance < self.scan.scope_dns_search_distance if emit_children: # only emit DNS children once for each unique host host_hash = hash(str(event.host)) @@ -317,7 +317,7 @@ async def distribute_event(self, *args, **kwargs): """ Queue event with modules """ - async with self.scan.acatch(context=self.distribute_event): + async with self.scan._acatch(context=self.distribute_event): event = self.scan.make_event(*args, **kwargs) event_hash = hash(event) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 9d71706a4a..49965b4c72 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -41,6 +41,44 @@ class Scanner: + """A class representing a single BBOT scan + + Examples: + Create scan with multiple targets: + >>> my_scan = Scanner("evilcorp.com", "1.2.3.0/24", modules=["nmap", "sslcert", "httpx"]) + + Create scan with custom config: + >>> config = {"http_proxy": "http://127.0.0.1:8080", "modules": {"nmap": {"top_ports": 2000}}} + >>> my_scan = Scanner("www.evilcorp.com", modules=["nmap", "httpx"], config=config) + + Synchronous, iterating over events as they're discovered: + >>> for event in my_scan.start(): + >>> print(event) + + Asynchronous, iterating over events as they're discovered: + >>> async for event in my_scan.async_start(): + >>> print(event) + + Synchronous, without consuming events: + >>> my_scan.start_without_generator() + + Asynchronous, without consuming events: + >>> await my_scan.start_without_generator() + + Attributes: + status (str): Status of scan + target (ScanTarget): Target of scan + config (omegaconf.dictconfig.DictConfig): BBOT config + whitelist (ScanTarget): Scan whitelist (by default this is the same as `target`) + blacklist (ScanTarget): Scan blacklist (this takes ultimate precedence) + helpers (ConfigAwareHelper): Helper containing various reusable functions, regexes, etc. + manager (ScanManager): Coordinates and monitors the flow of events between modules during a scan + dispatcher (Dispatcher): Triggers certain events when the scan `status` changes + modules (dict): Holds all loaded modules in this format: `{"module_name": Module()}` + stats (ScanStats): Holds high-level scan statistics such as how many events have been produced and consumed by each module + home (pathlib.Path): Base output directory of the scan (default: `~/.bbot/scans/`) + """ + _status_codes = { "NOT_STARTED": 0, "STARTING": 1, @@ -68,6 +106,23 @@ def __init__( strict_scope=False, force_start=False, ): + """ + Initializes the Scanner class. + + Args: + *targets (str): Target(s) to scan. + whitelist (ScanTarget, optional): Whitelisted target(s) to scan. Defaults to the same as `targets`. + blacklist (ScanTarget, optional): Blacklisted target(s). Takes ultimate precedence. Defaults to empty. + scan_id (str, optional): Unique identifier for the scan. Auto-generates if None. + name (str, optional): Human-readable name of the scan. Auto-generates if None. + modules (list[str], optional): List of module names to use during the scan. Defaults to empty list. + output_modules (list[str], optional): List of output modules to use. Defaults to ['python']. + output_dir (str or Path, optional): Directory to store scan output. Defaults to BBOT home directory (`~/.bbot`). + config (dict, optional): Configuration settings. Merged with BBOT config. + dispatcher (Dispatcher, optional): Dispatcher object to use. Defaults to new Dispatcher. + strict_scope (bool, optional): If True, only targets explicitly in whitelist are scanned. Defaults to False. + force_start (bool, optional): If True, forces the scan to start even with warnings. Defaults to False. + """ if modules is None: modules = [] if output_modules is None: @@ -102,19 +157,15 @@ def __init__( if name is None: tries = 0 - while 1: if tries > 5: self.name = f"{self.helpers.rand_string(4)}_{self.helpers.rand_string(4)}" break - self.name = random_name() - if output_dir is not None: home_path = Path(output_dir).resolve() / self.name else: home_path = self.helpers.bbot_home / "scans" / self.name - if not home_path.exists(): break tries += 1 @@ -153,7 +204,7 @@ def __init__( # scope distance self.scope_search_distance = max(0, int(self.config.get("scope_search_distance", 0))) - self.dns_search_distance = max( + self.scope_dns_search_distance = max( self.scope_search_distance, int(self.config.get("scope_dns_search_distance", 2)) ) self.scope_report_distance = int(self.config.get("scope_report_distance", 1)) @@ -173,7 +224,7 @@ def __init__( self._cleanedup = False self.__loop = None - self.manager_worker_loop_tasks = [] + self._manager_worker_loop_tasks = [] self.init_events_task = None self.ticker_task = None self.dispatcher_tasks = [] @@ -256,7 +307,7 @@ async def async_start(self): await self.dispatcher.on_start(self) # start manager worker loops - self.manager_worker_loop_tasks = [ + self._manager_worker_loop_tasks = [ asyncio.create_task(self.manager._worker_loop()) for _ in range(self.max_workers) ] @@ -423,7 +474,7 @@ def cancel_tasks(self): # dispatcher tasks += self.dispatcher_tasks # manager worker loops - tasks += self.manager_worker_loop_tasks + tasks += self._manager_worker_loop_tasks self.helpers.cancel_tasks_sync(tasks) # process pool self.process_pool.shutdown(cancel_futures=True) @@ -431,7 +482,7 @@ def cancel_tasks(self): async def report(self): for mod in self.modules.values(): context = f"{mod.name}.report()" - async with self.acatch(context), mod._task_counter.count(context): + async with self._acatch(context), mod._task_counter.count(context): await mod.report() async def cleanup(self): @@ -749,7 +800,7 @@ def _load_modules(self, modules): return loaded_modules, failed async def _status_ticker(self, interval=15): - async with self.acatch(): + async with self._acatch(): while 1: await asyncio.sleep(interval) self.manager.modules_status(_log=True) @@ -768,7 +819,7 @@ def catch(self, context="scan", finally_callback=None): self._handle_exception(e, context=context) @contextlib.asynccontextmanager - async def acatch(self, context="scan", finally_callback=None): + async def _acatch(self, context="scan", finally_callback=None): """ Async version of catch() diff --git a/docs/contribution.md b/docs/contribution.md index 2bc585898f..65b074adba 100644 --- a/docs/contribution.md +++ b/docs/contribution.md @@ -94,7 +94,7 @@ This will produce the output: [INFO] Finishing scan ``` -But something's wrong! We're emitting `IP_ADDRESS` [events](./scanning/events/), but they're not showing up in the output. This is because by default, BBOT only shows in-scope [events](./scanning/events/). To see them, we need to increase the report distance: +But something's wrong! We're emitting `IP_ADDRESS` [events](./scanning/events.md), but they're not showing up in the output. This is because by default, BBOT only shows in-scope [events](./scanning/events.md). To see them, we need to increase the report distance: ```bash # run the module again but with a higher report distance @@ -123,9 +123,9 @@ Now, with the `report_distance=1`: ### `handle_event()` and `emit_event()` -The `handle_event()` method is the most important part of the module. By overriding this method, you control what the module does. During a scan, when an [event](./scanning/events/) from your `watched_events` is encountered (a `DNS_NAME` in this example), `handle_event()` is automatically called with that [event](./scanning/events/). +The `handle_event()` method is the most important part of the module. By overriding this method, you control what the module does. During a scan, when an [event](./scanning/events.md) from your `watched_events` is encountered (a `DNS_NAME` in this example), `handle_event()` is automatically called with that event. -The `emit_event()` method is how modules return data. When you call `emit_event()`, it creates an [event](./scanning/events/) and prints it to the console. It also distributes it any modules that are interested in that data type. +The `emit_event()` method is how modules return data. When you call `emit_event()`, it creates an [event](./scanning/events.md) and prints it to the console. It also distributes it any modules that are interested in that data type. ### Module Dependencies diff --git a/docs/dev/scanner.md b/docs/dev/scanner.md new file mode 100644 index 0000000000..f5388688c0 --- /dev/null +++ b/docs/dev/scanner.md @@ -0,0 +1,3 @@ +# `bbot.scanner.Scanner()` + +::: bbot.scanner.Scanner diff --git a/docs/how_it_works.md b/docs/how_it_works.md index 3423e28a81..1d42389d4c 100644 --- a/docs/how_it_works.md +++ b/docs/how_it_works.md @@ -40,4 +40,4 @@ This allows for some interesting chains of events. Given a single target such as This is a simple example with only a few modules, but you can being to see how if 30 or 40 modules were enabled, they could feed each other exponentially to produce an immense amount of data. This recursion is exactly how BBOT is able to outperform other tools. -For a full list of event types and which modules consume/produce them, see [List of Event Types](../scanning/events/#list-of-event-types). +For a full list of event types and which modules consume/produce them, see [List of Event Types](scanning/events.md#list-of-event-types). diff --git a/docs/index.md b/docs/index.md index ac00172125..f5ef3b332f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -113,6 +113,6 @@ Or on the command-line: bbot -t evilcorp.com -f subdomain-enum -c modules.shodan_dns.api_key=deadbeef modules.virustotal.api_key=cafebabe ``` -For more information, see [Configuration](./scanning/configuration/). For a full list of modules, including which ones require API keys, see [List of Modules](./modules/list_of_modules/). +For more information, see [Configuration](./scanning/configuration.md). For a full list of modules, including which ones require API keys, see [List of Modules](./modules/list_of_modules.md). -[Next Up: Scanning -->](./scanning/){ .md-button .md-button--primary } +[Next Up: Scanning -->](./scanning/index.md){ .md-button .md-button--primary } diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index b3abd7b4ca..cbe682be8d 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -105,4 +105,4 @@ | speculate | internal | No | Derive certain event types from others by common sense | passive | DNS_NAME, DNS_NAME_UNRESOLVED, HTTP_RESPONSE, IP_ADDRESS, IP_RANGE, STORAGE_BUCKET, URL, URL_UNVERIFIED | DNS_NAME, FINDING, IP_ADDRESS, OPEN_TCP_PORT | -For a list of module config options, see [Module Options](../configurations/#module-config-options). +For a list of module config options, see [Module Options](../scanning/configuration.md#module-config-options). diff --git a/docs/scanning/configuration.md b/docs/scanning/configuration.md index 66d86991cb..c85796860f 100644 --- a/docs/scanning/configuration.md +++ b/docs/scanning/configuration.md @@ -7,7 +7,7 @@ For a list of all possible config options, see: - [Global Options](#global-config-options) - [Module Options](#module-config-options) -For examples of common config changes, see [Tips and Tricks](../tips_and_tricks/). +For examples of common config changes, see [Tips and Tricks](tips_and_tricks.md). ## Configuration Files diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 0f0487073b..9a0fc19a79 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -16,7 +16,7 @@ In addition to the obvious data (e.g. `www.evilcorp.com`), an event also contain - its `.scope_distance` (how many hops it is from the main scope, 0 == in-scope) - a list of `.tags` that describe the data (`mx-record`, `http-title`, etc.) -These attributes allow us to construct a visual graph of events (e.g. in [Neo4j](../output#neo4j)) and query/filter/grep them more easily. Here is what a typical event looks like in JSON format: +These attributes allow us to construct a visual graph of events (e.g. in [Neo4j](output.md#neo4j)) and query/filter/grep them more easily. Here is what a typical event looks like in JSON format: ```json { diff --git a/docs/scanning/index.md b/docs/scanning/index.md index 7aed1e0062..f47c50ba93 100644 --- a/docs/scanning/index.md +++ b/docs/scanning/index.md @@ -40,13 +40,13 @@ https://www.evilcorp.co.uk $ bbot -t targets.txt fsociety.com 5.6.7.0/24 -m nmap ``` -On start, BBOT automatically converts Targets into [Events](./events/). +On start, BBOT automatically converts Targets into [Events](events.md). ## Modules (`-m`) -To see a full list of modules and their descriptions, use `bbot -l` or see [List of Modules](./list_of_modules/). +To see a full list of modules and their descriptions, use `bbot -l` or see [List of Modules](../modules/list_of_modules.md). -Modules are the part of BBOT that does the work -- port scanning, subdomain brute-forcing, API querying, etc. Modules consume [Events](./events/) (`IP_ADDRESS`, `DNS_NAME`, etc.) from each other, process the data in a useful way, then emit the results as new events. You can enable individual modules with `-m`. +Modules are the part of BBOT that does the work -- port scanning, subdomain brute-forcing, API querying, etc. Modules consume [Events](events.md) (`IP_ADDRESS`, `DNS_NAME`, etc.) from each other, process the data in a useful way, then emit the results as new events. You can enable individual modules with `-m`. ```bash # Enable modules: nmap, sslcert, and httpx @@ -60,14 +60,14 @@ Modules fall into three categories: - **Scan Modules**: - These make up the majority of modules. Examples are `nmap`, `sslcert`, `httpx`, etc. Enable with `-m`. - **Output Modules**: - - These output scan data to different formats/destinations. `human`, `json`, and `csv` are enabled by default. Enable others with `-om`. (See: [Output](./output/)) + - These output scan data to different formats/destinations. `human`, `json`, and `csv` are enabled by default. Enable others with `-om`. (See: [Output](output.md)) - **Internal Modules**: - These modules perform essential, common-sense tasks. They are always enabled, unless explicitly disabled via the config (e.g. `-c speculate=false`). - `aggregate`: Summarizes results at the end of a scan - `excavate`: Extracts useful data such as subdomains from webpages, etc. - `speculate`: Intelligently infers new events, e.g. `OPEN_TCP_PORT` from `URL` or `IP_ADDRESS` from `IP_NETWORK`. -For details in the inner workings of modules, see [Creating a Module](../contribution/#creating-a-module). +For details in the inner workings of modules, see [Creating a Module](../contribution.md#creating-a-module). ## Flags (`-f`) @@ -141,7 +141,7 @@ BBOT modules have external dependencies ranging from OS packages (`openssl`) to - `--ignore-failed-deps` - Run modules even if they have failed dependencies - `--install-all-deps` - Install dependencies for all modules (useful if you are provisioning a pentest system and want to install everything ahead of time) -For details on how Ansible playbooks are attached to BBOT modules, see [How to Write a Module](../contribution/#module-dependencies). +For details on how Ansible playbooks are attached to BBOT modules, see [How to Write a Module](../contribution.md#module-dependencies). ## Scope @@ -151,15 +151,15 @@ By default, scope is whatever you specify with `-t`. This includes child subdoma ### Scope Distance -Since BBOT is recursive, it would quickly resort to scanning the entire internet without some kind of restraining mechanism. To solve this problem, every [event](./events/) discovered by BBOT is assigned a **Scope Distance**. Scope distance represents how far out from the main scope that data was discovered. +Since BBOT is recursive, it would quickly resort to scanning the entire internet without some kind of restraining mechanism. To solve this problem, every [event](events.md) discovered by BBOT is assigned a **Scope Distance**. Scope distance represents how far out from the main scope that data was discovered. For example, if your target is `evilcorp.com`, `www.evilcorp.com` would have a scope distance of `0` (i.e. in-scope). If BBOT discovers that `www.evilcorp.com` resolves to `1.2.3.4`, `1.2.3.4` is one hop away, which means it would have a scope distance of `1`. If `1.2.3.4` has a PTR record that points to `ecorp.blob.core.windows.net`, `ecorp.blob.core.windows.net` is two hops away, so its scope distance is `2`. -Scope distance continues to increase the further out you get. Most modules (e.g. `nuclei` and `nmap`) only consume in-scope events. Certain other passive modules such as `asn` accept out to distance `1`. By default, DNS resolution happens out to a distance of `2`. Upon its discovery, any [event](./events/) that's determined to be in-scope (e.g. `www.evilcorp.com`) immediately becomes distance `0`, and the cycle starts over. +Scope distance continues to increase the further out you get. Most modules (e.g. `nuclei` and `nmap`) only consume in-scope events. Certain other passive modules such as `asn` accept out to distance `1`. By default, DNS resolution happens out to a distance of `2`. Upon its discovery, any [event](events.md) that's determined to be in-scope (e.g. `www.evilcorp.com`) immediately becomes distance `0`, and the cycle starts over. #### Displaying Out-of-scope Events -By default, BBOT only displays in-scope events (with a few exceptions such as `STORAGE_BUCKET`s). If you want to see more, you must increase the [config](./configuration/) value of `scope_report_distance`: +By default, BBOT only displays in-scope events (with a few exceptions such as `STORAGE_BUCKET`s). If you want to see more, you must increase the [config](configuration.md) value of `scope_report_distance`: ```bash # display out-of-scope events up to one hop away from the main scope @@ -206,7 +206,7 @@ Wildcard hosts are collapsed into a single host beginning with `_wildcard`: ^^^^^^^^^ ``` -If you don't want this, you can disable wildcard detection on a domain-to-domain basis in the [config](./configuration/): +If you don't want this, you can disable wildcard detection on a domain-to-domain basis in the [config](configuration.md): ```yaml title="~/.bbot/config/bbot.yml" dns_wildcard_ignore: diff --git a/docs/scanning/output.md b/docs/scanning/output.md index 310b92288e..394700dce3 100644 --- a/docs/scanning/output.md +++ b/docs/scanning/output.md @@ -42,7 +42,7 @@ If you manually enable the `json` output module, it will go to stdout: bbot -t evilcorp.com -om json | jq ``` -You will then see [events](./events) like this: +You will then see [events](events.md) like this: ```json { @@ -114,7 +114,7 @@ output_modules: ### HTTP -The `http` output module sends [events](./events) in JSON format to a desired HTTP endpoint. +The `http` output module sends [events](events.md) in JSON format to a desired HTTP endpoint. ```bash # POST scan results to localhost diff --git a/docs/scanning/tips_and_tricks.md b/docs/scanning/tips_and_tricks.md index f8afedbfd6..aaafb15ca2 100644 --- a/docs/scanning/tips_and_tricks.md +++ b/docs/scanning/tips_and_tricks.md @@ -24,9 +24,9 @@ The web spider is great for finding juicy data like subdomains, email addresses, The web spider is controlled with three config values: -- `web_spider_distance` (`0` == all spidering disabled, default: `0`): the maximum number of links that can be followed in a row. This is designed to limit the spider in cases where `web_spider_depth` fails (e.g. for an ecommerce website with thousands of base-level URLs). - `web_spider_depth` (default: `1`: the maximum directory depth allowed. This is to prevent the spider from delving too deep into a website. -- `web_spider_links_per_page` (default: `25`): the maximum number of links per page that can be followed. This is designed specifically for cases where a single page has hundreds or thousands of links. +- `web_spider_distance` (`0` == all spidering disabled, default: `0`): the maximum number of links that can be followed in a row. This is designed to limit the spider in cases where `web_spider_depth` fails (e.g. for an ecommerce website with thousands of base-level URLs). +- `web_spider_links_per_page` (default: `25`): the maximum number of links per page that can be followed. This is designed to save you in cases where a single page has hundreds or thousands of links. Here is a typical example: @@ -87,7 +87,7 @@ bbot -m httpx gowitness wappalyzer -t urls.txt -c dns_resolution=false `URL_UNVERIFIED` events are URLs that haven't yet been visited by `httpx`. Once `httpx` visits them, it reraises them as `URL`s, tagged with their resulting status code. -For example, when [`excavate`](../#types-of-modules) gets an `HTTP_RESPONSE` event, it extracts links from the raw HTTP response as `URL_UNVERIFIED`s and then passes them back to `httpx` to be visited. +For example, when [`excavate`](index.md/#types-of-modules) gets an `HTTP_RESPONSE` event, it extracts links from the raw HTTP response as `URL_UNVERIFIED`s and then passes them back to `httpx` to be visited. By default, `URL_UNVERIFIED`s are hidden from output. If you want to see all of them including the out-of-scope ones, you can do it by changing `omit_event_types` and `scope_report_distance` in the config like so: diff --git a/mkdocs.yml b/mkdocs.yml index a6ae600fa3..21a0172047 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -7,6 +7,10 @@ site_description: >- # Repository repo_name: blacklanternsecurity/bbot repo_url: https://github.com/blacklanternsecurity/bbot +watch: + - "mkdocs.yml" + - "bbot" + - "docs" # Page tree nav: @@ -26,6 +30,8 @@ nav: - Nuclei: modules/nuclei.md - Contribution: - How to Write a Module: contribution.md + - Developer Reference: + - Scanner: dev/scanner.md - Misc: - Release History: release_history.md - Troubleshooting: troubleshooting.md @@ -46,6 +52,18 @@ theme: plugins: - search - extra-sass + - mkdocstrings: + enable_inventory: true + handlers: + python: + options: + show_signature_annotations: true + show_root_toc_entry: false + separate_signature: true + docstring_section_style: "list" + import: + - https://docs.python.org/3.11/objects.inv + - https://omegaconf.readthedocs.io/en/latest/objects.inv markdown_extensions: - attr_list diff --git a/poetry.lock b/poetry.lock index edda4d0057..2f1daa6c83 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,27 +2,27 @@ [[package]] name = "ansible" -version = "7.5.0" +version = "7.7.0" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-7.5.0-py3-none-any.whl", hash = "sha256:a2deadeb8a199abfbd7c1960bc126697be517ac4310b2f59eb2190706e6a2637"}, - {file = "ansible-7.5.0.tar.gz", hash = "sha256:4f08ca25bb29005c1afc4125e837882ad7a2c67ff0cc9d1a361b89ad09cf8c44"}, + {file = "ansible-7.7.0-py3-none-any.whl", hash = "sha256:4f8d346db1b6cec0f30b77935b3dce5633d76881186da839b58b34b48a089b92"}, + {file = "ansible-7.7.0.tar.gz", hash = "sha256:9c206ba515f13a0cc9c919d496218ba26df581755bdc39be85b074066c699a02"}, ] [package.dependencies] -ansible-core = ">=2.14.5,<2.15.0" +ansible-core = ">=2.14.7,<2.15.0" [[package]] name = "ansible-core" -version = "2.14.6" +version = "2.14.10" description = "Radically simple IT automation" optional = false python-versions = ">=3.9" files = [ - {file = "ansible-core-2.14.6.tar.gz", hash = "sha256:0cddb0df454561981f1c541db7ac5398d5e9de452ea1f01847acbd031fd7d2b2"}, - {file = "ansible_core-2.14.6-py3-none-any.whl", hash = "sha256:08963309f44cd98862aba8d887ac5c2b4159cd1c2e31ac9cf47d661e985e6bb9"}, + {file = "ansible-core-2.14.10.tar.gz", hash = "sha256:2c5d26d0f8d152020dd92d98f595f63c248a1997b8d74c5c3fb0d2408ec5a487"}, + {file = "ansible_core-2.14.10-py3-none-any.whl", hash = "sha256:97ab6c2d62940a7a02c4f2505aad5c5e30bb486e8122899c5c147b16e98ef51e"}, ] [package.dependencies] @@ -34,16 +34,17 @@ resolvelib = ">=0.5.3,<0.9.0" [[package]] name = "ansible-runner" -version = "2.3.2" +version = "2.3.4" description = "\"Consistent Ansible Python API and CLI with container and process isolation runtime capabilities\"" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "ansible-runner-2.3.2.tar.gz", hash = "sha256:c420e76ba18311d6350c8982fc3c0519b00624654053e538b0ea630651b08921"}, - {file = "ansible_runner-2.3.2-py3-none-any.whl", hash = "sha256:21f94eeaa536e19ab3913ad882c0722c86aad9cb371eebf99361b8c1fb38ee8c"}, + {file = "ansible-runner-2.3.4.tar.gz", hash = "sha256:79a1bd134d813c8ea3740599c6fd961a11425ce7757f2fd725cf56d6a1a7236c"}, + {file = "ansible_runner-2.3.4-py3-none-any.whl", hash = "sha256:73cca4fe509c8f4f0e93bf8ae13492c98454c62152685e43bdbd0f51907682bb"}, ] [package.dependencies] +importlib-metadata = {version = ">=4.6,<6.3", markers = "python_version < \"3.10\""} packaging = "*" pexpect = ">=4.5" python-daemon = "*" @@ -110,6 +111,17 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -176,28 +188,38 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cattrs" -version = "22.2.0" +version = "23.1.2" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.7" files = [ - {file = "cattrs-22.2.0-py3-none-any.whl", hash = "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21"}, - {file = "cattrs-22.2.0.tar.gz", hash = "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d"}, + {file = "cattrs-23.1.2-py3-none-any.whl", hash = "sha256:b2bb14311ac17bed0d58785e5a60f022e5431aca3932e3fc5cc8ed8639de50a4"}, + {file = "cattrs-23.1.2.tar.gz", hash = "sha256:db1c821b8c537382b2c7c66678c3790091ca0275ac486c76f3c8f3920e83c657"}, ] [package.dependencies] attrs = ">=20" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.2.0,<5.0.0)"] +cbor2 = ["cbor2 (>=5.4.6,<6.0.0)"] +msgpack = ["msgpack (>=1.0.2,<2.0.0)"] +orjson = ["orjson (>=3.5.2,<4.0.0)"] +pyyaml = ["PyYAML (>=6.0,<7.0)"] +tomlkit = ["tomlkit (>=0.11.4,<0.12.0)"] +ujson = ["ujson (>=5.4.0,<6.0.0)"] [[package]] name = "certifi" -version = "2023.5.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] @@ -289,97 +311,97 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -413,62 +435,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.5" +version = "7.3.1" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, ] [package.dependencies] @@ -479,30 +502,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, ] [package.dependencies] @@ -511,29 +538,29 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "deepdiff" -version = "6.3.0" +version = "6.5.0" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." optional = false python-versions = ">=3.7" files = [ - {file = "deepdiff-6.3.0-py3-none-any.whl", hash = "sha256:15838bd1cbd046ce15ed0c41e837cd04aff6b3e169c5e06fca69d7aa11615ceb"}, - {file = "deepdiff-6.3.0.tar.gz", hash = "sha256:6a3bf1e7228ac5c71ca2ec43505ca0a743ff54ec77aa08d7db22de6bc7b2b644"}, + {file = "deepdiff-6.5.0-py3-none-any.whl", hash = "sha256:acdc1651a3e802415e0337b7e1192df5cd7c17b72fbab480466fdd799b9a72e7"}, + {file = "deepdiff-6.5.0.tar.gz", hash = "sha256:080b1359d6128f3f5f1738c6be3064f0ad9b0cc41994aa90a028065f6ad11f25"}, ] [package.dependencies] ordered-set = ">=4.0.2,<4.2.0" [package.extras] -cli = ["click (==8.1.3)", "pyyaml (==6.0)"] +cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"] optimize = ["orjson"] [[package]] @@ -579,13 +606,13 @@ files = [ [[package]] name = "dunamai" -version = "1.17.0" +version = "1.18.0" description = "Dynamic version generation" optional = false python-versions = ">=3.5,<4.0" files = [ - {file = "dunamai-1.17.0-py3-none-any.whl", hash = "sha256:5aa4ac1085de10691269af021b10497261a5dd644f277e2a21822212604d877b"}, - {file = "dunamai-1.17.0.tar.gz", hash = "sha256:459381b585a1e78e4070f0d38a6afb4d67de2ee95064bf6b0438ec620dde0820"}, + {file = "dunamai-1.18.0-py3-none-any.whl", hash = "sha256:f9284a9f4048f0b809d11539896e78bde94c05b091b966a04a44ab4c48df03ce"}, + {file = "dunamai-1.18.0.tar.gz", hash = "sha256:5200598561ea5ba956a6174c36e402e92206c6a6aa4a93a6c5cb8003ee1e0997"}, ] [package.dependencies] @@ -593,13 +620,13 @@ packaging = ">=20.9" [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -607,18 +634,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.4" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "flake8" @@ -636,6 +664,37 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.1.0,<3.2.0" +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +optional = false +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + +[[package]] +name = "griffe" +version = "0.36.2" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "griffe-0.36.2-py3-none-any.whl", hash = "sha256:ba71895a3f5f606b18dcd950e8a1f8e7332a37f90f24caeb002546593f2e0eee"}, + {file = "griffe-0.36.2.tar.gz", hash = "sha256:333ade7932bb9096781d83092602625dfbfe220e87a039d2801259a1bd41d1c2"}, +] + +[package.dependencies] +colorama = ">=0.4" + [[package]] name = "h11" version = "0.14.0" @@ -754,6 +813,25 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-metadata" +version = "6.2.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.2.1-py3-none-any.whl", hash = "sha256:f65e478a7c2177bd19517a3a15dac094d253446d8690c5f3e71e735a04312374"}, + {file = "importlib_metadata-6.2.1.tar.gz", hash = "sha256:5a66966b39ff1c14ef5b2d60c1d842b0141fefff0f4cc6365b4bc9446c652807"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -782,6 +860,20 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "libsass" +version = "0.22.0" +description = "Sass for Python: A straightforward binding of libsass for Python." +optional = false +python-versions = ">=3.6" +files = [ + {file = "libsass-0.22.0-cp36-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f1efc1b612299c88aec9e39d6ca0c266d360daa5b19d9430bdeaffffa86993f9"}, + {file = "libsass-0.22.0-cp37-abi3-macosx_10_15_x86_64.whl", hash = "sha256:081e256ab3c5f3f09c7b8dea3bf3bf5e64a97c6995fd9eea880639b3f93a9f9a"}, + {file = "libsass-0.22.0-cp37-abi3-win32.whl", hash = "sha256:89c5ce497fcf3aba1dd1b19aae93b99f68257e5f2026b731b00a872f13324c7f"}, + {file = "libsass-0.22.0-cp37-abi3-win_amd64.whl", hash = "sha256:65455a2728b696b62100eb5932604aa13a29f4ac9a305d95773c14aaa7200aaf"}, + {file = "libsass-0.22.0.tar.gz", hash = "sha256:3ab5ad18e47db560f4f0c09e3d28cf3bb1a44711257488ac2adad69f4f7f8425"}, +] + [[package]] name = "lockfile" version = "0.12.2" @@ -795,153 +887,196 @@ files = [ [[package]] name = "lxml" -version = "4.9.2" +version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ - {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, - {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, - {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, - {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, - {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, - {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, - {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, - {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, - {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, - {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, - {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, - {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, - {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, - {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, - {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, - {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, - {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, - {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, - {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, - {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, - {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, - {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, - {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] +source = ["Cython (>=0.29.35)"] + +[[package]] +name = "markdown" +version = "3.4.4" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, + {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] +testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -955,6 +1090,160 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "mkdocs" +version = "1.5.3" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, + {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +jinja2 = ">=2.11.1" +markdown = ">=3.2.1" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +pathspec = ">=0.11.1" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.5.0" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, + {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, +] + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-extra-sass-plugin" +version = "0.1.0" +description = "This plugin adds stylesheets to your mkdocs site from `Sass`/`SCSS`." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mkdocs-extra-sass-plugin-0.1.0.tar.gz", hash = "sha256:cca7ae778585514371b22a63bcd69373d77e474edab4b270cf2924e05c879219"}, + {file = "mkdocs_extra_sass_plugin-0.1.0-py3-none-any.whl", hash = "sha256:10aa086fa8ef1fc4650f7bb6927deb7bf5bbf5a2dd3178f47e4ef44546b156db"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.6.3" +libsass = ">=0.15" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-material" +version = "9.3.2" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material-9.3.2-py3-none-any.whl", hash = "sha256:f2fd5cef6f0266b4caad6414f31c6a51e3183dbdd341995ad8fa7f33bc998c3d"}, + {file = "mkdocs_material-9.3.2.tar.gz", hash = "sha256:7b3a35a7731af02d70d120224fcec053ce09bebbf83dff3366ab72abc4d5fc89"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.5,<2.0" +mkdocs-material-extensions = ">=1.1,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4,<2023.0" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.1.1" +description = "Extension pack for Python Markdown and MkDocs Material." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, + {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.22.0" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocstrings-0.22.0-py3-none-any.whl", hash = "sha256:2d4095d461554ff6a778fdabdca3c00c468c2f1459d469f7a7f622a2b23212ba"}, + {file = "mkdocstrings-0.22.0.tar.gz", hash = "sha256:82a33b94150ebb3d4b5c73bab4598c3e21468c79ec072eff6931c8f3bfc38256"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.2" +mkdocs-autorefs = ">=0.3.1" +pymdown-extensions = ">=6.3" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.7.0" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings_python-1.7.0-py3-none-any.whl", hash = "sha256:85c5f009a5a0ebb6076b7818c82a2bb0eebd0b54662628fa8b25ee14a6207951"}, + {file = "mkdocstrings_python-1.7.0.tar.gz", hash = "sha256:5dac2712bd38a3ff0812b8650a68b232601d1474091b380a8b5bc102c8c0d80a"}, +] + +[package.dependencies] +griffe = ">=0.35" +mkdocstrings = ">=0.20" + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1020,15 +1309,25 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -1047,28 +1346,28 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1173,88 +1472,88 @@ files = [ [[package]] name = "pycryptodome" -version = "3.18.0" +version = "3.19.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.18.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d1497a8cd4728db0e0da3c304856cb37c0c4e3d0b36fcbabcc1600f18504fc54"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:928078c530da78ff08e10eb6cada6e0dff386bf3d9fa9871b4bbc9fbc1efe024"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:157c9b5ba5e21b375f052ca78152dd309a09ed04703fd3721dce3ff8ecced148"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:d20082bdac9218649f6abe0b885927be25a917e29ae0502eaf2b53f1233ce0c2"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad74044e5f5d2456c11ed4cfd3e34b8d4898c0cb201c4038fe41458a82ea27"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win32.whl", hash = "sha256:62a1e8847fabb5213ccde38915563140a5b338f0d0a0d363f996b51e4a6165cf"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win_amd64.whl", hash = "sha256:16bfd98dbe472c263ed2821284118d899c76968db1a6665ade0c46805e6b29a4"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7a3d22c8ee63de22336679e021c7f2386f7fc465477d59675caa0e5706387944"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:78d863476e6bad2a592645072cc489bb90320972115d8995bcfbee2f8b209918"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b6a610f8bfe67eab980d6236fdc73bfcdae23c9ed5548192bb2d530e8a92780e"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:422c89fd8df8a3bee09fb8d52aaa1e996120eafa565437392b781abec2a56e14"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:9ad6f09f670c466aac94a40798e0e8d1ef2aa04589c29faa5b9b97566611d1d1"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:53aee6be8b9b6da25ccd9028caf17dcdce3604f2c7862f5167777b707fbfb6cb"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:10da29526a2a927c7d64b8f34592f461d92ae55fc97981aab5bbcde8cb465bb6"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f21efb8438971aa16924790e1c3dba3a33164eb4000106a55baaed522c261acf"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4944defabe2ace4803f99543445c27dd1edbe86d7d4edb87b256476a91e9ffa4"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:51eae079ddb9c5f10376b4131be9589a6554f6fd84f7f655180937f611cd99a2"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:83c75952dcf4a4cebaa850fa257d7a860644c70a7cd54262c237c9f2be26f76e"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:957b221d062d5752716923d14e0926f47670e95fead9d240fa4d4862214b9b2f"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win32.whl", hash = "sha256:795bd1e4258a2c689c0b1f13ce9684fa0dd4c0e08680dcf597cf9516ed6bc0f3"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win_amd64.whl", hash = "sha256:b1d9701d10303eec8d0bd33fa54d44e67b8be74ab449052a8372f12a66f93fb9"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:cb1be4d5af7f355e7d41d36d8eec156ef1382a88638e8032215c215b82a4b8ec"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-win32.whl", hash = "sha256:fc0a73f4db1e31d4a6d71b672a48f3af458f548059aa05e83022d5f61aac9c08"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f022a4fd2a5263a5c483a2bb165f9cb27f2be06f2f477113783efe3fe2ad887b"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:363dd6f21f848301c2dcdeb3c8ae5f0dee2286a5e952a0f04954b82076f23825"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12600268763e6fec3cefe4c2dcdf79bde08d0b6dc1813887e789e495cb9f3403"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4604816adebd4faf8810782f137f8426bf45fee97d8427fa8e1e49ea78a52e2c"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01489bbdf709d993f3058e2996f8f40fee3f0ea4d995002e5968965fa2fe89fb"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3811e31e1ac3069988f7a1c9ee7331b942e605dfc0f27330a9ea5997e965efb2"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4b967bb11baea9128ec88c3d02f55a3e338361f5e4934f5240afcb667fdaec"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c8eda4f260072f7dbe42f473906c659dcbadd5ae6159dfb49af4da1293ae380"}, - {file = "pycryptodome-3.18.0.tar.gz", hash = "sha256:c9adee653fc882d98956e33ca2c1fb582e23a8af7ac82fee75bd6113c55a0413"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, + {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, ] [[package]] name = "pydantic" -version = "1.10.7" +version = "1.10.12" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, - {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, - {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, - {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, - {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, - {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, - {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, - {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, - {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, - {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, ] [package.dependencies] @@ -1275,15 +1574,29 @@ files = [ {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyjwt" -version = "2.7.0" +version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" files = [ - {file = "PyJWT-2.7.0-py3-none-any.whl", hash = "sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1"}, - {file = "PyJWT-2.7.0.tar.gz", hash = "sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074"}, + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] [package.extras] @@ -1292,15 +1605,33 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pymdown-extensions" +version = "10.3" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pymdown_extensions-10.3-py3-none-any.whl", hash = "sha256:77a82c621c58a83efc49a389159181d570e370fff9f810d3a4766a75fc678b66"}, + {file = "pymdown_extensions-10.3.tar.gz", hash = "sha256:94a0d8a03246712b64698af223848fd80aaf1ae4c4be29c8c61939b0467b5722"}, +] + +[package.dependencies] +markdown = ">=3.2" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.12)"] + [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -1316,13 +1647,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.21.0" +version = "0.21.1" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, - {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, ] [package.dependencies] @@ -1334,13 +1665,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -1448,53 +1779,188 @@ setuptools = ">=62.4.0" devel = ["coverage", "docutils", "isort", "testscenarios (>=0.4)", "testtools", "twine"] test = ["coverage", "docutils", "testscenarios (>=0.4)", "testtools"] +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "regex" +version = "2022.10.31" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.6" +files = [ + {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, + {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"}, + {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"}, + {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"}, + {file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"}, + {file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"}, + {file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"}, + {file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"}, + {file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"}, + {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"}, + {file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"}, + {file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"}, + {file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"}, + {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"}, + {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"}, + {file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"}, + {file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"}, + {file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"}, + {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"}, + {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"}, + {file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"}, + {file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"}, + {file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"}, + {file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"}, + {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"}, + {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"}, + {file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"}, + {file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"}, + {file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"}, + {file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"}, + {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"}, + {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"}, + {file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"}, + {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, + {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, ] [[package]] @@ -1582,19 +2048,19 @@ test = ["commentjson", "packaging", "pytest"] [[package]] name = "setuptools" -version = "67.8.0" +version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1620,13 +2086,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -1645,13 +2111,13 @@ widechars = ["wcwidth"] [[package]] name = "tldextract" -version = "3.4.4" +version = "3.6.0" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false python-versions = ">=3.7" files = [ - {file = "tldextract-3.4.4-py3-none-any.whl", hash = "sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2"}, - {file = "tldextract-3.4.4.tar.gz", hash = "sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234"}, + {file = "tldextract-3.6.0-py3-none-any.whl", hash = "sha256:30a492de80f4de215aa998588ba5c2e625ee74ace3a2705cfb52b0021053bcbe"}, + {file = "tldextract-3.6.0.tar.gz", hash = "sha256:a5d8b6583791daca268a7592ebcf764152fa49617983c49916ee9de99b366222"}, ] [package.dependencies] @@ -1673,24 +2139,24 @@ files = [ [[package]] name = "tomlkit" -version = "0.11.8" +version = "0.12.1" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] [[package]] name = "typing-extensions" -version = "4.5.0" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -1709,13 +2175,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.0.2" +version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, - {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, + {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, + {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, ] [package.extras] @@ -1726,23 +2192,62 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.24.1" +version = "20.24.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.1-py3-none-any.whl", hash = "sha256:01aacf8decd346cf9a865ae85c0cdc7f64c8caa07ff0d8b1dfc1733d10677442"}, - {file = "virtualenv-20.24.1.tar.gz", hash = "sha256:2ef6a237c31629da6442b0bcaa3999748108c7166318d1f55cc9f8d7294e97bd"}, + {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, + {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, ] [package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.12,<4" -platformdirs = ">=3.5.1,<4" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] +watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "websockets" @@ -1825,13 +2330,13 @@ files = [ [[package]] name = "werkzeug" -version = "2.3.4" +version = "2.3.7" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.3.4-py3-none-any.whl", hash = "sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f"}, - {file = "Werkzeug-2.3.4.tar.gz", hash = "sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76"}, + {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, + {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, ] [package.dependencies] @@ -1875,7 +2380,22 @@ files = [ [package.dependencies] xmltodict = ">=0.12.0,<0.13.0" +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "caa5eee14e9087c9a47b3f25af5e0a453269fe68eaa8617d141c6b1169250c3f" +content-hash = "fefc413a0132045bbdb1665144f272f9ca328dfc0db48926c2585a8927dd0af1" diff --git a/pyproject.toml b/pyproject.toml index 3b8ed842b7..32df2045be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,15 @@ pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" + +[tool.poetry.group.docs.dependencies] +mkdocs = "^1.5.2" +mkdocs-extra-sass-plugin = "^0.1.0" +mkdocs-material = "^9.2.5" +mkdocs-material-extensions = "^1.1.1" +mkdocstrings = "^0.22.0" +mkdocstrings-python = "^1.6.0" + [tool.pytest.ini_options] env = [ "BBOT_TESTING = True", From c36be66a0cde70a8c2d315a2ea2e2e7e5dd41c57 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 12:11:11 -0400 Subject: [PATCH 083/123] documenting Scanner() class --- bbot/cli.py | 2 +- bbot/scanner/scanner.py | 388 ++++++++++++++------- bbot/test/test_step_1/test_manager.py | 2 +- bbot/test/test_step_2/module_tests/base.py | 2 +- 4 files changed, 260 insertions(+), 134 deletions(-) diff --git a/bbot/cli.py b/bbot/cli.py index 675f320df4..4c45254d64 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -299,7 +299,7 @@ async def _main(): scanner.helpers.word_cloud.load() - await scanner.prep() + await scanner._prep() if not options.dry_run: if not options.agent_mode and not options.yes and sys.stdin.isatty(): diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 49965b4c72..c3c3f6070a 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -66,7 +66,19 @@ class Scanner: >>> await my_scan.start_without_generator() Attributes: - status (str): Status of scan + status (str): Status of scan, representing its current state. It can take on the following string values, each of which is mapped to an integer code in `_status_codes`: + ```markdown + - "NOT_STARTED" (0): Initial status before the scan starts. + - "STARTING" (1): Status when the scan is initializing. + - "RUNNING" (2): Status when the scan is in progress. + - "FINISHING" (3): Status when the scan is in the process of finalizing. + - "CLEANING_UP" (4): Status when the scan is cleaning up resources. + - "ABORTING" (5): Status when the scan is in the process of being aborted. + - "ABORTED" (6): Status when the scan has been aborted. + - "FAILED" (7): Status when the scan has encountered a failure. + - "FINISHED" (8): Status when the scan has successfully completed. + ``` + _status_code (int): The numerical representation of the current scan status, stored for internal use. It is mapped according to the values in `_status_codes`. target (ScanTarget): Target of scan config (omegaconf.dictconfig.DictConfig): BBOT config whitelist (ScanTarget): Scan whitelist (by default this is the same as `target`) @@ -77,6 +89,11 @@ class Scanner: modules (dict): Holds all loaded modules in this format: `{"module_name": Module()}` stats (ScanStats): Holds high-level scan statistics such as how many events have been produced and consumed by each module home (pathlib.Path): Base output directory of the scan (default: `~/.bbot/scans/`) + + Notes: + - The status is read-only once set to "ABORTING" until it transitions to "ABORTED." + - Invalid statuses are logged but not applied. + - Setting a status will trigger the `on_status` event in the dispatcher. """ _status_codes = { @@ -239,15 +256,16 @@ def __init__( self._stopping = False self._dns_regexes = None - self._log_handlers = None + self.__log_handlers = None self._log_handler_backup = [] def _on_keyboard_interrupt(self, loop, event): self.stop() - async def prep(self): - # event = asyncio.Event() - # self._loop.add_signal_handler(signal.SIGINT, self._on_keyboard_interrupt, loop, event) + async def _prep(self): + """ + Calls .load_modules() and .setup_modules() in preparation for a scan + """ self.helpers.mkdir(self.home) if not self._prepped: @@ -282,12 +300,13 @@ async def async_start_without_generator(self): pass async def async_start(self): + """ """ failed = True scan_start_time = datetime.now() try: - await self.prep() + await self._prep() - self.start_log_handlers() + self._start_log_handlers() if not self.target: self.warning(f"No scan targets specified") @@ -315,14 +334,14 @@ async def async_start(self): self.init_events_task = asyncio.create_task(self.manager.init_events()) self.status = "RUNNING" - self.start_modules() + self._start_modules() self.verbose(f"{len(self.modules):,} modules started") # main scan loop while 1: # abort if we're aborting if self.aborting: - self.drain_queues() + self._drain_queues() break if "python" in self.modules: @@ -358,9 +377,9 @@ async def async_start(self): self.critical(f"Unexpected error during scan:\n{traceback.format_exc()}") finally: - self.cancel_tasks() - await self.report() - await self.cleanup() + self._cancel_tasks() + await self._report() + await self._cleanup() log_fn = self.hugesuccess if self.status == "ABORTING": @@ -378,16 +397,35 @@ async def async_start(self): await self.dispatcher.on_finish(self) - self.stop_log_handlers() + self._stop_log_handlers() - def start_modules(self): + def _start_modules(self): self.verbose(f"Starting module worker loops") for module_name, module in self.modules.items(): module.start() async def setup_modules(self, remove_failed=True): + """Asynchronously initializes all loaded modules by invoking their `setup()` methods. + + Args: + remove_failed (bool): Flag indicating whether to remove modules that fail setup. + + Returns: + dict: Dictionary containing lists of module names categorized by their setup status. + 'succeeded' - List of modules that successfully set up. + 'hard_failed' - List of modules that encountered a hard failure during setup. + 'soft_failed' - List of modules that encountered a soft failure during setup. + + Raises: + ScanError: If no output modules could be loaded. + + Notes: + Hard-failed modules are set to an error state and removed if `remove_failed` is True. + Soft-failed modules are not set to an error state but are also removed if `remove_failed` is True. + """ await self.load_modules() self.verbose(f"Setting up modules") + succeeded = [] hard_failed = [] soft_failed = [] @@ -395,6 +433,7 @@ async def setup_modules(self, remove_failed=True): module_name, status, msg = await task if status == True: self.debug(f"Setup succeeded for {module_name} ({msg})") + succeeded.append(module_name) elif status == False: self.error(f"Setup hard-failed for {module_name}: {msg}") self.modules[module_name].set_error_state() @@ -411,23 +450,107 @@ async def setup_modules(self, remove_failed=True): total_failed = len(hard_failed + soft_failed) if hard_failed: msg = f"Setup hard-failed for {len(hard_failed):,} modules ({','.join(hard_failed)})" - self.fail_setup(msg) + self._fail_setup(msg) elif total_failed > 0: self.warning(f"Setup failed for {total_failed:,} modules") + return { + "succeeded": succeeded, + "hard_failed": hard_failed, + "soft_failed": soft_failed, + } + + async def load_modules(self): + """ + Import and instantiate all scan modules (including internal ones). + Module dependencies will be installed as part of this process. + """ + if not self._modules_loaded: + all_modules = list(set(self._scan_modules + self._output_modules + self._internal_modules)) + if not all_modules: + self.warning(f"No modules to load") + return + + if not self._scan_modules: + self.warning(f"No scan modules to load") + + # install module dependencies + succeeded, failed = await self.helpers.depsinstaller.install( + *self._scan_modules, *self._output_modules, *self._internal_modules + ) + if failed: + msg = f"Failed to install dependencies for {len(failed):,} modules: {','.join(failed)}" + self._fail_setup(msg) + modules = sorted([m for m in self._scan_modules if m in succeeded]) + output_modules = sorted([m for m in self._output_modules if m in succeeded]) + internal_modules = sorted([m for m in self._internal_modules if m in succeeded]) + + # Load scan modules + self.verbose(f"Loading {len(modules):,} scan modules: {','.join(modules)}") + loaded_modules, failed = self._load_modules(modules) + self.modules.update(loaded_modules) + if len(failed) > 0: + msg = f"Failed to load {len(failed):,} scan modules: {','.join(failed)}" + self._fail_setup(msg) + if loaded_modules: + self.info( + f"Loaded {len(loaded_modules):,}/{len(self._scan_modules):,} scan modules ({','.join(loaded_modules)})" + ) + + # Load internal modules + self.verbose(f"Loading {len(internal_modules):,} internal modules: {','.join(internal_modules)}") + loaded_internal_modules, failed_internal = self._load_modules(internal_modules) + self.modules.update(loaded_internal_modules) + if len(failed_internal) > 0: + msg = f"Failed to load {len(loaded_internal_modules):,} internal modules: {','.join(loaded_internal_modules)}" + self._fail_setup(msg) + if loaded_internal_modules: + self.info( + f"Loaded {len(loaded_internal_modules):,}/{len(self._internal_modules):,} internal modules ({','.join(loaded_internal_modules)})" + ) + + # Load output modules + self.verbose(f"Loading {len(output_modules):,} output modules: {','.join(output_modules)}") + loaded_output_modules, failed_output = self._load_modules(output_modules) + self.modules.update(loaded_output_modules) + if len(failed_output) > 0: + msg = f"Failed to load {len(failed_output):,} output modules: {','.join(failed_output)}" + self._fail_setup(msg) + if loaded_output_modules: + self.info( + f"Loaded {len(loaded_output_modules):,}/{len(self._output_modules):,} output modules, ({','.join(loaded_output_modules)})" + ) + + self.modules = OrderedDict(sorted(self.modules.items(), key=lambda x: getattr(x[-1], "_priority", 0))) + self._modules_loaded = True + def stop(self): + """ + Forcefully stop an in-progress scan + """ if not self._stopping: self._stopping = True self.status = "ABORTING" self.hugewarning(f"Aborting scan") self.trace() - self.cancel_tasks() - self.drain_queues() + self._cancel_tasks() + self._drain_queues() self.helpers.kill_children() - self.drain_queues() + self._drain_queues() self.helpers.kill_children() async def finish(self): + """Finalizes the scan by invoking the `finished()` method on all active modules if new activity is detected. + + The method is idempotent and will return False if no new activity has been recorded since the last invocation. + + Returns: + bool: True if new activity has been detected and the `finished()` method is invoked on all modules. + False if no new activity has been detected since the last invocation. + + Notes: + This method alters the scan's status to "FINISHING" if new activity is detected. + """ # if new events were generated since last time we were here if self.manager._new_activity: self.manager._new_activity = False @@ -443,7 +566,7 @@ async def finish(self): self.verbose("Completed final finish()") return False - def drain_queues(self): + def _drain_queues(self): # Empty event queues self.debug("Draining queues") for module in self.modules.values(): @@ -460,7 +583,7 @@ def drain_queues(self): self.manager.incoming_event_queue.get_nowait() self.debug("Finished draining queues") - def cancel_tasks(self): + def _cancel_tasks(self): tasks = [] # module workers for m in self.modules.values(): @@ -479,13 +602,13 @@ def cancel_tasks(self): # process pool self.process_pool.shutdown(cancel_futures=True) - async def report(self): + async def _report(self): for mod in self.modules.values(): context = f"{mod.name}.report()" async with self._acatch(context), mod._task_counter.count(context): await mod.report() - async def cleanup(self): + async def _cleanup(self): # clean up modules self.status = "CLEANING_UP" for mod in self.modules.values(): @@ -498,7 +621,16 @@ async def cleanup(self): def in_scope(self, e): """ - Checks whitelist and blacklist, also taking scope_distance into account + Check whether a hostname, url, IP, etc. is in scope. + Accepts either events or string data. + + Checks whitelist and blacklist. + If `e` is an event and its scope distance is zero, it will be considered in-scope. + + Examples: + Check if a URL is in scope: + >>> scan.in_scope("http://www.evilcorp.com") + True """ try: e = make_event(e, dummy=True) @@ -508,10 +640,16 @@ def in_scope(self, e): return in_scope and not self.blacklisted(e) def blacklisted(self, e): + """ + Check whether a hostname, url, IP, etc. is blacklisted. + """ e = make_event(e, dummy=True) return e in self.blacklist def whitelisted(self, e): + """ + Check whether a hostname, url, IP, etc. is whitelisted. + """ e = make_event(e, dummy=True) return e in self.whitelist @@ -521,6 +659,9 @@ def word_cloud(self): @property def stopping(self): + """ + Returns True if the scan is not running + """ return not self.running @property @@ -573,6 +714,25 @@ def log(self): @property def root_event(self): + """ + The root scan event, e.g.: + ```json + { + "type": "SCAN", + "id": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "data": "pixilated_kathryn (SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54)", + "scope_distance": 0, + "scan": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "timestamp": 1694548779.616255, + "source": "SCAN:1188928d942ace8e3befae0bdb9c3caa22705f54", + "tags": [ + "distance-0" + ], + "module": "TARGET", + "module_sequence": "TARGET" + } + ``` + """ root_event = self.make_event(data=f"{self.name} ({self.id})", event_type="SCAN", dummy=True) root_event._id = self.id root_event.scope_distance = 0 @@ -581,12 +741,69 @@ def root_event(self): root_event.module = self.helpers._make_dummy_module(name="TARGET", _type="TARGET") return root_event + def run_in_executor(self, callback, *args, **kwargs): + """ + Run a synchronous task in the event loop's default thread pool executor + + Examples: + Execute callback: + >>> result = await self.scan.run_in_executor(callback_fn, arg1, arg2) + """ + callback = partial(callback, **kwargs) + return self._loop.run_in_executor(None, callback, *args) + + def run_in_executor_mp(self, callback, *args, **kwargs): + """ + Same as run_in_executor() except with a process pool executor + Use only in cases where callback is CPU-bound + + Examples: + Execute callback: + >>> result = await self.scan.run_in_executor_mp(callback_fn, arg1, arg2) + """ + callback = partial(callback, **kwargs) + return self._loop.run_in_executor(self.process_pool, callback, *args) + + @property + def dns_regexes(self): + """ + A list of DNS hostname regexes generated from the scan target + For the purpose of extracting hostnames + + Examples: + + Extract hostnames from text: + >>> for regex in scan.dns_regexes: + >>> for match in regex.finditer(response.text): + >>> hostname = match.group().lower() + """ + if self._dns_regexes is None: + dns_targets = set(t.host for t in self.target if t.host and isinstance(t.host, str)) + dns_whitelist = set(t.host for t in self.whitelist if t.host and isinstance(t.host, str)) + dns_targets.update(dns_whitelist) + dns_targets = sorted(dns_targets, key=len) + dns_targets_set = set() + dns_regexes = [] + for t in dns_targets: + if not any(x in dns_targets_set for x in self.helpers.domain_parents(t, include_self=True)): + dns_targets_set.add(t) + dns_regexes.append(re.compile(r"((?:(?:[\w-]+)\.)+" + re.escape(t) + ")", re.I)) + self._dns_regexes = dns_regexes + + return self._dns_regexes + @property def useragent(self): + """ + Convenient shortcut to the HTTP user-agent configured for the scan + """ return self.config.get("user_agent", "BBOT") @property def json(self): + """ + A dictionary representation of the scan including its name, ID, targets, whitelist, blacklist, and modules + """ j = dict() for i in ("id", "name"): v = getattr(self, i, "") @@ -663,8 +880,15 @@ def critical(self, *args, trace=True, **kwargs): self.trace() @property - def log_handlers(self): - if self._log_handlers is None: + def log_level(self): + """ + Return the current log level, e.g. logging.INFO + """ + return get_log_level() + + @property + def _log_handlers(self): + if self.__log_handlers is None: self.helpers.mkdir(self.home) main_handler = logging.handlers.TimedRotatingFileHandler( str(self.home / "scan.log"), when="d", interval=1, backupCount=14 @@ -676,12 +900,12 @@ def log_handlers(self): str(self.home / "debug.log"), when="d", interval=1, backupCount=14 ) debug_handler.addFilter(lambda x: x.levelno != logging.STDOUT and x.levelno >= logging.DEBUG) - self._log_handlers = [main_handler, debug_handler] - return self._log_handlers + self.__log_handlers = [main_handler, debug_handler] + return self.__log_handlers - def start_log_handlers(self): + def _start_log_handlers(self): # add log handlers - for handler in self.log_handlers: + for handler in self._log_handlers: add_log_handler(handler) # temporarily disable main ones for handler_name in ("file_main", "file_debug"): @@ -690,9 +914,9 @@ def start_log_handlers(self): self._log_handler_backup.append(handler) remove_log_handler(handler) - def stop_log_handlers(self): + def _stop_log_handlers(self): # remove log handlers - for handler in self.log_handlers: + for handler in self._log_handlers: remove_log_handler(handler) # restore main ones for handler in self._log_handler_backup: @@ -703,67 +927,7 @@ def _internal_modules(self): if self.config.get(modname, True): yield modname - async def load_modules(self): - if not self._modules_loaded: - all_modules = list(set(self._scan_modules + self._output_modules + self._internal_modules)) - if not all_modules: - self.warning(f"No modules to load") - return - - if not self._scan_modules: - self.warning(f"No scan modules to load") - - # install module dependencies - succeeded, failed = await self.helpers.depsinstaller.install( - *self._scan_modules, *self._output_modules, *self._internal_modules - ) - if failed: - msg = f"Failed to install dependencies for {len(failed):,} modules: {','.join(failed)}" - self.fail_setup(msg) - modules = sorted([m for m in self._scan_modules if m in succeeded]) - output_modules = sorted([m for m in self._output_modules if m in succeeded]) - internal_modules = sorted([m for m in self._internal_modules if m in succeeded]) - - # Load scan modules - self.verbose(f"Loading {len(modules):,} scan modules: {','.join(modules)}") - loaded_modules, failed = self._load_modules(modules) - self.modules.update(loaded_modules) - if len(failed) > 0: - msg = f"Failed to load {len(failed):,} scan modules: {','.join(failed)}" - self.fail_setup(msg) - if loaded_modules: - self.info( - f"Loaded {len(loaded_modules):,}/{len(self._scan_modules):,} scan modules ({','.join(loaded_modules)})" - ) - - # Load internal modules - self.verbose(f"Loading {len(internal_modules):,} internal modules: {','.join(internal_modules)}") - loaded_internal_modules, failed_internal = self._load_modules(internal_modules) - self.modules.update(loaded_internal_modules) - if len(failed_internal) > 0: - msg = f"Failed to load {len(loaded_internal_modules):,} internal modules: {','.join(loaded_internal_modules)}" - self.fail_setup(msg) - if loaded_internal_modules: - self.info( - f"Loaded {len(loaded_internal_modules):,}/{len(self._internal_modules):,} internal modules ({','.join(loaded_internal_modules)})" - ) - - # Load output modules - self.verbose(f"Loading {len(output_modules):,} output modules: {','.join(output_modules)}") - loaded_output_modules, failed_output = self._load_modules(output_modules) - self.modules.update(loaded_output_modules) - if len(failed_output) > 0: - msg = f"Failed to load {len(failed_output):,} output modules: {','.join(failed_output)}" - self.fail_setup(msg) - if loaded_output_modules: - self.info( - f"Loaded {len(loaded_output_modules):,}/{len(self._output_modules):,} output modules, ({','.join(loaded_output_modules)})" - ) - - self.modules = OrderedDict(sorted(self.modules.items(), key=lambda x: getattr(x[-1], "_priority", 0))) - self._modules_loaded = True - - def fail_setup(self, msg): + def _fail_setup(self, msg): msg = str(msg) if not self.force_start: msg += " (--force to run module anyway)" @@ -772,10 +936,6 @@ def fail_setup(self, msg): else: raise ScanError(msg) - @property - def log_level(self): - return get_log_level() - @property def _loop(self): if self.__loop is None: @@ -806,7 +966,7 @@ async def _status_ticker(self, interval=15): self.manager.modules_status(_log=True) @contextlib.contextmanager - def catch(self, context="scan", finally_callback=None): + def _catch(self, context="scan", finally_callback=None): """ Handle common errors by stopping scan, logging tracebacks, etc. @@ -831,40 +991,6 @@ async def _acatch(self, context="scan", finally_callback=None): except BaseException as e: self._handle_exception(e, context=context) - def run_in_executor(self, callback, *args, **kwargs): - """ - Run a synchronous task in the event loop's default thread pool executor - """ - callback = partial(callback, **kwargs) - return self._loop.run_in_executor(None, callback, *args) - - def run_in_executor_mp(self, callback, *args, **kwargs): - """ - Same as run_in_executor() except with a process pool executor - """ - callback = partial(callback, **kwargs) - return self._loop.run_in_executor(self.process_pool, callback, *args) - - @property - def dns_regexes(self): - """ - Return a list of regexes for extracting target hostnames - """ - if self._dns_regexes is None: - dns_targets = set(t.host for t in self.target if t.host and isinstance(t.host, str)) - dns_whitelist = set(t.host for t in self.whitelist if t.host and isinstance(t.host, str)) - dns_targets.update(dns_whitelist) - dns_targets = sorted(dns_targets, key=len) - dns_targets_set = set() - dns_regexes = [] - for t in dns_targets: - if not any(x in dns_targets_set for x in self.helpers.domain_parents(t, include_self=True)): - dns_targets_set.add(t) - dns_regexes.append(re.compile(r"((?:(?:[\w-]+)\.)+" + re.escape(t) + ")", re.I)) - self._dns_regexes = dns_regexes - - return self._dns_regexes - def _handle_exception(self, e, context="scan", finally_callback=None): if callable(context): context = f"{context.__qualname__}()" diff --git a/bbot/test/test_step_1/test_manager.py b/bbot/test/test_step_1/test_manager.py index 7804836f50..16e6db7f56 100644 --- a/bbot/test/test_step_1/test_manager.py +++ b/bbot/test/test_step_1/test_manager.py @@ -108,7 +108,7 @@ class DummyModule3: msg = "Ignore this error, it belongs here" exceptions = (Exception(msg), KeyboardInterrupt(msg), BrokenPipeError(msg)) for e in exceptions: - with manager.scan.catch(): + with manager.scan._catch(): raise e diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index 1392e557a6..a4562cfc76 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -105,7 +105,7 @@ async def module_test(self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, mo module_test = self.ModuleTest(self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request) module_test.log.info(f"Starting {self.name} module test") await self.setup_before_prep(module_test) - await module_test.scan.prep() + await module_test.scan._prep() await self.setup_after_prep(module_test) module_test.events = [e async for e in module_test.scan.async_start()] yield module_test From 590b4a229ca18c0d563a09c15d36f067f90e8768 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 12 Sep 2023 17:05:29 -0400 Subject: [PATCH 084/123] steadily working through helpers --- bbot/core/helpers/misc.py | 262 ++++++++++++++++++++++++++++++++++---- bbot/scanner/scanner.py | 10 +- mkdocs.yml | 5 + pyproject.toml | 1 - 4 files changed, 245 insertions(+), 33 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 14b58b73a3..09a7b485f9 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -44,8 +44,26 @@ def is_domain(d): """ - "evilcorp.co.uk" --> True - "www.evilcorp.co.uk" --> False + Check if the given input represents a domain without subdomains. + + This function takes an input string `d` and returns True if it represents a domain without any subdomains. + Otherwise, it returns False. + + Args: + d (str): The input string containing the domain. + + Returns: + bool: True if the input is a domain without subdomains, False otherwise. + + Examples: + >>> is_domain("evilcorp.co.uk") + True + + >>> is_domain("www.evilcorp.co.uk") + False + + Notes: + - Port, if present in input, is ignored. """ d, _ = split_host_port(d) extracted = tldextract(d) @@ -56,8 +74,26 @@ def is_domain(d): def is_subdomain(d): """ - "www.evilcorp.co.uk" --> True - "evilcorp.co.uk" --> False + Check if the given input represents a subdomain. + + This function takes an input string `d` and returns True if it represents a subdomain. + Otherwise, it returns False. + + Args: + d (str): The input string containing the domain or subdomain. + + Returns: + bool: True if the input is a subdomain, False otherwise. + + Examples: + >>> is_subdomain("www.evilcorp.co.uk") + True + + >>> is_subdomain("evilcorp.co.uk") + False + + Notes: + - Port, if present in input, is ignored. """ d, _ = split_host_port(d) extracted = tldextract(d) @@ -68,13 +104,47 @@ def is_subdomain(d): def is_ptr(d): """ - "wsc-11-22-33-44.evilcorp.com" --> True - "www2.evilcorp.com" --> False + Check if the given input represents a PTR record domain. + + This function takes an input string `d` and returns True if it matches the PTR record format. + Otherwise, it returns False. + + Args: + d (str): The input string potentially representing a PTR record domain. + + Returns: + bool: True if the input matches PTR record format, False otherwise. + + Examples: + >>> is_ptr("wsc-11-22-33-44.evilcorp.com") + True + + >>> is_ptr("www2.evilcorp.com") + False """ return bool(bbot_regexes.ptr_regex.search(str(d))) def is_url(u): + """ + Check if the given input represents a valid URL. + + This function takes an input string `u` and returns True if it matches any of the predefined URL formats. + Otherwise, it returns False. + + Args: + u (str): The input string potentially representing a URL. + + Returns: + bool: True if the input matches a valid URL format, False otherwise. + + Examples: + >>> is_url("https://evilcorp.com") + True + + >>> is_url("not-a-url") + False + """ u = str(u) for r in bbot_regexes.event_type_regexes["URL"]: if r.match(u): @@ -87,10 +157,30 @@ def is_url(u): def is_uri(u, return_scheme=False): """ - is_uri("http://evilcorp.com") --> True - is_uri("ftp://evilcorp.com") --> True - is_uri("evilcorp.com") --> False - is_uri("ftp://evilcorp.com", return_scheme=True) --> "ftp" + Check if the given input represents a URI and optionally return its scheme. + + This function takes an input string `u` and returns True if it matches a URI format. + When `return_scheme` is True, it returns the URI scheme instead of a boolean. + + Args: + u (str): The input string potentially representing a URI. + return_scheme (bool, optional): Whether to return the URI scheme. Defaults to False. + + Returns: + Union[bool, str]: True if the input matches a URI format; the URI scheme if `return_scheme` is True. + + Examples: + >>> is_uri("http://evilcorp.com") + True + + >>> is_uri("ftp://evilcorp.com") + True + + >>> is_uri("evilcorp.com") + False + + >>> is_uri("ftp://evilcorp.com", return_scheme=True) + "ftp" """ match = uri_regex.match(u) if return_scheme: @@ -102,9 +192,32 @@ def is_uri(u, return_scheme=False): def split_host_port(d): """ - "evilcorp.com:443" --> ("evilcorp.com", 443) - "192.168.1.1:443" --> (IPv4Address('192.168.1.1'), 443) - "[dead::beef]:443" --> (IPv6Address('dead::beef'), 443) + Parse a string containing a host and port into a tuple. + + This function takes an input string `d` and returns a tuple containing the host and port. + The host is converted to its appropriate IP address type if possible. The port is inferred + based on the scheme if not provided. + + Args: + d (str): The input string containing the host and possibly the port. + + Returns: + Tuple[Union[IPv4Address, IPv6Address, str], Optional[int]]: Tuple containing the host and port. + + Examples: + >>> split_host_port("evilcorp.com:443") + ("evilcorp.com", 443) + + >>> split_host_port("192.168.1.1:443") + (IPv4Address('192.168.1.1'), 443) + + >>> split_host_port("[dead::beef]:443") + (IPv6Address('dead::beef'), 443) + + Notes: + - If port is not provided, it is inferred based on the scheme: + - For "https" and "wss", port 443 is used. + - For "http" and "ws", port 80 is used. """ d = str(d) host = None @@ -146,10 +259,32 @@ def split_host_port(d): def parent_domain(d): """ - "www.internal.evilcorp.co.uk" --> "internal.evilcorp.co.uk" - "www.internal.evilcorp.co.uk:8080" --> "internal.evilcorp.co.uk:8080" - "www.evilcorp.co.uk" --> "evilcorp.co.uk" - "evilcorp.co.uk" --> "evilcorp.co.uk" + Retrieve the parent domain of a given subdomain string. + + This function takes an input string `d` representing a subdomain and returns its parent domain. + If the input does not represent a subdomain, it returns the input as is. + + Args: + d (str): The input string representing a subdomain or domain. + + Returns: + str: The parent domain of the subdomain, or the original input if it is not a subdomain. + + Examples: + >>> parent_domain("www.internal.evilcorp.co.uk") + "internal.evilcorp.co.uk" + + >>> parent_domain("www.internal.evilcorp.co.uk:8080") + "internal.evilcorp.co.uk:8080" + + >>> parent_domain("www.evilcorp.co.uk") + "evilcorp.co.uk" + + >>> parent_domain("evilcorp.co.uk") + "evilcorp.co.uk" + + Notes: + - Port, if present in input, is preserved in the output. """ host, port = split_host_port(d) if is_subdomain(d): @@ -159,8 +294,26 @@ def parent_domain(d): def domain_parents(d, include_self=False): """ - "test.www.evilcorp.co.uk" --> ["www.evilcorp.co.uk", "evilcorp.co.uk"] + Generate a list of parent domains for a given domain string. + + This function takes an input string `d` and generates a list of parent domains in decreasing order of specificity. + If `include_self` is set to True, the list will also include the input domain if it is not a top-level domain. + + Args: + d (str): The input string representing a domain or subdomain. + include_self (bool, optional): Whether to include the input domain itself. Defaults to False. + + Yields: + str: Parent domains of the input string in decreasing order of specificity. + + Examples: + >>> list(domain_parents("test.www.evilcorp.co.uk")) + ["www.evilcorp.co.uk", "evilcorp.co.uk"] + + Notes: + - Port, if present in input, is preserved in the output. """ + parent = str(d) if include_self and not is_domain(parent): yield parent @@ -175,6 +328,29 @@ def domain_parents(d, include_self=False): def parent_url(u): + """ + Retrieve the parent URL of a given URL. + + This function takes an input string `u` representing a URL and returns its parent URL. + If the input URL does not have a parent (i.e., it's already the top-level), it returns None. + + Args: + u (str): The input string representing a URL. + + Returns: + Union[str, None]: The parent URL of the input URL, or None if it has no parent. + + Examples: + >>> parent_url("https://evilcorp.com/sub/path/") + "https://evilcorp.com/sub/" + + >>> parent_url("https://evilcorp.com/") + None + + Notes: + - Only the path component of the URL is modified. + - All other components like scheme, netloc, query, and fragment are preserved. + """ parsed = urlparse(u) path = Path(parsed.path) if path.parent == path: @@ -185,16 +361,31 @@ def parent_url(u): def url_parents(u): """ - "http://www.evilcorp.co.uk/admin/tools/cmd.php" --> ["http://www.evilcorp.co.uk/admin/tools/","http://www.evilcorp.co.uk/admin/", "http://www.evilcorp.co.uk/"] - """ + Generate a list of parent URLs for a given URL string. + + This function takes an input string `u` representing a URL and generates a list of its parent URLs in decreasing order of specificity. + + Args: + u (str): The input string representing a URL. + + Returns: + List[str]: A list of parent URLs of the input URL in decreasing order of specificity. - parent_list = set() + Examples: + >>> url_parents("http://www.evilcorp.co.uk/admin/tools/cmd.php") + ["http://www.evilcorp.co.uk/admin/tools/", "http://www.evilcorp.co.uk/admin/", "http://www.evilcorp.co.uk/"] + + Notes: + - The list is generated by continuously calling `parent_url` until it returns None. + - All components of the URL except for the path are preserved. + """ + parent_list = [] while 1: parent = parent_url(u) if parent == None: - return list(parent_list) - else: - parent_list.add(parent) + return parent_list + elif parent not in parent_list: + parent_list.append(parent) u = parent @@ -282,9 +473,26 @@ def is_ip_type(i): def make_ip_type(s): """ - "dead::beef" --> IPv6Address('dead::beef') - "192.168.1.0/24" --> IPv4Network('192.168.1.0/24') - "evilcorp.com" --> "evilcorp.com" + Convert a string to its corresponding IP address or network type. + + This function attempts to convert the input string `s` into either an IPv4 or IPv6 address object, + or an IPv4 or IPv6 network object. If none of these conversions are possible, the original string is returned. + + Args: + s (str): The input string to be converted. + + Returns: + Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network, str]: The converted object or original string. + + Examples: + >>> make_ip_type("dead::beef") + IPv6Address('dead::beef') + + >>> make_ip_type("192.168.1.0/24") + IPv4Network('192.168.1.0/24') + + >>> make_ip_type("evilcorp.com") + 'evilcorp.com' """ # IP address with suppress(Exception): diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index c3c3f6070a..686794ce69 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -51,19 +51,19 @@ class Scanner: >>> config = {"http_proxy": "http://127.0.0.1:8080", "modules": {"nmap": {"top_ports": 2000}}} >>> my_scan = Scanner("www.evilcorp.com", modules=["nmap", "httpx"], config=config) - Synchronous, iterating over events as they're discovered: + Start the scan, iterating over events as they're discovered (synchronous): >>> for event in my_scan.start(): >>> print(event) - Asynchronous, iterating over events as they're discovered: + Start the scan, iterating over events as they're discovered (asynchronous): >>> async for event in my_scan.async_start(): >>> print(event) - Synchronous, without consuming events: + Start the scan without consuming events (synchronous): >>> my_scan.start_without_generator() - Asynchronous, without consuming events: - >>> await my_scan.start_without_generator() + Start the scan without consuming events (asynchronous): + >>> await my_scan.async_start_without_generator() Attributes: status (str): Status of scan, representing its current state. It can take on the following string values, each of which is mapped to an integer code in `_status_codes`: diff --git a/mkdocs.yml b/mkdocs.yml index 21a0172047..3b5118b30e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -32,6 +32,9 @@ nav: - How to Write a Module: contribution.md - Developer Reference: - Scanner: dev/scanner.md + - Helpers: + # dev/helpers/index.md + - Miscellaneous: dev/helpers/misc.md - Misc: - Release History: release_history.md - Troubleshooting: troubleshooting.md @@ -59,6 +62,8 @@ plugins: options: show_signature_annotations: true show_root_toc_entry: false + show_root_heading: true + show_root_full_path: false separate_signature: true docstring_section_style: "list" import: diff --git a/pyproject.toml b/pyproject.toml index 32df2045be..f8436f3bc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,6 @@ pytest-timeout = "^2.1.0" pytest = "^7.4.0" pre-commit = "^3.4.0" - [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" mkdocs-extra-sass-plugin = "^0.1.0" From ae0f173d489ba92d610e033abc5b13f7a843e081 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 13 Sep 2023 14:09:45 -0400 Subject: [PATCH 085/123] refactor module inheritance --- bbot/core/helpers/modules.py | 3 + bbot/modules/anubisdb.py | 4 +- bbot/modules/azure_tenant.py | 4 +- bbot/modules/bevigil.py | 4 +- bbot/modules/binaryedge.py | 4 +- bbot/modules/builtwith.py | 4 +- bbot/modules/c99.py | 4 +- bbot/modules/censys.py | 4 +- bbot/modules/certspotter.py | 4 +- bbot/modules/chaos.py | 4 +- bbot/modules/columbus.py | 4 +- bbot/modules/crobat.py | 139 +-------------- bbot/modules/crt.py | 4 +- bbot/modules/digitorus.py | 4 +- bbot/modules/dnsdumpster.py | 4 +- bbot/modules/emailformat.py | 4 +- bbot/modules/fullhunt.py | 4 +- bbot/modules/github.py | 4 +- bbot/modules/hackertarget.py | 4 +- bbot/modules/hunterio.py | 4 +- bbot/modules/ipstack.py | 4 +- bbot/modules/leakix.py | 7 +- bbot/modules/massdns.py | 4 +- bbot/modules/myssl.py | 4 +- bbot/modules/otx.py | 4 +- bbot/modules/pgp.py | 4 +- bbot/modules/rapiddns.py | 4 +- bbot/modules/riddler.py | 4 +- bbot/modules/shodan_dns.py | 13 +- bbot/modules/sitedossier.py | 4 +- bbot/modules/subdomaincenter.py | 4 +- bbot/modules/sublist3r.py | 4 +- bbot/modules/templates/root_domains.py | 69 ++++++++ bbot/modules/templates/subdomain_enum.py | 163 ++++++++++++++++++ bbot/modules/threatminer.py | 4 +- bbot/modules/urlscan.py | 4 +- bbot/modules/wayback.py | 4 +- bbot/scripts/docs.py | 7 +- .../module_tests/test_module_leakix.py | 19 ++ 39 files changed, 327 insertions(+), 217 deletions(-) create mode 100644 bbot/modules/templates/root_domains.py create mode 100644 bbot/modules/templates/subdomain_enum.py diff --git a/bbot/core/helpers/modules.py b/bbot/core/helpers/modules.py index fe449a3ee2..c6dddab7ff 100644 --- a/bbot/core/helpers/modules.py +++ b/bbot/core/helpers/modules.py @@ -18,6 +18,9 @@ def __init__(self): self._configs = {} def file_filter(self, file): + file = file.resolve() + if "mixins" in file.parts: + return False return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] def preload(self, module_dir): diff --git a/bbot/modules/anubisdb.py b/bbot/modules/anubisdb.py index c580c9e9da..7b0cda171a 100644 --- a/bbot/modules/anubisdb.py +++ b/bbot/modules/anubisdb.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class anubisdb(crobat): +class anubisdb(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index 8ba59dcfcd..b9ada3d186 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -1,10 +1,10 @@ import re from contextlib import suppress -from .viewdns import viewdns +from bbot.modules.templates.root_domains import root_domains -class azure_tenant(viewdns): +class azure_tenant(root_domains): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "passive", "safe"] diff --git a/bbot/modules/bevigil.py b/bbot/modules/bevigil.py index 87d81d838c..0b54d40f40 100644 --- a/bbot/modules/bevigil.py +++ b/bbot/modules/bevigil.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class bevigil(shodan_dns): +class bevigil(subdomain_enum_apikey): """ Retrieve OSINT data from mobile applications using BeVigil """ diff --git a/bbot/modules/binaryedge.py b/bbot/modules/binaryedge.py index 637585f9fc..64970c861d 100644 --- a/bbot/modules/binaryedge.py +++ b/bbot/modules/binaryedge.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class binaryedge(shodan_dns): +class binaryedge(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/builtwith.py b/bbot/modules/builtwith.py index 4adcd2cb03..25a46ddf53 100644 --- a/bbot/modules/builtwith.py +++ b/bbot/modules/builtwith.py @@ -10,10 +10,10 @@ # # ############################################################ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class builtwith(shodan_dns): +class builtwith(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/c99.py b/bbot/modules/c99.py index 5b0179def4..8e05a1c4b8 100644 --- a/bbot/modules/c99.py +++ b/bbot/modules/c99.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class c99(shodan_dns): +class c99(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/censys.py b/bbot/modules/censys.py index ed7a62f3bd..339f10bf7c 100644 --- a/bbot/modules/censys.py +++ b/bbot/modules/censys.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class censys(shodan_dns): +class censys(subdomain_enum_apikey): """ thanks to https://github.com/owasp-amass/amass/blob/master/resources/scripts/cert/censys.ads """ diff --git a/bbot/modules/certspotter.py b/bbot/modules/certspotter.py index d943bf8eb3..4441b9d98f 100644 --- a/bbot/modules/certspotter.py +++ b/bbot/modules/certspotter.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class certspotter(crobat): +class certspotter(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/chaos.py b/bbot/modules/chaos.py index f247ce18db..3eb7635732 100644 --- a/bbot/modules/chaos.py +++ b/bbot/modules/chaos.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class chaos(shodan_dns): +class chaos(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/columbus.py b/bbot/modules/columbus.py index 057a35c90d..2e89013591 100644 --- a/bbot/modules/columbus.py +++ b/bbot/modules/columbus.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class columbus(crobat): +class columbus(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/crobat.py b/bbot/modules/crobat.py index 5aecf3f6e7..7ece44fdbc 100644 --- a/bbot/modules/crobat.py +++ b/bbot/modules/crobat.py @@ -1,145 +1,10 @@ -from bbot.modules.base import BaseModule +from bbot.modules.templates.subdomain_enum import subdomain_enum -class crobat(BaseModule): - """ - A typical free API-based subdomain enumeration module - Inherited by several other modules including sublist3r, dnsdumpster, etc. - """ - +class crobat(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] # tag "subdomain-enum" removed 2023-02-24 because API is offline flags = ["passive", "safe"] meta = {"description": "Query Project Crobat for subdomains"} - base_url = "https://sonar.omnisint.io" - # set module error state after this many failed requests in a row - abort_after_failures = 5 - # whether to reject wildcard DNS_NAMEs - reject_wildcards = "strict" - # this helps combat rate limiting by ensuring that a query doesn't execute - # until the queue is ready to receive its results - _qsize = 1 - - async def setup(self): - self.processed = set() - self.http_timeout = self.scan.config.get("http_timeout", 10) - self._failures = 0 - return True - - async def _is_wildcard(self, query): - if self.helpers.is_dns_name(query): - for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query)).items(): - if any(t in wildcard_rdtypes for t in ("A", "AAAA", "CNAME")): - return True - return False - - async def filter_event(self, event): - """ - This filter_event is used across many modules - """ - query = self.make_query(event) - # reject if already processed - if self.already_processed(query): - return False, "Event was already processed" - eligible, reason = await self.eligible_for_enumeration(event) - if eligible: - self.processed.add(hash(query)) - return True, reason - return False, reason - - async def eligible_for_enumeration(self, event): - query = self.make_query(event) - # check if wildcard - is_wildcard = await self._is_wildcard(query) - # check if cloud - is_cloud = False - if any(t.startswith("cloud-") for t in event.tags): - is_cloud = True - # reject if it's a cloud resource and not in our target - if is_cloud and event not in self.scan.target: - return False, "Event is a cloud resource and not a direct target" - # optionally reject events with wildcards / errors - if self.reject_wildcards: - if any(t in event.tags for t in ("a-error", "aaaa-error")): - return False, "Event has a DNS resolution error" - if self.reject_wildcards == "strict": - if is_wildcard: - return False, "Event is a wildcard domain" - elif self.reject_wildcards == "cloud_only": - if is_wildcard and is_cloud: - return False, "Event is both a cloud resource and a wildcard domain" - return True, "" - - def already_processed(self, hostname): - for parent in self.helpers.domain_parents(hostname, include_self=True): - if hash(parent) in self.processed: - return True - return False - - async def abort_if(self, event): - # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains - if "in-scope" not in event.tags: - return True - if await self._is_wildcard(event.data): - return True - return False - - async def handle_event(self, event): - query = self.make_query(event) - results = await self.query(query) - if results: - for hostname in set(results): - if hostname: - try: - hostname = self.helpers.validators.validate_host(hostname) - except ValueError as e: - self.verbose(e) - continue - if hostname and hostname.endswith(f".{query}") and not hostname == event.data: - self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) - - async def request_url(self, query): - url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" - return await self.request_with_fail_count(url) - - def make_query(self, event): - if "target" in event.tags: - query = str(event.data) - else: - query = self.helpers.parent_domain(event.data).lower() - return ".".join([s for s in query.split(".") if s != "_wildcard"]) - - def parse_results(self, r, query=None): - json = r.json() - if json: - for hostname in json: - yield hostname - - async def query(self, query, parse_fn=None, request_fn=None): - if parse_fn is None: - parse_fn = self.parse_results - if request_fn is None: - request_fn = self.request_url - try: - response = await request_fn(query) - if response is None: - self.info(f'Query "{query}" failed (no response)') - return [] - try: - results = list(parse_fn(response, query)) - except Exception as e: - if response: - self.info( - f'Error parsing results for query "{query}" (status code {response.status_code})', trace=True - ) - self.log.trace(response.text) - else: - self.info(f'Error parsing results for "{query}": {e}', trace=True) - return - if results: - return results - self.debug(f'No results for "{query}"') - except Exception as e: - self.info(f"Error retrieving results for {query}: {e}", trace=True) diff --git a/bbot/modules/crt.py b/bbot/modules/crt.py index b2b5837c30..9773f72d48 100644 --- a/bbot/modules/crt.py +++ b/bbot/modules/crt.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class crt(crobat): +class crt(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/digitorus.py b/bbot/modules/digitorus.py index 3047652b2c..0da4877440 100644 --- a/bbot/modules/digitorus.py +++ b/bbot/modules/digitorus.py @@ -1,9 +1,9 @@ import re -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class digitorus(crobat): +class digitorus(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index 00a557a3ad..8bb1fa1ede 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -1,10 +1,10 @@ import re from bs4 import BeautifulSoup -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class dnsdumpster(crobat): +class dnsdumpster(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 82b5797445..3817cb3f36 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -1,7 +1,7 @@ -from .viewdns import viewdns +from bbot.modules.templates.root_domains import root_domains -class emailformat(viewdns): +class emailformat(root_domains): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] diff --git a/bbot/modules/fullhunt.py b/bbot/modules/fullhunt.py index 8bc5d23265..1485dc6b5c 100644 --- a/bbot/modules/fullhunt.py +++ b/bbot/modules/fullhunt.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class fullhunt(shodan_dns): +class fullhunt(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/github.py b/bbot/modules/github.py index f6a933ea50..25ef862ef6 100644 --- a/bbot/modules/github.py +++ b/bbot/modules/github.py @@ -1,7 +1,7 @@ -from bbot.modules.shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class github(shodan_dns): +class github(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED"] flags = ["passive", "subdomain-enum", "safe"] diff --git a/bbot/modules/hackertarget.py b/bbot/modules/hackertarget.py index d6c3b4e3b7..d23f5c6cf4 100644 --- a/bbot/modules/hackertarget.py +++ b/bbot/modules/hackertarget.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class hackertarget(crobat): +class hackertarget(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/hunterio.py b/bbot/modules/hunterio.py index 8bb9f74744..1e65c6e4c7 100644 --- a/bbot/modules/hunterio.py +++ b/bbot/modules/hunterio.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class hunterio(shodan_dns): +class hunterio(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS", "DNS_NAME", "URL_UNVERIFIED"] flags = ["passive", "email-enum", "subdomain-enum", "safe"] diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index 22dce58be5..19e7cacda6 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class Ipstack(shodan_dns): +class Ipstack(subdomain_enum_apikey): """ Ipstack GeoIP Leverages the ipstack.com API to geolocate a host by IP address. diff --git a/bbot/modules/leakix.py b/bbot/modules/leakix.py index 4ebf895703..45053755a1 100644 --- a/bbot/modules/leakix.py +++ b/bbot/modules/leakix.py @@ -1,8 +1,7 @@ -from .crobat import crobat -from .shodan_dns import shodan_dns +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class leakix(shodan_dns): +class leakix(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] @@ -14,7 +13,7 @@ class leakix(shodan_dns): base_url = "https://leakix.net" async def setup(self): - ret = await crobat.setup(self) + ret = await super(subdomain_enum_apikey, self).setup() self.headers = {"Accept": "application/json"} self.api_key = self.config.get("api_key", "") if self.api_key: diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index ed54fc6550..1dee1580d8 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -3,10 +3,10 @@ import random import subprocess -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class massdns(crobat): +class massdns(subdomain_enum): flags = ["subdomain-enum", "passive", "slow", "aggressive"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/myssl.py b/bbot/modules/myssl.py index 7edd3fbc0b..a08c885eda 100644 --- a/bbot/modules/myssl.py +++ b/bbot/modules/myssl.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class myssl(crobat): +class myssl(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/otx.py b/bbot/modules/otx.py index abe856e47e..72f2e1d5ba 100644 --- a/bbot/modules/otx.py +++ b/bbot/modules/otx.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class otx(crobat): +class otx(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/pgp.py b/bbot/modules/pgp.py index ce7098e27d..c1e0773c37 100644 --- a/bbot/modules/pgp.py +++ b/bbot/modules/pgp.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class pgp(crobat): +class pgp(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] diff --git a/bbot/modules/rapiddns.py b/bbot/modules/rapiddns.py index 0af7e39306..088288ddbf 100644 --- a/bbot/modules/rapiddns.py +++ b/bbot/modules/rapiddns.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class rapiddns(crobat): +class rapiddns(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/riddler.py b/bbot/modules/riddler.py index c6f865ee10..d525acbade 100644 --- a/bbot/modules/riddler.py +++ b/bbot/modules/riddler.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class riddler(crobat): +class riddler(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/shodan_dns.py b/bbot/modules/shodan_dns.py index c94d0ac203..7780120b60 100644 --- a/bbot/modules/shodan_dns.py +++ b/bbot/modules/shodan_dns.py @@ -1,12 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey -class shodan_dns(crobat): - """ - A typical module for authenticated, API-based subdomain enumeration - Inherited by several other modules including securitytrails, c99.nl, etc. - """ - +class shodan_dns(subdomain_enum_apikey): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] @@ -16,10 +11,6 @@ class shodan_dns(crobat): base_url = "https://api.shodan.io" - async def setup(self): - await super().setup() - return await self.require_api_key() - async def ping(self): url = f"{self.base_url}/api-info?key={self.api_key}" r = await self.request_with_fail_count(url) diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index f9180f1db7..87358a955d 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class sitedossier(crobat): +class sitedossier(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/subdomaincenter.py b/bbot/modules/subdomaincenter.py index 218c49d306..6d1825b8b2 100644 --- a/bbot/modules/subdomaincenter.py +++ b/bbot/modules/subdomaincenter.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class subdomaincenter(crobat): +class subdomaincenter(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] diff --git a/bbot/modules/sublist3r.py b/bbot/modules/sublist3r.py index ee15a145b0..3c13cf3081 100644 --- a/bbot/modules/sublist3r.py +++ b/bbot/modules/sublist3r.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class sublist3r(crobat): +class sublist3r(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] # tag "subdomain-enum" removed 2023-02-24 because API is offline diff --git a/bbot/modules/templates/root_domains.py b/bbot/modules/templates/root_domains.py new file mode 100644 index 0000000000..a1eaf8c99c --- /dev/null +++ b/bbot/modules/templates/root_domains.py @@ -0,0 +1,69 @@ +import re + +from bbot.modules.base import BaseModule + + +class root_domains(BaseModule): + """ + Used as a base for modules that only act on root domains and not individual hostnames + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["passive", "safe"] + meta = { + "description": "", + } + in_scope_only = True + _qsize = 1 + + async def setup(self): + self.processed = set() + self.date_regex = re.compile(r"\d{4}-\d{2}-\d{2}") + return True + + async def filter_event(self, event): + _, domain = self.helpers.split_domain(event.data) + if hash(domain) in self.processed: + return False + self.processed.add(hash(domain)) + return True + + async def handle_event(self, event): + _, query = self.helpers.split_domain(event.data) + for domain, _ in await self.query(query): + self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) + # todo: registrar? + + async def query(self, query): + results = set() + url = f"{self.base_url}/reversewhois/?q={query}" + r = await self.helpers.request(url) + status_code = getattr(r, "status_code", 0) + if status_code not in (200,): + self.verbose(f"Error retrieving reverse whois results (status code: {status_code})") + + content = getattr(r, "content", b"") + from bs4 import BeautifulSoup + + html = BeautifulSoup(content, "html.parser") + found = set() + for table_row in html.findAll("tr"): + table_cells = table_row.findAll("td") + # make double-sure we're in the right table by checking the date field + try: + if self.date_regex.match(table_cells[1].text.strip()): + # domain == first cell + domain = table_cells[0].text.strip().lower() + # registrar == last cell + registrar = table_cells[-1].text.strip() + if domain and not domain == query: + result = (domain, registrar) + result_hash = hash(result) + if result_hash not in found: + found.add(result_hash) + results.add(result) + except IndexError: + self.debug(f"Invalid row {str(table_row)[:40]}...") + continue + return results diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py new file mode 100644 index 0000000000..6bf380e8fd --- /dev/null +++ b/bbot/modules/templates/subdomain_enum.py @@ -0,0 +1,163 @@ +from bbot.modules.base import BaseModule + + +class subdomain_enum(BaseModule): + """ + A typical free API-based subdomain enumeration module + Inherited by many other modules including sublist3r, dnsdumpster, etc. + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query an API for subdomains"} + + base_url = "https://api.example.com" + + # set module error state after this many failed requests in a row + abort_after_failures = 5 + # whether to reject wildcard DNS_NAMEs + reject_wildcards = "strict" + # this helps combat rate limiting by ensuring that a query doesn't execute + # until the queue is ready to receive its results + _qsize = 1 + + async def setup(self): + self.processed = set() + self.http_timeout = self.scan.config.get("http_timeout", 10) + self._failures = 0 + return True + + async def handle_event(self, event): + query = self.make_query(event) + results = await self.query(query) + if results: + for hostname in set(results): + if hostname: + try: + hostname = self.helpers.validators.validate_host(hostname) + except ValueError as e: + self.verbose(e) + continue + if hostname and hostname.endswith(f".{query}") and not hostname == event.data: + self.emit_event(hostname, "DNS_NAME", event, abort_if=self.abort_if) + + async def request_url(self, query): + url = f"{self.base_url}/subdomains/{self.helpers.quote(query)}" + return await self.request_with_fail_count(url) + + def make_query(self, event): + if "target" in event.tags: + query = str(event.data) + else: + query = self.helpers.parent_domain(event.data).lower() + return ".".join([s for s in query.split(".") if s != "_wildcard"]) + + def parse_results(self, r, query=None): + json = r.json() + if json: + for hostname in json: + yield hostname + + async def query(self, query, parse_fn=None, request_fn=None): + if parse_fn is None: + parse_fn = self.parse_results + if request_fn is None: + request_fn = self.request_url + try: + response = await request_fn(query) + if response is None: + self.info(f'Query "{query}" failed (no response)') + return [] + try: + results = list(parse_fn(response, query)) + except Exception as e: + if response: + self.info( + f'Error parsing results for query "{query}" (status code {response.status_code})', trace=True + ) + self.log.trace(response.text) + else: + self.info(f'Error parsing results for "{query}": {e}', trace=True) + return + if results: + return results + self.debug(f'No results for "{query}"') + except Exception as e: + self.info(f"Error retrieving results for {query}: {e}", trace=True) + + async def _is_wildcard(self, query): + if self.helpers.is_dns_name(query): + for domain, wildcard_rdtypes in (await self.helpers.is_wildcard_domain(query)).items(): + if any(t in wildcard_rdtypes for t in ("A", "AAAA", "CNAME")): + return True + return False + + async def filter_event(self, event): + """ + This filter_event is used across many modules + """ + query = self.make_query(event) + # reject if already processed + if self.already_processed(query): + return False, "Event was already processed" + eligible, reason = await self.eligible_for_enumeration(event) + if eligible: + self.processed.add(hash(query)) + return True, reason + return False, reason + + async def eligible_for_enumeration(self, event): + query = self.make_query(event) + # check if wildcard + is_wildcard = await self._is_wildcard(query) + # check if cloud + is_cloud = False + if any(t.startswith("cloud-") for t in event.tags): + is_cloud = True + # reject if it's a cloud resource and not in our target + if is_cloud and event not in self.scan.target: + return False, "Event is a cloud resource and not a direct target" + # optionally reject events with wildcards / errors + if self.reject_wildcards: + if any(t in event.tags for t in ("a-error", "aaaa-error")): + return False, "Event has a DNS resolution error" + if self.reject_wildcards == "strict": + if is_wildcard: + return False, "Event is a wildcard domain" + elif self.reject_wildcards == "cloud_only": + if is_wildcard and is_cloud: + return False, "Event is both a cloud resource and a wildcard domain" + return True, "" + + def already_processed(self, hostname): + for parent in self.helpers.domain_parents(hostname, include_self=True): + if hash(parent) in self.processed: + return True + return False + + async def abort_if(self, event): + # this helps weed out unwanted results when scanning IP_RANGES and wildcard domains + if "in-scope" not in event.tags: + return True + if await self._is_wildcard(event.data): + return True + return False + + +class subdomain_enum_apikey(subdomain_enum): + """ + A typical module for authenticated, API-based subdomain enumeration + Inherited by several other modules including securitytrails, c99.nl, etc. + """ + + watched_events = ["DNS_NAME"] + produced_events = ["DNS_NAME"] + flags = ["subdomain-enum", "passive", "safe"] + meta = {"description": "Query API for subdomains", "auth_required": True} + options = {"api_key": ""} + options_desc = {"api_key": "API key"} + + async def setup(self): + await super().setup() + return await self.require_api_key() diff --git a/bbot/modules/threatminer.py b/bbot/modules/threatminer.py index 0613c59028..bbc1e23c3d 100644 --- a/bbot/modules/threatminer.py +++ b/bbot/modules/threatminer.py @@ -1,7 +1,7 @@ -from bbot.modules.crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class threatminer(crobat): +class threatminer(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["subdomain-enum", "passive", "safe"] diff --git a/bbot/modules/urlscan.py b/bbot/modules/urlscan.py index 320c327618..f1efe08e54 100644 --- a/bbot/modules/urlscan.py +++ b/bbot/modules/urlscan.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class urlscan(crobat): +class urlscan(subdomain_enum): flags = ["subdomain-enum", "passive", "safe"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME", "URL_UNVERIFIED"] diff --git a/bbot/modules/wayback.py b/bbot/modules/wayback.py index 08e32926a4..d5b8f320f8 100644 --- a/bbot/modules/wayback.py +++ b/bbot/modules/wayback.py @@ -1,7 +1,7 @@ -from .crobat import crobat +from bbot.modules.templates.subdomain_enum import subdomain_enum -class wayback(crobat): +class wayback(subdomain_enum): flags = ["passive", "subdomain-enum", "safe"] watched_events = ["DNS_NAME"] produced_events = ["URL_UNVERIFIED", "DNS_NAME"] diff --git a/bbot/scripts/docs.py b/bbot/scripts/docs.py index 9969a8989c..b664881592 100755 --- a/bbot/scripts/docs.py +++ b/bbot/scripts/docs.py @@ -117,9 +117,10 @@ def update_md_files(keyword, s): bbot_docs_toc += f"- **{section_title}**\n" for subsection in subsections: for subsection_title, subsection_path in subsection.items(): - path = subsection_path.split("index.md")[0] - path = path.split(".md")[0] - bbot_docs_toc += f" - [{subsection_title}]({base_url}/{path})\n" + if isinstance(subsection_path, str): + path = subsection_path.split("index.md")[0] + path = path.split(".md")[0] + bbot_docs_toc += f" - [{subsection_title}]({base_url}/{path})\n" bbot_docs_toc = bbot_docs_toc.strip() assert len(bbot_docs_toc.splitlines()) > 5 update_md_files("BBOT DOCS TOC", bbot_docs_toc) diff --git a/bbot/test/test_step_2/module_tests/test_module_leakix.py b/bbot/test/test_step_2/module_tests/test_module_leakix.py index b6bfbd8bf4..aad4a095c4 100644 --- a/bbot/test/test_step_2/module_tests/test_module_leakix.py +++ b/bbot/test/test_step_2/module_tests/test_module_leakix.py @@ -24,3 +24,22 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" + + +class TestLeakIX_NoAPIKey(ModuleTestBase): + modules_overrides = ["leakix"] + + async def setup_before_prep(self, module_test): + module_test.httpx_mock.add_response( + url=f"https://leakix.net/api/subdomains/blacklanternsecurity.com", + json=[ + { + "subdomain": "asdf.blacklanternsecurity.com", + "distinct_ips": 3, + "last_seen": "2023-04-02T09:38:30.02Z", + }, + ], + ) + + def check(self, module_test, events): + assert any(e.data == "asdf.blacklanternsecurity.com" for e in events), "Failed to detect subdomain" From 3753724907709f8b0eaa46feab2ebf1f4e89bc61 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 13 Sep 2023 14:17:37 -0400 Subject: [PATCH 086/123] code cleanup in geoip modules --- bbot/modules/base.py | 7 +++++++ bbot/modules/ip2location.py | 10 ++++------ bbot/modules/ipstack.py | 9 +++++---- bbot/modules/templates/subdomain_enum.py | 2 -- 4 files changed, 16 insertions(+), 12 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index a5faa45108..b66c1e3466 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -614,6 +614,13 @@ def priority(self): def auth_required(self): return self.meta.get("auth_required", False) + @property + def http_timeout(self): + """ + Convenience shortcut to `http_timeout` in the config + """ + return self.scan.config.get("http_timeout", 10) + @property def log(self): if getattr(self, "_log", None) is None: diff --git a/bbot/modules/ip2location.py b/bbot/modules/ip2location.py index 823ede94da..4a203e55bb 100644 --- a/bbot/modules/ip2location.py +++ b/bbot/modules/ip2location.py @@ -1,7 +1,7 @@ -from .shodan_dns import shodan_dns +from bbot.modules.base import BaseModule -class IP2Location(shodan_dns): +class IP2Location(BaseModule): """ IP2Location.io Geolocation API. """ @@ -21,12 +21,10 @@ class IP2Location(shodan_dns): base_url = "http://api.ip2location.io" - async def filter_event(self, event): - return True - async def setup(self): + await self.require_api_key() self.lang = self.config.get("lang", "") - return await super().setup() + return True async def ping(self): url = self.build_url("8.8.8.8") diff --git a/bbot/modules/ipstack.py b/bbot/modules/ipstack.py index 19e7cacda6..031ac272c9 100644 --- a/bbot/modules/ipstack.py +++ b/bbot/modules/ipstack.py @@ -1,7 +1,7 @@ -from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey +from bbot.modules.base import BaseModule -class Ipstack(subdomain_enum_apikey): +class Ipstack(BaseModule): """ Ipstack GeoIP Leverages the ipstack.com API to geolocate a host by IP address. @@ -10,7 +10,7 @@ class Ipstack(subdomain_enum_apikey): watched_events = ["IP_ADDRESS"] produced_events = ["GEOLOCATION"] flags = ["passive", "safe"] - meta = {"description": "Query IPStack's API for GeoIP ", "auth_required": True} + meta = {"description": "Query IPStack's GeoIP API", "auth_required": True} options = {"api_key": ""} options_desc = {"api_key": "IPStack GeoIP API Key"} scope_distance_modifier = 1 @@ -19,7 +19,8 @@ class Ipstack(subdomain_enum_apikey): base_url = "http://api.ipstack.com" - async def filter_event(self, event): + async def setup(self): + await self.require_api_key() return True async def ping(self): diff --git a/bbot/modules/templates/subdomain_enum.py b/bbot/modules/templates/subdomain_enum.py index 6bf380e8fd..61c2de6a4e 100644 --- a/bbot/modules/templates/subdomain_enum.py +++ b/bbot/modules/templates/subdomain_enum.py @@ -24,8 +24,6 @@ class subdomain_enum(BaseModule): async def setup(self): self.processed = set() - self.http_timeout = self.scan.config.get("http_timeout", 10) - self._failures = 0 return True async def handle_event(self, event): From 0fd2ad252706146a6e83e3927c44233ddd62e682 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 13 Sep 2023 15:06:58 -0400 Subject: [PATCH 087/123] fix tests --- bbot/core/helpers/modules.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/core/helpers/modules.py b/bbot/core/helpers/modules.py index c6dddab7ff..7a6f822214 100644 --- a/bbot/core/helpers/modules.py +++ b/bbot/core/helpers/modules.py @@ -19,7 +19,7 @@ def __init__(self): def file_filter(self, file): file = file.resolve() - if "mixins" in file.parts: + if "templates" in file.parts: return False return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] From 4e3b107419bc888f3011c55792042713c3e63f3a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 11:50:15 -0400 Subject: [PATCH 088/123] resolve conflicts in modules/base.py --- bbot/core/helpers/web.py | 12 + bbot/modules/base.py | 751 +++++++++++++++++++++++++----- bbot/modules/gowitness.py | 2 +- bbot/modules/internal/excavate.py | 2 +- bbot/modules/robots.py | 2 +- bbot/scanner/scanner.py | 2 +- docs/dev/scanner.md | 2 - mkdocs.yml | 3 + 8 files changed, 665 insertions(+), 111 deletions(-) diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index 7d801ca5af..c5ad95cff7 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -393,6 +393,18 @@ async def curl(self, *args, **kwargs): output = (await self.parent_helper.run(curl_command)).stdout return output + def is_spider_danger(self, source_event, url): + """ + Todo: write tests for this + """ + url_depth = self.parent_helper.url_depth(url) + web_spider_depth = self.parent_helper.scan.config.get("web_spider_depth", 1) + spider_distance = getattr(source_event, "web_spider_distance", 0) + 1 + web_spider_distance = self.parent_helper.scan.config.get("web_spider_distance", 0) + if (url_depth > web_spider_depth) or (spider_distance > web_spider_distance): + return True + return False + user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]] pass_keywords = [re.compile(r, re.I) for r in ["pass"]] diff --git a/bbot/modules/base.py b/bbot/modules/base.py index b66c1e3466..a5bf776873 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -10,79 +10,114 @@ class BaseModule: - # Event types to watch + """The base class for all BBOT modules. + + Attributes: + watched_events (List): Event types to watch. + + produced_events (List): Event types to produce. + + meta (Dict): Metadata about the module, such as whether authentication is required and a description. + + flags (List): Flags indicating the type of module (must have at least "safe" or "aggressive" and "passive" or "active"). + + deps_pip (List): Python dependencies to install via pip. Empty list by default. + + deps_apt (List): APT package dependencies to install. Empty list by default. + + deps_shell (List): Other dependencies installed via shell commands. Uses [ansible.builtin.shell](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html). Empty list by default. + + deps_ansible (List): Additional Ansible tasks for complex dependencies. Empty list by default. + + accept_dupes (bool): Whether to accept incoming duplicate events. Default is False. + + suppress_dupes (bool): Whether to suppress outgoing duplicate events. Default is True. + + per_host_only (bool): Limit the module to only scanning once per host. Default is False. + + scope_distance_modifier (int, None): Modifies scope distance acceptance for events. Default is 0. + ``` + None == accept all events + 2 == accept events up to and including the scan's configured search distance plus two + 1 == accept events up to and including the scan's configured search distance plus one + 0 == (DEFAULT) accept events up to and including the scan's configured search distance + ``` + + target_only (bool): Accept only the initial target event(s). Default is False. + + in_scope_only (bool): Accept only explicitly in-scope events. Default is False. + + options (Dict): Customizable options for the module, e.g., {"api_key": ""}. Empty dict by default. + + options_desc (Dict): Descriptions for options, e.g., {"api_key": "API Key"}. Empty dict by default. + + max_event_handlers (int): Maximum concurrent instances of handle_event() or handle_batch(). Default is 1. + + batch_size (int): Size of batches processed by handle_batch(). Default is 1. + + batch_wait (int): Seconds to wait before force-submitting a batch. Default is 10. + + failed_request_abort_threshold (int): Threshold for setting error state after failed HTTP requests (only takes effect when `request_with_fail_count()` is used. Default is 5. + + _scope_shepherding (bool): When set to false, prevents events generated by this module from being automatically marked as in-scope. Default is True. Useful for low-confidence modules like speculate and ipneighbor. + + _stats_exclude (bool): Whether to exclude this module from scan statistics. Default is False. + + _qsize (int): Outgoing queue size (0 for infinite). Default is 0. + + _priority (int): Priority level of events raised by this module, 1-5. Default is 3. + + _name (str): Module name, overridden automatically. Default is 'base'. + + _type (str): Module type, for differentiating between normal and output modules. Default is 'scan'. + """ + watched_events = [] - # Event types to produce produced_events = [] - # Module description, etc. meta = {"auth_required": False, "description": "Base module"} - # Flags, must include either "passive" or "active" flags = [] + options = {} + options_desc = {} - # python dependencies (pip install ____) deps_pip = [] - # apt dependencies (apt install ____) deps_apt = [] - # other dependences as shell commands - # uses ansible.builtin.shell (https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html) deps_shell = [] - # list of ansible tasks for when other dependency installation methods aren't enough deps_ansible = [] - # Whether to accept incoming duplicate events + accept_dupes = False - # Whether to block outgoing duplicate events suppress_dupes = True - # Limit the module to only scanning once per host. By default, defined by event.host, but can be customized by overriding per_host_only = False - - # Scope distance modifier - accept/deny events based on scope distance - # None == accept all events - # 2 == accept events up to and including the scan's configured search distance plus two - # 1 == accept events up to and including the scan's configured search distance plus one - # 0 == (DEFAULT) accept events up to and including the scan's configured search distance - # -1 == accept events up to and including the scan's configured search distance minus one - # -2 == accept events up to and including the scan's configured search distance minus two scope_distance_modifier = 0 - # Only accept the initial target event(s) target_only = False - # Only accept explicitly in-scope events (scope distance == 0) - # Use this options if your module is aggressive or if you don't want it to scale with - # the scan's search distance in_scope_only = False - # Options, e.g. {"api_key": ""} - options = {} - # Options description, e.g. {"api_key": "API Key"} - options_desc = {} - # Maximum concurrent instances of handle_event() or handle_batch() max_event_handlers = 1 - # Batch size - # If batch size > 1, override handle_batch() instead of handle_event() batch_size = 1 - # Seconds to wait before force-submitting batch batch_wait = 10 - # Use in conjunction with .request_with_fail_count() to set_error_state() after this many failed HTTP requests failed_request_abort_threshold = 5 - # When set to false, prevents events generated by this module from being automatically marked as in-scope - # Useful for low-confidence modules like speculate and ipneighbor + _scope_shepherding = True - # Exclude from scan statistics _stats_exclude = False - # outgoing queue size (0 == infinite) _qsize = 0 - # Priority of events raised by this module, 1-5, lower numbers == higher priority _priority = 3 - # Name, overridden automatically _name = "base" - # Type, for differentiating between normal modules and output modules, etc. _type = "scan" def __init__(self, scan): + """Initializes a module instance. + + Args: + scan: The BBOT scan object associated with this module instance. + + Attributes: + scan: The scan object associated with this module. + + errored (bool): Whether the module has errored out. Default is False. + """ self.scan = scan self.errored = False self._log = None self._incoming_event_queue = None - # seconds since we've submitted a batch self._outgoing_event_queue = None # seconds since we've submitted a batch self._last_submitted_batch = None @@ -107,66 +142,116 @@ def __init__(self, scan): self._per_host_tracker = set() async def setup(self): - """ - Perform setup functions at the beginning of the scan. - Optionally override this method. + """Asynchronously sets up the module at the beginning of the scan. + + This method can be overridden to perform any necessary setup logic. - Must return True or False based on whether the setup was successful + Returns: + bool or None: True if setup was successful. None for a soft-fail, which will produce a warning but not abort the scan. False for a hard-fail, which will abort the scan. """ return True async def handle_event(self, event): - """ - Override this method if batch_size == 1. + """Asynchronously handles incoming events that the module is configured to watch. + + This method is automatically invoked when an event that matches any in `watched_events` is encountered during a scan. Override this method to implement custom event-handling logic for your module. + + Args: + event (Event): The event object containing details about the incoming event. + + Note: + This method should be overridden if the `batch_size` attribute of the module is set to 1. + + Returns: + None """ pass def handle_batch(self, *events): - """ - Override this method if batch_size > 1. + """Handles incoming events in batches for optimized processing. + + This method is automatically called when multiple events that match any in `watched_events` are encountered and the `batch_size` attribute is set to a value greater than 1. Override this method to implement custom batch event-handling logic for your module. + + Args: + *events (Event): A variable number of Event objects to be processed in a batch. + + Note: + This method should be overridden if the `batch_size` attribute of the module is set to a value greater than 1. + + Returns: + None """ pass async def filter_event(self, event): - """ - Accept/reject events based on custom criteria + """Asynchronously filters incoming events based on custom criteria. + + Override this method for more granular control over which events are accepted by your module. This method is called automatically before `handle_event()` for each incoming event that matches any in `watched_events`. + + Args: + event (Event): The incoming Event object to be filtered. - Override this method if you need more granular control - over which events are distributed to your module + Returns: + tuple: A 2-tuple where the first value is a bool indicating whether the event should be accepted, and the second value is a string explaining the reason for its acceptance or rejection. By default, returns `(True, None)` to indicate acceptance without reason. + + Note: + This method should be overridden if the module requires custom logic for event filtering. """ return True async def finish(self): - """ - Perform final functions when scan is nearing completion + """Asynchronously performs final tasks as the scan nears completion. - For example, if your module relies on the word cloud, you may choose to wait until - the scan is finished (and the word cloud is most complete) before running an operation. + This method can be overridden to execute any necessary finalization logic. For example, if the module relies on a word cloud, you might wait for the scan to finish to ensure the word cloud is most complete before running an operation. - Note that this method may be called multiple times, because it may raise events. - Optionally override this method. + Returns: + None by default, but can return additional data if overridden. + + Warnings: + This method may be called multiple times since it can raise events, which may re-trigger the "finish" phase of the scan. Optional to override. """ return async def report(self): - """ - Perform a final task when the scan is finished, but before cleanup happens + """Asynchronously executes a final task after the scan is complete but before cleanup. - This is useful for modules that aggregate data and raise summary events at the end of a scan + This method can be overridden to aggregate data and raise summary events at the end of the scan. + + Returns: + None by default, but can return additional data if overridden. + + Note: + This method is called only once per scan. """ return async def cleanup(self): - """ - Perform final cleanup after the scan has finished - This method is called only once, and may not raise events. - Optionally override this method. + """Asynchronously performs final cleanup operations after the scan is complete. + + This method can be overridden to implement custom cleanup logic. It is called only once per scan and may not raise events. + + Returns: + None by default, but can return additional data if overridden. + + Note: + This method is called only once per scan and may not raise events. """ return async def require_api_key(self): - """ - Use in setup() to ensure the module is configured with an API key + """Asynchronously checks if the module is configured with a valid API key. + + This method is typically used within the setup() method to ensure that an API key is provided in the module configuration. Your module must define an 'api_key' in its config options for this method to work properly. + + Example Usage: + def setup(self): + return await self.require_api_key() + + Returns: + Tuple (bool, str): The first element is a boolean indicating whether the API is ready to use. The second element is a string message, either indicating that the API is ready or describing the error. + + Raises: + Exception: Any exceptions raised by the self.ping() method will propagate. """ self.api_key = self.config.get("api_key", "") if self.auth_secret: @@ -180,29 +265,43 @@ async def require_api_key(self): return None, "No API key set" async def ping(self): - """ - Used in conjuction with require_api_key to ensure an API is up and responding + """Asynchronously checks the health of the configured API. - Requires the use of an assert statement. + This method is used in conjunction with require_api_key() to verify that the API is not just configured, but also responsive. This method should include an assert statement to validate the API's health, typically by making a test request to a known endpoint. - E.g. if your API has a "/ping" endpoint, you can use it like this: - def ping(self): - r = self.request_with_fail_count(f"{self.base_url}/ping") + Example Usage: + In your implementation, if the API has a "/ping" endpoint: + async def ping(self): + r = await self.request_with_fail_count(f"{self.base_url}/ping") resp_content = getattr(r, "text", "") assert getattr(r, "status_code", 0) == 200, resp_content + + Returns: + None + + Raises: + AssertionError: If the API does not respond as expected. """ return @property def auth_secret(self): - """ - Use this to indicate whether the module has everything it needs for authentication + """Indicates if the module is properly configured for authentication. + + This read-only property should be used to check whether all necessary attributes (e.g., API keys, tokens, etc.) are configured to perform authenticated requests in the module. Commonly used in setup or initialization steps. + + Returns: + bool: True if the module is properly configured for authentication, otherwise False. """ return getattr(self, "api_key", "") def get_watched_events(self): - """ - Override if you need your watched_events to be dynamic + """Retrieve the set of events that the module is interested in observing. + + Override this method if the set of events the module should watch needs to be determined dynamically, e.g., based on configuration options or other runtime conditions. + + Returns: + set: The set of event types that this module will handle. """ if self._watched_events is None: self._watched_events = set(self.watched_events) @@ -215,7 +314,7 @@ async def _handle_batch(self): if self.batch_size <= 1: return if self.num_incoming_events > 0: - events, finish = await self.events_waiting() + events, finish = await self._events_waiting() if events and not self.errored: self.debug(f"Handling batch of {len(events):,} events") submitted = True @@ -229,6 +328,21 @@ async def _handle_batch(self): return submitted def make_event(self, *args, **kwargs): + """Create an event for the scan. + + Raises a validation error if the event could not be created, unless raise_error is set to False. + + Args: + *args: Positional arguments to be passed to the scan's make_event method. + **kwargs: Keyword arguments to be passed to the scan's make_event method. + raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. + + Returns: + Event or None: The created event, or None if a validation error occurred and raise_error was False. + + Raises: + ValidationError: If the event could not be validated and raise_error is True. + """ raise_error = kwargs.pop("raise_error", False) try: event = self.scan.make_event(*args, **kwargs) @@ -242,6 +356,26 @@ def make_event(self, *args, **kwargs): return event def emit_event(self, *args, **kwargs): + """Emit an event to the event queue and distribute it to interested modules. + + The method first creates an event object by calling `self.make_event()` with the provided arguments. + Then, the event is queued for outgoing distribution using `self.queue_outgoing_event()`. + + Args: + *args: Positional arguments to be passed to `self.make_event()` for event creation. + **kwargs: Keyword arguments to be passed for event creation or configuration of the emit action. + ```markdown + - on_success_callback: Optional callback function to execute upon successful event emission. + - abort_if: Optional condition under which the event emission should be aborted. + - quick: Optional flag to indicate whether the event should be processed quickly. + ``` + + Returns: + None + + Raises: + ValidationError: If the event cannot be validated (handled in `self.make_event()`). + """ event_kwargs = dict(kwargs) emit_kwargs = {} for o in ("on_success_callback", "abort_if", "quick"): @@ -253,16 +387,42 @@ def emit_event(self, *args, **kwargs): self.queue_outgoing_event(event, **emit_kwargs) async def emit_event_wait(self, *args, **kwargs): - """ - Same as emit_event except we wait on the outgoing queue + """Emit an event to the event queue and await until there is space in the outgoing queue. + + This method is similar to `emit_event`, but it waits until there's sufficient space in the outgoing + event queue before emitting the event. It utilizes the queue size threshold defined in `self._qsize`. + + Args: + *args: Positional arguments to be passed to `emit_event()` for event creation. + **kwargs: Keyword arguments to be passed to `emit_event()` for event creation or configuration. + + Returns: + None + + See Also: + emit_event: For emitting an event without waiting on the queue size. """ while self.outgoing_event_queue.qsize() > self._qsize: await self.helpers.sleep(0.2) return self.emit_event(*args, **kwargs) - async def events_waiting(self): + async def _events_waiting(self): """ - yields all events in queue, up to maximum batch size + Asynchronously fetches events from the incoming_event_queue, up to a specified batch size. + + Args: + None + + Returns: + tuple: A tuple containing two elements: + - events (list): A list of acceptable events from the queue. + - finish (bool): A flag indicating if a "FINISHED" event is encountered. + + Notes: + - The method pulls events from incoming_event_queue using 'get_nowait()'. + - Events go through '_event_postcheck()' for validation. + - "FINISHED" events are handled differently and the finish flag is set to True. + - If the queue is empty or the batch size is reached, the loop breaks. """ events = [] finish = False @@ -300,6 +460,27 @@ def start(self): self._tasks = [asyncio.create_task(self._worker()) for _ in range(self._max_event_handlers)] async def _setup(self): + """ + Asynchronously sets up the module by invoking its 'setup()' method. + + This method catches exceptions during setup, sets the module's error state if necessary, and determines the + status code based on the result of the setup process. + + Args: + None + + Returns: + tuple: A tuple containing the module's name, status (True for success, False for hard-fail, None for soft-fail), + and an optional status message. + + Raises: + Exception: Captured exceptions from the 'setup()' method are logged, but not propagated. + + Notes: + - The 'setup()' method can return either a simple boolean status or a tuple of status and message. + - A WordlistError exception triggers a soft-fail status. + - The debug log will contain setup status information for the module. + """ status_codes = {False: "hard-fail", None: "soft-fail", True: "success"} status = False @@ -322,6 +503,29 @@ async def _setup(self): return self.name, status, str(msg) async def _worker(self): + """ + The core worker loop for the module, responsible for handling events from the incoming event queue. + + This method is a coroutine and is run asynchronously. Multiple instances can run simultaneously based on + the 'max_event_handlers' configuration. The worker dequeues events from 'incoming_event_queue', performs + necessary prechecks, and passes the event to the appropriate handler function. + + Args: + None + + Returns: + None + + Raises: + asyncio.CancelledError: If the worker is cancelled during its operation. + + Notes: + - The worker is sensitive to the 'stopping' flag of the scan. It will terminate if this flag is set. + - The worker handles backpressure by pausing when the outgoing event queue is full. + - Batch processing is supported and is activated when 'batch_size' > 1. + - Each event is subject to a post-check via '_event_postcheck()' to decide whether it should be handled. + - Special 'FINISHED' events trigger the 'finish()' method of the module. + """ async with self.scan._acatch(context=self._worker): try: while not self.scan.stopping and not self.errored: @@ -375,9 +579,33 @@ def max_scope_distance(self): def _event_precheck(self, event): """ - Check if an event should be accepted by the module - Used when putting an event INTO the modules' queue + Pre-checks an event to determine if it should be accepted by the module for queuing. + + This method is called when an event is about to be enqueued into the module's incoming event queue. + It applies various filters such as special signal event types, module error state, watched event types, and more + to decide whether or not the event should be enqueued. + + Args: + event (Event): The event object to check. + + Returns: + tuple: A tuple (bool, str) where the bool indicates if the event should be accepted, and the str gives the reason. + + Examples: + >>> result, reason = self._event_precheck(event) + >>> if result: + ... self.incoming_event_queue.put_nowait(event) + ... else: + ... self.debug(f"Not accepting {event} because {reason}") + + Notes: + - The method considers special signal event types like "FINISHED". + - Checks whether the module is in an error state. + - Checks if the event type matches the types this module is interested in (`watched_events`). + - Checks for events tagged as 'target' if the module has `target_only` flag set. + - Applies specific filtering based on event type and module name. """ + # special signal event types if event.type in ("FINISHED",): return True, "its type is FINISHED" @@ -409,8 +637,29 @@ def _event_precheck(self, event): async def _event_postcheck(self, event): """ - Check if an event should be accepted by the module - Used when taking an event FROM the module's queue (immediately before it's handled) + Post-checks an event to determine if it should be accepted by the module for handling. + + This method is called when an event is dequeued from the module's incoming event queue, right before it is actually processed. + It applies various filters such as scope, custom filtering logic, and per-host tracking to decide the event's fate. + + Args: + event (Event): The event object to check. + + Returns: + tuple: A tuple (bool, str) where the bool indicates if the event should be accepted, and the str gives the reason. + + Examples: + >>> async def custom_filter(event): + ... if event.data not in ["evilcorp.com"]: + ... return False, "it's not on the cool list" + ... + >>> self.filter_event = custom_filter + >>> result, reason = await self._event_postcheck(event) + + Notes: + - Override the `filter_event` method for custom filtering logic. + - This method also maintains host-based tracking when the `per_host_only` flag is set. + - The method will also update event production stats for output modules. """ # special exception for "FINISHED" event if event.type in ("FINISHED",): @@ -474,7 +723,22 @@ async def _cleanup(self): async def queue_event(self, event): """ - Queue (incoming) event with module + Asynchronously queues an incoming event to the module's event queue for further processing. + + The function performs an initial check to see if the event is acceptable for queuing. + If the event passes the check, it is put into the `incoming_event_queue`. + + Args: + event: The event object to be queued. + + Returns: + None: The function doesn't return anything but modifies the state of the `incoming_event_queue`. + + Examples: + >>> await self.queue_event(some_event) + + Raises: + AttributeError: If the module is not in an acceptable state to queue incoming events. """ async with self._task_counter.count("queue_event()", _log=False): if self.incoming_event_queue is False: @@ -498,7 +762,23 @@ async def queue_event(self, event): def queue_outgoing_event(self, event, **kwargs): """ - Queue (outgoing) event with module + Queues an outgoing event to the module's outgoing event queue for further processing. + + The function attempts to put the event into the `outgoing_event_queue` immediately. + If it's not possible due to the current state of the module, an AttributeError is raised, and a debug log is generated. + + Args: + event: The event object to be queued. + **kwargs: Additional keyword arguments to be associated with the event. + + Returns: + None: The function doesn't return anything but modifies the state of the `outgoing_event_queue`. + + Examples: + >>> self.queue_outgoing_event(some_outgoing_event, abort_if=lambda e: "unresolved" in e.tags) + + Raises: + AttributeError: If the module is not in an acceptable state to queue outgoing events. """ try: self.outgoing_event_queue.put_nowait((event, kwargs)) @@ -506,6 +786,26 @@ def queue_outgoing_event(self, event, **kwargs): self.debug(f"Not in an acceptable state to queue outgoing event") def set_error_state(self, message=None, clear_outgoing_queue=False): + """ + Puts the module into an errored state where it cannot accept new events. Optionally logs a warning message. + + The function sets the module's `errored` attribute to True and logs a warning with the optional message. + It also clears the incoming event queue to prevent further processing and updates its status to False. + + Args: + message (str, optional): Additional message to be logged along with the warning. + + Returns: + None: The function doesn't return anything but updates the `errored` state and clears the incoming event queue. + + Examples: + >>> self.set_error_state() + >>> self.set_error_state("Failed to connect to the server") + + Notes: + - The function sets `self._incoming_event_queue` to False to prevent its further use. + - If the module was already in an errored state, the function will not reset the error state or the queue. + """ if not self.errored: log_msg = f"Setting error state for module {self.name}" if message is not None: @@ -527,8 +827,27 @@ def set_error_state(self, message=None, clear_outgoing_queue=False): while 1: self.outgoing_event_queue.get_nowait() - # override in the module to define different values to comprise the hash def get_per_host_hash(self, event): + """ + Computes a per-host hash value for a given event. This method may be optionally overridden in subclasses. + + The function uses the event's `host` and `port` or the parsed URL to create a string to be hashed. + The hash value is used for distinguishing events related to the same host. + + Args: + event (Event): The event object containing host, port, or parsed URL information. + + Returns: + int: The hash value computed for the host. + + Examples: + >>> event = self.make_event("https://example.com:8443") + >>> self.get_per_host_hash(event) + + Notes: + - To change the behavior, override this method in your custom module. + - The hash value is dependent on the `host` and `port` or the `parsed` attribute in the event object. + """ parsed = getattr(event, "parsed", None) if parsed is None: to_hash = self.helpers.make_netloc(event.host, event.port) @@ -546,6 +865,22 @@ def helpers(self): @property def status(self): + """ + Provides the current status of the module as a dictionary. + + The dictionary contains the following keys: + - 'events': A sub-dictionary with 'incoming' and 'outgoing' keys, representing the number of events in the respective queues. + - 'tasks': The current value of the task counter. + - 'errored': A boolean value indicating if the module is in an error state. + - 'running': A boolean value indicating if the module is currently processing data. + + Returns: + dict: A dictionary containing the current status of the module. + + Examples: + >>> self.status + {'events': {'incoming': 5, 'outgoing': 2}, 'tasks': 3, 'errored': False, 'running': True} + """ status = { "events": {"incoming": self.num_incoming_events, "outgoing": self.outgoing_event_queue.qsize()}, "tasks": self._task_counter.value, @@ -556,19 +891,47 @@ def status(self): @property def running(self): - """ - Indicates whether the module is currently processing data. + """Property indicating whether the module is currently processing data. + + This property checks if the task counter (`self._task_counter.value`) is greater than zero, + indicating that there are ongoing tasks in the module. + + Returns: + bool: True if the module is currently processing data, False otherwise. """ return self._task_counter.value > 0 @property def finished(self): - """ - Indicates whether the module is finished (not running and nothing in queues) + """Property indicating whether the module has finished processing. + + This property checks three conditions to determine if the module is finished: + 1. The module is not currently running (`self.running` is False). + 2. The number of incoming events in the queue is zero or less (`self.num_incoming_events <= 0`). + 3. The number of outgoing events in the queue is zero or less (`self.outgoing_event_queue.qsize() <= 0`). + + Returns: + bool: True if the module has finished processing, False otherwise. """ return not self.running and self.num_incoming_events <= 0 and self.outgoing_event_queue.qsize() <= 0 async def request_with_fail_count(self, *args, **kwargs): + """Asynchronously perform an HTTP request while keeping track of consecutive failures. + + This function wraps the `self.helpers.request` method, incrementing a failure counter if + the request returns None. When the failure counter exceeds `self.failed_request_abort_threshold`, + the module is set to an error state. + + Args: + *args: Positional arguments to pass to `self.helpers.request`. + **kwargs: Keyword arguments to pass to `self.helpers.request`. + + Returns: + Any: The response object or None if the request failed. + + Raises: + None: Sets the module to an error state when the failure threshold is reached. + """ r = await self.helpers.request(*args, **kwargs) if r is None: self._request_failures += 1 @@ -578,17 +941,16 @@ async def request_with_fail_count(self, *args, **kwargs): self.set_error_state(f"Setting error state due to {self._request_failures:,} failed HTTP requests") return r - def is_spider_danger(self, source_event, url): - url_depth = self.helpers.url_depth(url) - web_spider_depth = self.scan.config.get("web_spider_depth", 1) - spider_distance = getattr(source_event, "web_spider_distance", 0) + 1 - web_spider_distance = self.scan.config.get("web_spider_distance", 0) - if (url_depth > web_spider_depth) or (spider_distance > web_spider_distance): - return True - return False - @property def config(self): + """Property that provides easy access to the module's configuration in the scan's config. + + This property serves as a shortcut to retrieve the module-specific configuration from + `self.scan.config`. If no configuration is found for this module, an empty dictionary is returned. + + Returns: + dict: The configuration dictionary specific to this module. + """ config = self.scan.config.get("modules", {}).get(self.name, {}) if config is None: config = {} @@ -608,6 +970,19 @@ def outgoing_event_queue(self): @property def priority(self): + """ + Gets the priority level of the module as an integer. + + The priority level is constrained to be between 1 and 5, inclusive. + A lower value indicates a higher priority. + + Returns: + int: The priority level of the module, constrained between 1 and 5. + + Examples: + >>> self.priority + 3 + """ return int(max(1, min(5, self._priority))) @property @@ -629,8 +1004,15 @@ def log(self): @property def memory_usage(self): - """ - Return how much memory the module is currently using in bytes + """Property that calculates the current memory usage of the module in bytes. + + This property uses the `get_size` function to estimate the memory consumption + of the module object. The depth of the object graph traversal is limited to 3 levels + to avoid performance issues. Commonly shared objects like `self.scan`, `self.helpers`, + are excluded from the calculation to prevent double-counting. + + Returns: + int: The estimated memory usage of the module in bytes. """ seen = {self.scan, self.helpers, self.log} # noqa return get_size(self, max_depth=3, seen=seen) @@ -639,6 +1021,21 @@ def __str__(self): return self.name def log_table(self, *args, **kwargs): + """Logs a table to the console and optionally writes it to a file. + + This function generates a table using `self.helpers.make_table`, then logs each line + of the table as an info-level log. If a table_name is provided, it also writes the table to a file. + + Args: + *args: Variable length argument list to be passed to `self.helpers.make_table`. + **kwargs: Arbitrary keyword arguments. If 'table_name' is specified, the table will be written to a file. + + Returns: + str: The generated table as a string. + + Examples: + >>> self.log_table(['Header1', 'Header2'], [['row1col1', 'row1col2'], ['row2col1', 'row2col2']], table_name="my_table") + """ table_name = kwargs.pop("table_name", None) table = self.helpers.make_table(*args, **kwargs) for line in table.splitlines(): @@ -652,64 +1049,208 @@ def log_table(self, *args, **kwargs): return table def stdout(self, *args, **kwargs): + """Writes log messages directly to standard output. + + This is typically reserved for output modules only, e.g. `human` or `json`. + + Args: + *args: Variable length argument list to be passed to `self.log.stdout`. + **kwargs: Arbitrary keyword arguments to be passed to `self.log.stdout`. + + Examples: + >>> self.stdout("This will be printed to stdout") + """ self.log.stdout(*args, extra={"scan_id": self.scan.id}, **kwargs) def debug(self, *args, trace=False, **kwargs): + """Logs debug messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.debug("This is a debug message") + >>> self.debug("This is a debug message with a trace", trace=True) + """ self.log.debug(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def verbose(self, *args, trace=False, **kwargs): + """Logs messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.verbose("This is a verbose message") + >>> self.verbose("This is a verbose message with a trace", trace=True) + """ self.log.verbose(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugeverbose(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened white text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugeverbose("This is a huge verbose message") + >>> self.hugeverbose("This is a huge verbose message with a trace", trace=True) + """ self.log.hugeverbose(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def info(self, *args, trace=False, **kwargs): + """Logs informational messages and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.info("This is an informational message") + >>> self.info("This is an informational message with a trace", trace=True) + """ self.log.info(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugeinfo(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened blue text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugeinfo("This is a huge informational message") + >>> self.hugeinfo("This is a huge informational message with a trace", trace=True) + """ self.log.hugeinfo(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def success(self, *args, trace=False, **kwargs): + """Logs a success message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.success("Operation completed successfully") + >>> self.success("Operation completed with a trace", trace=True) + """ self.log.success(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugesuccess(self, *args, trace=False, **kwargs): + """Logs a whole message in emboldened green text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to False. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugesuccess("This is a huge success message") + >>> self.hugesuccess("This is a huge success message with a trace", trace=True) + """ self.log.hugesuccess(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def warning(self, *args, trace=True, **kwargs): + """Logs a warning message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.warning("This is a warning message") + >>> self.warning("This is a warning message with a trace", trace=False) + """ self.log.warning(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def hugewarning(self, *args, trace=True, **kwargs): + """Logs a whole message in emboldened orange text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.hugewarning("This is a huge warning message") + >>> self.hugewarning("This is a huge warning message with a trace", trace=False) + """ self.log.hugewarning(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def error(self, *args, trace=True, **kwargs): + """Logs an error message, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.error("This is an error message") + >>> self.error("This is an error message with a trace", trace=False) + """ self.log.error(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() def trace(self): + """Logs the stack trace of the most recently caught exception. + + This method captures the type, value, and traceback of the most recent exception and logs it using the trace level. It is typically used for debugging purposes. + + Anything logged using this method will always be written to the scan's `debug.log`, even if debugging is not enabled. + + Examples: + >>> try: + >>> 1 / 0 + >>> except ZeroDivisionError: + >>> self.trace() + """ e_type, e_val, e_traceback = exc_info() if e_type is not None: self.log.trace(traceback.format_exc()) def critical(self, *args, trace=True, **kwargs): + """Logs a whole message in emboldened red text, and optionally the stack trace of the most recent exception. + + Args: + *args: Variable-length argument list to pass to the logger. + trace (bool, optional): Whether to log the stack trace of the most recently caught exception. Defaults to True. + **kwargs: Arbitrary keyword arguments to pass to the logger. + + Examples: + >>> self.critical("This is a critical message") + >>> self.critical("This is a critical message with a trace", trace=False) + """ self.log.critical(*args, extra={"scan_id": self.scan.id}, **kwargs) if trace: self.trace() diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 2ae676134d..f19c5ed49e 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -148,7 +148,7 @@ async def handle_batch(self, *events): _id = row["url_id"] source_url = self.screenshots_taken[_id] source_event = events[source_url] - if self.is_spider_danger(source_event, url): + if self.helpers.is_spider_danger(source_event, url): tags.append("spider-danger") if url and url.startswith("http"): self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags) diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index 720b9b96b5..51b8a4dc22 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -78,7 +78,7 @@ async def search(self, content, event, **kwargs): url_event = self.report(result, name, event, **kwargs) if url_event is not None: url_in_scope = self.excavate.scan.in_scope(url_event) - is_spider_danger = self.excavate.is_spider_danger(event, result) + is_spider_danger = self.excavate.helpers.is_spider_danger(event, result) if ( ( urls_found >= self.web_spider_links_per_page and url_in_scope diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index 48ce967097..98b114b75a 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -46,6 +46,6 @@ async def handle_event(self, event): continue tags = [] - if self.is_spider_danger(event, unverified_url): + if self.helpers.is_spider_danger(event, unverified_url): tags.append("spider-danger") self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 686794ce69..69815e2e0a 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -345,7 +345,7 @@ async def async_start(self): break if "python" in self.modules: - events, finish = await self.modules["python"].events_waiting() + events, finish = await self.modules["python"]._events_waiting() for e in events: yield e diff --git a/docs/dev/scanner.md b/docs/dev/scanner.md index f5388688c0..a03de4e4bb 100644 --- a/docs/dev/scanner.md +++ b/docs/dev/scanner.md @@ -1,3 +1 @@ -# `bbot.scanner.Scanner()` - ::: bbot.scanner.Scanner diff --git a/mkdocs.yml b/mkdocs.yml index 3b5118b30e..80dcdef6dc 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -32,6 +32,8 @@ nav: - How to Write a Module: contribution.md - Developer Reference: - Scanner: dev/scanner.md + - Target: dev/target.md + - BaseModule: dev/basemodule.md - Helpers: # dev/helpers/index.md - Miscellaneous: dev/helpers/misc.md @@ -60,6 +62,7 @@ plugins: handlers: python: options: + heading_level: 1 show_signature_annotations: true show_root_toc_entry: false show_root_heading: true From 8c8ff8d2748eb26da2be2630cb65f34733231769 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 13 Sep 2023 17:09:14 -0400 Subject: [PATCH 089/123] tweak scanner docs --- bbot/scanner/scanner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 69815e2e0a..ba6ecd453d 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -138,7 +138,7 @@ def __init__( config (dict, optional): Configuration settings. Merged with BBOT config. dispatcher (Dispatcher, optional): Dispatcher object to use. Defaults to new Dispatcher. strict_scope (bool, optional): If True, only targets explicitly in whitelist are scanned. Defaults to False. - force_start (bool, optional): If True, forces the scan to start even with warnings. Defaults to False. + force_start (bool, optional): If True, allows the scan to start even when module setups hard-fail. Defaults to False. """ if modules is None: modules = [] From 9b2a7ce26d2c1ffc774b2f8eda9e879718c5ea01 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 14 Sep 2023 11:24:18 -0400 Subject: [PATCH 090/123] documentation for Target --- bbot/modules/base.py | 41 +++++++--- bbot/scanner/target.py | 167 +++++++++++++++++++++++++++++++++++++++-- 2 files changed, 191 insertions(+), 17 deletions(-) diff --git a/bbot/modules/base.py b/bbot/modules/base.py index a5bf776873..0a4d524c39 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -205,7 +205,7 @@ async def finish(self): This method can be overridden to execute any necessary finalization logic. For example, if the module relies on a word cloud, you might wait for the scan to finish to ensure the word cloud is most complete before running an operation. Returns: - None by default, but can return additional data if overridden. + None Warnings: This method may be called multiple times since it can raise events, which may re-trigger the "finish" phase of the scan. Optional to override. @@ -218,7 +218,7 @@ async def report(self): This method can be overridden to aggregate data and raise summary events at the end of the scan. Returns: - None by default, but can return additional data if overridden. + None Note: This method is called only once per scan. @@ -231,7 +231,7 @@ async def cleanup(self): This method can be overridden to implement custom cleanup logic. It is called only once per scan and may not raise events. Returns: - None by default, but can return additional data if overridden. + None Note: This method is called only once per scan and may not raise events. @@ -239,19 +239,20 @@ async def cleanup(self): return async def require_api_key(self): - """Asynchronously checks if the module is configured with a valid API key. - - This method is typically used within the setup() method to ensure that an API key is provided in the module configuration. Your module must define an 'api_key' in its config options for this method to work properly. + """ + Asynchronously checks if an API key is required and valid. - Example Usage: - def setup(self): - return await self.require_api_key() + Args: + None Returns: - Tuple (bool, str): The first element is a boolean indicating whether the API is ready to use. The second element is a string message, either indicating that the API is ready or describing the error. + bool or tuple: Returns True if API key is valid and ready. + Returns a tuple (None, "error message") otherwise. - Raises: - Exception: Any exceptions raised by the self.ping() method will propagate. + Notes: + - Fetches the API key from the configuration. + - Calls the 'ping()' method to test API accessibility. + - Sets the API key readiness status accordingly. """ self.api_key = self.config.get("api_key", "") if self.auth_secret: @@ -308,6 +309,22 @@ def get_watched_events(self): return self._watched_events async def _handle_batch(self): + """ + Asynchronously handles a batch of events in the module. + + Args: + None + + Returns: + bool: True if events were submitted for processing, False otherwise. + + Notes: + - The method is wrapped in a task counter to monitor asynchronous operations. + - Checks if there are any events in the incoming queue and module is not in an error state. + - Invokes '_events_waiting()' to fetch a batch of events. + - Calls the module's 'handle_batch()' method to process these events. + - If a "FINISHED" event is found, invokes 'finish()' method of the module. + """ finish = False async with self._task_counter.count(f"{self.name}.handle_batch()"): submitted = False diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 8d1345d42f..a500b0e9e1 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -1,5 +1,6 @@ import logging import ipaddress +from copy import deepcopy from contextlib import suppress from bbot.core.errors import * @@ -10,11 +11,81 @@ class Target: + """ + A class representing a target. Can contain an unlimited number of hosts, IP or IP ranges, URLs, etc. + + Attributes: + make_in_scope (bool): Specifies whether to mark contained events as in-scope. + scan (Scan): Reference to the Scan object that instantiated the Target. + _events (dict): Dictionary mapping hosts to events related to the target. + strict_scope (bool): Flag indicating whether to consider child domains in-scope. + If set to True, only the exact hosts specifieid and not their children are considered part of the target. + + Examples: + Basic usage + >>> target = Target(scan, "evilcorp.com", "1.2.3.0/24") + >>> len(target) + 257 + >>> list(t.events) + [ + DNS_NAME("evilcorp.com", module=TARGET, tags={'domain', 'distance-1', 'target'}), + IP_RANGE("1.2.3.0/24", module=TARGET, tags={'ipv4', 'distance-1', 'target'}) + ] + >>> "www.evilcorp.com" in target + True + >>> "1.2.3.4" in target + True + >>> "4.3.2.1" in target + False + >>> "https://admin.evilcorp.com" in target + True + >>> "bob@evilcorp.com" in target + True + + Event correlation + >>> target.get("www.evilcorp.com") + DNS_NAME("evilcorp.com", module=TARGET, tags={'domain', 'distance-1', 'target'}) + >>> target.get("1.2.3.4") + IP_RANGE("1.2.3.0/24", module=TARGET, tags={'ipv4', 'distance-1', 'target'}) + + Target comparison + >>> target2 = Targets(scan, "www.evilcorp.com") + >>> target2 == target + False + >>> target2 in target + True + >>> target in target2 + False + + Notes: + - Targets are only precise down to the individual host. Ports and protocols are not considered in scope calculations. + - If you specify "https://evilcorp.com:8443" as a target, all of evilcorp.com (including subdomains and other ports and protocols) will be considered part of the target + - If you do not want to include child subdomains, use `strict_scope=True` + """ + make_in_scope = False def __init__(self, scan, *targets, strict_scope=False): + """ + Initialize a Target object. + + Args: + scan (Scan): Reference to the Scan object that instantiated the Target. + *targets: One or more targets (e.g., domain names, IP ranges) to be included in this Target. + strict_scope (bool, optional): Flag to control whether only the exact hosts are considered in-scope. + Defaults to False. + + Attributes: + scan (Scan): Reference to the Scan object. + strict_scope (bool): Flag to control in-scope conditions. If True, only exact hosts are considered. + + Notes: + - If you are instantiating a target from within a BBOT module, use `self.helpers.make_target()` instead. (this removes the need to pass in a scan object.) + - The strict_scope flag can be set to restrict scope calculation to only exactly-matching hosts and not their child subdomains. + - Each target is processed and stored as an `Event` in the '_events' dictionary. + """ self.scan = scan - self.dummy_module = ScanTargetDummyModule(scan) + self._dummy_module = ScanTargetDummyModule(scan) self._events = dict() if len(targets) > 0: log.verbose(f"Creating events from {len(targets):,} targets") @@ -25,6 +96,23 @@ def __init__(self, scan, *targets, strict_scope=False): self._hash = None def add_target(self, t): + """ + Add a target or merge events from another Target object into this Target. + + Args: + t: The target to be added. It can be either a string, an event object, or another Target object. + + Attributes Modified: + _events (dict): The dictionary is updated to include the new target's events. + + Examples: + >>> target.add_target('example.com') + + Notes: + - If `t` is of the same class as this Target, all its events are merged. + - If `t` is an event, it is directly added to `_events`. + - If `make_in_scope` is True, the scope distance of the event is set to 0. + """ if type(t) == self.__class__: for k, v in t._events.items(): try: @@ -35,7 +123,9 @@ def add_target(self, t): if is_event(t): event = t else: - event = self.scan.make_event(t, source=self.scan.root_event, module=self.dummy_module, tags=["target"]) + event = self.scan.make_event( + t, source=self.scan.root_event, module=self._dummy_module, tags=["target"] + ) if self.make_in_scope: event.set_scope_distance(0) try: @@ -47,18 +137,73 @@ def add_target(self, t): @property def events(self): + """ + A generator property that yields all events in the target. + + Yields: + Event object: One of the Event objects stored in the `_events` dictionary. + + Examples: + >>> target = Target(scan, "example.com") + >>> for event in target.events: + ... print(event) + + Notes: + - This property is read-only. + - Iterating over this property gives you one event at a time from the `_events` dictionary. + """ for _events in self._events.values(): yield from _events def copy(self): + """ + Creates and returns a copy of the Target object, including a deep copy of the `_events` attribute. + + Returns: + Target: A new Target object with the same `scan` and `strict_scope` attributes as the original. + A deep copy of the `_events` dictionary is made. + + Examples: + >>> original_target = Target(scan, "example.com") + >>> copied_target = original_target.copy() + >>> copied_target is original_target + False + >>> copied_target == original_target + True + >>> copied_target in original_target + True + >>> original_target in copied_target + True + + Notes: + - The `scan` object reference is kept intact in the copied Target object. + """ self_copy = self.__class__(self.scan, strict_scope=self.strict_scope) - self_copy._events = dict(self._events) + self_copy._events = deepcopy(self._events) return self_copy def get(self, host): """ - Get the matching target for a specified host. If not found, return None + Gets the event associated with the specified host from the target's `_events` dictionary. + + Args: + host (Event, Target, or str): The hostname, IP, URL, or event to look for. + + Returns: + Event or None: Returns the Event object associated with the given host if it exists, otherwise returns None. + + Examples: + >>> target = Target(scan, "evilcorp.com", "1.2.3.0/24") + >>> target.get("www.evilcorp.com") + DNS_NAME("evilcorp.com", module=TARGET, tags={'domain', 'distance-1', 'target'}) + >>> target.get("1.2.3.4") + IP_RANGE("1.2.3.0/24", module=TARGET, tags={'ipv4', 'distance-1', 'target'}) + + Notes: + - The method returns the first event that matches the given host. + - If `strict_scope` is False, it will also consider parent domains and IP ranges. """ + try: other = make_event(host, dummy=True) except ValidationError: @@ -108,7 +253,19 @@ def __hash__(self): def __len__(self): """ - Returns the total number of HOSTS (not events) in the target + Calculates and returns the total number of hosts within this target, not counting duplicate events. + + Returns: + int: The total number of unique hosts present within the target's `_events`. + + Examples: + >>> target = Target(scan, "evilcorp.com", "1.2.3.0/24") + >>> len(target) + 257 + + Notes: + - If a host is represented as an IP network, all individual IP addresses in that network are counted. + - For other types of hosts, each unique event is counted as one. """ num_hosts = 0 for host, _events in self._events.items(): From d4d1cb361c3f5f424b8bd9d086934ab1e164b773 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 14 Sep 2023 15:36:16 -0400 Subject: [PATCH 091/123] documentation for ScanManager, Event --- bbot/core/event/base.py | 268 +++++++++++++++++++++++++++- bbot/modules/base.py | 12 ++ bbot/scanner/manager.py | 69 ++++++- bbot/scanner/scanner.py | 18 +- bbot/scanner/target.py | 20 +-- bbot/test/test_step_1/test_agent.py | 2 +- docs/dev/basemodule.md | 1 + docs/dev/event.md | 16 ++ docs/dev/helpers/misc.md | 15 ++ docs/dev/target.md | 1 + docs/scanning/events.md | 4 +- mkdocs.yml | 4 + 12 files changed, 398 insertions(+), 32 deletions(-) create mode 100644 docs/dev/basemodule.md create mode 100644 docs/dev/event.md create mode 100644 docs/dev/helpers/misc.md create mode 100644 docs/dev/target.md diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 4e4ae54a2b..adfadb1acc 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -32,6 +32,60 @@ class BaseEvent: + """ + Represents a piece of data discovered during a BBOT scan. + + An Event contains various attributes that provide metadata about the discovered data. + The attributes assist in understanding the context of the Event and facilitate further + filtering and querying. Events are integral in the construction of visual graphs and + are the cornerstone of data exchange between BBOT modules. + + You can inherit from this class when creating a new event type. However, it's not always + necessary. You only need to subclass if you want to layer additional functionality on + top of the base class. + + Attributes: + type (str): Specifies the type of the event, e.g., `IP_ADDRESS`, `DNS_NAME`. + id (str): A unique identifier for the event. + data (str or dict): The main data for the event, e.g., a URL or IP address. + data_graph (str): Representation of `self.data` for Neo4j graph nodes. + data_human (str): Representation of `self.data` for human output. + data_id (str): Representation of `self.data` used to calculate the event's ID (and ultimately its hash, which is used for deduplication) + data_json (str): Representation of `self.data` to be used in JSON serialization. + host (str, IPvXAddress, or IPvXNetwork): The associated IP address or hostname for the event + host_stem (str): An abbreviated representation of hostname that removes the TLD, e.g. "www.evilcorp". Used by the word cloud. + port (int or None): The port associated with the event, if applicable, else None. + words (set): A list of relevant keywords extracted from the event. Used by the word cloud. + scope_distance (int): Indicates how many hops the event is from the main scope; 0 means in-scope. + web_spider_distance (int): The spider distance from the web root, specific to web crawling. + scan (Scanner): The scan object that generated the event. + timestamp (datetime.datetime): The time at which the data was discovered. + resolved_hosts (list of str): List of hosts to which the event data resolves, applicable for URLs and DNS names. + source (BaseEvent): The source event that led to the discovery of this event. + source_id (str): The `id` attribute of the source event. + tags (set of str): Descriptive tags for the event, e.g., `mx-record`, `in-scope`. + module (BaseModule): The module that discovered the event. + module_sequence (str): The sequence of modules that participated in the discovery. + + Examples: + ```json + { + "type": "URL", + "id": "URL:017ec8e5dc158c0fd46f07169f8577fb4b45e89a", + "data": "http://www.blacklanternsecurity.com/", + "web_spider_distance": 0, + "scope_distance": 0, + "scan": "SCAN:4d786912dbc97be199da13074699c318e2067a7f", + "timestamp": 1688526222.723366, + "resolved_hosts": ["185.199.108.153"], + "source": "OPEN_TCP_PORT:cf7e6a937b161217eaed99f0c566eae045d094c7", + "tags": ["in-scope", "distance-0", "dir", "ip-185-199-108-153", "status-301", "http-title-301-moved-permanently"], + "module": "httpx", + "module_sequence": "httpx" + } + ``` + """ + # Always emit this event type even if it's not in scope _always_emit = False # Always emit events with these tags even if they're not in scope @@ -46,7 +100,7 @@ class BaseEvent: def __init__( self, data, - event_type=None, + event_type, source=None, module=None, scan=None, @@ -57,6 +111,29 @@ def __init__( _dummy=False, _internal=None, ): + """ + Initializes an Event object with the given parameters. + + In most cases, you should use `make_event()` instead of instantiating this class directly. + `make_event()` is much friendlier, and can auto-detect the event type for you. + + Attributes: + data (str, dict): The primary data for the event. + event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. + source (BaseEvent, optional): Source event that led to this event's discovery. Defaults to None. + module (str, optional): Module that discovered the event. Defaults to None. + scan (Scan, optional): BBOT Scan object. Required unless _dummy is True. Defaults to None. + scans (list of Scan, optional): BBOT Scan objects, used primarily when unserializing an Event from the database. Defaults to None. + tags (list of str, optional): Descriptive tags for the event. Defaults to None. + confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + timestamp (datetime, optional): Time of event discovery. Defaults to current UTC time. + _dummy (bool, optional): If True, disables certain data validations. Defaults to False. + _internal (Any, optional): If specified, makes the event internal. Defaults to None. + + Raises: + ValidationError: If either `scan` or `source` are not specified and `_dummy` is False. + """ + self._id = None self._hash = None self.__host = None @@ -229,6 +306,19 @@ def scope_distance(self): @scope_distance.setter def scope_distance(self, scope_distance): + """ + Setter for the scope_distance attribute, ensuring it only decreases. + + The scope_distance attribute is designed to never increase; it can only be set to smaller values than + the current one. If a larger value is provided, it is ignored. The setter also updates the event's + tags to reflect the new scope distance. + + Parameters: + scope_distance (int): The new scope distance to set, must be a non-negative integer. + + Note: + The method will automatically update the relevant 'distance-' tags associated with the event. + """ if scope_distance >= 0: new_scope_distance = None # ensure scope distance does not increase (only allow setting to smaller values) @@ -249,6 +339,19 @@ def source(self): @source.setter def source(self, source): + """ + Setter for the source attribute, ensuring it's a valid event and updating scope distance. + + Sets the source of the event and automatically adjusts the scope distance based on the source event's + scope distance. The scope distance is incremented by 1 if the host of the source event is different + from the current event's host. + + Parameters: + source (BaseEvent): The new source event to set. Must be a valid event object. + + Note: + If an invalid source is provided and the event is not a dummy, a warning will be logged. + """ if is_event(source): self._source = source if source.scope_distance >= 0: @@ -290,12 +393,47 @@ def get_sources(self, omit=False): return sources def make_internal(self): + """ + Marks the event as internal, excluding it from output but allowing normal exchange between scan modules. + + Internal events are typically speculative and may not be interesting by themselves but can lead to + the discovery of interesting events. This method sets the `_internal` attribute to True, adds the + "internal" tag, and ensures the event is marked as made internal (useful for later reversion). + + Examples of internal events include `OPEN_TCP_PORT`s from the `speculate` module, + `IP_ADDRESS`es from the `ipneighbor` module, or out-of-scope `DNS_NAME`s that originate + from DNS resolutions. + + Once an event is marked as internal, all of its future children become internal as well. + If `ScanManager._emit_event()` determines the event is interesting, it may be reverted back to its + original state and forcefully re-emitted along with the whole chain of internal events. + + The purpose of internal events is to enable speculative/explorative discovery without cluttering + the console with irrelevant or uninteresting events. + """ if not self._made_internal: self._internal = True self.add_tag("internal") self._made_internal = True def unmake_internal(self, set_scope_distance=None, force_output=False): + """ + Reverts the event from being internal, optionally forcing it to be included in output and setting its scope distance. + + Removes the 'internal' tag, resets the `_internal` attribute, and adjusts scope distance if specified. + Optionally, forces the event to be included in the output. Also, if any source events are internal, they + are also reverted recursively. + + This typically happens in `ScanManager._emit_event()` if the event is determined to be interesting. + + Parameters: + set_scope_distance (int, optional): If specified, sets the scope distance to this value. + force_output (bool or str, optional): If True, forces the event to be included in output. + If set to "trail_only", only its source events are modified. + + Returns: + list: A list of source events that were also reverted from being internal. + """ source_trail = [] self.remove_tag("internal") if self._made_internal: @@ -322,7 +460,17 @@ def unmake_internal(self, set_scope_distance=None, force_output=False): def set_scope_distance(self, d=0): """ - Set the scope of an event and its parents + Sets the scope distance for the event and its parent events, while considering module-specific scoping rules. + + Unmakes the event internal if needed and adjusts its scope distance. If the distance is set to 0, + adds the 'in-scope' tag to the event. Takes into account module-specific scoping preferences unless + the event type is "DNS_NAME". + + Parameters: + d (int): The scope distance to set for this event. + + Returns: + list: A list of parent events whose scope distance was also set. """ source_trail = [] # keep the event internal if the module requests so, unless it's a DNS_NAME @@ -337,6 +485,19 @@ def _host(self): return "" def _sanitize_data(self, data): + """ + Validates and sanitizes the event's data during instantiation. + + By default, uses the '_data_load' method to pre-process the data and then applies the '_data_validator' + to validate and create a sanitized dictionary. Raises a ValidationError if any of the validations fail. + Subclasses can override this method to provide custom validation logic. + + Returns: + Any: The sanitized data. + + Raises: + ValidationError: If the data fails to validate. + """ data = self._data_load(data) if self._data_validator is not None: if not isinstance(data, dict): @@ -377,7 +538,15 @@ def _data_id(self): @property def pretty_string(self): """ - Graph representation of event.data + A human-friendly representation of the event's data. Used for graph representation. + + If the event's data is a dictionary, the function will try to return a JSON-formatted string. + Otherwise, it will use smart_decode to convert the data into a string representation. + + Override if necessary. + + Returns: + str: The graphical representation of the event's data. """ return self._pretty_string() @@ -424,6 +593,18 @@ def __contains__(self, other): return False def json(self, mode="json"): + """ + Serializes the event object to a JSON-compatible dictionary. + + By default, it includes attributes such as 'type', 'id', 'data', 'scope_distance', and others that are present. + Additional specific attributes can be serialized based on the mode specified. + + Parameters: + mode (str): Specifies the data serialization mode. Default is "json". Other options include "graph", "human", and "id". + + Returns: + dict: JSON-serializable dictionary representation of the event object. + """ j = dict() for i in ("type", "id"): v = getattr(self, i, "") @@ -466,14 +647,28 @@ def json(self, mode="json"): @staticmethod def from_json(j): + """ + Convenience shortcut to create an Event object from a JSON-compatible dictionary. + + Calls the `event_from_json()` function to deserialize the event. + + Parameters: + j (dict): The JSON-compatible dictionary containing event data. + + Returns: + Event: The deserialized Event object. + """ return event_from_json(j) @property def module_sequence(self): """ - A human-friendly representation of the module name that includes modules from omitted source events + Get a human-friendly string that represents the sequence of modules responsible for generating this event. + + Includes the names of omitted source events to provide a complete view of the module sequence leading to this event. - Helpful in identifying where a URL came from + Returns: + str: The module sequence in human-friendly format. """ module_name = getattr(self.module, "name", "") if getattr(self.source, "_omit", False): @@ -974,7 +1169,47 @@ def make_event( internal=None, ): """ - If data is already an event, simply return it + Creates and returns a new event object or modifies an existing one. + + This function serves as a factory for creating new event objects, either by generating a new `Event` + object or by updating an existing event with additional metadata. If `data` is already an event, + it updates the event based on the additional parameters provided. + + Parameters: + data (Union[str, dict, BaseEvent]): The primary data for the event or an existing event object. + event_type (str, optional): Type of the event, e.g., 'IP_ADDRESS'. Auto-detected if not provided. + source (BaseEvent, optional): Source event leading to this event's discovery. + module (str, optional): Module that discovered the event. + scan (Scan, optional): BBOT Scan object associated with the event. + scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. + tags (Union[str, List[str]], optional): Descriptive tags for the event, as a list or a single string. + confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + dummy (bool, optional): Disables data validations if set to True. Defaults to False. + internal (Any, optional): Makes the event internal if set to True. Defaults to None. + + Returns: + BaseEvent: A new or updated event object. + + Raises: + ValidationError: Raised when there's an error in event data or type sanitization. + + Examples: + If inside a module, e.g. from within its `handle_event()`: + >>> self.make_event("1.2.3.4", source=event) + IP_ADDRESS("1.2.3.4", module=nmap, tags={'ipv4', 'distance-1'}) + + If you're outside a module but you have a scan object: + >>> scan.make_event("1.2.3.4", source=scan.root_event) + IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4', 'distance-1'}) + + If you're outside a scan and just messing around: + >>> from bbot.core.event.base import make_event + >>> make_event("1.2.3.4", dummy=True) + IP_ADDRESS("1.2.3.4", module=None, tags={'ipv4'}) + + Note: + When working within a module's `handle_event()`, use the instance method + `self.make_event()` instead of calling this function directly. """ # allow tags to be either a string or an array @@ -1037,6 +1272,27 @@ def make_event( def event_from_json(j): + """ + Creates an event object from a JSON dictionary. + + This function deserializes a JSON dictionary to create a new event object, using the `make_event` function + for the actual object creation. It sets additional attributes such as the timestamp and scope distance + based on the input JSON. + + Parameters: + j (Dict): JSON dictionary containing the event attributes. + Must include keys "data" and "type". + + Returns: + BaseEvent: A new event object initialized with attributes from the JSON dictionary. + + Raises: + ValidationError: Raised when the JSON dictionary is missing required fields. + + Note: + The function assumes that the input JSON dictionary is valid and may raise exceptions + if required keys are missing. Make sure to validate the JSON input beforehand. + """ try: kwargs = { "data": j["data"], diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 0a4d524c39..981caf61d6 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -354,6 +354,10 @@ def make_event(self, *args, **kwargs): **kwargs: Keyword arguments to be passed to the scan's make_event method. raise_error (bool, optional): Whether to raise a validation error if the event could not be created. Defaults to False. + Examples: + >>> new_event = self.make_event("1.2.3.4", source=event) + >>> self.emit_event(new_event) + Returns: Event or None: The created event, or None if a validation error occurred and raise_error was False. @@ -375,6 +379,8 @@ def make_event(self, *args, **kwargs): def emit_event(self, *args, **kwargs): """Emit an event to the event queue and distribute it to interested modules. + This is how modules "return" data. + The method first creates an event object by calling `self.make_event()` with the provided arguments. Then, the event is queued for outgoing distribution using `self.queue_outgoing_event()`. @@ -387,6 +393,12 @@ def emit_event(self, *args, **kwargs): - quick: Optional flag to indicate whether the event should be processed quickly. ``` + Examples: + >>> self.emit_event("www.evilcorp.com", source=event, tags=["affiliate"]) + + >>> new_event = self.make_event("1.2.3.4", source=event) + >>> self.emit_event(new_event) + Returns: None diff --git a/bbot/scanner/manager.py b/bbot/scanner/manager.py index 4797bf1367..572ce4cd1f 100644 --- a/bbot/scanner/manager.py +++ b/bbot/scanner/manager.py @@ -11,10 +11,32 @@ class ScanManager: """ - Manages modules and events during a scan + Manages the modules, event queues, and overall event flow during a scan. + + Simultaneously serves as a shepherd, policeman, judge, jury, and executioner for events. + It is responsible for managing the incoming event queue and distributing events to modules. + + Attributes: + scan (Scan): Reference to the Scan object that instantiated the ScanManager. + incoming_event_queue (asyncio.PriorityQueue): Queue storing incoming events for processing. + events_distributed (set): Set tracking globally unique events. + events_accepted (set): Set tracking events accepted by individual modules. + dns_resolution (bool): Flag to enable or disable DNS resolution. + _task_counter (TaskCounter): Counter for ongoing tasks. + _new_activity (bool): Flag indicating new activity. + _modules_by_priority (dict): Modules sorted by their priorities. + _incoming_queues (list): List of incoming event queues from each module. + _module_priority_weights (list): Weight values for each module based on priority. """ def __init__(self, scan): + """ + Initializes the ScanManager object, setting up essential attributes for scan management. + + Args: + scan (Scan): Reference to the Scan object that instantiated the ScanManager. + """ + self.scan = scan self.incoming_event_queue = asyncio.PriorityQueue() @@ -32,8 +54,13 @@ def __init__(self, scan): async def init_events(self): """ - seed scanner with target events + Initializes events by seeding the scanner with target events and distributing them for further processing. + + Notes: + - This method populates the event queue with initial target events. + - It also marks the Scan object as finished with initialization by setting `_finished_init` to True. """ + context = f"manager.init_events()" async with self.scan._acatch(context), self._task_counter.count(context): await self.distribute_event(self.scan.root_event) @@ -87,7 +114,41 @@ def _event_precheck(self, event, exclude=("DNS_NAME",)): return False return True - async def _emit_event(self, event, *args, **kwargs): + async def _emit_event(self, event, **kwargs): + """ + Handles the emission, tagging, and distribution of a events during a scan. + + A lot of really important stuff happens here. Actually this is probably the most + important method in all of BBOT. It is basically the central intersection that + every event passes through. + + Probably it is also needless to say that it exists in a delicate balance. + Close to half of my debugging time has been spent in this function. + I have slain many dragons here and there may still be more yet to slay. + + Tread carefully, friend. -TheTechromancer + + Notes: + - Central function for decision-making in BBOT. + - Conducts DNS resolution, tagging, and scope calculations. + - Checks against whitelists and blacklists. + - Calls custom callbacks. + - Handles DNS wildcard events. + - Decides on event acceptance and distribution. + + Parameters: + event (Event): The event object to be emitted. + **kwargs: Arbitrary keyword arguments (e.g., `on_success_callback`, `abort_if`). + + Side Effects: + - Event tagging. + - Populating DNS data. + - Emitting new events. + - Queueing events for further processing. + - Adjusting event scopes. + - Running callbacks. + - Updating scan statistics. + """ log.debug(f"Emitting {event}") distribute_event = True event_distributed = False @@ -272,7 +333,7 @@ async def _emit_event(self, event, *args, **kwargs): self.queue_event(child_event) except ValidationError as e: - log.warning(f"Event validation failed with args={args}, kwargs={kwargs}: {e}") + log.warning(f"Event validation failed with kwargs={kwargs}: {e}") log.trace(traceback.format_exc()) finally: diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index ba6ecd453d..38b7eab23f 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -14,8 +14,8 @@ from bbot import config as bbot_config +from .target import Target from .stats import ScanStats -from .target import ScanTarget from .manager import ScanManager from .dispatcher import Dispatcher from bbot.modules import module_loader @@ -79,10 +79,10 @@ class Scanner: - "FINISHED" (8): Status when the scan has successfully completed. ``` _status_code (int): The numerical representation of the current scan status, stored for internal use. It is mapped according to the values in `_status_codes`. - target (ScanTarget): Target of scan + target (Target): Target of scan config (omegaconf.dictconfig.DictConfig): BBOT config - whitelist (ScanTarget): Scan whitelist (by default this is the same as `target`) - blacklist (ScanTarget): Scan blacklist (this takes ultimate precedence) + whitelist (Target): Scan whitelist (by default this is the same as `target`) + blacklist (Target): Scan blacklist (this takes ultimate precedence) helpers (ConfigAwareHelper): Helper containing various reusable functions, regexes, etc. manager (ScanManager): Coordinates and monitors the flow of events between modules during a scan dispatcher (Dispatcher): Triggers certain events when the scan `status` changes @@ -128,8 +128,8 @@ def __init__( Args: *targets (str): Target(s) to scan. - whitelist (ScanTarget, optional): Whitelisted target(s) to scan. Defaults to the same as `targets`. - blacklist (ScanTarget, optional): Blacklisted target(s). Takes ultimate precedence. Defaults to empty. + whitelist (list, optional): Whitelisted target(s) to scan. Defaults to the same as `targets`. + blacklist (list, optional): Blacklisted target(s). Takes ultimate precedence. Defaults to empty. scan_id (str, optional): Unique identifier for the scan. Auto-generates if None. name (str, optional): Human-readable name of the scan. Auto-generates if None. modules (list[str], optional): List of module names to use during the scan. Defaults to empty list. @@ -194,7 +194,7 @@ def __init__( else: self.home = self.helpers.bbot_home / "scans" / self.name - self.target = ScanTarget(self, *targets, strict_scope=strict_scope) + self.target = Target(self, *targets, strict_scope=strict_scope, make_in_scope=True) self.modules = OrderedDict({}) self._scan_modules = modules @@ -205,10 +205,10 @@ def __init__( if not whitelist: self.whitelist = self.target.copy() else: - self.whitelist = ScanTarget(self, *whitelist, strict_scope=strict_scope) + self.whitelist = Target(self, *whitelist, strict_scope=strict_scope) if not blacklist: blacklist = [] - self.blacklist = ScanTarget(self, *blacklist) + self.blacklist = Target(self, *blacklist) if dispatcher is None: self.dispatcher = Dispatcher() diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index a500b0e9e1..f79c6edf84 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -63,9 +63,7 @@ class Target: - If you do not want to include child subdomains, use `strict_scope=True` """ - make_in_scope = False - - def __init__(self, scan, *targets, strict_scope=False): + def __init__(self, scan, *targets, strict_scope=False, make_in_scope=False): """ Initialize a Target object. @@ -74,6 +72,8 @@ def __init__(self, scan, *targets, strict_scope=False): *targets: One or more targets (e.g., domain names, IP ranges) to be included in this Target. strict_scope (bool, optional): Flag to control whether only the exact hosts are considered in-scope. Defaults to False. + make_in_scope (bool, optional): Flag to control whether contained events are marked as in-scope. + Defaults to False. Attributes: scan (Scan): Reference to the Scan object. @@ -85,14 +85,16 @@ def __init__(self, scan, *targets, strict_scope=False): - Each target is processed and stored as an `Event` in the '_events' dictionary. """ self.scan = scan - self._dummy_module = ScanTargetDummyModule(scan) + self.strict_scope = strict_scope + self.make_in_scope = make_in_scope + + self._dummy_module = TargetDummyModule(scan) self._events = dict() if len(targets) > 0: log.verbose(f"Creating events from {len(targets):,} targets") for t in targets: self.add_target(t) - self.strict_scope = strict_scope self._hash = None def add_target(self, t): @@ -232,7 +234,7 @@ def __iter__(self): yield from self.events def __contains__(self, other): - # if "other" is a ScanTarget + # if "other" is a Target if type(other) == self.__class__: contained_in_self = [self._contains(e) for e in other.events] return all(contained_in_self) @@ -276,11 +278,7 @@ def __len__(self): return num_hosts -class ScanTarget(Target): - make_in_scope = True - - -class ScanTargetDummyModule(BaseModule): +class TargetDummyModule(BaseModule): _type = "TARGET" name = "TARGET" diff --git a/bbot/test/test_step_1/test_agent.py b/bbot/test/test_step_1/test_agent.py index a4b8e447e9..73bb503551 100644 --- a/bbot/test/test_step_1/test_agent.py +++ b/bbot/test/test_step_1/test_agent.py @@ -142,7 +142,7 @@ async def test_agent(agent): async with websockets.serve(_websocket_handler, "127.0.0.1", 8765): asyncio.create_task(agent.start()) # wait for 30 seconds - await asyncio.wait_for(scan_done.wait(), 10) + await asyncio.wait_for(scan_done.wait(), 30) assert success await agent.start_scan("scan_to_be_cancelled", targets=["127.0.0.1"], modules=["ipneighbor"]) diff --git a/docs/dev/basemodule.md b/docs/dev/basemodule.md new file mode 100644 index 0000000000..04e59042ec --- /dev/null +++ b/docs/dev/basemodule.md @@ -0,0 +1 @@ +::: bbot.modules.base.BaseModule diff --git a/docs/dev/event.md b/docs/dev/event.md new file mode 100644 index 0000000000..9169b80af0 --- /dev/null +++ b/docs/dev/event.md @@ -0,0 +1,16 @@ +This is a developer reference. For a high-level description of BBOT events, plus a full list of event types, see [Events](../../scanning/events) + +::: bbot.core.event.base.make_event +::: bbot.core.event.base.event_from_json + +::: bbot.core.event.base.BaseEvent + options: + members: + - __init__ + - json + - from_json + - pretty_string + - module_sequence + - make_internal + - unmake_internal + - set_scope_distance diff --git a/docs/dev/helpers/misc.md b/docs/dev/helpers/misc.md new file mode 100644 index 0000000000..5a390a520e --- /dev/null +++ b/docs/dev/helpers/misc.md @@ -0,0 +1,15 @@ +# Misc Helpers + +These are miscellaneous helpers, used throughout BBOT and its modules for simple tasks such as parsing domains, ports, urls, etc. + + +::: bbot.core.helpers.misc.is_domain +::: bbot.core.helpers.misc.is_subdomain +::: bbot.core.helpers.misc.is_ptr +::: bbot.core.helpers.misc.is_uri +::: bbot.core.helpers.misc.is_url +::: bbot.core.helpers.misc.parent_domain +::: bbot.core.helpers.misc.domain_parents +::: bbot.core.helpers.misc.parent_url +::: bbot.core.helpers.misc.url_parents + \ No newline at end of file diff --git a/docs/dev/target.md b/docs/dev/target.md new file mode 100644 index 0000000000..b2e4bffe31 --- /dev/null +++ b/docs/dev/target.md @@ -0,0 +1 @@ +::: bbot.scanner.target.Target diff --git a/docs/scanning/events.md b/docs/scanning/events.md index 9a0fc19a79..bf6f5de730 100644 --- a/docs/scanning/events.md +++ b/docs/scanning/events.md @@ -16,7 +16,7 @@ In addition to the obvious data (e.g. `www.evilcorp.com`), an event also contain - its `.scope_distance` (how many hops it is from the main scope, 0 == in-scope) - a list of `.tags` that describe the data (`mx-record`, `http-title`, etc.) -These attributes allow us to construct a visual graph of events (e.g. in [Neo4j](output.md#neo4j)) and query/filter/grep them more easily. Here is what a typical event looks like in JSON format: +These attributes allow us to construct a visual graph of events (e.g. in [Neo4j](../output#neo4j)) and query/filter/grep them more easily. Here is what a typical event looks like in JSON format: ```json { @@ -42,6 +42,8 @@ These attributes allow us to construct a visual graph of events (e.g. in [Neo4j] } ``` +For a more detailed description of BBOT events, see [Developer Documentation - Event](../../dev/event). + Below is a full list of event types along with which modules produce/consume them. ## List of Event Types diff --git a/mkdocs.yml b/mkdocs.yml index 80dcdef6dc..1ccedb94c7 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -32,6 +32,7 @@ nav: - How to Write a Module: contribution.md - Developer Reference: - Scanner: dev/scanner.md + - Event: dev/event.md - Target: dev/target.md - BaseModule: dev/basemodule.md - Helpers: @@ -69,6 +70,9 @@ plugins: show_root_full_path: false separate_signature: true docstring_section_style: "list" + filters: + - "!^_" + - "^__init__$" import: - https://docs.python.org/3.11/objects.inv - https://omegaconf.readthedocs.io/en/latest/objects.inv From baa1d4629ea96464a1bdde9560f9668408fed9c8 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 14 Sep 2023 16:01:00 -0400 Subject: [PATCH 092/123] fix agent tests --- bbot/scanner/target.py | 7 +++---- docs/dev/event.md | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index f79c6edf84..f733f82956 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -1,6 +1,5 @@ import logging import ipaddress -from copy import deepcopy from contextlib import suppress from bbot.core.errors import * @@ -159,11 +158,11 @@ def events(self): def copy(self): """ - Creates and returns a copy of the Target object, including a deep copy of the `_events` attribute. + Creates and returns a copy of the Target object, including a shallow copy of the `_events` attribute. Returns: Target: A new Target object with the same `scan` and `strict_scope` attributes as the original. - A deep copy of the `_events` dictionary is made. + A shallow copy of the `_events` dictionary is made. Examples: >>> original_target = Target(scan, "example.com") @@ -181,7 +180,7 @@ def copy(self): - The `scan` object reference is kept intact in the copied Target object. """ self_copy = self.__class__(self.scan, strict_scope=self.strict_scope) - self_copy._events = deepcopy(self._events) + self_copy._events = dict(self._events) return self_copy def get(self, host): diff --git a/docs/dev/event.md b/docs/dev/event.md index 9169b80af0..79f0cc7cb5 100644 --- a/docs/dev/event.md +++ b/docs/dev/event.md @@ -1,4 +1,4 @@ -This is a developer reference. For a high-level description of BBOT events, plus a full list of event types, see [Events](../../scanning/events) +This is a developer reference. For a high-level description of BBOT events including a full list of event types, see [Events](../../scanning/events) ::: bbot.core.event.base.make_event ::: bbot.core.event.base.event_from_json From 8d03b1ac11b72b33650573ebdb22c5860621bde6 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 14 Sep 2023 16:40:11 -0400 Subject: [PATCH 093/123] more scanner documentation --- bbot/scanner/scanner.py | 57 ++++++++++++++++++++++++++++++++++++----- 1 file changed, 50 insertions(+), 7 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 38b7eab23f..64d8526480 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -89,6 +89,10 @@ class Scanner: modules (dict): Holds all loaded modules in this format: `{"module_name": Module()}` stats (ScanStats): Holds high-level scan statistics such as how many events have been produced and consumed by each module home (pathlib.Path): Base output directory of the scan (default: `~/.bbot/scans/`) + running (bool): Whether the scan is currently running. + stopping (bool): Whether the scan is currently stopping. + stopped (bool): Whether the scan is currently stopped. + aborting (bool): Whether the scan is aborted or currently aborting. Notes: - The status is read-only once set to "ABORTING" until it transitions to "ABORTED." @@ -525,8 +529,12 @@ async def load_modules(self): self._modules_loaded = True def stop(self): - """ - Forcefully stop an in-progress scan + """Stops the in-progress scan and performs necessary cleanup. + + This method sets the scan's status to "ABORTING," cancels any pending tasks, and drains event queues. It also kills child processes spawned during the scan. + + Returns: + None """ if not self._stopping: self._stopping = True @@ -567,7 +575,13 @@ async def finish(self): return False def _drain_queues(self): - # Empty event queues + """Empties all the event queues for each loaded module and the manager's incoming event queue. + + This method iteratively empties both the incoming and outgoing event queues of each module, as well as the incoming event queue of the scan manager. + + Returns: + None + """ self.debug("Draining queues") for module in self.modules.values(): with contextlib.suppress(asyncio.queues.QueueEmpty): @@ -584,6 +598,16 @@ def _drain_queues(self): self.debug("Finished draining queues") def _cancel_tasks(self): + """Cancels all asynchronous tasks and shuts down the process pool. + + This method collects all pending tasks from each module, the dispatcher, + and the scan manager. After collecting these tasks, it cancels them synchronously + using a helper function. Finally, it shuts down the process pool, canceling any + pending futures. + + Returns: + None + """ tasks = [] # module workers for m in self.modules.values(): @@ -603,13 +627,35 @@ def _cancel_tasks(self): self.process_pool.shutdown(cancel_futures=True) async def _report(self): + """Asynchronously executes the `report()` method for each module in the scan. + + This method is called once at the end of each scan and is responsible for + triggering the `report()` function for each module. It executes irrespective + of whether the scan was aborted or completed successfully. The method makes + use of an asynchronous context manager (`_acatch`) to handle exceptions and + a task counter to keep track of the task's context. + + Returns: + None + """ for mod in self.modules.values(): context = f"{mod.name}.report()" async with self._acatch(context), mod._task_counter.count(context): await mod.report() async def _cleanup(self): - # clean up modules + """Asynchronously executes the `cleanup()` method for each module in the scan. + + This method is called once at the end of the scan to perform resource cleanup + tasks. It is executed regardless of whether the scan was aborted or completed + successfully. The scan status is set to "CLEANING_UP" during the execution. + After calling the `cleanup()` method for each module, it performs additional + cleanup tasks such as removing the scan's home directory if empty and cleaning + old scans. + + Returns: + None + """ self.status = "CLEANING_UP" for mod in self.modules.values(): await mod._cleanup() @@ -659,9 +705,6 @@ def word_cloud(self): @property def stopping(self): - """ - Returns True if the scan is not running - """ return not self.running @property From b38ca864834d28dd334b95ab6a830e4fbda9804d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 14 Sep 2023 16:46:04 -0400 Subject: [PATCH 094/123] even more scanner docs --- bbot/scanner/scanner.py | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 64d8526480..fd7847d05e 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -465,9 +465,31 @@ async def setup_modules(self, remove_failed=True): } async def load_modules(self): - """ - Import and instantiate all scan modules (including internal ones). - Module dependencies will be installed as part of this process. + """Asynchronously import and instantiate all scan modules, including internal and output modules. + + This method is automatically invoked by `setup_modules()`. It performs several key tasks in the following sequence: + + 1. Install dependencies for each module via `self.helpers.depsinstaller.install()`. + 2. Load scan modules and updates the `modules` dictionary. + 3. Load internal modules and updates the `modules` dictionary. + 4. Load output modules and updates the `modules` dictionary. + 5. Sorts modules based on their `_priority` attribute. + + If any modules fail to load or their dependencies fail to install, a ScanError will be raised (unless `self.force_start` is set to True). + + Attributes: + succeeded, failed (tuple): A tuple containing lists of modules that succeeded or failed during the dependency installation. + loaded_modules, loaded_internal_modules, loaded_output_modules (dict): Dictionaries of successfully loaded modules. + failed, failed_internal, failed_output (list): Lists of module names that failed to load. + + Raises: + ScanError: If any module dependencies fail to install or modules fail to load, and if self.force_start is False. + + Returns: + None + + Note: + After all modules are loaded, they are sorted by `_priority` and stored in the `modules` dictionary. """ if not self._modules_loaded: all_modules = list(set(self._scan_modules + self._output_modules + self._internal_modules)) From 64380fe5d9a497f466a9323e8485a7777fe2f2c7 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 14 Sep 2023 16:50:37 -0400 Subject: [PATCH 095/123] blacked --- bbot/scanner/scanner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index fd7847d05e..2e7a21772d 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -474,14 +474,14 @@ async def load_modules(self): 3. Load internal modules and updates the `modules` dictionary. 4. Load output modules and updates the `modules` dictionary. 5. Sorts modules based on their `_priority` attribute. - + If any modules fail to load or their dependencies fail to install, a ScanError will be raised (unless `self.force_start` is set to True). Attributes: succeeded, failed (tuple): A tuple containing lists of modules that succeeded or failed during the dependency installation. loaded_modules, loaded_internal_modules, loaded_output_modules (dict): Dictionaries of successfully loaded modules. failed, failed_internal, failed_output (list): Lists of module names that failed to load. - + Raises: ScanError: If any module dependencies fail to install or modules fail to load, and if self.force_start is False. From 65041f5069f2f5ecd414336d09e9705584eb8a75 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 11:51:59 -0400 Subject: [PATCH 096/123] resolve conflicts in helpers/misc.py --- bbot/core/event/helpers.py | 16 +- bbot/core/helpers/misc.py | 880 +++++++++++++++++++++++++++++++------ bbot/scanner/scanner.py | 5 +- docs/dev/helpers/misc.md | 14 +- 4 files changed, 775 insertions(+), 140 deletions(-) diff --git a/bbot/core/event/helpers.py b/bbot/core/event/helpers.py index 228be7c335..2b3164befb 100644 --- a/bbot/core/event/helpers.py +++ b/bbot/core/event/helpers.py @@ -12,7 +12,21 @@ def get_event_type(data): """ - Attempt to divine event type from data + Determines the type of event based on the given data. + + Args: + data (str): The data to be used for determining the event type. + + Returns: + str: The type of event such as "IP_ADDRESS", "IP_RANGE", or "URL_UNVERIFIED". + + Raises: + ValidationError: If the event type could not be determined. + + Notes: + - Utilizes `smart_decode_punycode` and `smart_decode` to preprocess the data. + - Makes use of `ipaddress` standard library to check for IP and network types. + - Checks against a set of predefined regular expressions stored in `event_type_regexes`. """ # IP address diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 09a7b485f9..6080ce8714 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -391,14 +391,41 @@ def url_parents(u): def tldextract(data): """ - "www.evilcorp.co.uk" --> ExtractResult(subdomain='www', domain='evilcorp', suffix='co.uk') + Extracts the subdomain, domain, and suffix from a URL string. + + Args: + data (str): The URL string to be processed. + + Returns: + ExtractResult: A named tuple containing the subdomain, domain, and suffix. + + Examples: + >>> tldextract("www.evilcorp.co.uk") + ExtractResult(subdomain='www', domain='evilcorp', suffix='co.uk') + + Notes: + - Utilizes `smart_decode` to preprocess the data. + - Makes use of the `tldextract` library for extraction. """ return _tldextract.extract(smart_decode(data)) def split_domain(hostname): """ - "www.internal.evilcorp.co.uk" --> ("www.internal", "evilcorp.co.uk") + Splits the hostname into its subdomain and registered domain components. + + Args: + hostname (str): The full hostname to be split. + + Returns: + tuple: A tuple containing the subdomain and registered domain. + + Examples: + >>> split_domain("www.internal.evilcorp.co.uk") + ("www.internal", "evilcorp.co.uk") + + Notes: + - Utilizes the `tldextract` function to first break down the hostname. """ parsed = tldextract(hostname) subdomain = parsed.subdomain @@ -412,8 +439,20 @@ def split_domain(hostname): def domain_stem(domain): """ - An abbreviated representation of hostname that removes the TLD - www.evilcorp.com --> www.evilcorp + Returns an abbreviated representation of the hostname by removing the TLD (Top-Level Domain). + + Args: + domain (str): The full domain name to be abbreviated. + + Returns: + str: An abbreviated domain string without the TLD. + + Examples: + >>> domain_stem("www.evilcorp.com") + "www.evilcorp" + + Notes: + - Utilizes the `tldextract` function for domain parsing. """ parsed = tldextract(str(domain)) return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") @@ -421,7 +460,21 @@ def domain_stem(domain): def ip_network_parents(i, include_self=False): """ - "192.168.1.1" --> [192.168.1.0/31, 192.168.1.0/30 ... 128.0.0.0/1, 0.0.0.0/0] + Generates all parent IP networks for a given IP address or network, optionally including the network itself. + + Args: + i (str or ipaddress.IPv4Network/ipaddress.IPv6Network): The IP address or network to find parents for. + include_self (bool, optional): Whether to include the network itself in the result. Default is False. + + Yields: + ipaddress.IPv4Network or ipaddress.IPv6Network: Parent IP networks in descending order of prefix length. + + Examples: + >>> list(ip_network_parents("192.168.1.1")) + [ipaddress.IPv4Network('192.168.1.0/31'), ipaddress.IPv4Network('192.168.1.0/30'), ... , ipaddress.IPv4Network('0.0.0.0/0')] + + Notes: + - Utilizes Python's built-in `ipaddress` module for network operations. """ net = ipaddress.ip_network(i, strict=False) for i in range(net.prefixlen - (0 if include_self else 1), -1, -1): @@ -429,11 +482,44 @@ def ip_network_parents(i, include_self=False): def is_port(p): + """ + Checks if the given string represents a valid port number. + + Args: + p (str or int): The port number to check. + + Returns: + bool: True if the port number is valid, False otherwise. + + Examples: + >>> is_port('80') + True + >>> is_port('70000') + False + """ + p = str(p) return p and p.isdigit() and 0 <= int(p) <= 65535 def is_dns_name(d): + """ + Determines if the given string is a valid DNS name. + + Args: + d (str): The string to be checked. + + Returns: + bool: True if the string is a valid DNS name, False otherwise. + + Examples: + >>> is_dns_name('www.example.com') + True + >>> is_dns_name('localhost') + True + >>> is_dns_name('192.168.1.1') + False + """ if is_ip(d): return False d = smart_decode(d) @@ -446,9 +532,24 @@ def is_dns_name(d): def is_ip(d, version=None): """ - "192.168.1.1" --> True - "bad::c0de" --> True - "evilcorp.com" --> False + Checks if the given string or object represents a valid IP address. + + Args: + d (str or ipaddress.IPvXAddress): The IP address to check. + version (int, optional): The IP version to validate (4 or 6). Default is None. + + Returns: + bool: True if the string or object is a valid IP address, False otherwise. + + Examples: + >>> is_ip('192.168.1.1') + True + >>> is_ip('bad::c0de', version=6) + True + >>> is_ip('bad::c0de', version=4) + False + >>> is_ip('evilcorp.com') + False """ if isinstance(d, (ipaddress.IPv4Address, ipaddress.IPv6Address)): if version is None or version == d.version: @@ -464,11 +565,23 @@ def is_ip(d, version=None): def is_ip_type(i): """ - IPv6Address('dead::beef') --> True - IPv4Network('192.168.1.0/24') --> True - "192.168.1.0/24" --> False + Checks if the given object is an instance of an IPv4 or IPv6 type from the ipaddress module. + + Args: + i (ipaddress._BaseV4 or ipaddress._BaseV6): The IP object to check. + + Returns: + bool: True if the object is an instance of ipaddress._BaseV4 or ipaddress._BaseV6, False otherwise. + + Examples: + >>> is_ip_type(ipaddress.IPv6Address('dead::beef')) + True + >>> is_ip_type(ipaddress.IPv4Network('192.168.1.0/24')) + True + >>> is_ip_type("192.168.1.0/24") + False """ - return hasattr(i, "is_multicast") + return isinstance(i, ipaddress._BaseV4) or isinstance(i, ipaddress._BaseV6) def make_ip_type(s): @@ -504,12 +617,43 @@ def make_ip_type(s): def host_in_host(host1, host2): + """ + Checks if host1 is included within host2, either as a subdomain, IP, or IP network. + Used for scope calculations/decisions within BBOT. + + Args: + host1 (str or ipaddress.IPv4Address or ipaddress.IPv6Address or ipaddress.IPv4Network or ipaddress.IPv6Network): + The host to check for inclusion within host2. + host2 (str or ipaddress.IPv4Address or ipaddress.IPv6Address or ipaddress.IPv4Network or ipaddress.IPv6Network): + The host within which to check for the inclusion of host1. + + Returns: + bool: True if host1 is included in host2, otherwise False. + + Examples: + >>> host_in_host("www.evilcorp.com", "evilcorp.com") + True + >>> host_in_host("evilcorp.com", "www.evilcorp.com") + False + >>> host_in_host(ipaddress.IPv6Address('dead::beef'), ipaddress.IPv6Network('dead::/64')) + True + >>> host_in_host(ipaddress.IPv4Address('192.168.1.1'), ipaddress.IPv4Network('10.0.0.0/8')) + False + + Notes: + - If checking an IP address/network, you MUST FIRST convert your IP into an ipaddress object (e.g. via `make_ip_type()`) before passing it to this function. + """ + """ Is host1 included in host2? "www.evilcorp.com" in "evilcorp.com"? --> True "evilcorp.com" in "www.evilcorp.com"? --> False IPv6Address('dead::beef') in IPv6Network('dead::/64')? --> True IPv4Address('192.168.1.1') in IPv4Network('10.0.0.0/8')? --> False + + Very important! Used throughout BBOT for scope calculations/decisions. + + Works with hostnames, IPs, and IP networks. """ if not host1 or not host2: @@ -537,7 +681,17 @@ def host_in_host(host1, host2): def sha1(data): """ - sha1("asdf").hexdigest() --> "3da541559918a808c2402bba5012f6c60b27661c" + Computes the SHA-1 hash of the given data. + + Args: + data (str or dict): The data to hash. If a dictionary, it is first converted to a JSON string with sorted keys. + + Returns: + hashlib.Hash: SHA-1 hash object of the input data. + + Examples: + >>> sha1("asdf").hexdigest() + '3da541559918a808c2402bba5012f6c60b27661c' """ if isinstance(data, dict): data = json.dumps(data, sort_keys=True) @@ -546,9 +700,19 @@ def sha1(data): def smart_decode(data): """ - Turn data into a string without complaining about it - b"asdf" --> "asdf" - "asdf" --> "asdf" + Decodes the input data to a UTF-8 string, silently ignoring errors. + + Args: + data (str or bytes): The data to decode. + + Returns: + str: The decoded string. + + Examples: + >>> smart_decode(b"asdf") + "asdf" + >>> smart_decode("asdf") + "asdf" """ if isinstance(data, bytes): return data.decode("utf-8", errors="ignore") @@ -558,8 +722,19 @@ def smart_decode(data): def smart_encode(data): """ - Turn data into bytes without complaining about it - "asdf" --> b"asdf" + Encodes the input data to bytes using UTF-8 encoding, silently ignoring errors. + + Args: + data (str or bytes): The data to encode. + + Returns: + bytes: The encoded bytes. + + Examples: + >>> smart_encode("asdf") + b"asdf" + >>> smart_encode(b"asdf") + b"asdf" """ if isinstance(data, bytes): return data @@ -572,7 +747,24 @@ def smart_encode(data): def recursive_decode(data, max_depth=5): """ - Encode double or triple-encoded strings + Recursively decodes doubly or triply-encoded strings to their original form. + + Supports both URL-encoding and backslash-escapes (including unicode) + + Args: + data (str): The data to decode. + max_depth (int, optional): Maximum recursion depth for decoding. Defaults to 5. + + Returns: + str: The decoded string. + + Examples: + >>> recursive_decode("Hello%20world%21") + "Hello world!" + >>> recursive_decode("Hello%20%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442") + "Hello Привет" + >>> recursive_dcode("%5Cu0020%5Cu041f%5Cu0440%5Cu0438%5Cu0432%5Cu0435%5Cu0442%5Cu0021") + " Привет!" """ # Decode newline and tab escapes data = backslash_regex.sub( @@ -600,9 +792,22 @@ def recursive_decode(data, max_depth=5): def rand_string(length=10, digits=True): """ - rand_string() --> "c4hp4i9jzx" - rand_string(20) --> "ap4rsdtg5iw7ey7y3oa5" - rand_string(30) --> "xdmyxtglqf0z3q8t46n430kesq68yu" + Generates a random string of specified length. + + Args: + length (int, optional): The length of the random string. Defaults to 10. + digits (bool, optional): Whether to include digits in the string. Defaults to True. + + Returns: + str: A random string of the specified length. + + Examples: + >>> rand_string() + 'c4hp4i9jzx' + >>> rand_string(20) + 'ap4rsdtg5iw7ey7y3oa5' + >>> rand_string(30, digits=False) + 'xdmyxtglqfzqktngkesyulwbfrihva' """ pool = rand_pool if digits: @@ -611,6 +816,22 @@ def rand_string(length=10, digits=True): def extract_params_json(json_data): + """ + Extracts keys from a JSON object and returns them as a set. Used by the `paramminer_headers` module. + + Args: + json_data (str): JSON-formatted string containing key-value pairs. + + Returns: + set: A set containing the keys present in the JSON object. + + Raises: + Logs a message if JSONDecodeError occurs. + + Examples: + >>> extract_params_json('{"a": 1, "b": {"c": 2}}') + {'a', 'b', 'c'} + """ try: data = json.loads(json_data) except json.JSONDecodeError: @@ -636,6 +857,22 @@ def extract_params_json(json_data): def extract_params_xml(xml_data): + """ + Extracts tags from an XML object and returns them as a set. + + Args: + xml_data (str): XML-formatted string containing elements. + + Returns: + set: A set containing the tags present in the XML object. + + Raises: + Logs a message if ParseError occurs. + + Examples: + >>> extract_params_xml('') + {'child1', 'child2', 'root'} + """ try: root = ET.fromstring(xml_data) except ET.ParseError: @@ -654,6 +891,31 @@ def extract_params_xml(xml_data): def extract_params_html(html_data): + """ + Extracts parameters from an HTML object, yielding them one at a time. + + Args: + html_data (str): HTML-formatted string. + + Yields: + str: A string containing the parameter found in HTML object. + + Examples: + >>> html_data = ''' + ... + ... + ... + ... Click Me + ... + ... + ... + ... ''' + >>> list(extract_params_html(html_data)) + ['user', 'param2', 'param3'] + """ input_tag = bbot_regexes.input_tag_regex.findall(html_data) for i in input_tag: @@ -683,10 +945,27 @@ def extract_params_html(html_data): def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=100, word_regexes=None): + """Intelligently extracts words from given data. + + This function uses regular expressions and optionally wordninja to extract words + from a given text string. Thanks to wordninja it can handle concatenated words intelligently. + + Args: + data (str): The data from which words are to be extracted. + acronyms (bool, optional): Whether to include acronyms. Defaults to True. + wordninja (bool, optional): Whether to use the wordninja library to split concatenated words. Defaults to True. + model (object, optional): A custom wordninja model for special types of data such as DNS names. + max_length (int, optional): Maximum length for a word to be included. Defaults to 100. + word_regexes (list, optional): A list of compiled regular expression objects for word extraction. Defaults to None. + + Returns: + set: A set of extracted words. + + Examples: + >>> extract_words('blacklanternsecurity') + {'black', 'lantern', 'security', 'bls', 'blacklanternsecurity'} """ - Intelligently extract words from given data - Returns set() of extracted words - """ + if word_regexes is None: word_regexes = bbot_regexes.word_regexes words = set() @@ -706,6 +985,8 @@ def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=10 subwords = model.split(word) for subword in subwords: words.add(subword) + # this section generates compound words + # it is interesting but currently disabled the quality of its output doesn't quite justify its quantity # blacklanternsecurity --> ['black', 'lantern', 'security', 'blacklantern', 'lanternsecurity'] # for s, e in combinations(range(len(subwords) + 1), 2): # if e - s <= max_slice_length: @@ -720,11 +1001,25 @@ def extract_words(data, acronyms=True, wordninja=True, model=None, max_length=10 def closest_match(s, choices, n=1, cutoff=0.0): - """ - Given a string and a list of choices, returns the best match + """Finds the closest matching strings from a list of choices based on a given string. + + This function uses the difflib library to find the closest matches to a given string `s` from a list of `choices`. + It can return either the single best match or a list of the top `n` best matches. - closest_match("asdf", ["asd", "fds"]) --> "asd" - closest_match("asdf", ["asd", "fds", "asdff"], n=3) --> ["asd", "asdff", "fds"] + Args: + s (str): The string for which to find the closest match. + choices (list): A list of strings to compare against. + n (int, optional): The number of best matches to return. Defaults to 1. + cutoff (float, optional): A float value that defines the similarity threshold. Strings with similarity below this value are not considered. Defaults to 0.0. + + Returns: + str or list: Either the closest matching string or a list of the `n` closest matching strings. + + Examples: + >>> closest_match("asdf", ["asd", "fds"]) + 'asd' + >>> closest_match("asdf", ["asd", "fds", "asdff"], n=3) + ['asdff', 'asd', 'fds'] """ matches = difflib.get_close_matches(s, choices, n=n, cutoff=cutoff) if not choices or not matches: @@ -735,8 +1030,21 @@ def closest_match(s, choices, n=1, cutoff=0.0): def match_and_exit(s, choices, msg=None, loglevel="HUGEWARNING", exitcode=2): - """ - Return the closest match, warn, and exit + """Finds the closest match from a list of choices for a given string, logs a warning, and exits the program. + + This function is particularly useful for CLI applications where you want to validate flags or modules. + + Args: + s (str): The string for which to find the closest match. + choices (list): A list of strings to compare against. + msg (str, optional): Additional message to prepend in the warning message. Defaults to None. + loglevel (str, optional): The log level to use for the warning message. Defaults to "HUGEWARNING". + exitcode (int, optional): The exit code to use when exiting the program. Defaults to 2. + + Examples: + >>> match_and_exit("some_module", ["some_mod", "some_other_mod"], msg="module") + # Output: Could not find module "some_module". Did you mean "some_mod"? + # Exits with code 2 """ if msg is None: msg = "" @@ -769,9 +1077,22 @@ def kill_children(parent_pid=None, sig=signal.SIGTERM): def str_or_file(s): - """ - "file.txt" --> ["file_line1", "file_line2", "file_line3"] - "not_a_file" --> ["not_a_file"] + """Reads a string or file and yields its content line-by-line. + + This function tries to open the given string `s` as a file and yields its lines. + If it fails to open `s` as a file, it treats `s` as a regular string and yields it as is. + + Args: + s (str): The string or file path to read. + + Yields: + str: Either lines from the file or the original string. + + Examples: + >>> list(str_or_file("file.txt")) + ['file_line1', 'file_line2', 'file_line3'] + >>> list(str_or_file("not_a_file")) + ['not_a_file'] """ try: with open(s, errors="ignore") as f: @@ -782,13 +1103,26 @@ def str_or_file(s): def chain_lists(l, try_files=False, msg=None, remove_blank=True): - """ - Chain together list, splitting entries on comma - - Optionally try to open entries as files and add their contents to the list - - Used for parsing a list of arguments that may include space and/or comma-separated values - - ["a", "b,c,d"] --> ["a", "b", "c", "d"] - - try_files=True: - - ["a,file.txt", "c,d"] --> ["a", "f_line1", "f_line2", "f_line3", "c", "d"] + """Chains together list elements, allowing for entries separated by commas. + + This function takes a list `l` and flattens it by splitting its entries on commas. + It also allows you to optionally open entries as files and add their contents to the list. + + Args: + l (list): The list of strings to chain together. + try_files (bool, optional): Whether to try to open entries as files. Defaults to False. + msg (str, optional): An optional message to log when reading from a file. Defaults to None. + remove_blank (bool, optional): Whether to remove blank entries from the list. Defaults to True. + + Returns: + list: The list of chained elements. + + Examples: + >>> chain_lists(["a", "b,c,d"]) + ['a', 'b', 'c', 'd'] + + >>> chain_lists(["a,file.txt", "c,d"], try_files=True) + ['a', 'f_line1', 'f_line2', 'f_line3', 'c', 'd'] """ final_list = dict() for entry in l: @@ -811,8 +1145,21 @@ def chain_lists(l, try_files=False, msg=None, remove_blank=True): def list_files(directory, filter=lambda x: True): - """ - "/tmp/test" --> ["file1.txt", "file2.txt"] + """Lists files in a given directory that meet a specified filter condition. + + Args: + directory (str): The directory where to list files. + filter (callable, optional): A function to filter the files. Defaults to a lambda function that returns True for all files. + + Yields: + Path: A Path object for each file that meets the filter condition. + + Examples: + >>> list(list_files("/tmp/test")) + [Path('/tmp/test/file1.py'), Path('/tmp/test/file2.txt')] + + >>> list(list_files("/tmp/test"), filter=lambda f: f.suffix == ".py") + [Path('/tmp/test/file1.py')] """ directory = Path(directory).resolve() if directory.is_dir(): @@ -822,20 +1169,48 @@ def list_files(directory, filter=lambda x: True): def rm_at_exit(path): + """Registers a file to be automatically deleted when the program exits. + + Args: + path (str or Path): The path to the file to be deleted upon program exit. + + Examples: + >>> rm_at_exit("/tmp/test/file1.txt") """ - Removes a file automatically when BBOT exits - """ - atexit.register(_rm_at_exit, path) + atexit.register(delete_file, path) + + +def delete_file(path): + """Deletes a file at the given path. + + Args: + path (str or Path): The path to the file to be deleted. + Note: + This function suppresses all exceptions to ensure that the program continues running even if the file could not be deleted. -def _rm_at_exit(path): + Examples: + >>> delete_file("/tmp/test/file1.txt") + """ with suppress(Exception): Path(path).unlink(missing_ok=True) def read_file(filename): - """ - "/tmp/file.txt" --> ["file_line1", "file_line2", "file_line3"] + """Reads a file line by line and yields each line without line breaks. + + Args: + filename (str or Path): The path to the file to read. + + Yields: + str: A line from the file without the trailing line break. + + Examples: + >>> for line in read_file("/tmp/file.txt"): + ... print(line) + file_line1 + file_line2 + file_line3 """ with open(filename, errors="ignore") as f: for line in f: @@ -843,10 +1218,24 @@ def read_file(filename): def gen_numbers(n, padding=2): - """ - n=5 --> ['0', '00', '01', '02', '03', '04', '1', '2', '3', '4'] - n=3, padding=3 --> ['0', '00', '000', '001', '002', '01', '02', '1', '2'] - n=5, padding=1 --> ['0', '1', '2', '3', '4'] + """Generates numbers with variable padding and returns them as a set of strings. + + Args: + n (int): The upper limit of numbers to generate, exclusive. + padding (int, optional): The maximum number of digits to pad the numbers with. Defaults to 2. + + Returns: + set: A set of string representations of numbers with varying degrees of padding. + + Examples: + >>> gen_numbers(5) + {'0', '00', '01', '02', '03', '04', '1', '2', '3', '4'} + + >>> gen_numbers(3, padding=3) + {'0', '00', '000', '001', '002', '01', '02', '1', '2'} + + >>> gen_numbers(5, padding=1) + {'0', '1', '2', '3', '4'} """ results = set() for i in range(n): @@ -856,12 +1245,30 @@ def gen_numbers(n, padding=2): def make_netloc(host, port): - """ - ("192.168.1.1", None) --> "192.168.1.1" - ("192.168.1.1", 443) --> "192.168.1.1:443" - ("evilcorp.com", 80) --> "evilcorp.com:80" - ("dead::beef", None) --> "[dead::beef]" - ("dead::beef", 443) --> "[dead::beef]:443" + """Constructs a network location string from a given host and port. + + Args: + host (str): The hostname or IP address. + port (int, optional): The port number. If None, the port is omitted. + + Returns: + str: A network location string in the form 'host' or 'host:port'. + + Examples: + >>> make_netloc("192.168.1.1", None) + "192.168.1.1" + + >>> make_netloc("192.168.1.1", 443) + "192.168.1.1:443" + + >>> make_netloc("evilcorp.com", 80) + "evilcorp.com:80" + + >>> make_netloc("dead::beef", None) + "[dead::beef]" + + >>> make_netloc("dead::beef", 443) + "[dead::beef]:443" """ if is_ip(host, version=6): host = f"[{host}]" @@ -871,8 +1278,17 @@ def make_netloc(host, port): def which(*executables): - """ - "python" --> "/usr/bin/python" + """Finds the full path of the first available executable from a list of executables. + + Args: + *executables (str): One or more executable names to search for. + + Returns: + str: The full path of the first available executable, or None if none are found. + + Examples: + >>> which("python", "python3") + "/usr/bin/python" """ for e in executables: location = shutil.which(e) @@ -881,9 +1297,19 @@ def which(*executables): def search_dict_by_key(key, d): - """ - Search a dictionary by key name - Generator, yields all values with matching keys + """Search a nested dictionary or list of dictionaries by a key and yield all matching values. + + Args: + key (str): The key to search for. + d (Union[dict, list]): The dictionary or list of dictionaries to search. + + Yields: + Any: Yields all values that match the provided key. + + Examples: + >>> d = {'a': 1, 'b': {'c': 2, 'a': 3}, 'd': [{'a': 4}, {'e': 5}]} + >>> list(search_dict_by_key('a', d)) + [1, 3, 4] """ if isinstance(d, dict): if key in d: @@ -896,10 +1322,18 @@ def search_dict_by_key(key, d): def search_format_dict(d, **kwargs): - """ - Recursively .format() string values in dictionary values - search_format_dict({"test": "#{name} is awesome"}, name="keanu") - --> {"test": "keanu is awesome"} + """Recursively format string values in a dictionary or list using the provided keyword arguments. + + Args: + d (Union[dict, list, str]): The dictionary, list, or string to format. + **kwargs: Arbitrary keyword arguments used for string formatting. + + Returns: + Union[dict, list, str]: The formatted dictionary, list, or string. + + Examples: + >>> search_format_dict({"test": "#{name} is awesome"}, name="keanu") + {"test": "keanu is awesome"} """ if isinstance(d, dict): return {k: search_format_dict(v, **kwargs) for k, v in d.items()} @@ -913,21 +1347,30 @@ def search_format_dict(d, **kwargs): def search_dict_values(d, *regexes): - """ - Recursively search a dictionary's values based on regexes + """Recursively search a dictionary's values based on provided regex patterns. - dict_to_search = { - "key1": { - "key2": [ - { - "key3": "A URL: https://www.evilcorp.com" - } - ] - } - }) + Args: + d (Union[dict, list, str]): The dictionary, list, or string to search. + *regexes: Arbitrary number of compiled regex patterns. + + Returns: + Generator: Yields matching values based on the provided regex patterns. - search_dict_values(dict_to_search, url_regexes) --> "https://www.evilcorp.com" + Examples: + >>> dict_to_search = { + ... "key1": { + ... "key2": [ + ... { + ... "key3": "A URL: https://www.evilcorp.com" + ... } + ... ] + ... } + ... } + >>> url_regexes = re.compile(r'https?://[^\s<>"]+|www\.[^\s<>"]+') + >>> list(search_dict_values(dict_to_search, url_regexes)) + ["https://www.evilcorp.com"] """ + results = set() if isinstance(d, str): for r in regexes: @@ -945,11 +1388,25 @@ def search_dict_values(d, *regexes): yield from search_dict_values(v, *regexes) -def filter_dict(d, *key_names, fuzzy=False, invert=False, exclude_keys=None, prev_key=None): +def filter_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): """ - Recursively filter a dictionary based on key names - filter_dict({"key1": "test", "key2": "asdf"}, "key2") - --> {"key2": "asdf"} + Recursively filter a dictionary based on key names. + + Args: + d (dict): The input dictionary. + *key_names: Names of keys to filter for. + fuzzy (bool): Whether to perform fuzzy matching on keys. + exclude_keys (list, None): List of keys to be excluded from the final dict. + _prev_key (str, None): For internal recursive use; the previous key in the hierarchy. + + Returns: + dict: A dictionary containing only the keys specified in key_names. + + Examples: + >>> filter_dict({"key1": "test", "key2": "asdf"}, "key2") + {"key2": "asdf"} + >>> filter_dict({"key1": "test", "key2": {"key3": "asdf"}}, "key1", "key3", exclude_keys="key2") + {'key1': 'test'} """ if exclude_keys is None: exclude_keys = [] @@ -959,16 +1416,31 @@ def filter_dict(d, *key_names, fuzzy=False, invert=False, exclude_keys=None, pre if isinstance(d, dict): for key in d: if key in key_names or (fuzzy and any(k in key for k in key_names)): - if not prev_key in exclude_keys: + if not any(k in exclude_keys for k in [key, _prev_key]): ret[key] = copy.deepcopy(d[key]) elif isinstance(d[key], list) or isinstance(d[key], dict): - child = filter_dict(d[key], *key_names, fuzzy=fuzzy, prev_key=key, exclude_keys=exclude_keys) + child = filter_dict(d[key], *key_names, fuzzy=fuzzy, _prev_key=key, exclude_keys=exclude_keys) if child: ret[key] = child return ret -def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, prev_key=None): +def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, _prev_key=None): + """ + Recursively clean unwanted keys from a dictionary. + Useful for removing secrets from a config. + + Args: + d (dict): The input dictionary. + *key_names: Names of keys to remove. + fuzzy (bool): Whether to perform fuzzy matching on keys. + exclude_keys (list, None): List of keys to be excluded from removal. + _prev_key (str, None): For internal recursive use; the previous key in the hierarchy. + + Returns: + dict: A dictionary cleaned of the keys specified in key_names. + + """ if exclude_keys is None: exclude_keys = [] if isinstance(exclude_keys, str): @@ -977,26 +1449,47 @@ def clean_dict(d, *key_names, fuzzy=False, exclude_keys=None, prev_key=None): if isinstance(d, dict): for key, val in list(d.items()): if key in key_names or (fuzzy and any(k in key for k in key_names)): - if prev_key not in exclude_keys: + if _prev_key not in exclude_keys: d.pop(key) else: - d[key] = clean_dict(val, *key_names, fuzzy=fuzzy, prev_key=key, exclude_keys=exclude_keys) + d[key] = clean_dict(val, *key_names, fuzzy=fuzzy, _prev_key=key, exclude_keys=exclude_keys) return d def grouper(iterable, n): """ - >>> list(grouper('ABCDEFG', 3)) - [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] + Grouper groups an iterable into chunks of a given size. + + Args: + iterable (iterable): The iterable to be chunked. + n (int): The size of each chunk. + + Returns: + iterator: An iterator that produces lists of elements from the original iterable, each of length `n` or less. + + Examples: + >>> list(grouper('ABCDEFG', 3)) + [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] """ + iterable = iter(iterable) return iter(lambda: list(islice(iterable, n)), []) def split_list(alist, wanted_parts=2): """ - >>> split_list([1,2,3,4,5]) - [[1, 2], [3, 4, 5]] + Splits a list into a specified number of approximately equal parts. + + Args: + alist (list): The list to be split. + wanted_parts (int): The number of parts to split the list into. + + Returns: + list: A list of lists, each containing a portion of the original list. + + Examples: + >>> split_list([1, 2, 3, 4, 5]) + [[1, 2], [3, 4, 5]] """ length = len(alist) return [alist[i * length // wanted_parts : (i + 1) * length // wanted_parts] for i in range(wanted_parts)] @@ -1004,7 +1497,24 @@ def split_list(alist, wanted_parts=2): def mkdir(path, check_writable=True, raise_error=True): """ - Create a directory and ensure that it's writable + Creates a directory and optionally checks if it's writable. + + Args: + path (str or Path): The directory to create. + check_writable (bool, optional): Whether to check if the directory is writable. Default is True. + raise_error (bool, optional): Whether to raise an error if the directory creation fails. Default is True. + + Returns: + bool: True if the directory is successfully created (and writable, if check_writable=True); otherwise False. + + Raises: + DirectoryCreationError: Raised if the directory cannot be created and `raise_error=True`. + + Examples: + >>> mkdir("/tmp/new_dir") + True + >>> mkdir("/restricted_dir", check_writable=False, raise_error=False) + False """ path = Path(path).resolve() touchfile = path / f".{rand_string()}" @@ -1023,8 +1533,20 @@ def mkdir(path, check_writable=True, raise_error=True): def make_date(d=None, microseconds=False): """ - make_date() --> "20220707_1325_50" - make_date(microseconds=True) --> "20220707_1330_35167617" + Generates a string representation of the current date and time, with optional microsecond precision. + + Args: + d (datetime, optional): A datetime object to convert. Defaults to the current date and time. + microseconds (bool, optional): Whether to include microseconds. Defaults to False. + + Returns: + str: A string representation of the date and time, formatted as YYYYMMDD_HHMM_SS or YYYYMMDD_HHMM_SSFFFFFF if microseconds are included. + + Examples: + >>> make_date() + "20220707_1325_50" + >>> make_date(microseconds=True) + "20220707_1330_35167617" """ f = "%Y%m%d_%H%M_%S" if microseconds: @@ -1041,9 +1563,21 @@ def error_and_exit(msg): def get_file_extension(s): """ - https://evilcorp.com/api/test.php --> "php" - /etc/test.conf --> "conf" - /etc/passwd --> "" + Extracts the file extension from a given string representing a URL or file path. + + Args: + s (str): The string from which to extract the file extension. + + Returns: + str: The file extension, or an empty string if no extension is found. + + Examples: + >>> get_file_extension("https://evilcorp.com/api/test.php") + "php" + >>> get_file_extension("/etc/test.conf") + "conf" + >>> get_file_extension("/etc/passwd") + "" """ s = str(s).lower().strip() rightmost_section = s.rsplit("/", 1)[-1] @@ -1055,13 +1589,23 @@ def get_file_extension(s): def backup_file(filename, max_backups=10): """ - rename a file as a backup + Renames a file by appending an iteration number as a backup. Recursively renames + files up to a specified maximum number of backups. + + Args: + filename (str or pathlib.Path): The file to backup. + max_backups (int, optional): The maximum number of backups to keep. Defaults to 10. - recursively renames files up to max_backups + Returns: + pathlib.Path: The new backup filepath. - backup_file("/tmp/test.txt") --> "/tmp/test.0.txt" - backup_file("/tmp/test.0.txt") --> "/tmp/test.1.txt" - backup_file("/tmp/test.1.txt") --> "/tmp/test.2.txt" + Examples: + >>> backup_file("/tmp/test.txt") + PosixPath("/tmp/test.0.txt") + >>> backup_file("/tmp/test.0.txt") + PosixPath("/tmp/test.1.txt") + >>> backup_file("/tmp/test.1.txt") + PosixPath("/tmp/test.2.txt") """ filename = Path(filename).resolve() suffixes = [s.strip(".") for s in filename.suffixes] @@ -1079,11 +1623,21 @@ def backup_file(filename, max_backups=10): def latest_mtime(d): - """ - Given a directory, return the latest modified time of any contained file or directory (recursive) - Useful for sorting directories by modified time for the purpose of cleanup, etc. + """Get the latest modified time of any file or sub-directory in a given directory. + + This function takes a directory path as an argument and returns the latest modified time + of any contained file or directory, recursively. It's useful for sorting directories by + modified time for cleanup or other purposes. + + Args: + d (str or Path): The directory path to search for the latest modified time. + + Returns: + float: The latest modified time in Unix timestamp format. - latest_mtime("~/.bbot/scans/mushy_susan") --> 1659016928.2848816 + Examples: + >>> latest_mtime("~/.bbot/scans/mushy_susan") + 1659016928.2848816 """ d = Path(d).resolve() mtimes = [d.lstat().st_mtime] @@ -1097,6 +1651,21 @@ def latest_mtime(d): def filesize(f): + """Get the file size of a given file. + + This function takes a file path as an argument and returns its size in bytes. If the path + does not point to a file, the function returns 0. + + Args: + f (str or Path): The file path for which to get the size. + + Returns: + int: The size of the file in bytes, or 0 if the path does not point to a file. + + Examples: + >>> filesize("/path/to/file.txt") + 1024 + """ f = Path(f) if f.is_file(): return f.stat().st_size @@ -1104,11 +1673,23 @@ def filesize(f): def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, raise_error=False): - """ - Given a directory "d", measure the number of subdirectories and files (matching "filter") - And remove (rm -r) the oldest ones past the threshold of "keep" + """Clean up old files and directories within a given directory based on various filtering and sorting options. + + This function removes the oldest files and directories in the provided directory 'd' that exceed a specified + threshold ('keep'). The items to be deleted can be filtered using a lambda function 'filter', and they are + sorted by a key function, defaulting to latest modification time. - clean_old_dirs("~/.bbot/scans", filter=lambda x: x.is_dir() and scan_name_regex.match(x.name)) + Args: + d (str or Path): The directory path to clean up. + keep (int): The number of items to keep. Ones beyond this count will be removed. + filter (Callable): A lambda function for filtering which files or directories to consider. + Defaults to a lambda function that returns True for all. + key (Callable): A function to sort the files and directories. Defaults to latest modification time. + reverse (bool): Whether to reverse the order of sorted items before removing. Defaults to True. + raise_error (bool): Whether to raise an error if directory deletion fails. Defaults to False. + + Examples: + >>> clean_old("~/.bbot/scans", filter=lambda x: x.is_dir() and scan_name_regex.match(x.name)) """ d = Path(d) if not d.is_dir(): @@ -1129,6 +1710,20 @@ def clean_old(d, keep=10, filter=lambda x: True, key=latest_mtime, reverse=True, def extract_emails(s): """ Extract email addresses from a body of text + + This function takes in a string and yields all email addresses found in it. + The emails are converted to lower case before yielding. It utilizes + regular expressions for email pattern matching. + + Args: + s (str): The input string from which to extract email addresses. + + Yields: + str: Yields email addresses found in the input string, in lower case. + + Examples: + >>> list(extract_emails("Contact us at info@evilcorp.com and support@evilcorp.com")) + ['info@evilcorp.com', 'support@evilcorp.com'] """ for email in bbot_regexes.email_regex.findall(smart_decode(s)): yield email.lower() @@ -1222,8 +1817,17 @@ def smart_decode_punycode(text: str) -> str: def can_sudo_without_password(): - """ - Return True if the current user can sudo without a password + """Check if the current user has passwordless sudo access. + + This function checks whether the current user can use sudo without entering a password. + It runs a command with sudo and checks the return code to determine this. + + Returns: + bool: True if the current user can use sudo without a password, False otherwise. + + Examples: + >>> can_sudo_without_password() + True """ if os.geteuid() != 0: env = dict(os.environ) @@ -1237,8 +1841,20 @@ def can_sudo_without_password(): def verify_sudo_password(sudo_pass): - """ - Return True if the sudo password is correct + """Verify if the given sudo password is correct. + + This function checks whether the sudo password provided is valid for the current user. + It runs a command with sudo, feeding in the password via stdin, and checks the return code. + + Args: + sudo_pass (str): The sudo password to verify. + + Returns: + bool: True if the sudo password is correct, False otherwise. + + Examples: + >>> verify_sudo_password("mysecretpassword") + True """ try: sp.run( @@ -1254,16 +1870,30 @@ def verify_sudo_password(sudo_pass): def make_table(*args, **kwargs): - """ - make_table([["row1", "row1"], ["row2", "row2"]], ["header1", "header2"]) --> + """Generate a formatted table from the given rows and headers. + + This function uses the `tabulate` package to generate a table with formatting options. + It can accept various input formats and table styles, which can be customized using optional arguments. - +-----------+-----------+ - | header1 | header2 | - +===========+===========+ - | row1 | row1 | - +-----------+-----------+ - | row2 | row2 | - +-----------+-----------+ + Args: + *args: Positional arguments to be passed to `tabulate.tabulate`. + **kwargs: Keyword arguments to customize table formatting. + - tablefmt (str, optional): Table format. Default is 'grid'. + - disable_numparse (bool, optional): Disable automatic number parsing. Default is True. + - maxcolwidths (int, optional): Maximum column width. Default is 40. + + Returns: + str: A string representing the formatted table. + + Examples: + >>> print(make_table([["row1", "row1"], ["row2", "row2"]], ["header1", "header2"])) + +-----------+-----------+ + | header1 | header2 | + +===========+===========+ + | row1 | row1 | + +-----------+-----------+ + | row2 | row2 | + +-----------+-----------+ """ # fix IndexError: list index out of range if args and not args[0]: diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 2e7a21772d..ecc4d31e44 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -836,11 +836,10 @@ def dns_regexes(self): For the purpose of extracting hostnames Examples: - Extract hostnames from text: >>> for regex in scan.dns_regexes: - >>> for match in regex.finditer(response.text): - >>> hostname = match.group().lower() + ... for match in regex.finditer(response.text): + ... hostname = match.group().lower() """ if self._dns_regexes is None: dns_targets = set(t.host for t in self.target if t.host and isinstance(t.host, str)) diff --git a/docs/dev/helpers/misc.md b/docs/dev/helpers/misc.md index 5a390a520e..3a95dc0d94 100644 --- a/docs/dev/helpers/misc.md +++ b/docs/dev/helpers/misc.md @@ -2,14 +2,6 @@ These are miscellaneous helpers, used throughout BBOT and its modules for simple tasks such as parsing domains, ports, urls, etc. - -::: bbot.core.helpers.misc.is_domain -::: bbot.core.helpers.misc.is_subdomain -::: bbot.core.helpers.misc.is_ptr -::: bbot.core.helpers.misc.is_uri -::: bbot.core.helpers.misc.is_url -::: bbot.core.helpers.misc.parent_domain -::: bbot.core.helpers.misc.domain_parents -::: bbot.core.helpers.misc.parent_url -::: bbot.core.helpers.misc.url_parents - \ No newline at end of file +::: bbot.core.helpers.misc + options: + show_root_heading: false From 6c51348d1534d8b36696d11f5f11748460f4d9f1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 15 Sep 2023 14:29:53 -0400 Subject: [PATCH 097/123] finished documenting misc helpers --- bbot/core/helpers/misc.py | 399 +++++++++++++++++++++++--- bbot/core/helpers/web.py | 3 +- bbot/test/test_step_1/test_helpers.py | 2 +- 3 files changed, 365 insertions(+), 39 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 6080ce8714..a23d2f4b2a 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -1914,8 +1914,25 @@ def make_table(*args, **kwargs): def human_timedelta(d): - """ - Format a TimeDelta object in human-readable form + """Convert a TimeDelta object into a human-readable string. + + This function takes a datetime.timedelta object and converts it into a string format that + is easier to read and understand. + + Args: + d (datetime.timedelta): The TimeDelta object to convert. + + Returns: + str: A string representation of the TimeDelta object in human-readable form. + + Examples: + >>> from datetime import datetime + >>> + >>> start_time = datetime.now() + >>> end_time = datetime.now() + >>> elapsed_time = end_time - start_time + >>> human_timedelta(elapsed_time) + '2 hours, 30 minutes, 15 seconds' """ hours, remainder = divmod(d.seconds, 3600) minutes, seconds = divmod(remainder, 60) @@ -1933,9 +1950,21 @@ def human_timedelta(d): def bytes_to_human(_bytes): - """ - Converts bytes to human-readable filesize - bytes_to_human(1234129384) --> "1.15GB" + """Convert a bytes size to a human-readable string. + + This function converts a numeric bytes value into a human-readable string format, complete + with the appropriate unit symbol (B, KB, MB, GB, etc.). + + Args: + _bytes (int): The number of bytes to convert. + + Returns: + str: A string representing the number of bytes in a more readable format, rounded to two + decimal places. + + Examples: + >>> bytes_to_human(1234129384) + '1.15GB' """ sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"] units = {} @@ -1956,9 +1985,23 @@ def bytes_to_human(_bytes): def human_to_bytes(filesize): - """ - Converts human-readable filesize to bytes - human_to_bytes("23.23gb") --> 24943022571 + """Convert a human-readable file size string to its bytes equivalent. + + This function takes a human-readable file size string, such as "2.5GB", and converts it + to its equivalent number of bytes. + + Args: + filesize (str or int): The human-readable file size string or integer bytes value to convert. + + Returns: + int: The number of bytes equivalent to the input human-readable file size. + + Raises: + ValueError: If the input string cannot be converted to bytes. + + Examples: + >>> human_to_bytes("23.23gb") + 24943022571 """ if isinstance(filesize, int): return filesize @@ -1982,8 +2025,17 @@ def human_to_bytes(filesize): def cpu_architecture(): - """ - Returns the CPU architecture, e.g. "amd64, "armv7", "arm64", etc. + """Return the CPU architecture of the current system. + + This function fetches and returns the architecture type of the CPU where the code is being executed. + It maps common identifiers like "x86_64" to more general types like "amd64". + + Returns: + str: A string representing the CPU architecture, such as "amd64", "armv7", or "arm64". + + Examples: + >>> cpu_architecture() + 'amd64' """ uname = platform.uname() arch = uname.machine.lower() @@ -1995,15 +2047,33 @@ def cpu_architecture(): def os_platform(): - """ - Returns the OS platform, e.g. "linux", "darwin", "windows", etc. + """Return the OS platform of the current system. + + This function fetches and returns the OS type where the code is being executed. + It converts the platform identifier to lowercase. + + Returns: + str: A string representing the OS platform, such as "linux", "darwin", or "windows". + + Examples: + >>> os_platform() + 'linux' """ return platform.system().lower() def os_platform_friendly(): - """ - Returns the OS platform in a more human-friendly format, because apple is indecisive + """Return a human-friendly OS platform string, suitable for golang release binaries. + + This function fetches the OS platform and modifies it to a more human-readable format if necessary. + Specifically, it changes "darwin" to "macOS". + + Returns: + str: A string representing the human-friendly OS platform, such as "macOS", "linux", or "windows". + + Examples: + >>> os_platform_friendly() + 'macOS' """ p = os_platform() if p == "darwin": @@ -2015,44 +2085,91 @@ def os_platform_friendly(): def tagify(s, maxlen=None): - """ - Sanitize a string into a tag-friendly format + """Sanitize a string into a tag-friendly format. + + Converts a given string to lowercase and replaces all characters not matching + [a-z0-9] with hyphens. Optionally truncates the result to 'maxlen' characters. + + Args: + s (str): The input string to sanitize. + maxlen (int, optional): The maximum length for the tag. Defaults to None. + + Returns: + str: A sanitized, tag-friendly string. - tagify("HTTP Web Title") --> "http-web-title" + Examples: + >>> tagify("HTTP Web Title") + 'http-web-title' + >>> tagify("HTTP Web Title", maxlen=8) + 'http-web' """ ret = str(s).lower() return tag_filter_regex.sub("-", ret)[:maxlen].strip("-") def memory_status(): - """ - Return statistics on system memory consumption + """Return statistics on system memory consumption. + + The function returns a `psutil` named tuple that contains statistics on + system virtual memory usage, such as total memory, used memory, available + memory, and more. + + Returns: + psutil._pslinux.svmem: A named tuple representing various statistics + about system virtual memory usage. - Example: to get available memory (not including swap): - memory_status().available + Examples: + >>> mem = memory_status() + >>> mem.available + 13195399168 - Example: to get percent memory used: - memory_status().percent + >>> mem = memory_status() + >>> mem.percent + 79.0 """ return psutil.virtual_memory() def swap_status(): - """ - Return statistics on swap memory consumption + """Return statistics on swap memory consumption. + + The function returns a `psutil` named tuple that contains statistics on + system swap memory usage, such as total swap, used swap, free swap, and more. - Example: to get total swap: - swap_status().total + Returns: + psutil._common.sswap: A named tuple representing various statistics + about system swap memory usage. + + Examples: + >>> swap = swap_status() + >>> swap.total + 4294967296 - Example: to get in-use swap: - swap_status().used + >>> swap = swap_status() + >>> swap.used + 2097152 """ return psutil.swap_memory() def get_size(obj, max_depth=5, seen=None): """ - Rough recursive measurement of a python object's memory footprint + Roughly estimate the memory footprint of a Python object using recursion. + + Parameters: + obj (any): The object whose size is to be determined. + max_depth (int, optional): Maximum depth to which nested objects will be inspected. Defaults to 5. + seen (set, optional): Objects that have already been accounted for, to avoid loops. + + Returns: + int: Approximate memory footprint of the object in bytes. + + Examples: + >>> get_size(my_list) + 4200 + + >>> get_size(my_dict, max_depth=3) + 8400 """ # If seen is not provided, initialize an empty set if seen is None: @@ -2094,6 +2211,22 @@ def get_size(obj, max_depth=5, seen=None): def is_file(f): + """ + Check if a path points to a file. + + Parameters: + f (str): Path to the file. + + Returns: + bool: True if the path is a file, False otherwise. + + Examples: + >>> is_file("/etc/passwd") + True + + >>> is_file("/nonexistent") + False + """ with suppress(Exception): return Path(f).is_file() return False @@ -2104,12 +2237,17 @@ def is_file(f): def cloudcheck(ip): """ - Check whether an IP address belongs to a cloud provider + Check whether an IP address belongs to a cloud provider and returns the provider name, type, and subnet. - provider, provider_type, subnet = cloudcheck("168.62.20.37") - print(provider) # "Azure" - print(provider_type) # "cloud" - print(subnet) # IPv4Network('168.62.0.0/19') + Args: + ip (str): The IP address to check. + + Returns: + tuple: A tuple containing provider name (str), provider type (str), and subnet (IPv4Network). + + Examples: + >>> cloudcheck("168.62.20.37") + ('Azure', 'cloud', IPv4Network('168.62.0.0/19')) """ provider, provider_type, subnet = _cloudcheck.check(ip) if provider: @@ -2119,10 +2257,48 @@ def cloudcheck(ip): def is_async_function(f): + """ + Check if a given function is an asynchronous function. + + Args: + f (function): The function to check. + + Returns: + bool: True if the function is asynchronous, False otherwise. + + Examples: + >>> async def foo(): + ... pass + >>> is_async_function(foo) + True + """ return inspect.iscoroutinefunction(f) async def execute_sync_or_async(callback, *args, **kwargs): + """ + Execute a function or coroutine, handling either synchronous or asynchronous invocation. + + Args: + callback (Union[Callable, Coroutine]): The function or coroutine to execute. + *args: Variable-length argument list to pass to the callback. + **kwargs: Arbitrary keyword arguments to pass to the callback. + + Returns: + Any: The return value from the executed function or coroutine. + + Examples: + >>> async def foo_async(x): + ... return x + 1 + >>> def foo_sync(x): + ... return x + 1 + + >>> asyncio.run(execute_sync_or_async(foo_async, 1)) + 2 + + >>> asyncio.run(execute_sync_or_async(foo_sync, 1)) + 2 + """ if is_async_function(callback): return await callback(*args, **kwargs) else: @@ -2131,7 +2307,22 @@ async def execute_sync_or_async(callback, *args, **kwargs): def get_exception_chain(e): """ - Get the full chain of exceptions that led to the current one + Retrieves the full chain of exceptions leading to the given exception. + + Args: + e (BaseException): The exception for which to get the chain. + + Returns: + list[BaseException]: List of exceptions in the chain, from the given exception back to the root cause. + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except ValueError as e: + ... exc_chain = get_exception_chain(e) + ... for exc in exc_chain: + ... print(exc) + This is a value error """ exception_chain = [] current_exception = e @@ -2142,6 +2333,23 @@ def get_exception_chain(e): def get_traceback_details(e): + """ + Retrieves detailed information from the traceback of an exception. + + Args: + e (BaseException): The exception for which to get traceback details. + + Returns: + tuple: A tuple containing filename (str), line number (int), and function name (str) where the exception was raised. + + Examples: + >>> try: + ... raise ValueError("This is a value error") + ... except ValueError as e: + ... filename, lineno, funcname = get_traceback_details(e) + ... print(f"File: {filename}, Line: {lineno}, Function: {funcname}") + File: , Line: 2, Function: + """ tb = traceback.extract_tb(e.__traceback__) last_frame = tb[-1] # Get the last frame in the traceback (the one where the exception was raised) filename = last_frame.filename @@ -2151,6 +2359,24 @@ def get_traceback_details(e): async def cancel_tasks(tasks, ignore_errors=True): + """ + Asynchronously cancels a list of asyncio tasks. + + Args: + tasks (list[Task]): A list of asyncio Task objects to cancel. + ignore_errors (bool, optional): Whether to ignore errors other than asyncio.CancelledError. Defaults to True. + + Examples: + >>> async def main(): + ... task1 = asyncio.create_task(async_function1()) + ... task2 = asyncio.create_task(async_function2()) + ... await cancel_tasks([task1, task2]) + ... + >>> asyncio.run(main()) + + Note: + This function will not cancel the current task that it is called from. + """ current_task = asyncio.current_task() tasks = [t for t in tasks if t != current_task] for task in tasks: @@ -2166,6 +2392,21 @@ async def cancel_tasks(tasks, ignore_errors=True): def cancel_tasks_sync(tasks): + """ + Synchronously cancels a list of asyncio tasks. + + Args: + tasks (list[Task]): A list of asyncio Task objects to cancel. + + Examples: + >>> loop = asyncio.get_event_loop() + >>> task1 = loop.create_task(some_async_function1()) + >>> task2 = loop.create_task(some_async_function2()) + >>> cancel_tasks_sync([task1, task2]) + + Note: + This function will not cancel the current task from which it is called. + """ current_task = asyncio.current_task() for task in tasks: if task != current_task: @@ -2174,6 +2415,31 @@ def cancel_tasks_sync(tasks): def weighted_shuffle(items, weights): + """ + Shuffles a list of items based on their corresponding weights. + + Args: + items (list): The list of items to shuffle. + weights (list): The list of weights corresponding to each item. + + Returns: + list: A new list containing the shuffled items. + + Examples: + >>> items = ['apple', 'banana', 'cherry'] + >>> weights = [0.4, 0.5, 0.1] + >>> weighted_shuffle(items, weights) + ['banana', 'apple', 'cherry'] + >>> weighted_shuffle(items, weights) + ['apple', 'banana', 'cherry'] + >>> weighted_shuffle(items, weights) + ['apple', 'banana', 'cherry'] + >>> weighted_shuffle(items, weights) + ['banana', 'apple', 'cherry'] + + Note: + The sum of all weights does not have to be 1. They will be normalized internally. + """ # Create a list of tuples where each tuple is (item, weight) pool = list(zip(items, weights)) @@ -2196,6 +2462,28 @@ def weighted_shuffle(items, weights): def parse_port_string(port_string): + """ + Parses a string containing ports and port ranges into a list of individual ports. + + Args: + port_string (str): The string containing individual ports and port ranges separated by commas. + + Returns: + list: A list of individual ports parsed from the input string. + + Raises: + ValueError: If the input string contains invalid ports or port ranges. + + Examples: + >>> parse_port_string("22,80,1000-1002") + [22, 80, 1000, 1001, 1002] + + >>> parse_port_string("1-2,3-5") + [1, 2, 3, 4, 5] + + >>> parse_port_string("invalid") + ValueError: Invalid port or port range: invalid + """ elements = port_string.split(",") ports = [] @@ -2221,6 +2509,28 @@ def parse_port_string(port_string): def parse_list_string(list_string): + """ + Parses a comma-separated string into a list, removing invalid characters. + + Args: + list_string (str): The string containing elements separated by commas. + + Returns: + list: A list of individual elements parsed from the input string. + + Raises: + ValueError: If the input string contains invalid characters. + + Examples: + >>> parse_list_string("html,js,css") + ['html', 'js', 'css'] + + >>> parse_list_string("png,jpg,gif") + ['png', 'jpg', 'gif'] + + >>> parse_list_string("invalid<>char") + ValueError: Invalid character in string: invalid<>char + """ elements = list_string.split(",") result = [] @@ -2232,6 +2542,23 @@ def parse_list_string(list_string): async def as_completed(coros): + """ + Async generator that yields completed Tasks as they are completed. + + Args: + coros (iterable): An iterable of coroutine objects or asyncio Tasks. + + Yields: + asyncio.Task: A Task object that has completed its execution. + + Examples: + >>> async def main(): + ... async for task in as_completed([coro1(), coro2(), coro3()]): + ... result = task.result() + ... print(f'Task completed with result: {result}') + + >>> asyncio.run(main()) + """ tasks = {coro if isinstance(coro, asyncio.Task) else asyncio.create_task(coro): coro for coro in coros} while tasks: done, _ = await asyncio.wait(tasks.keys(), return_when=asyncio.FIRST_COMPLETED) diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index c5ad95cff7..f997ce1118 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -265,8 +265,7 @@ async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **re Note: You MUST break out of the loop when you stop getting useful results! Otherwise it will loop forever. - Example: - Here's a quick example of how to use this: + Examples: ``` agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}') try: diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index abf09cadc2..8838491237 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -338,7 +338,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https with pytest.raises(DirectoryCreationError, match="Failed to create.*"): helpers.mkdir(test_file) - helpers._rm_at_exit(test_file) + helpers.delete_file(test_file) assert not test_file.exists() timedelta = datetime.timedelta(hours=1, minutes=2, seconds=3) From 2f10635f456ca6ae31a606edf68eccaee195fed1 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 15 Sep 2023 16:51:16 -0400 Subject: [PATCH 098/123] steady work on developer documentation --- bbot/core/helpers/command.py | 108 +++++++++- bbot/core/helpers/dns.py | 368 ++++++++++++++++++++++++++++++++--- bbot/core/helpers/files.py | 67 ++++++- bbot/core/helpers/helper.py | 8 + mkdocs.yml | 2 + 5 files changed, 509 insertions(+), 44 deletions(-) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 0d03ccac17..54fc209c79 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -10,10 +10,28 @@ async def run(self, *command, check=False, text=True, **kwargs): - """ - Simple helper for running a command, and getting its output as a string - process = await run(["ls", "/tmp"]) - process.stdout --> "file1.txt\nfile2.txt" + """Runs a command asynchronously and gets its output as a string. + + This method is a simple helper for executing a command and capturing its output. + If an error occurs during execution, it can optionally raise an error or just log the stderr. + + Args: + *command (str): The command to run as separate arguments. + check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. + Defaults to False. + text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + **kwargs (dict): Additional keyword arguments for the subprocess. + + Returns: + CompletedProcess: A completed process object with attributes for the command, return code, stdout, and stderr. + + Raises: + CalledProcessError: If the subprocess exits with a non-zero status and `check=True`. + + Examples: + >>> process = await run(["ls", "/tmp"]) + >>> process.stdout + "file1.txt\nfile2.txt" """ proc, _input, command = await self._spawn_proc(*command, **kwargs) if proc is not None: @@ -41,10 +59,28 @@ async def run(self, *command, check=False, text=True, **kwargs): async def run_live(self, *command, check=False, text=True, **kwargs): - """ - Simple helper for running a command and iterating through its output line by line in realtime - async for line in run_live(["ls", "/tmp"]): - log.info(line) + """Runs a command asynchronously and iterates through its output line by line in realtime. + + This method is useful for executing a command and capturing its output on-the-fly, as it is generated. + If an error occurs during execution, it can optionally raise an error or just log the stderr. + + Args: + *command (str): The command to run as separate arguments. + check (bool, optional): If set to True, raises an error if the subprocess exits with a non-zero status. + Defaults to False. + text (bool, optional): If set to True, decodes the subprocess output to string. Defaults to True. + **kwargs (dict): Additional keyword arguments for the subprocess. + + Yields: + str or bytes: The output lines of the command, either as a decoded string (if `text=True`) + or as bytes (if `text=False`). + + Raises: + CalledProcessError: If the subprocess exits with a non-zero status and `check=True`. + + Examples: + >>> async for line in run_live(["tail", "-f", "/var/log/auth.log"]): + ... log.info(line) """ proc, _input, command = await self._spawn_proc(*command, **kwargs) if proc is not None: @@ -91,7 +127,28 @@ async def run_live(self, *command, check=False, text=True, **kwargs): log.warning(f"Stderr for run_live({command_str}):\n\t{stderr}") -async def _spawn_proc(self, *command, **kwargs): +def _spawn_proc(self, *command, **kwargs): + """Spawns an asynchronous subprocess. + + Prepares the command and associated keyword arguments. If the `input` argument is provided, + it checks to ensure that the `stdin` argument is not also provided. Once prepared, it creates + and returns the subprocess. If the command executable is not found, it logs a warning and traceback. + + Args: + *command (str): The command to run as separate arguments. + **kwargs (dict): Additional keyword arguments for the subprocess. + + Raises: + ValueError: If both stdin and input arguments are provided. + + Returns: + tuple: A tuple containing the created process (or None if creation failed), the input (or None if not provided), + and the prepared command (or None if subprocess creation failed). + + Examples: + >>> _spawn_proc("ls", "-l", input="data") + (, "data", ["ls", "-l"]) + """ command, kwargs = self._prepare_command_kwargs(command, kwargs) _input = kwargs.pop("input", None) if _input is not None: @@ -110,6 +167,17 @@ async def _spawn_proc(self, *command, **kwargs): async def _write_stdin(proc, _input): + """ + Asynchronously writes input to an active subprocess's stdin. + + This function takes an `_input` parameter, which can be of type str, bytes, + list, tuple, or an asynchronous generator. The input is then written line by + line to the stdin of the given `proc`. + + Args: + proc (subprocess.Popen): An active subprocess object. + _input (str, bytes, list, tuple, async generator): The data to write to stdin. + """ if _input is not None: if isinstance(_input, (str, bytes)): _input = [_input] @@ -124,6 +192,28 @@ async def _write_stdin(proc, _input): def _prepare_command_kwargs(self, command, kwargs): + """ + Prepare arguments for passing into `asyncio.create_subprocess_exec()`. + + This method modifies the `kwargs` dictionary in place to prepare it for + use in the `asyncio.create_subprocess_exec()` method. It sets the default + values for keys like 'limit', 'stdout', and 'stderr' if they are not + already present. It also handles the case when 'sudo' needs to be run. + + Args: + command (list): The command to be run in the subprocess. + kwargs (dict): The keyword arguments to be passed to `asyncio.create_subprocess_exec()`. + + Returns: + tuple: A tuple containing the modified `command` and `kwargs`. + + Examples: + >>> _prepare_command_kwargs(['ls', '-l'], {}) + (['ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1}) + + >>> _prepare_command_kwargs(['ls', '-l'], {'sudo': True}) + (['sudo', '-E', '-A', 'LD_LIBRARY_PATH=...', 'PATH=...', 'ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1, 'env': environ(...)}) + """ # limit = 100MB (this is needed for cases like httpx that are sending large JSON blobs over stdout) if not "limit" in kwargs: kwargs["limit"] = 1024 * 1024 * 100 diff --git a/bbot/core/helpers/dns.py b/bbot/core/helpers/dns.py index 1d5fb7befe..8608536614 100644 --- a/bbot/core/helpers/dns.py +++ b/bbot/core/helpers/dns.py @@ -18,6 +18,21 @@ class BBOTAsyncResolver(dns.asyncresolver.Resolver): + """Custom asynchronous resolver for BBOT with rate limiting. + + This class extends dnspython's async resolver and provides additional support for rate-limiting DNS queries. + The maximum number of queries allowed per second can be customized via BBOT's config. + + Attributes: + _parent_helper: A reference to the instantiated `ConfigAwareHelper` (typically `scan.helpers`). + _dns_rate_limiter (RateLimiter): An instance of the RateLimiter class for DNS query rate-limiting. + + Args: + *args: Positional arguments passed to the base resolver. + **kwargs: Keyword arguments. '_parent_helper' is expected among these to provide configuration data for + rate-limiting. All other keyword arguments are passed to the base resolver. + """ + def __init__(self, *args, **kwargs): self._parent_helper = kwargs.pop("_parent_helper") dns_queries_per_second = self._parent_helper.config.get("dns_queries_per_second", 100) @@ -30,8 +45,37 @@ async def resolve(self, *args, **kwargs): class DNSHelper: - """ - For host resolution, automatic wildcard detection, etc. + """Helper class for DNS-related operations within BBOT. + + This class provides mechanisms for host resolution, wildcard domain detection, event tagging, and more. + It centralizes all DNS-related activities in BBOT, offering both synchronous and asynchronous methods + for DNS resolution, as well as various utilities for batch resolution and DNS query filtering. + + Attributes: + parent_helper: A reference to the instantiated `ConfigAwareHelper` (typically `scan.helpers`). + resolver (BBOTAsyncResolver): An asynchronous DNS resolver tailored for BBOT with rate-limiting capabilities. + timeout (int): The timeout value for DNS queries. Defaults to 5 seconds. + retries (int): The number of retries for failed DNS queries. Defaults to 1. + abort_threshold (int): The threshold for aborting after consecutive failed queries. Defaults to 50. + max_dns_resolve_distance (int): Maximum allowed distance for DNS resolution. Defaults to 4. + all_rdtypes (list): A list of DNS record types to be considered during operations. + wildcard_ignore (tuple): Domains to be ignored during wildcard detection. + wildcard_tests (int): Number of tests to be run for wildcard detection. Defaults to 5. + _wildcard_cache (dict): Cache for wildcard detection results. + _dns_cache (CacheDict): Cache for DNS resolution results, limited in size. + _event_cache (CacheDict): Cache for event resolution results, tags. Limited in size. + resolver_file (Path): File containing system's current resolver nameservers. + filter_bad_ptrs (bool): Whether to filter out DNS names that appear to be auto-generated PTR records. Defaults to True. + + Args: + parent_helper: The parent helper object with configuration details and utilities. + + Raises: + DNSError: If an issue arises when creating the BBOTAsyncResolver instance. + + Examples: + >>> dns_helper = DNSHelper(parent_config) + >>> resolved_host = dns_helper.resolver.resolve("example.com") """ all_rdtypes = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"] @@ -44,7 +88,7 @@ def __init__(self, parent_helper): raise DNSError(f"Failed to create BBOT DNS resolver: {e}") self.timeout = self.parent_helper.config.get("dns_timeout", 5) self.retries = self.parent_helper.config.get("dns_retries", 1) - self.abort_threshold = self.parent_helper.config.get("dns_abort_threshold", 5) + self.abort_threshold = self.parent_helper.config.get("dns_abort_threshold", 50) self.max_dns_resolve_distance = self.parent_helper.config.get("max_dns_resolve_distance", 4) self.resolver.timeout = self.timeout self.resolver.lifetime = self.timeout @@ -95,14 +139,25 @@ def __init__(self, parent_helper): self.filter_bad_ptrs = self.parent_helper.config.get("dns_filter_ptrs", True) async def resolve(self, query, **kwargs): - """ - "1.2.3.4" --> { - "evilcorp.com", - } - "evilcorp.com" --> { - "1.2.3.4", - "dead::beef" - } + """Resolve DNS names and IP addresses to their corresponding results. + + This is a high-level function that can translate a given domain name to its associated IP addresses + or an IP address to its corresponding domain names. It's structured for ease of use within modules + and will abstract away most of the complexity of DNS resolution, returning a simple set of results. + + Args: + query (str): The domain name or IP address to resolve. + **kwargs: Additional arguments to be passed to the resolution process. + + Returns: + set: A set containing resolved domain names or IP addresses. + + Examples: + >>> results = await resolve("1.2.3.4") + {"evilcorp.com"} + + >>> results = await resolve("evilcorp.com") + {"1.2.3.4", "dead::beef"} """ results = set() try: @@ -122,6 +177,32 @@ async def resolve(self, query, **kwargs): return results async def resolve_raw(self, query, **kwargs): + """Resolves the given query to its associated DNS records. + + This function is a foundational method for DNS resolution in this class. It understands both IP addresses and + hostnames and returns their associated records in a raw format provided by the dnspython library. + + Args: + query (str): The IP address or hostname to resolve. + type (str or list[str], optional): Specifies the DNS record type(s) to fetch. Can be a single type like 'A' + or a list like ['A', 'AAAA']. If set to 'any', 'all', or '*', it fetches all supported types. If not + specified, the function defaults to fetching 'A' and 'AAAA' records. + **kwargs: Additional arguments that might be passed to the resolver. + + Returns: + tuple: A tuple containing two lists: + - list: A list of tuples where each tuple consists of a record type string (like 'A') and the associated + raw dnspython answer. + - list: A list of tuples where each tuple consists of a record type string and the associated error if + there was an issue fetching the record. + + Examples: + >>> await resolve_raw("8.8.8.8") + ([('PTR', )], []) + + >>> await resolve_raw("dns.google") + ([('A', ), ('AAAA', )], []) + """ # DNS over TCP is more reliable # But setting this breaks DNS resolution on Ubuntu because systemd-resolve doesn't support TCP # kwargs["tcp"] = True @@ -160,6 +241,29 @@ async def resolve_raw(self, query, **kwargs): return (results, errors) async def _resolve_hostname(self, query, **kwargs): + """Translate a hostname into its corresponding IP addresses. + + This is the foundational function for converting a domain name into its associated IP addresses. It's designed + for internal use within the class and handles retries, caching, and a variety of error/timeout scenarios. + It also respects certain configurations that might ask to skip certain types of queries. Results are returned + in the default dnspython answer object format. + + Args: + query (str): The hostname to resolve. + rdtype (str, optional): The type of DNS record to query (e.g., 'A', 'AAAA'). Defaults to 'A'. + retries (int, optional): The number of times to retry on failure. Defaults to class-wide `retries`. + use_cache (bool, optional): Whether to check the cache before trying a fresh resolution. Defaults to True. + **kwargs: Additional arguments that might be passed to the resolver. + + Returns: + tuple: A tuple containing: + - list: A list of resolved IP addresses. + - list: A list of errors encountered during the resolution process. + + Examples: + >>> results, errors = await _resolve_hostname("google.com") + (, []) + """ self.debug(f"Resolving {query} with kwargs={kwargs}") results = [] errors = [] @@ -232,6 +336,27 @@ async def _resolve_hostname(self, query, **kwargs): return results, errors async def _resolve_ip(self, query, **kwargs): + """Translate an IP address into a corresponding DNS name. + + This is the most basic function that will convert an IP address into its associated domain name. It handles + retries, caching, and multiple types of timeout/error scenarios internally. The function is intended for + internal use and should not be directly called by modules without understanding its intricacies. + + Args: + query (str): The IP address to be reverse-resolved. + retries (int, optional): The number of times to retry on failure. Defaults to 0. + use_cache (bool, optional): Whether to check the cache for the result before attempting resolution. Defaults to True. + **kwargs: Additional arguments to be passed to the resolution process. + + Returns: + tuple: A tuple containing: + - list: A list of resolved domain names (in default dnspython answer format). + - list: A list of errors encountered during resolution. + + Examples: + >>> results, errors = await _resolve_ip("8.8.8.8") + (, []) + """ self.debug(f"Reverse-resolving {query} with kwargs={kwargs}") retries = kwargs.pop("retries", 0) use_cache = kwargs.pop("use_cache", True) @@ -271,6 +396,25 @@ async def _resolve_ip(self, query, **kwargs): return results, errors async def handle_wildcard_event(self, event, children): + """ + Used within BBOT's scan manager to detect and tag DNS wildcard events. + + Wildcards are detected for every major record type. If a wildcard is detected, its data + is overwritten, for example: `_wildcard.evilcorp.com`. + + Args: + event (object): The event to check for wildcards. + children (list): A list of the event's resulting DNS children after resolution. + + Returns: + None: This method modifies the `event` in place and does not return a value. + + Examples: + >>> handle_wildcard_event(event, children) + # The `event` might now have tags like ["wildcard", "a-wildcard", "aaaa-wildcard"] and + # its `data` attribute might be modified to "_wildcard.evilcorp.com" if it was detected + # as a wildcard. + """ log.debug(f"Entering handle_wildcard_event({event}, children={children})") try: event_host = str(event.host) @@ -324,8 +468,29 @@ async def handle_wildcard_event(self, event, children): async def resolve_event(self, event, minimal=False): """ - Tag event with appropriate dns record types - Optionally create child events from dns resolutions + Tag the given event with the appropriate DNS record types and optionally create child + events based on DNS resolutions. + + Args: + event (object): The event to be resolved and tagged. + minimal (bool, optional): If set to True, the function will perform minimal DNS + resolution. Defaults to False. + + Returns: + tuple: A 4-tuple containing the following items: + - event_tags (set): Set of tags for the event. + - event_whitelisted (bool): Whether the event is whitelisted. + - event_blacklisted (bool): Whether the event is blacklisted. + - dns_children (dict): Dictionary containing child events from DNS resolutions. + + Examples: + >>> event = make_event("evilcorp.com") + >>> resolve_event(event) + ({'resolved', 'ns-record', 'a-record',}, False, False, {'A': {IPv4Address('1.2.3.4'), IPv4Address('1.2.3.5')}, 'NS': {'ns1.evilcorp.com'}}) + + Note: + This method does not modify the passed in `event`. Instead, it returns data + that can be used to modify or act upon the `event`. """ log.debug(f"Resolving {event}") event_host = str(event.host) @@ -430,8 +595,33 @@ async def resolve_event(self, event, minimal=False): log.debug(f"Finished resolving {event}") def event_cache_get(self, host): + """ + Retrieves cached event data based on the given host. + + Args: + host (str): The host for which the event data is to be retrieved. + + Returns: + tuple: A 4-tuple containing the following items: + - event_tags (set): Set of tags for the event. + - event_whitelisted (bool or None): Whether the event is whitelisted. Returns None if not found. + - event_blacklisted (bool or None): Whether the event is blacklisted. Returns None if not found. + - dns_children (set): Set containing child events from DNS resolutions. + + Examples: + Assuming an event with host "www.evilcorp.com" has been cached: + + >>> event_cache_get("www.evilcorp.com") + ({"resolved", "a-record"}, False, False, {'1.2.3.4'}) + + Assuming no event with host "www.notincache.com" has been cached: + + >>> event_cache_get("www.notincache.com") + (set(), None, None, set()) + """ try: - return self._event_cache[host] + event_tags, event_whitelisted, event_blacklisted, dns_children = self._event_cache[host] + return (event_tags, event_whitelisted, event_blacklisted, dns_children) except KeyError: return set(), None, None, set() @@ -444,10 +634,27 @@ async def _resolve_batch_coro_wrapper(self, q, **kwargs): async def resolve_batch(self, queries, **kwargs): """ - await resolve_batch(["www.evilcorp.com", "evilcorp.com"]) --> [ - ("www.evilcorp.com", {"1.1.1.1"}), - ("evilcorp.com", {"2.2.2.2"}) - ] + Asynchronously resolves a batch of queries in parallel and yields the results as they are completed. + + This method wraps around `_resolve_batch_coro_wrapper` to resolve a list of queries in parallel. + It batches the queries to a manageable size and executes them asynchronously, respecting + global rate limits. + + Args: + queries (list): List of queries to resolve. + **kwargs: Additional keyword arguments to pass to `_resolve_batch_coro_wrapper`. + + Yields: + tuple: A tuple containing the original query and its resolved value. + + Examples: + >>> import asyncio + >>> async def example_usage(): + ... async for result in resolve_batch(['www.evilcorp.com', 'evilcorp.com']): + ... print(result) + ('www.evilcorp.com', {'1.1.1.1'}) + ('evilcorp.com', {'2.2.2.2'}) + """ queries = list(queries) batch_size = 250 @@ -459,7 +666,28 @@ async def resolve_batch(self, queries, **kwargs): def extract_targets(self, record): """ - Extract whatever hostnames/IPs a DNS records points to + Extracts hostnames or IP addresses from a given DNS record. + + This method reads the DNS record's type and based on that, extracts the target + hostnames or IP addresses it points to. The type of DNS record + (e.g., "A", "MX", "CNAME", etc.) determines which fields are used for extraction. + + Args: + record (dns.rdata.Rdata): The DNS record to extract information from. + + Returns: + set: A set of tuples, each containing the DNS record type and the extracted value. + + Examples: + >>> from dns.rrset import from_text + >>> record = from_text('www.example.com', 3600, 'IN', 'A', '192.0.2.1') + >>> extract_targets(record[0]) + {('A', '192.0.2.1')} + + >>> record = from_text('example.com', 3600, 'IN', 'MX', '10 mail.example.com.') + >>> extract_targets(record[0]) + {('MX', 'mail.example.com')} + """ results = set() rdtype = str(record.rdtype.name).upper() @@ -486,11 +714,50 @@ def extract_targets(self, record): @staticmethod def _clean_dns_record(record): + """ + Cleans and formats a given DNS record for further processing. + + This static method converts the DNS record to text format if it's not already a string. + It also removes any trailing dots and converts the record to lowercase. + + Args: + record (str or dns.rdata.Rdata): The DNS record to clean. + + Returns: + str: The cleaned and formatted DNS record. + + Examples: + >>> _clean_dns_record('www.evilcorp.com.') + 'www.evilcorp.com' + + >>> from dns.rrset import from_text + >>> record = from_text('www.evilcorp.com', 3600, 'IN', 'A', '1.2.3.4')[0] + >>> _clean_dns_record(record) + '1.2.3.4' + """ if not isinstance(record, str): record = str(record.to_text()) return str(record).rstrip(".").lower() async def _catch(self, callback, *args, **kwargs): + """ + Asynchronously catches exceptions thrown during DNS resolution and logs them. + + This method wraps around a given asynchronous callback function to handle different + types of DNS exceptions and general exceptions. It logs the exceptions for debugging + and, in some cases, re-raises them. + + Args: + callback (callable): The asynchronous function to be executed. + *args: Positional arguments to pass to the callback. + **kwargs: Keyword arguments to pass to the callback. + + Returns: + Any: The return value of the callback function, or an empty list if an exception is caught. + + Raises: + dns.resolver.NoNameservers: When no nameservers could be reached. + """ try: return await callback(*args, **kwargs) except dns.resolver.NoNameservers: @@ -509,16 +776,33 @@ async def is_wildcard(self, query, ips=None, rdtype=None): """ Use this method to check whether a *host* is a wildcard entry - This can reliably tell the difference between a valid DNS record and a wildcard inside a wildcard domain. + This can reliably tell the difference between a valid DNS record and a wildcard within a wildcard domain. + + If you want to know whether a domain is using wildcard DNS, use `is_wildcard_domain()` instead. + + Args: + query (str): The hostname to check for a wildcard entry. + ips (list, optional): List of IPs to compare against, typically obtained from a previous DNS resolution of the query. + rdtype (str, optional): The DNS record type (e.g., "A", "AAAA") to consider during the check. + + Returns: + dict: A dictionary indicating if the query is a wildcard for each checked DNS record type. + Keys are DNS record types like "A", "AAAA", etc. + Values are tuples where the first element is a boolean indicating if the query is a wildcard, + and the second element is the wildcard parent if it's a wildcard. - If you want to know whether a domain is using wildcard DNS, use is_wildcard_domain() instead. + Raises: + ValueError: If only one of `ips` or `rdtype` is specified or if no valid IPs are specified. - Returns a dictionary in the following format: - {rdtype: (is_wildcard, wildcard_parent)} + Examples: + >>> is_wildcard("www.github.io") + {"A": (True, "github.io"), "AAAA": (True, "github.io")} - is_wildcard("www.github.io") --> {"A": (True, "github.io"), "AAAA": (True, "github.io")} + >>> is_wildcard("www.evilcorp.com", ips=["93.184.216.34"], rdtype="A") + {"A": (False, "evilcorp.com")} - Note that is_wildcard can be True, False, or None (indicating that wildcard detection was inconclusive) + Note: + `is_wildcard` can be True, False, or None (indicating that wildcard detection was inconclusive) """ result = {} @@ -618,12 +902,25 @@ async def is_wildcard(self, query, ips=None, rdtype=None): async def is_wildcard_domain(self, domain, log_info=False): """ - Check whether a domain is using wildcard DNS + Check whether a given host or its children make use of wildcard DNS entries. Wildcard DNS can have + various implications, particularly in subdomain enumeration and subdomain takeovers. - Returns a dictionary containing any DNS record types that are wildcards, and their associated IPs - is_wildcard_domain("github.io") --> {"A": {"1.2.3.4",}, "AAAA": {"dead::beef",}} - """ + Args: + domain (str): The domain to check for wildcard DNS entries. + log_info (bool, optional): Whether to log the result of the check. Defaults to False. + + Returns: + dict: A dictionary where the keys are the parent domains that have wildcard DNS entries, + and the values are another dictionary of DNS record types ("A", "AAAA", etc.) mapped to + sets of their resolved IP addresses. + Examples: + >>> is_wildcard_domain("github.io") + {"github.io": {"A": {"1.2.3.4"}, "AAAA": {"dead::beef"}}} + + >>> is_wildcard_domain("example.com") + {} + """ wildcard_domain_results = {} domain = self._clean_dns_record(domain) @@ -690,7 +987,18 @@ async def is_wildcard_domain(self, domain, log_info=False): async def _connectivity_check(self, interval=5): """ - Used to periodically check whether the scan has an internet connection + Periodically checks for an active internet connection by attempting DNS resolution. + + Args: + interval (int, optional): The time interval, in seconds, at which to perform the check. + Defaults to 5 seconds. + + Returns: + bool: True if there is an active internet connection, False otherwise. + + Examples: + >>> await _connectivity_check() + True """ if self._last_dns_success is not None: if time.time() - self._last_dns_success < interval: diff --git a/bbot/core/helpers/files.py b/bbot/core/helpers/files.py index 27ed71948d..438f741122 100644 --- a/bbot/core/helpers/files.py +++ b/bbot/core/helpers/files.py @@ -12,11 +12,23 @@ def tempfile(self, content, pipe=True): """ - tempfile(["temp", "file", "content"]) --> Path("/home/user/.bbot/temp/pgxml13bov87oqrvjz7a") + Creates a temporary file or named pipe and populates it with content. - if "pipe" is True (the default), a named pipe is used instead of - a true file, which allows python data to be piped directly into the - process without taking up disk space + Args: + content (list, set, tuple, str): The content to populate the temporary file with. + pipe (bool, optional): If True, a named pipe is used instead of a true file. + This allows Python data to be piped directly into the process without taking up disk space. + Defaults to True. + + Returns: + str: The filepath of the created temporary file or named pipe. + + Examples: + >>> tempfile(["This", "is", "temp", "content"]) + '/home/user/.bbot/temp/pgxml13bov87oqrvjz7a' + + >>> tempfile(["Another", "temp", "file"], pipe=False) + '/home/user/.bbot/temp/someotherfile' """ filename = self.temp_filename() rm_at_exit(filename) @@ -39,6 +51,19 @@ def tempfile(self, content, pipe=True): def _feed_pipe(self, pipe, content, text=True): + """ + Feeds content into a named pipe or file-like object. + + Args: + pipe (str or file-like object): The named pipe or file-like object to feed the content into. + content (iterable): The content to be written into the pipe or file. + text (bool, optional): If True, the content is decoded using smart_decode function. + If False, smart_encode function is used. Defaults to True. + + Notes: + The method tries to determine if 'pipe' is a file-like object that has a 'write' method. + If so, it writes directly to that object. Otherwise, it opens 'pipe' as a file for writing. + """ try: if text: decode_fn = self.smart_decode @@ -70,13 +95,33 @@ def _feed_pipe(self, pipe, content, text=True): def feed_pipe(self, pipe, content, text=True): + """ + Starts a new thread to feed content into a named pipe or file-like object using _feed_pipe(). + + Args: + pipe (str or file-like object): The named pipe or file-like object to feed the content into. + content (iterable): The content to be written into the pipe or file. + text (bool, optional): If True, the content is decoded using smart_decode function. + If False, smart_encode function is used. Defaults to True. + """ t = threading.Thread(target=self._feed_pipe, args=(pipe, content), kwargs={"text": text}, daemon=True) t.start() def tempfile_tail(self, callback): """ - Create a named pipe and execute a callback on each line + Create a named pipe and execute a callback function on each line that is written to the pipe. + + Useful for ingesting output from a program (e.g. nuclei) directly from a file in real-time as + each line is written. The idea is you create the file with this function and then tell the CLI + program to output to it as a normal output file. We are then able to scoop up the output line + by line as it's written to our "file" (which is actually a named pipe, shhh! ;) + + Args: + callback (Callable): A function that will be invoked with each line written to the pipe as its argument. + + Returns: + str: The filename of the created named pipe. """ filename = self.temp_filename() rm_at_exit(filename) @@ -92,6 +137,18 @@ def tempfile_tail(self, callback): def tail(filename, callback): + """ + Continuously read lines from a file and execute a callback function on each line. + + Args: + filename (str): The path of the file to tail. + callback (Callable): A function to call on each line read from the file. + + Examples: + >>> def print_callback(line): + ... print(f"Received: {line}") + >>> tail("/path/to/file", print_callback) + """ try: with open(filename, errors="ignore") as f: for line in f: diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 241ec15d89..ef6d259cc0 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -18,6 +18,14 @@ class ConfigAwareHelper: + """ + A central class designed to provide easy access to helpers. + + Since certain helper functions rely on configuration-specific parameters + (such as dns and http which rely on rate-limits etc.,) it also provides + certain helpers with access to the config and the current BBOT scan instance. + """ + from . import ntlm from . import regexes from . import validators diff --git a/mkdocs.yml b/mkdocs.yml index 1ccedb94c7..2ecbafe366 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -37,6 +37,8 @@ nav: - BaseModule: dev/basemodule.md - Helpers: # dev/helpers/index.md + - Command: dev/helpers/command.md + - DNS: dev/helpers/dns.md - Miscellaneous: dev/helpers/misc.md - Misc: - Release History: release_history.md From e6afb1cc0ec43a9448c796f9e5dac772984a5d0c Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 15 Sep 2023 17:18:34 -0400 Subject: [PATCH 099/123] fix tests, started interact.sh developer docs --- bbot/core/helpers/command.py | 2 +- bbot/core/helpers/helper.py | 45 +++++++++++++++++++++---- bbot/core/helpers/interactsh.py | 59 +++++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 4 files changed, 100 insertions(+), 7 deletions(-) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 54fc209c79..bc28cbc822 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -127,7 +127,7 @@ async def run_live(self, *command, check=False, text=True, **kwargs): log.warning(f"Stderr for run_live({command_str}):\n\t{stderr}") -def _spawn_proc(self, *command, **kwargs): +async def _spawn_proc(self, *command, **kwargs): """Spawns an asynchronous subprocess. Prepares the command and associated keyword arguments. If the `input` argument is provided, diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index ef6d259cc0..eef59e5d38 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -19,11 +19,29 @@ class ConfigAwareHelper: """ - A central class designed to provide easy access to helpers. - - Since certain helper functions rely on configuration-specific parameters - (such as dns and http which rely on rate-limits etc.,) it also provides - certain helpers with access to the config and the current BBOT scan instance. + Centralized helper class that provides unified access to various helper functions. + + This class serves as a convenient interface for accessing helper methods across different files. + It is designed to be configuration-aware, allowing helper functions to utilize scan-specific + configurations like rate-limits. The class leverages Python's `__getattribute__` magic method + to provide seamless access to helper functions across various namespaces. + + Attributes: + config (dict): Configuration settings for the BBOT scan instance. + _scan (Scan): A BBOT scan instance. + bbot_home (Path): Home directory for BBOT. + cache_dir (Path): Directory for storing cache files. + temp_dir (Path): Directory for storing temporary files. + tools_dir (Path): Directory for storing tools, e.g. compiled binaries. + lib_dir (Path): Directory for storing libraries. + scans_dir (Path): Directory for storing scan results. + wordlist_dir (Path): Directory for storing wordlists. + current_dir (Path): The current working directory. + keep_old_scans (int): The number of old scans to keep. + + Examples: + >>> helper = ConfigAwareHelper(config) + >>> ips = helper.dns.resolve("www.evilcorp.com") """ from . import ntlm @@ -106,7 +124,22 @@ def _make_dummy_module(self, name, _type="scan"): def __getattribute__(self, attr): """ - Allow static functions from sub-helpers to be accessed from the main class + Do not be afraid, the angel said. + + Overrides Python's built-in __getattribute__ to provide convenient access to helper methods. + + This method first attempts to find an attribute within this class itself. If unsuccessful, + it then looks in the 'misc', 'dns', and 'web' helper modules, in that order. If the attribute + is still not found, an AttributeError is raised. + + Args: + attr (str): The attribute name to look for. + + Returns: + Any: The attribute value, if found. + + Raises: + AttributeError: If the attribute is not found in any of the specified places. """ try: # first try self diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 8f81ec0af1..3fb4bdc7b6 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -19,6 +19,65 @@ class Interactsh: + """ + A pure python implementation of ProjectDiscovery's interact.sh. + + *"Interactsh is an open-source tool for detecting out-of-band interactions. It is a tool designed to detect vulnerabilities that cause external interactions."* + + - https://app.interactsh.com + - https://github.com/projectdiscovery/interactsh + + This class facilitates interactions with the interact.sh service for + out-of-band data exfiltration and vulnerability confirmation. It allows + for customization by accepting server and token parameters from the + configuration provided by `parent_helper`. + + Attributes: + parent_helper (ConfigAwareHelper): An instance of a helper class containing configuration data. + server (str): The server to be used. If None (the default), a random server will be chosen from a predetermined list. + correlation_id (str): An identifier to correlate requests and responses. Default is None. + custom_server (str): Optional. A custom interact.sh server. Loaded from configuration. + token (str): Optional. A token for interact.sh API. Loaded from configuration. + _poll_task (AsyncTask): The task responsible for polling the interact.sh server. Default is None. + + Examples: + ```python + # instantiate interact.sh client (no requests are sent yet) + >>> interactsh_client = s.helpers.interactsh() + # register with an interact.sh server + >>> interactsh_domain = await interactsh_client.register() + [INFO] Registering with interact.sh server: oast.me + [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] + # simulate an out-of-band interaction + >>> await s.helpers.request(f"https://{interactsh_domain}/test") + # wait for out-of-band interaction to be registered + >>> await asyncio.sleep(10) + >>> data_list = await interactsh_client.poll() + >>> print(data_list) + [ + { + "protocol": "dns", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "full-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "q-type": "A", + "raw-request": "...", + "remote-address": "1.2.3.4", + "timestamp": "2023-09-15T21:09:23.187226851Z" + }, + { + "protocol": "http", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "full-id": "rg99x2f860h5466ou3so86i07n1m3013k", + "raw-request": "GET /test HTTP/1.1 ...", + "remote-address": "1.2.3.4", + "timestamp": "2023-09-15T21:09:24.155677967Z" + } + ] + # finally, shut down the client + >>> await interactsh_client.deregister() + ``` + """ + def __init__(self, parent_helper): self.parent_helper = parent_helper self.server = None diff --git a/mkdocs.yml b/mkdocs.yml index 2ecbafe366..1b0510aabf 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -39,6 +39,7 @@ nav: # dev/helpers/index.md - Command: dev/helpers/command.md - DNS: dev/helpers/dns.md + - Interactsh: dev/helpers/interactsh.md - Miscellaneous: dev/helpers/misc.md - Misc: - Release History: release_history.md From bf5fd4166d93759732952a903960bf183f38a1bf Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Sat, 16 Sep 2023 15:12:29 -0400 Subject: [PATCH 100/123] interactsh developer documentation --- bbot/core/helpers/interactsh.py | 102 ++++++++++++++++++++++++++++++-- 1 file changed, 97 insertions(+), 5 deletions(-) diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index 3fb4bdc7b6..695fd62600 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -38,18 +38,18 @@ class Interactsh: correlation_id (str): An identifier to correlate requests and responses. Default is None. custom_server (str): Optional. A custom interact.sh server. Loaded from configuration. token (str): Optional. A token for interact.sh API. Loaded from configuration. - _poll_task (AsyncTask): The task responsible for polling the interact.sh server. Default is None. + _poll_task (AsyncTask): The task responsible for polling the interact.sh server. Examples: ```python # instantiate interact.sh client (no requests are sent yet) - >>> interactsh_client = s.helpers.interactsh() + >>> interactsh_client = self.helpers.interactsh() # register with an interact.sh server >>> interactsh_domain = await interactsh_client.register() [INFO] Registering with interact.sh server: oast.me [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] # simulate an out-of-band interaction - >>> await s.helpers.request(f"https://{interactsh_domain}/test") + >>> await self.helpers.request(f"https://{interactsh_domain}/test") # wait for out-of-band interaction to be registered >>> await asyncio.sleep(10) >>> data_list = await interactsh_client.poll() @@ -87,6 +87,28 @@ def __init__(self, parent_helper): self._poll_task = None async def register(self, callback=None): + """ + Registers the instance with an interact.sh server and sets up polling. + + Generates RSA keys for secure communication, builds a correlation ID, + and sends a POST request to an interact.sh server to register. Optionally, + starts an asynchronous polling task to listen for interactions. + + Args: + callback (callable, optional): A function to be called each time new interactions are received. + + Returns: + str: The registered domain for out-of-band interactions. + + Raises: + InteractshError: If registration with an interact.sh server fails. + + Examples: + >>> interactsh_client = self.helpers.interactsh() + >>> registered_domain = await interactsh_client.register() + [INFO] Registering with interact.sh server: oast.me + [INFO] Successfully registered to interactsh server oast.me with correlation_id rg99x2f860h5466ou3so [rg99x2f860h5466ou3so86i07n1m3013k.oast.me] + """ rsa = RSA.generate(1024) self.public_key = rsa.publickey().exportKey() @@ -143,6 +165,19 @@ async def register(self, callback=None): return self.domain async def deregister(self): + """ + Deregisters the instance from the interact.sh server and cancels the polling task. + + Sends a POST request to the server to deregister, using the correlation ID + and secret key generated during registration. Optionally, if a polling + task was started, it is cancelled. + + Raises: + InteractshError: If required information is missing or if deregistration fails. + + Examples: + >>> await interactsh_client.deregister() + """ if not self.server or not self.correlation_id or not self.secret: raise InteractshError(f"Missing required information to deregister") @@ -163,6 +198,31 @@ async def deregister(self): raise InteractshError(f"Failed to de-register with interactsh server {self.server}") async def poll(self): + """ + Polls the interact.sh server for interactions tied to the current instance. + + Sends a GET request to the server to fetch interactions associated with the + current correlation_id and secret key. Returned interactions are decrypted + using an AES key provided by the server response. + + Raises: + InteractshError: If required information for polling is missing. + + Returns: + list: A list of decrypted interaction data dictionaries. + + Examples: + >>> data_list = await interactsh_client.poll() + >>> print(data_list) + [ + { + "protocol": "dns", + "unique-id": "rg99x2f860h5466ou3so86i07n1m3013k", + ... + }, + ... + ] + """ if not self.server or not self.correlation_id or not self.secret: raise InteractshError(f"Missing required information to poll") @@ -180,11 +240,27 @@ async def poll(self): aes_key = r.json()["aes_key"] for data in data_list: - decrypted_data = self.decrypt(aes_key, data) + decrypted_data = self._decrypt(aes_key, data) ret.append(decrypted_data) return ret async def poll_loop(self, callback): + """ + Starts a polling loop to continuously check for interactions with the interact.sh server. + + Continuously polls the interact.sh server for interactions tied to the current instance, + using the `poll` method. When interactions are received, it executes the given callback + function with each interaction data. + + Parameters: + callback (callable): The function to be called for every interaction received from the server. + + Returns: + awaitable: An awaitable object that executes the internal `_poll_loop` method. + + Examples: + >>> await interactsh_client.poll_loop(my_callback) + """ async with self.parent_helper.scan._acatch(context=self._poll_loop): return await self._poll_loop(callback) @@ -206,7 +282,23 @@ async def _poll_loop(self, callback): if data: await self.parent_helper.execute_sync_or_async(callback, data) - def decrypt(self, aes_key, data): + def _decrypt(self, aes_key, data): + """ + Decrypts and returns the data received from the interact.sh server. + + Uses RSA and AES for decrypting the data. RSA with PKCS1_OAEP and SHA256 is used to decrypt the AES key, + and then AES (CFB mode) is used to decrypt the actual data payload. + + Parameters: + aes_key (str): The AES key for decryption, encrypted with RSA and base64 encoded. + data (str): The data payload to decrypt, which is base64 encoded and AES encrypted. + + Returns: + dict: The decrypted data, loaded as a JSON object. + + Examples: + >>> decrypted_data = self._decrypt(aes_key, data) + """ private_key = RSA.importKey(self.private_key) cipher = PKCS1_OAEP.new(private_key, hashAlgo=SHA256) aes_plain_key = cipher.decrypt(base64.b64decode(aes_key)) From b6160e6bb5cb32dd665d3f5b7963f8d0bea52333 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 11:53:24 -0400 Subject: [PATCH 101/123] resolve conflicts in helpers/regexes.py --- bbot/core/helpers/modules.py | 115 ++++++++++++++- bbot/core/helpers/ratelimiter.py | 14 ++ bbot/core/helpers/regexes.py | 11 +- bbot/core/helpers/url.py | 138 ++++++++++++++++- bbot/core/helpers/validators.py | 134 ++++++++++++++--- bbot/core/helpers/web.py | 244 +++++++++++++++++++++++++++---- bbot/core/helpers/wordcloud.py | 196 ++++++++++++++++++++++++- mkdocs.yml | 2 + 8 files changed, 799 insertions(+), 55 deletions(-) diff --git a/bbot/core/helpers/modules.py b/bbot/core/helpers/modules.py index 7a6f822214..c6cc52f426 100644 --- a/bbot/core/helpers/modules.py +++ b/bbot/core/helpers/modules.py @@ -11,6 +11,14 @@ class ModuleLoader: + """ + Main class responsible for loading BBOT modules. + + This class is in charge of preloading modules to determine their dependencies. + Once dependencies are identified, they are installed before the actual module is imported. + This ensures that all requisite libraries and components are available for the module to function correctly. + """ + def __init__(self): self._preloaded = {} self._preloaded_orig = None @@ -24,8 +32,25 @@ def file_filter(self, file): return file.suffix.lower() == ".py" and file.stem not in ["base", "__init__"] def preload(self, module_dir): - """ - Preload modules from a specified directory + """Preloads all modules within a directory. + + This function recursively iterates through each file in the specified directory + and preloads the BBOT module to gather its meta-information and dependencies. + + Args: + module_dir (str or Path): Directory containing BBOT modules to be preloaded. + + Returns: + dict: A dictionary where keys are the names of the preloaded modules and + values are their respective preloaded data. + + Examples: + >>> preload("/path/to/bbot_modules/") + { + "module1": {...}, + "module2": {...}, + ... + } """ module_dir = Path(module_dir) for module_file in list_files(module_dir, filter=self.file_filter): @@ -50,7 +75,7 @@ def preload(self, module_dir): print(f"[CRIT] Error in {module_file.name}") sys.exit(1) - return self.preloaded + return self._preloaded def preloaded(self, type=None): preloaded = {} @@ -77,6 +102,51 @@ def check_type(self, module, type): return self._preloaded[module]["type"] == type def preload_module(self, module_file): + """ + Preloads a BBOT module to gather its meta-information and dependencies. + + This function reads a BBOT module file, extracts its attributes such as + events watched and produced, flags, meta-information, and dependencies. + + Args: + module_file (str): Path to the BBOT module file. + + Returns: + dict: A dictionary containing meta-information and dependencies for the module. + + Examples: + >>> preload_module("bbot/modules/wappalyzer.py") + { + "watched_events": [ + "HTTP_RESPONSE" + ], + "produced_events": [ + "TECHNOLOGY" + ], + "flags": [ + "active", + "safe", + "web-basic", + "web-thorough" + ], + "meta": { + "description": "Extract technologies from web responses" + }, + "config": {}, + "options_desc": {}, + "hash": "d5a88dd3866c876b81939c920bf4959716e2a374", + "deps": { + "pip": [ + "python-Wappalyzer~=0.3.1" + ], + "pip_constraints": [], + "shell": [], + "apt": [], + "ansible": [] + }, + "sudo": false + } + """ watched_events = [] produced_events = [] flags = [] @@ -185,6 +255,22 @@ def load_modules(self, module_names): return modules def load_module(self, module_name): + """Loads a BBOT module by its name. + + Imports the module from its namespace, locates its class, and returns it. + Identifies modules based on the presence of `watched_events` and `produced_events` attributes. + + Args: + module_name (str): The name of the module to load. + + Returns: + object: The loaded module class object. + + Examples: + >>> module = load_module("example_module") + >>> isinstance(module, object) + True + """ namespace = self._preloaded[module_name]["namespace"] import_path = f"{namespace}.{module_name}" module_variables = importlib.import_module(import_path, "bbot") @@ -208,6 +294,8 @@ def load_module(self, module_name): def recommend_dependencies(self, modules): """ Returns a dictionary containing missing dependencies and their suggested resolutions + + Needs work. For this we should probably be building a dependency graph """ resolve_choices = {} # step 1: build a dictionary containing event types and their associated modules @@ -272,6 +360,27 @@ def add_or_create(d, k, *items): d[k] = set(items) def modules_table(self, modules=None, mod_type=None): + """Generates a table of module information. + + Constructs a table to display information such as module name, type, and event details. + + Args: + modules (list, optional): List of module names to include in the table. + mod_type (str, optional): Type of modules to include ('scan', 'output', 'internal'). + + Returns: + str: A formatted table string. + + Examples: + >>> print(modules_table(["nmap"])) + +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ + | Module | Type | Needs API Key | Description | Flags | Consumed Events | Produced Events | + +==========+========+=================+==============================+===============================+======================+===================+ + | nmap | scan | No | Execute port scans with nmap | active, aggressive, portscan, | DNS_NAME, IP_ADDRESS | OPEN_TCP_PORT | + | | | | | web-thorough | | | + +----------+--------+-----------------+------------------------------+-------------------------------+----------------------+-------------------+ + """ + table = [] header = ["Module", "Type", "Needs API Key", "Description", "Flags", "Consumed Events", "Produced Events"] maxcolwidths = [20, 10, 5, 30, 30, 20, 20] diff --git a/bbot/core/helpers/ratelimiter.py b/bbot/core/helpers/ratelimiter.py index a7b37a42e6..482be66e84 100644 --- a/bbot/core/helpers/ratelimiter.py +++ b/bbot/core/helpers/ratelimiter.py @@ -6,6 +6,20 @@ class RateLimiter: + """ + An asynchronous rate limiter class designed to be used as a context manager. + + Args: + rate (int): The number of allowed requests per second. + name (str): The name of the rate limiter, used for logging. + + Examples: + >>> rate_limiter = RateLimiter(100, "web") + >>> async def rate_limited_request(url): + ... async with rate_limiter: + ... return await request(url) + """ + def __init__(self, rate, name): self.rate = rate / 10 self.name = name diff --git a/bbot/core/helpers/regexes.py b/bbot/core/helpers/regexes.py index 3761b09e7f..f4a1223480 100644 --- a/bbot/core/helpers/regexes.py +++ b/bbot/core/helpers/regexes.py @@ -19,14 +19,21 @@ word_regex = re.compile(r"[^\d\W_]+") word_num_regex = re.compile(r"[^\W_]+") num_regex = re.compile(r"\d+") + _ipv6_regex = r"[A-F0-9:]*:[A-F0-9:]*:[A-F0-9:]*" ipv6_regex = re.compile(_ipv6_regex, re.I) + # dns names with periods _dns_name_regex = r"(?:\w(?:[\w-]{0,100}\w)?\.)+(?:[xX][nN]--)?[^\W_]{1,63}\.?" +dns_name_regex = re.compile(_dns_name_regex, re.I) + # dns names without periods _hostname_regex = r"(?!\w*\.\w+)\w(?:[\w-]{0,100}\w)?" +hostname_regex = re.compile(r"^" + _hostname_regex + r"$", re.I) + _email_regex = r"(?:[^\W_][\w\-\.\+]{,100})@" + _dns_name_regex email_regex = re.compile(_email_regex, re.I) + _ptr_regex = r"(?:[0-9]{1,3}[-_\.]){3}[0-9]{1,3}" ptr_regex = re.compile(_ptr_regex) # uuid regex @@ -50,6 +57,7 @@ _double_slash_regex = r"/{2,}" double_slash_regex = re.compile(_double_slash_regex) +# event type regexes, used throughout BBOT for autodetection of event types, validation, and excavation. event_type_regexes = OrderedDict( ( (k, tuple(re.compile(r, re.I) for r in regexes)) @@ -78,9 +86,8 @@ ) event_id_regex = re.compile(r"[0-9a-f]{40}:[A-Z0-9_]+") -dns_name_regex = re.compile(_dns_name_regex, re.I) scan_name_regex = re.compile(r"[a-z]{3,20}_[a-z]{3,20}") -hostname_regex = re.compile(r"^" + _hostname_regex + r"$", re.I) + # For use with extract_params_html helper input_tag_regex = re.compile(r"]+?name=[\"\'](\w+)[\"\']") diff --git a/bbot/core/helpers/url.py b/bbot/core/helpers/url.py index e595a99bba..5482e54c51 100644 --- a/bbot/core/helpers/url.py +++ b/bbot/core/helpers/url.py @@ -10,12 +10,49 @@ def parse_url(url): - if type(url) == ParseResult: + """ + Parse the given URL string or ParseResult object and return a ParseResult. + + This function checks if the input is already a ParseResult object. If it is, + it returns the object as-is. Otherwise, it parses the given URL string using + `urlparse`. + + Args: + url (Union[str, ParseResult]): The URL string or ParseResult object to be parsed. + + Returns: + ParseResult: A named 6-tuple that contains the components of a URL. + + Examples: + >>> parse_url('https://www.evilcorp.com') + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='', fragment='') + """ + if isinstance(url, ParseResult): return url return urlparse(url) def add_get_params(url, params): + """ + Add or update query parameters to the given URL. + + This function takes an existing URL and a dictionary of query parameters, + updates or adds these parameters to the URL, and returns a new URL. + + Args: + url (Union[str, ParseResult]): The original URL. + params (Dict[str, Any]): A dictionary containing the query parameters to be added or updated. + + Returns: + ParseResult: A named 6-tuple containing the components of the modified URL. + + Examples: + >>> add_get_params('https://www.evilcorp.com?foo=1', {'bar': 2}) + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=1&bar=2', fragment='') + + >>> add_get_params('https://www.evilcorp.com?foo=1', {'foo': 2}) + ParseResult(scheme='https', netloc='www.evilcorp.com', path='', params='', query='foo=2', fragment='') + """ parsed = parse_url(url) old_params = dict(parse_qs(parsed.query)) old_params.update(params) @@ -23,6 +60,22 @@ def add_get_params(url, params): def get_get_params(url): + """ + Extract the query parameters from the given URL as a dictionary. + + Args: + url (Union[str, ParseResult]): The URL from which to extract query parameters. + + Returns: + Dict[str, List[str]]: A dictionary containing the query parameters and their values. + + Examples: + >>> get_get_params('https://www.evilcorp.com?foo=1&bar=2') + {'foo': ['1'], 'bar': ['2']} + + >>> get_get_params('https://www.evilcorp.com?foo=1&foo=2') + {'foo': ['1', '2']} + """ parsed = parse_url(url) return dict(parse_qs(parsed.query)) @@ -34,6 +87,32 @@ def get_get_params(url): def charset(p): + """ + Determine the character set of the given string based on the types of characters it contains. + + Args: + p (str): The string whose character set is to be determined. + + Returns: + int: A bitmask representing the types of characters present in the string. + - CHAR_LOWER = 1: Lowercase alphabets + - CHAR_UPPER = 2: Uppercase alphabets + - CHAR_DIGIT = 4: Digits + - CHAR_SYMBOL = 8: Symbols/Special characters + + Examples: + >>> charset('abc') + 1 + + >>> charset('abcABC') + 3 + + >>> charset('abc123') + 5 + + >>> charset('!abc123') + 13 + """ ret = 0 for c in p: if c.islower(): @@ -48,6 +127,28 @@ def charset(p): def param_type(p): + """ + Evaluates the type of the given parameter. + + Args: + p (str): The parameter whose type is to be evaluated. + + Returns: + int: An integer representing the type of parameter. + - 1: Integer + - 2: UUID + - 3: Other + + Examples: + >>> param_type('123') + 1 + + >>> param_type('550e8400-e29b-41d4-a716-446655440000') + 2 + + >>> param_type('abc') + 3 + """ try: int(p) return 1 @@ -59,6 +160,25 @@ def param_type(p): def hash_url(url): + """ + Hashes a URL for the purpose of cleaning or collapsing similar URLs. + + Args: + url (str): The URL to be hashed. + + Returns: + int: The hash value of the cleaned URL. + + Examples: + >>> hash_url('https://www.evilcorp.com') + -7448777882396416944 + + >>> hash_url('https://www.evilcorp.com/page/1') + -8101275613229735915 + + >>> hash_url('https://www.evilcorp.com/page/2') + -8101275613229735915 + """ parsed = parse_url(url) parsed = parsed._replace(fragment="", query="") to_hash = [parsed.netloc] @@ -76,6 +196,22 @@ def hash_url(url): def url_depth(url): + """ + Calculate the depth of the given URL based on its path components. + + Args: + url (Union[str, ParseResult]): The URL whose depth is to be calculated. + + Returns: + int: The depth of the URL, based on its path components. + + Examples: + >>> url_depth('https://www.evilcorp.com/foo/bar/') + 2 + + >>> url_depth('https://www.evilcorp.com/foo//bar/baz/') + 3 + """ parsed = parse_url(url) parsed = parsed._replace(path=double_slash_regex.sub("/", parsed.path)) split_path = str(parsed.path).strip("/").split("/") diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 82d7a38d4f..d85eb05641 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -3,6 +3,7 @@ from contextlib import suppress from bbot.core.helpers import regexes +from bbot.core.errors import ValidationError from bbot.core.helpers.url import parse_url, hash_url from bbot.core.helpers.misc import smart_encode_punycode, split_host_port, make_netloc, is_ip @@ -11,7 +12,18 @@ def validator(func): """ - Decorator for squashing all errors into ValueError + Decorator that squashes all errors raised by the wrapped function into a ValueError. + + Args: + func (Callable): The function to be decorated. + + Returns: + Callable: The wrapped function. + + Examples: + >>> @validator + ... def validate_port(port): + ... return max(1, min(65535, int(str(port)))) """ def validate_wrapper(*args, **kwargs): @@ -25,6 +37,28 @@ def validate_wrapper(*args, **kwargs): @validator def validate_port(port): + """ + Validates and sanitizes a port number by ensuring it falls within the allowed range (1-65535). + + Args: + port (int or str): The port number to validate. + + Returns: + int: The sanitized port number. + + Raises: + ValueError: If the port number cannot be converted to an integer or is out of range. + + Examples: + >>> validate_port(22) + 22 + + >>> validate_port(70000) + 65535 + + >>> validate_port(-123) + 1 + """ return max(1, min(65535, int(str(port)))) @@ -39,6 +73,33 @@ def validate_open_port(open_port): @validator def validate_host(host): + """ + Validates and sanitizes a host string. This function handles IPv4, IPv6, and domain names. + + It automatically strips ports, trailing periods, and clinging asterisks and dashes. + + Args: + host (str): The host string to validate. + + Returns: + str: The sanitized host string. + + Raises: + ValidationError: If the host is invalid or does not conform to IPv4, IPv6, or DNS_NAME formats. + + Examples: + >>> validate_host("2001:db8::ff00:42:8329") + '2001:db8::ff00:42:8329' + + >>> validate_host("192.168.0.1:443") + '192.168.0.1' + + >>> validate_host(".*.eViLCoRP.com.") + 'evilcorp.com' + + >>> validate_host("Invalid<>Host") + ValueError: Validation failed for ('Invalid<>Host',), {}: Invalid hostname: "invalid<>host" + """ # stringify, strip and lowercase host = str(host).strip().lower() # handle IPv6 netlocs @@ -62,7 +123,7 @@ def validate_host(host): for r in regexes.event_type_regexes["DNS_NAME"]: if r.match(host): return host - assert False, f'Invalid hostname: "{host}"' + raise ValidationError(f'Invalid hostname: "{host}"') @validator @@ -74,7 +135,7 @@ def validate_url(url): def validate_url_parsed(url): url = str(url).strip() if not any(r.match(url) for r in regexes.event_type_regexes["URL"]): - assert False, f'Invalid URL: "{url}"' + raise ValidationError(f'Invalid URL: "{url}"') return clean_url(url) @@ -91,16 +152,29 @@ def validate_email(email): email = smart_encode_punycode(str(email).strip().lower()) if any(r.match(email) for r in regexes.event_type_regexes["EMAIL_ADDRESS"]): return email - assert False, f'Invalid email: "{email}"' + raise ValidationError(f'Invalid email: "{email}"') def clean_url(url): """ - Remove query string and fragment, lowercase netloc, remove redundant port + Cleans and normalizes a URL. This function removes the query string and fragment, + lowercases the netloc, and removes redundant port numbers. + + Args: + url (str): The URL string to clean. + + Returns: + ParseResult: A ParseResult object containing the cleaned URL. + + Examples: + >>> clean_url("http://evilcorp.com:80") + ParseResult(scheme='http', netloc='evilcorp.com', path='/', params='', query='', fragment='') - http://evilcorp.com:80 --> http://evilcorp.com/ - http://eViLcORp.com/ --> http://evilcorp.com/ - http://evilcorp.com/api?user=bob#place --> http://evilcorp.com/api + >>> clean_url("http://eViLcORp.com/") + ParseResult(scheme='http', netloc='evilcorp.com', path='/', params='', query='', fragment='') + + >>> clean_url("http://evilcorp.com/api?user=bob#place") + ParseResult(scheme='http', netloc='evilcorp.com', path='/api', params='', query='', fragment='') """ parsed = parse_url(url) parsed = parsed._replace(netloc=str(parsed.netloc).lower(), fragment="", query="") @@ -132,14 +206,20 @@ def clean_url(url): def collapse_urls(urls, threshold=10): """ - Smartly dedupe suspiciously-similar URLs like these: - - http://evilcorp.com/user/11111/info - - http://evilcorp.com/user/2222/info - - http://evilcorp.com/user/333/info - - http://evilcorp.com/user/44/info - - http://evilcorp.com/user/5/info - - Useful for cleaning large lists of garbage-riddled URLs from sources like wayback + Collapses a list of URLs by deduping similar URLs based on a hashing mechanism. + Useful for cleaning large lists of noisy URLs, such as those retrieved from wayback. + + Args: + urls (list): The list of URL strings to collapse. + threshold (int): The number of allowed duplicate URLs before collapsing. + + Yields: + str: A deduped URL from the input list. + + Example: + >>> list(collapse_urls(["http://evilcorp.com/user/11111/info", "http://evilcorp.com/user/2222/info"], threshold=1)) + ["http://evilcorp.com/user/11111/info"] + """ url_hashes = {} for url in urls: @@ -163,10 +243,26 @@ def collapse_urls(urls, threshold=10): def soft_validate(s, t): """ - Friendly validation wrapper that returns True/False instead of raising an error + Softly validates a given string against a specified type. This function returns a boolean + instead of raising an error. + + Args: + s (str): The string to validate. + t (str): The type to validate against, e.g., "url" or "host". + + Returns: + bool: True if the string is valid, False otherwise. + + Raises: + ValueError: If no validator for the specified type is found. - is_valid_url = soft_validate("http://evilcorp.com", "url") - is_valid_host = soft_validate("http://evilcorp.com", "host") + Examples: + >>> soft_validate("http://evilcorp.com", "url") + True + >>> soft_validate("evilcorp.com", "url") + False + >>> soft_validate("http://evilcorp", "wrong_type") + ValueError: No validator for type "wrong_type" """ try: validator_fn = globals()[f"validate_{t.strip().lower()}"] diff --git a/bbot/core/helpers/web.py b/bbot/core/helpers/web.py index f997ce1118..5ac1cd82ae 100644 --- a/bbot/core/helpers/web.py +++ b/bbot/core/helpers/web.py @@ -28,6 +28,24 @@ def extract_cookies(self, *args, **kwargs): class BBOTAsyncClient(httpx.AsyncClient): + """ + A subclass of httpx.AsyncClient tailored with BBOT-specific configurations and functionalities. + This class provides rate limiting, logging, configurable timeouts, user-agent customization, custom + headers, and proxy settings. Additionally, it allows the disabling of cookies, making it suitable + for use across an entire scan. + + Attributes: + _bbot_scan (object): BBOT scan object containing configuration details. + _rate_limiter (RateLimiter): A rate limiter object to limit web requests. + _persist_cookies (bool): Flag to determine whether cookies should be persisted across requests. + + Examples: + >>> async with BBOTAsyncClient(_bbot_scan=bbot_scan_object) as client: + >>> response = await client.request("GET", "https://example.com") + >>> print(response.status_code) + 200 + """ + def __init__(self, *args, **kwargs): self._bbot_scan = kwargs.pop("_bbot_scan") web_requests_per_second = self._bbot_scan.config.get("web_requests_per_second", 100) @@ -83,7 +101,26 @@ def _merge_cookies(self, cookies): class WebHelper: """ - For making HTTP requests + Main utility class for managing HTTP operations in BBOT. It serves as a wrapper around the BBOTAsyncClient, + which itself is a subclass of httpx.AsyncClient. The class provides functionalities to make HTTP requests, + download files, and handle cached wordlists. + + Attributes: + parent_helper (object): The parent helper object containing scan configurations. + http_debug (bool): Flag to indicate whether HTTP debugging is enabled. + ssl_verify (bool): Flag to indicate whether SSL verification is enabled. + web_client (BBOTAsyncClient): An instance of BBOTAsyncClient for making HTTP requests. + client_only_options (tuple): A tuple of options only applicable to the web client. + + Examples: + Basic web request: + >>> response = await self.helpers.request("https://www.evilcorp.com") + + Download file: + >>> filename = await self.helpers.download("https://www.evilcorp.com/passwords.docx") + + Download wordlist (cached for 30 days by default): + >>> filename = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") """ client_only_options = ( @@ -105,6 +142,50 @@ def AsyncClient(self, *args, **kwargs): return BBOTAsyncClient(*args, **kwargs) async def request(self, *args, **kwargs): + """ + Asynchronous function for making HTTP requests, intended to be the most basic web request function + used widely across BBOT and within this helper class. Handles various exceptions and timeouts + that might occur during the request. + + This function automatically respects the scan's global timeout, proxy, headers, etc. + Headers you specify will be merged with the scan's. Your arguments take ultimate precedence, + meaning you can override the scan's values if you want. + + Args: + url (str): The URL to send the request to. + method (str, optional): The HTTP method to use for the request. Defaults to 'GET'. + headers (dict, optional): Dictionary of HTTP headers to send with the request. + params (dict, optional): Dictionary, list of tuples, or bytes to send in the query string. + cookies (dict, optional): Dictionary or CookieJar object containing cookies. + json (Any, optional): A JSON serializable Python object to send in the body. + data (dict, optional): Dictionary, list of tuples, or bytes to send in the body. + files (dict, optional): Dictionary of 'name': file-like-objects for multipart encoding upload. + auth (tuple, optional): Auth tuple to enable Basic/Digest/Custom HTTP auth. + timeout (float, optional): The maximum time to wait for the request to complete. + proxies (dict, optional): Dictionary mapping protocol schemes to proxy URLs. + allow_redirects (bool, optional): Enables or disables redirection. Defaults to None. + stream (bool, optional): Enables or disables response streaming. + raise_error (bool, optional): Whether to raise exceptions for HTTP connect, timeout errors. Defaults to False. + client (httpx.AsyncClient, optional): A specific httpx.AsyncClient to use for the request. Defaults to self.web_client. + cache_for (int, optional): Time in seconds to cache the request. Not used currently. Defaults to None. + + Raises: + httpx.TimeoutException: If the request times out. + httpx.ConnectError: If the connection fails. + httpx.RequestError: For other request-related errors. + + Returns: + httpx.Response or None: The HTTP response object returned by the httpx library. + + Examples: + >>> response = await self.helpers.request("https://www.evilcorp.com") + + >>> response = await self.helpers.request("https://api.evilcorp.com/", method="POST", data="stuff") + + Note: + If the web request fails, it will return None unless `raise_error` is `True`. + """ + raise_error = kwargs.pop("raise_error", False) # TODO: use this cache_for = kwargs.pop("cache_for", None) # noqa @@ -183,10 +264,24 @@ async def request(self, *args, **kwargs): async def download(self, url, **kwargs): """ - Downloads file, returns full path of filename - If download failed, returns None + Asynchronous function for downloading files from a given URL. Supports caching with an optional + time period in hours via the "cache_hrs" keyword argument. In case of successful download, + returns the full path of the saved filename. If the download fails, returns None. + + Args: + url (str): The URL of the file to download. + filename (str, optional): The filename to save the downloaded file as. + If not provided, will generate based on URL. + cache_hrs (float, optional): The number of hours to cache the downloaded file. + A negative value disables caching. Defaults to -1. + method (str, optional): The HTTP method to use for the request, defaults to 'GET'. + **kwargs: Additional keyword arguments to pass to the httpx request. + + Returns: + Path or None: The full path of the downloaded file as a Path object if successful, otherwise None. - Caching supported via "cache_hrs" + Examples: + >>> filepath = await self.helpers.download("https://www.evilcorp.com/passwords.docx", cache_hrs=24) """ success = False filename = kwargs.pop("filename", self.parent_helper.cache_filename(url)) @@ -218,6 +313,32 @@ async def download(self, url, **kwargs): return filename.resolve() async def wordlist(self, path, lines=None, **kwargs): + """ + Asynchronous function for retrieving wordlists, either from a local path or a URL. + Allows for optional line-based truncation and caching. Returns the full path of the wordlist + file or a truncated version of it. + + Args: + path (str): The local or remote path of the wordlist. + lines (int, optional): Number of lines to read from the wordlist. + If specified, will return a truncated wordlist with this many lines. + cache_hrs (float, optional): Number of hours to cache the downloaded wordlist. + Defaults to 720 hours (30 days) for remote wordlists. + **kwargs: Additional keyword arguments to pass to the 'download' function for remote wordlists. + + Returns: + Path: The full path of the wordlist (or its truncated version) as a Path object. + + Raises: + WordlistError: If the path is invalid or the wordlist could not be retrieved or found. + + Examples: + Fetching full wordlist + >>> wordlist_path = await self.helpers.wordlist("https://www.evilcorp.com/wordlist.txt") + + Fetching and truncating to the first 100 lines + >>> wordlist_path = await self.helpers.wordlist("/root/rockyou.txt", lines=100) + """ if not path: raise WordlistError(f"Invalid wordlist: {path}") if not "cache_hrs" in kwargs: @@ -246,37 +367,35 @@ async def wordlist(self, path, lines=None, **kwargs): async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **requests_kwargs): """ - An async generator to fetch and loop through API pages. + An asynchronous generator function for iterating through paginated API data. - This function keeps calling the API with the provided URL, increasing the page number each time, and spits out - the results one page at a time. It's perfect for APIs that split their data across multiple pages. + This function continuously makes requests to a specified API URL, incrementing the page number + or applying a custom pagination function, and yields the received data one page at a time. + It is well-suited for APIs that provide paginated results. Args: - url (str): The API endpoint. May contain placeholders for 'page' and 'page_size'. - page_size (int, optional): How many items you want per page. Defaults to 100. - json (bool, optional): If True, we'll try to convert the response to JSON. Defaults to True. - next_key (callable, optional): If your API has a weird way to get to the next page, give us a function - that takes the response and spits out the new URL. Defaults to None. - **requests_kwargs: Any other stuff you want to pass to the request. + url (str): The initial API URL. Can contain placeholders for 'page', 'page_size', and 'offset'. + page_size (int, optional): The number of items per page. Defaults to 100. + json (bool, optional): If True, attempts to deserialize the response content to a JSON object. Defaults to True. + next_key (callable, optional): A function that takes the last page's data and returns the URL for the next page. Defaults to None. + **requests_kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function. Yields: - If 'json' is True, you'll get a dict with the API's response, else you'll get the raw response. + dict or httpx.Response: If 'json' is True, yields a dictionary containing the parsed JSON data. Otherwise, yields the raw HTTP response. Note: - You MUST break out of the loop when you stop getting useful results! Otherwise it will loop forever. + The loop will continue indefinitely unless manually stopped. Make sure to break out of the loop once the last page has been received. Examples: - ``` - agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}') - try: - async for page in agen: - subdomains = json["subdomains"] - self.hugesuccess(subdomains) - if not subdomains: - break - finally: - agen.aclose() - ``` + >>> agen = api_page_iter('https://api.example.com/data?page={page}&page_size={page_size}') + >>> try: + >>> async for page in agen: + >>> subdomains = page["subdomains"] + >>> self.hugesuccess(subdomains) + >>> if not subdomains: + >>> break + >>> finally: + >>> agen.aclose() """ page = 1 offset = 0 @@ -304,6 +423,36 @@ async def api_page_iter(self, url, page_size=100, json=True, next_key=None, **re page += 1 async def curl(self, *args, **kwargs): + """ + An asynchronous function that runs a cURL command with specified arguments and options. + + This function constructs and executes a cURL command based on the provided parameters. + It offers support for various cURL options such as headers, post data, and cookies. + + Args: + *args: Variable length argument list for positional arguments. Unused in this function. + url (str): The URL for the cURL request. Mandatory. + raw_path (bool, optional): If True, activates '--path-as-is' in cURL. Defaults to False. + headers (dict, optional): A dictionary of HTTP headers to include in the request. + ignore_bbot_global_settings (bool, optional): If True, ignores the global settings of BBOT. Defaults to False. + post_data (dict, optional): A dictionary containing data to be sent in the request body. + method (str, optional): The HTTP method to use for the request (e.g., 'GET', 'POST'). + cookies (dict, optional): A dictionary of cookies to include in the request. + path_override (str, optional): Overrides the request-target to use in the HTTP request line. + head_mode (bool, optional): If True, includes '-I' to fetch headers only. Defaults to None. + raw_body (str, optional): Raw string to be sent in the body of the request. + **kwargs: Arbitrary keyword arguments that will be forwarded to the HTTP request function. + + Returns: + str: The output of the cURL command. + + Raises: + CurlError: If 'url' is not supplied. + + Examples: + >>> output = await curl(url="https://example.com", headers={"X-Header": "Wat"}) + >>> print(output) + """ url = kwargs.get("url", "") if not url: @@ -394,7 +543,28 @@ async def curl(self, *args, **kwargs): def is_spider_danger(self, source_event, url): """ - Todo: write tests for this + Determines whether visiting a URL could potentially trigger a web-spider-like happening. + + This function assesses the depth and distance of a URL in relation to the parent helper's + configuration settings for web spidering. If the URL exceeds the specified depth or distance, + the function returns True, indicating a possible web-spider risk. + + Args: + source_event: The source event object that discovered the URL. + url (str): The URL to evaluate for web-spider risk. + + Returns: + bool: True if visiting the URL might trigger a web-spider-like event, False otherwise. + + Todo: + - Write tests for this function + + Examples: + >>> is_spider_danger(source_event_obj, "https://example.com/subpage") + True + + >>> is_spider_danger(source_event_obj, "https://example.com/") + False """ url_depth = self.parent_helper.url_depth(url) web_spider_depth = self.parent_helper.scan.config.get("web_spider_depth", 1) @@ -410,6 +580,26 @@ def is_spider_danger(self, source_event, url): def is_login_page(html): + """ + Determines if the provided HTML content contains a login page. + + This function parses the HTML to search for forms with input fields typically used for + authentication. If it identifies password fields or a combination of username and password + fields, it returns True. + + Args: + html (str): The HTML content to analyze. + + Returns: + bool: True if the HTML contains a login page, otherwise False. + + Examples: + >>> is_login_page('
') + True + + >>> is_login_page('
') + False + """ try: soup = BeautifulSoup(html, "html.parser") except Exception as e: diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index 1d387df812..54e5f7fd27 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -13,9 +13,65 @@ class WordCloud(dict): + """ + WordCloud is a specialized dictionary-like class for storing and aggregating + words extracted from various data sources such as DNS names and URLs. The class + is intended to facilitate the generation of target-specific wordlists and mutations. + + The WordCloud class can be accessed and manipulated like a standard Python dictionary. + It also offers additional methods for generating mutations based on the words it contains. + + Attributes: + parent_helper: The parent helper object that provides necessary utilities. + devops_mutations: A set containing common devops-related mutations, loaded from a file. + dns_mutator: An instance of the DNSMutator class for generating DNS-based mutations. + + Examples: + >>> s = Scanner("www1.evilcorp.com", "www-test.evilcorp.com") + >>> s.start_without_generator() + >>> print(s.helpers.word_cloud) + { + "blacklanternsecurity": 1, + "security": 1, + "bls": 1, + "black": 1, + "lantern": 1 + } + + >>> s.helpers.word_cloud.mutations(["word"], cloud=True, numbers=0, devops=False, letters=False) + [ + [ + "1", + "word" + ], + [ + "corp", + "word" + ], + [ + "ec", + "word" + ], + [ + "evil", + "word" + ], + ... + ] + + >>> s.helpers.word_cloud.dns_mutator.mutations("word") + [ + "word", + "word-test", + "word1", + "wordtest", + "www-word", + "wwwword" + ] + """ + def __init__(self, parent_helper, *args, **kwargs): self.parent_helper = parent_helper - self.max_backups = 20 devops_filename = self.parent_helper.wordlist_dir / "devops_mutations.txt" self.devops_mutations = set(self.parent_helper.read_file(devops_filename)) @@ -27,6 +83,23 @@ def __init__(self, parent_helper, *args, **kwargs): def mutations( self, words, devops=True, cloud=True, letters=True, numbers=5, number_padding=2, substitute_numbers=True ): + """ + Generate various mutations for the given list of words based on different criteria. + + Yields tuples of strings which can be joined on the desired delimiter, e.g. "-" or "_". + + Args: + words (Union[str, Iterable[str]]): A single word or list of words to mutate. + devops (bool): Whether to include devops-related mutations. + cloud (bool): Whether to include mutations from the word cloud. + letters (bool): Whether to include letter-based mutations. + numbers (int): The maximum numeric mutations to include. + number_padding (int): Padding for numeric mutations. + substitute_numbers (bool): Whether to substitute numbers in mutations. + + Yields: + tuple: A tuple containing each of the mutation segments. + """ if isinstance(words, str): words = (words,) results = set() @@ -68,6 +141,15 @@ def modifiers(self, devops=True, cloud=True, letters=True, numbers=5, number_pad return modifiers def absorb_event(self, event): + """ + Absorbs an event from a BBOT scan into the word cloud. + + This method updates the word cloud by extracting words from the given event. It aims to avoid including PTR + (Pointer) records, as they tend to produce unhelpful mutations in the word cloud. + + Args: + event (Event): The event object containing the words to be absorbed into the word cloud. + """ for word in event.words: self.add_word(word) if event.scope_distance == 0 and event.type.startswith("DNS_NAME"): @@ -78,13 +160,45 @@ def absorb_event(self, event): def absorb_word(self, word, ninja=True): """ - Use word ninja to smartly split the word, - e.g. "blacklantern" --> "black", "lantern" + Absorbs a word into the word cloud after splitting it using a word extraction algorithm. + + This method splits the input word into smaller meaningful words using word extraction, and then adds each + of them to the word cloud. The splitting is done using a predefined algorithm in the parent helper. + + Args: + word (str): The word to be split and absorbed into the word cloud. + ninja (bool, optional): If True, word extraction is enabled. Defaults to True. + + Examples: + >>> self.helpers.word_cloud.absorb_word("blacklantern") + >>> print(self.helpers.word_cloud) + { + "blacklantern": 1, + "black": 1, + "bl": 1, + "lantern": 1 + } """ for w in self.parent_helper.extract_words(word): self.add_word(w) def add_word(self, word, lowercase=True): + """ + Adds a word to the word cloud. + + This method updates the word cloud by adding a given word. If the word already exists in the cloud, + its frequency count is incremented by 1. Optionally, the word can be converted to lowercase before adding. + + Args: + word (str): The word to be added to the word cloud. + lowercase (bool, optional): If True, the word will be converted to lowercase before adding. Defaults to True. + + Examples: + >>> self.helpers.word_cloud.add_word("Example") + >>> self.helpers.word_cloud.add_word("example") + >>> print(self.helpers.word_cloud) + {'example': 2} + """ if lowercase: word = word.lower() try: @@ -93,6 +207,34 @@ def add_word(self, word, lowercase=True): self[word] = 1 def get_number_mutations(self, base, n=5, padding=2): + """ + Generates mutations of a base string by modifying the numerical parts or appending numbers. + + This method detects existing numbers in the base string and tries incrementing and decrementing them within a + specified range. It also appends numbers at the end or after each word to generate more mutations. + + Args: + base (str): The base string to generate mutations from. + n (int, optional): The range of numbers to use for incrementing/decrementing. Defaults to 5. + padding (int, optional): Zero-pad numbers up to this length. Defaults to 2. + + Returns: + set: A set of mutated strings based on the base input. + + Examples: + >>> self.helpers.word_cloud.get_number_mutations("www2-test", n=2) + { + "www0-test", + "www1-test", + "www2-test", + "www2-test0", + "www2-test00", + "www2-test01", + "www2-test1", + "www3-test", + "www4-test" + } + """ results = set() # detects numbers and increments/decrements them @@ -136,11 +278,37 @@ def get_number_mutations(self, base, n=5, padding=2): return results def truncate(self, limit): + """ + Truncates the word cloud dictionary to retain only the top `limit` entries based on their occurrence frequencies. + + Args: + limit (int): The maximum number of entries to retain in the word cloud. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.truncate(2) + >>> self.helpers.word_cloud + {'cherry': 8, 'apple': 5} + """ new_self = dict(self.json(limit=limit)) self.clear() self.update(new_self) def json(self, limit=None): + """ + Returns the word cloud as a sorted OrderedDict, optionally truncated to the top `limit` entries. + + Args: + limit (int, optional): The maximum number of entries to include in the returned OrderedDict. If None, all entries are included. + + Returns: + OrderedDict: A dictionary sorted by word frequencies, potentially truncated to the top `limit` entries. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.json(limit=2) + OrderedDict([('cherry', 8), ('apple', 5)]) + """ cloud_sorted = sorted(self.items(), key=lambda x: x[-1], reverse=True) if limit is not None: cloud_sorted = cloud_sorted[:limit] @@ -151,6 +319,21 @@ def default_filename(self): return self.parent_helper.scan.home / f"wordcloud.tsv" def save(self, filename=None, limit=None): + """ + Saves the word cloud to a file. The cloud can optionally be truncated to the top `limit` entries. + + Args: + filename (str, optional): The path to the file where the word cloud will be saved. If None, uses a default filename. + limit (int, optional): The maximum number of entries to save to the file. If None, all entries are saved. + + Returns: + tuple: A tuple containing a boolean indicating success or failure, and the resolved filename. + + Examples: + >>> self.helpers.word_cloud.update({"apple": 5, "banana": 2, "cherry": 8}) + >>> self.helpers.word_cloud.save(filename="word_cloud.txt", limit=2) + (True, Path('word_cloud.txt')) + """ if filename is None: filename = self.default_filename else: @@ -177,6 +360,13 @@ def save(self, filename=None, limit=None): return False, filename def load(self, filename=None): + """ + Loads a word cloud from a file. The file can be either a standard wordlist with one entry per line + or a .tsv (tab-separated) file where the first row is the count and the second row is the associated entry. + + Args: + filename (str, optional): The path to the file from which to load the word cloud. If None, uses a default filename. + """ if filename is None: wordcloud_path = self.default_filename else: diff --git a/mkdocs.yml b/mkdocs.yml index 1b0510aabf..4bec84f24d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -41,6 +41,8 @@ nav: - DNS: dev/helpers/dns.md - Interactsh: dev/helpers/interactsh.md - Miscellaneous: dev/helpers/misc.md + - Web: dev/helpers/web.md + - Word Cloud: dev/helpers/wordcloud.md - Misc: - Release History: release_history.md - Troubleshooting: troubleshooting.md From c2dd69912a679a7c2879236b2d54504a1c463347 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 08:50:51 -0400 Subject: [PATCH 102/123] docs for dns mutator, massdns module --- bbot/core/helpers/wordcloud.py | 40 +++++++++++++++++++++++++++++----- bbot/modules/massdns.py | 6 +++++ 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index 54e5f7fd27..a33f3346b2 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -31,11 +31,17 @@ class WordCloud(dict): >>> s.start_without_generator() >>> print(s.helpers.word_cloud) { - "blacklanternsecurity": 1, - "security": 1, - "bls": 1, - "black": 1, - "lantern": 1 + "evilcorp": 2, + "ec": 2, + "www1": 1, + "evil": 2, + "www": 2, + "w1": 1, + "corp": 2, + "1": 1, + "wt": 1, + "test": 1, + "www-test": 1 } >>> s.helpers.word_cloud.mutations(["word"], cloud=True, numbers=0, devops=False, letters=False) @@ -397,6 +403,10 @@ def load(self, filename=None): class Mutator(dict): + """ + Base class for generating mutations from a list of words. + It accumulates words and produces mutations from them. + """ def mutations(self, words, max_mutations=None): mutations = self.top_mutations(max_mutations) ret = set() @@ -439,6 +449,26 @@ def add_word(self, word): class DNSMutator(Mutator): + """ + DNS-specific mutator used by the `massdns` module to generate target-specific subdomain mutations. + + This class extends the Mutator base class to add DNS-specific logic for generating + subdomain mutations based on input words. It utilizes custom word extraction patterns + and a wordninja model trained on DNS-specific data. + + Examples: + >>> s = Scanner("www1.evilcorp.com", "www-test.evilcorp.com") + >>> s.start_without_generator() + >>> s.helpers.word_cloud.dns_mutator.mutations("word") + [ + "word", + "word-test", + "word1", + "wordtest", + "www-word", + "wwwword" + ] + """ extract_word_regexes = [ re.compile(r, re.I) for r in [ diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index 1dee1580d8..f577532c84 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -7,6 +7,12 @@ class massdns(subdomain_enum): + """ + This is BBOT's flagship subdomain enumeration module. + + It uses massdns to brute-force subdomains. + At the end of a scan, it will leverage BBOT's word cloud to recursively discover target-specific subdomain mutations. + """ flags = ["subdomain-enum", "passive", "slow", "aggressive"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] From 803386fd8a1f6cca11556f94a776c1e1c39e418d Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 18 Sep 2023 08:51:00 -0400 Subject: [PATCH 103/123] blacked --- bbot/core/helpers/wordcloud.py | 2 ++ bbot/modules/massdns.py | 1 + 2 files changed, 3 insertions(+) diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index a33f3346b2..7531a93e2f 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -407,6 +407,7 @@ class Mutator(dict): Base class for generating mutations from a list of words. It accumulates words and produces mutations from them. """ + def mutations(self, words, max_mutations=None): mutations = self.top_mutations(max_mutations) ret = set() @@ -469,6 +470,7 @@ class DNSMutator(Mutator): "wwwword" ] """ + extract_word_regexes = [ re.compile(r, re.I) for r in [ diff --git a/bbot/modules/massdns.py b/bbot/modules/massdns.py index f577532c84..7e4331f5bd 100644 --- a/bbot/modules/massdns.py +++ b/bbot/modules/massdns.py @@ -13,6 +13,7 @@ class massdns(subdomain_enum): It uses massdns to brute-force subdomains. At the end of a scan, it will leverage BBOT's word cloud to recursively discover target-specific subdomain mutations. """ + flags = ["subdomain-enum", "passive", "slow", "aggressive"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] From 14e78c318d2325a03709891acab182d2a86b4d6b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 19 Sep 2023 13:45:24 -0400 Subject: [PATCH 104/123] add per_domain_only module attribute (cleaning up module inheritance) --- bbot/core/helpers/misc.py | 2 + bbot/modules/azure_tenant.py | 5 ++- bbot/modules/base.py | 36 ++++++++++++++++-- bbot/modules/emailformat.py | 5 ++- bbot/modules/templates/root_domains.py | 39 -------------------- bbot/modules/viewdns.py | 12 +----- bbot/test/test_step_1/test_helpers.py | 2 + bbot/test/test_step_1/test_modules_basic.py | 41 +++++++++++++++++++++ 8 files changed, 86 insertions(+), 56 deletions(-) diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index a23d2f4b2a..ecbceaa33e 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -427,6 +427,8 @@ def split_domain(hostname): Notes: - Utilizes the `tldextract` function to first break down the hostname. """ + if is_ip(hostname): + return ("", hostname) parsed = tldextract(hostname) subdomain = parsed.subdomain domain = parsed.registered_domain diff --git a/bbot/modules/azure_tenant.py b/bbot/modules/azure_tenant.py index b9ada3d186..4fcf9d7d92 100644 --- a/bbot/modules/azure_tenant.py +++ b/bbot/modules/azure_tenant.py @@ -1,10 +1,10 @@ import re from contextlib import suppress -from bbot.modules.templates.root_domains import root_domains +from bbot.modules.base import BaseModule -class azure_tenant(root_domains): +class azure_tenant(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] flags = ["affiliates", "subdomain-enum", "cloud-enum", "passive", "safe"] @@ -12,6 +12,7 @@ class azure_tenant(root_domains): base_url = "https://autodiscover-s.outlook.com" in_scope_only = True + per_domain_only = True async def setup(self): self.processed = set() diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 981caf61d6..65731d7fa6 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -33,7 +33,9 @@ class BaseModule: suppress_dupes (bool): Whether to suppress outgoing duplicate events. Default is True. - per_host_only (bool): Limit the module to only scanning once per host. Default is False. + per_host_only (bool): Limit the module to only scanning once per host:port. Default is False. + + per_domain_only (bool): Limit the module to only scanning once per domain. Default is False. scope_distance_modifier (int, None): Modifies scope distance acceptance for events. Default is 0. ``` @@ -87,6 +89,7 @@ class BaseModule: accept_dupes = False suppress_dupes = True per_host_only = False + per_domain_only = False scope_distance_modifier = 0 target_only = False in_scope_only = False @@ -715,10 +718,18 @@ async def _event_postcheck(self, event): return False, msg if self.per_host_only: - if self.get_per_host_hash(event) in self._per_host_tracker: + _hash = self.get_per_host_hash(event) + if _hash in self._per_host_tracker: return False, "per_host_only enabled and already seen host" else: - self._per_host_tracker.add(self.get_per_host_hash(event)) + self._per_host_tracker.add(_hash) + + if self.per_domain_only: + _hash = self.get_per_domain_hash(event) + if _hash in self._per_host_tracker: + return False, "per_domain_only enabled and already seen domain" + else: + self._per_host_tracker.add(_hash) if self._type == "output" and not event._stats_recorded: event._stats_recorded = True @@ -884,6 +895,25 @@ def get_per_host_hash(self, event): to_hash = f"{parsed.scheme}://{parsed.netloc}/" return hash(to_hash) + def get_per_domain_hash(self, event): + """ + Computes a per-domain hash value for a given event. This method may be optionally overridden in subclasses. + + Events with the same root domain will receive the same hash value. + + Args: + event (Event): The event object containing host, port, or parsed URL information. + + Returns: + int: The hash value computed for the domain. + + Examples: + >>> event = self.make_event("https://www.example.com:8443") + >>> self.get_per_domain_hash(event) + """ + _, domain = self.helpers.split_domain(event.host) + return hash(domain) + @property def name(self): return str(self._name) diff --git a/bbot/modules/emailformat.py b/bbot/modules/emailformat.py index 3817cb3f36..3fd47ee2db 100644 --- a/bbot/modules/emailformat.py +++ b/bbot/modules/emailformat.py @@ -1,12 +1,13 @@ -from bbot.modules.templates.root_domains import root_domains +from bbot.modules.base import BaseModule -class emailformat(root_domains): +class emailformat(BaseModule): watched_events = ["DNS_NAME"] produced_events = ["EMAIL_ADDRESS"] flags = ["passive", "email-enum", "safe"] meta = {"description": "Query email-format.com for email addresses"} in_scope_only = False + per_domain_only = True base_url = "https://www.email-format.com" diff --git a/bbot/modules/templates/root_domains.py b/bbot/modules/templates/root_domains.py index a1eaf8c99c..e852f81a23 100644 --- a/bbot/modules/templates/root_domains.py +++ b/bbot/modules/templates/root_domains.py @@ -28,42 +28,3 @@ async def filter_event(self, event): return False self.processed.add(hash(domain)) return True - - async def handle_event(self, event): - _, query = self.helpers.split_domain(event.data) - for domain, _ in await self.query(query): - self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) - # todo: registrar? - - async def query(self, query): - results = set() - url = f"{self.base_url}/reversewhois/?q={query}" - r = await self.helpers.request(url) - status_code = getattr(r, "status_code", 0) - if status_code not in (200,): - self.verbose(f"Error retrieving reverse whois results (status code: {status_code})") - - content = getattr(r, "content", b"") - from bs4 import BeautifulSoup - - html = BeautifulSoup(content, "html.parser") - found = set() - for table_row in html.findAll("tr"): - table_cells = table_row.findAll("td") - # make double-sure we're in the right table by checking the date field - try: - if self.date_regex.match(table_cells[1].text.strip()): - # domain == first cell - domain = table_cells[0].text.strip().lower() - # registrar == last cell - registrar = table_cells[-1].text.strip() - if domain and not domain == query: - result = (domain, registrar) - result_hash = hash(result) - if result_hash not in found: - found.add(result_hash) - results.add(result) - except IndexError: - self.debug(f"Invalid row {str(table_row)[:40]}...") - continue - return results diff --git a/bbot/modules/viewdns.py b/bbot/modules/viewdns.py index 4fbfb08f19..c2a5e44317 100644 --- a/bbot/modules/viewdns.py +++ b/bbot/modules/viewdns.py @@ -5,7 +5,7 @@ class viewdns(BaseModule): """ - Used as a base for modules that only act on root domains and not individual hostnames + Todo: Also retrieve registrar? """ watched_events = ["DNS_NAME"] @@ -16,25 +16,17 @@ class viewdns(BaseModule): } base_url = "https://viewdns.info" in_scope_only = True + per_domain_only = True _qsize = 1 async def setup(self): - self.processed = set() self.date_regex = re.compile(r"\d{4}-\d{2}-\d{2}") return True - async def filter_event(self, event): - _, domain = self.helpers.split_domain(event.data) - if hash(domain) in self.processed: - return False - self.processed.add(hash(domain)) - return True - async def handle_event(self, event): _, query = self.helpers.split_domain(event.data) for domain, _ in await self.query(query): self.emit_event(domain, "DNS_NAME", source=event, tags=["affiliate"]) - # todo: registrar? async def query(self, query): results = set() diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 8838491237..492dcb0b5e 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -161,6 +161,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.split_domain("www.test.notreal") == ("www", "test.notreal") assert helpers.split_domain("test.notreal") == ("", "test.notreal") assert helpers.split_domain("notreal") == ("", "notreal") + assert helpers.split_domain("192.168.0.1") == ("", "192.168.0.1") + assert helpers.split_domain("dead::beef") == ("", "dead::beef") assert helpers.split_host_port("https://evilcorp.co.uk") == ("evilcorp.co.uk", 443) assert helpers.split_host_port("http://evilcorp.co.uk:666") == ("evilcorp.co.uk", 666) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index e14f8c4022..77c25a7a15 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -201,6 +201,47 @@ async def test_modules_basic_perhostonly(scan, helpers, events, bbot_config, bbo assert valid_1 == True assert valid_2 == False assert hash("http://evilcorp.com/") in module._per_host_tracker + assert reason_2 == "per_host_only enabled and already seen host" + + else: + assert valid_1 == True + assert valid_2 == True + + +@pytest.mark.asyncio +async def test_modules_basic_perdomainonly(scan, helpers, events, bbot_config, bbot_scanner, httpx_mock, monkeypatch): + per_domain_scan = bbot_scanner( + "evilcorp.com", + modules=list(set(available_modules + available_internal_modules)), + config=bbot_config, + ) + + await per_domain_scan.load_modules() + await per_domain_scan.setup_modules() + per_domain_scan.status = "RUNNING" + + # ensure that multiple events to the same "host" (schema + host) are blocked and check the per host tracker + + for module_name, module in sorted(per_domain_scan.modules.items()): + monkeypatch.setattr(module, "filter_event", BaseModule(per_domain_scan).filter_event) + + if "URL" in module.watched_events: + url_1 = per_domain_scan.make_event( + "http://www.evilcorp.com/1", event_type="URL", source=per_domain_scan.root_event, tags=["status-200"] + ) + url_1.set_scope_distance(0) + url_2 = per_domain_scan.make_event( + "http://mail.evilcorp.com/2", event_type="URL", source=per_domain_scan.root_event, tags=["status-200"] + ) + url_2.set_scope_distance(0) + valid_1, reason_1 = await module._event_postcheck(url_1) + valid_2, reason_2 = await module._event_postcheck(url_2) + + if module.per_domain_only == True: + assert valid_1 == True + assert valid_2 == False + assert hash("evilcorp.com") in module._per_host_tracker + assert reason_2 == "per_domain_only enabled and already seen domain" else: assert valid_1 == True From fefa3b50569f8a3680adb48a8166fe0dcad7e937 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 20 Sep 2023 10:49:54 -0400 Subject: [PATCH 105/123] remove unused root_domains template --- bbot/modules/templates/root_domains.py | 30 -------------------------- 1 file changed, 30 deletions(-) delete mode 100644 bbot/modules/templates/root_domains.py diff --git a/bbot/modules/templates/root_domains.py b/bbot/modules/templates/root_domains.py deleted file mode 100644 index e852f81a23..0000000000 --- a/bbot/modules/templates/root_domains.py +++ /dev/null @@ -1,30 +0,0 @@ -import re - -from bbot.modules.base import BaseModule - - -class root_domains(BaseModule): - """ - Used as a base for modules that only act on root domains and not individual hostnames - """ - - watched_events = ["DNS_NAME"] - produced_events = ["DNS_NAME"] - flags = ["passive", "safe"] - meta = { - "description": "", - } - in_scope_only = True - _qsize = 1 - - async def setup(self): - self.processed = set() - self.date_regex = re.compile(r"\d{4}-\d{2}-\d{2}") - return True - - async def filter_event(self, event): - _, domain = self.helpers.split_domain(event.data) - if hash(domain) in self.processed: - return False - self.processed.add(hash(domain)) - return True From 441caffe6640a8443b37095712de3d4c68019b6a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 21 Sep 2023 15:27:54 -0400 Subject: [PATCH 106/123] small update to docs index.md --- docs/index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/index.md b/docs/index.md index f5ef3b332f..ae590beef8 100644 --- a/docs/index.md +++ b/docs/index.md @@ -46,9 +46,9 @@ git clone https://github.com/blacklanternsecurity/bbot && cd bbot ./bbot-docker.sh --help ``` -## Examples +## Example Commands -Below are some common scan examples. +Below are some examples of common scans. **Subdomains:** From 99e36eb37a2de6a2c579368e5ed9851a72573f13 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 14:37:50 -0400 Subject: [PATCH 107/123] Add Discord Bot Example --- README.md | 2 + docs/dev/helpers/index.md | 29 ++++++++++++ docs/dev/index.md | 92 +++++++++++++++++++++++++++++++++++++++ mkdocs.yml | 3 +- 4 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 docs/dev/helpers/index.md create mode 100644 docs/dev/index.md diff --git a/README.md b/README.md index 02f6f27cd1..d447be51a3 100644 --- a/README.md +++ b/README.md @@ -120,6 +120,8 @@ For details, see [Configuration](https://www.blacklanternsecurity.com/bbot/scann ## BBOT as a Python library +BBOT exposes a Python API that allows it to be used for all kinds of fun and nefarious purposes, like a [Discord Bot that responds to `/scan evilcorp.com`](https://www.blacklanternsecurity.com/bbot/dev/#bbot-python-library-advanced-usage#discord-bot-example). + **Synchronous** ```python diff --git a/docs/dev/helpers/index.md b/docs/dev/helpers/index.md new file mode 100644 index 0000000000..be7365fe10 --- /dev/null +++ b/docs/dev/helpers/index.md @@ -0,0 +1,29 @@ +# BBOT Helpers + +In this section are various helper functions that are designed to make your life easier when devving on BBOT. Whether you're extending BBOT by writing a module or working on its core engine, these functions are designed to act as useful machine parts to perform essential tasks, such as making a web request or executing a DNS query. + +The vast majority of these helpers can be accessed directly from the `.helpers` attribute of a scan or module, like so: + +```python +class MyModule(BaseModule): + + ... + + async def handle_event(self, event): + # Web Request + response = await self.helpers.request("https://www.evilcorp.com") + + # DNS query + for ip in await self.helpers.resolve("www.evilcorp.com"): + self.hugesuccess(str(ip)) + + # Execute shell command + completed_process = self.helpers.run("ls", "-l") + self.hugesuccess(completed_process.stdout) + + # Split a DNS name into subdomain / domain + self.helpers.split_domain("www.internal.evilcorp.co.uk") + # ("www.internal", "evilcorp.co.uk") +``` + +[Next Up: Command Helpers -->](command.md){ .md-button .md-button--primary } diff --git a/docs/dev/index.md b/docs/dev/index.md new file mode 100644 index 0000000000..982b04e05a --- /dev/null +++ b/docs/dev/index.md @@ -0,0 +1,92 @@ +# BBOT Developer Reference + +BBOT exposes a convenient API that allows you to create, start, and stop scans using Python code. + +Documented in this section are commonly-used classes and functions within BBOT, along with usage examples. + +## Discord Bot Example + +Below is a simple Discord bot designed to run BBOT scans. + +```python +import asyncio +import discord +from discord.ext import commands + +from bbot.scanner import Scanner +from bbot.modules import module_loader +from bbot.modules.output.discord import Discord + + +# make list of BBOT modules +bbot_modules = ["excavate", "speculate", "aggregate"] +for module_name, preloaded in module_loader.preloaded().items(): + flags = preloaded["flags"] + if "subdomain-enum" in flags and "passive" in flags and "slow" not in flags: + bbot_modules.append(module_name) + + +class BBOTDiscordBot(commands.Cog): + """ + A simple Discord bot capable of running a BBOT scan. + + To set up: + 1. Go to Discord Developer Portal (https://discord.com/developers) + 2. Create a new application + 3. Create an invite link for the bot, visit the link to invite it to your server + - Your Application --> OAuth2 --> URL Generator + - For Scopes, select "bot"" + - For Bot Permissions, select: + - Read Messages/View Channels + - Send Messages + 4. Turn on "Message Content Intent" + - Your Application --> Bot --> Privileged Gateway Intents --> Message Content Intent + 5. Copy your Discord Bot Token and put it at the top this file + - Your Application --> Bot --> Reset Token + 6. Run this script + + To scan evilcorp.com, you would type: + + /scan evilcorp.com + + Results will be output to the same channel. + """ + def __init__(self): + self.current_scan = None + + @commands.command(name="scan", description="Scan a target with BBOT.") + async def scan(self, ctx, target: str): + if self.current_scan is not None: + self.current_scan.stop() + await ctx.send(f"Starting scan against {target}.") + + self.current_scan = Scanner(target, modules=bbot_modules) + discord_module = Discord(self.current_scan) + + seen = set() + num_events = 0 + async for event in self.current_scan.async_start(): + if hash(event) in seen: + continue + seen.add(hash(event)) + await ctx.send(discord_module.format_message(event)) + num_events += 1 + + await ctx.send(f"Finished scan against {target}. {num_events:,} results.") + self.current_scan = None + + +if __name__ == "__main__": + intents = discord.Intents.default() + intents.message_content = True + bot = commands.Bot(command_prefix="/", intents=intents) + + @bot.event + async def on_ready(): + print(f"We have logged in as {bot.user}") + await bot.add_cog(BBOTDiscordBot()) + + bot.run("DISCORD_BOT_TOKEN_HERE") +``` + +[Next Up: Scanner -->](scanner.md){ .md-button .md-button--primary } diff --git a/mkdocs.yml b/mkdocs.yml index 4bec84f24d..75fabc6e7d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -31,12 +31,13 @@ nav: - Contribution: - How to Write a Module: contribution.md - Developer Reference: + - Overview: dev/index.md - Scanner: dev/scanner.md - Event: dev/event.md - Target: dev/target.md - BaseModule: dev/basemodule.md - Helpers: - # dev/helpers/index.md + - Overview: dev/helpers/index.md - Command: dev/helpers/command.md - DNS: dev/helpers/dns.md - Interactsh: dev/helpers/interactsh.md From bce133540dad88de31e4f22f97a6af108074ec43 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 15:02:53 -0400 Subject: [PATCH 108/123] update discord bot docs --- docs/dev/index.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/dev/index.md b/docs/dev/index.md index 982b04e05a..5a9bf88d0c 100644 --- a/docs/dev/index.md +++ b/docs/dev/index.md @@ -18,7 +18,7 @@ from bbot.modules import module_loader from bbot.modules.output.discord import Discord -# make list of BBOT modules +# make list of BBOT modules to enable for the scan bbot_modules = ["excavate", "speculate", "aggregate"] for module_name, preloaded in module_loader.preloaded().items(): flags = preloaded["flags"] @@ -60,11 +60,13 @@ class BBOTDiscordBot(commands.Cog): self.current_scan.stop() await ctx.send(f"Starting scan against {target}.") + # creates scan instance self.current_scan = Scanner(target, modules=bbot_modules) discord_module = Discord(self.current_scan) seen = set() num_events = 0 + # start scan and iterate through results async for event in self.current_scan.async_start(): if hash(event) in seen: continue From 42a3ad28a41549e31515aeb4c6fea2d0b9c529ca Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 16:45:51 -0400 Subject: [PATCH 109/123] cleaned up README --- README.md | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/README.md b/README.md index d447be51a3..90b4db41fc 100644 --- a/README.md +++ b/README.md @@ -18,13 +18,9 @@ BBOT typically outperforms other subdomain enumeration tools by 20-25%. To learn ## Installation ([pip](https://pypi.org/project/bbot/)) -For more installation methods including [Docker](https://hub.docker.com/r/blacklanternsecurity/bbot), see [Installation](https://www.blacklanternsecurity.com/bbot/#installation). +Note: Requires both Linux and Python 3.9+. For more installation methods including [Docker](https://hub.docker.com/r/blacklanternsecurity/bbot), see [Installation](https://www.blacklanternsecurity.com/bbot/#installation). ```bash -# Prerequisites: -# - Linux (Windows and macOS are *not* supported) -# - Python 3.9 or newer - # stable version pipx install bbot From 8081379e0354e071c1aa8e3d60ea2d18fb200f41 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Fri, 22 Sep 2023 16:46:24 -0400 Subject: [PATCH 110/123] cleaned up README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 90b4db41fc..8818a53a82 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ BBOT typically outperforms other subdomain enumeration tools by 20-25%. To learn ## Installation ([pip](https://pypi.org/project/bbot/)) -Note: Requires both Linux and Python 3.9+. For more installation methods including [Docker](https://hub.docker.com/r/blacklanternsecurity/bbot), see [Installation](https://www.blacklanternsecurity.com/bbot/#installation). +Note: Requires Linux and Python 3.9+. For more installation methods including [Docker](https://hub.docker.com/r/blacklanternsecurity/bbot), see [Installation](https://www.blacklanternsecurity.com/bbot/#installation). ```bash # stable version From 7ea2311b175bc3856c586284e8cf6c3830a9b27b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 25 Sep 2023 12:04:19 -0400 Subject: [PATCH 111/123] fixed unhandled websockets error in agent --- bbot/agent/agent.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index f30fb94d15..09eff2a03b 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -51,7 +51,14 @@ async def ws(self, rebuild=False): verbs = ("Rebuilding", "Rebuilt") url = f"{self.url}/control/" log.debug(f"{verbs[0]} websocket connection to {url}") - self._ws = await websockets.connect(url, **kwargs) + while 1: + try: + self._ws = await websockets.connect(url, **kwargs) + break + except Exception as e: + log.error(f'Failed to establish websockets connection to URL "{url}": {e}') + log.trace(traceback.format_exc()) + await asyncio.sleep(1) log.debug(f"{verbs[1]} websocket connection to {url}") return self._ws From 69189788b372876d14a46fedd4d6ba9fd668d966 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 25 Sep 2023 12:08:32 -0400 Subject: [PATCH 112/123] fix 'unknown command: ping' error --- bbot/agent/agent.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index 09eff2a03b..819388274c 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -77,6 +77,7 @@ async def start(self): if message.command == "ping": if self.scan is None: await self.send({"conversation": str(message.conversation), "message_type": "pong"}) + continue command_type = getattr(messages, message.command, None) if command_type is None: From 1d92059d615ef33b2c3fbe531b42b3fba301890f Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 25 Sep 2023 12:18:12 -0400 Subject: [PATCH 113/123] fix websocket output URL from agent --- bbot/agent/agent.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/bbot/agent/agent.py b/bbot/agent/agent.py index 819388274c..1c8debc1e0 100644 --- a/bbot/agent/agent.py +++ b/bbot/agent/agent.py @@ -123,11 +123,7 @@ async def start_scan(self, scan_id, name=None, targets=[], modules=[], output_mo f"Starting scan with targets={targets}, modules={modules}, output_modules={output_modules}" ) output_module_config = OmegaConf.create( - { - "output_modules": { - "websocket": {"url": f"{self.url}/control/scan/{scan_id}/", "token": self.token} - } - } + {"output_modules": {"websocket": {"url": f"{self.url}/scan/{scan_id}/", "token": self.token}}} ) config = OmegaConf.create(config) config = OmegaConf.merge(self.config, config, output_module_config) From 20edb218eb9dbe93c6840b9d168e06b3c4e5375a Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 25 Sep 2023 12:41:06 -0400 Subject: [PATCH 114/123] fix agent tests --- bbot/test/test_step_1/test_cli.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index 31b681c177..0ccc94887e 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -53,7 +53,10 @@ async def test_cli(monkeypatch, bbot_config): task = asyncio.create_task(cli._main()) await asyncio.sleep(2) task.cancel() - await task + try: + await task + except asyncio.CancelledError: + pass # no args monkeypatch.setattr("sys.argv", ["bbot"]) From 62d7a1680347d2cf0cea34537d0c19b3d7bee606 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Thu, 28 Sep 2023 15:19:00 -0400 Subject: [PATCH 115/123] fix wayback URL validation error --- bbot/core/helpers/validators.py | 5 ++++- bbot/test/test_step_1/test_helpers.py | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index 82d7a38d4f..b26543e470 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -143,7 +143,10 @@ def collapse_urls(urls, threshold=10): """ url_hashes = {} for url in urls: - new_url = clean_url(url) + try: + new_url = clean_url(url) + except ValueError as e: + log.verbose(f"Failed to clean url {url}: {e}") url_hash = hash_url(new_url) try: url_hashes[url_hash].add(new_url) diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index abf09cadc2..b46a9ad2c0 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -39,6 +39,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_config, bbot_https assert helpers.validators.clean_url("http://evilcorp.com/asdf?a=asdf#frag").geturl() == "http://evilcorp.com/asdf" assert helpers.validators.clean_url("http://evilcorp.com//asdf").geturl() == "http://evilcorp.com/asdf" assert helpers.validators.clean_url("http://evilcorp.com.").geturl() == "http://evilcorp.com/" + with pytest.raises(ValueError): + helpers.validators.clean_url("http://evilcorp,com") assert helpers.url_depth("http://evilcorp.com/asdf/user/") == 2 assert helpers.url_depth("http://evilcorp.com/asdf/user") == 2 From b70d41e67dc2a0a82799a173a08442854ca9ddc5 Mon Sep 17 00:00:00 2001 From: BBOT Docs Autopublish Date: Fri, 29 Sep 2023 19:21:11 +0000 Subject: [PATCH 116/123] Refresh module docs --- docs/modules/list_of_modules.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/modules/list_of_modules.md b/docs/modules/list_of_modules.md index cbe682be8d..4b9a21a218 100644 --- a/docs/modules/list_of_modules.md +++ b/docs/modules/list_of_modules.md @@ -65,7 +65,7 @@ | hunterio | scan | Yes | Query hunter.io for emails | email-enum, passive, safe, subdomain-enum | DNS_NAME | DNS_NAME, EMAIL_ADDRESS, URL_UNVERIFIED | | ip2location | scan | Yes | Query IP2location.io's API for geolocation information. | passive, safe | IP_ADDRESS | GEOLOCATION | | ipneighbor | scan | No | Look beside IPs in their surrounding subnet | aggressive, passive, subdomain-enum | IP_ADDRESS | IP_ADDRESS | -| ipstack | scan | Yes | Query IPStack's API for GeoIP | passive, safe | IP_ADDRESS | GEOLOCATION | +| ipstack | scan | Yes | Query IPStack's GeoIP API | passive, safe | IP_ADDRESS | GEOLOCATION | | leakix | scan | No | Query leakix.net for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | | massdns | scan | No | Brute-force subdomains with massdns (highly effective) | aggressive, passive, slow, subdomain-enum | DNS_NAME | DNS_NAME | | myssl | scan | No | Query myssl.com's API for subdomains | passive, safe, subdomain-enum | DNS_NAME | DNS_NAME | From 2aa711b7e68904fb585763e83203db8d901a8e79 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 2 Oct 2023 05:35:23 -0400 Subject: [PATCH 117/123] fix docs publishing --- .github/workflows/tests.yml | 8 ++++++-- poetry.lock | 37 ++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cafdad9824..eafd199e59 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,6 +4,7 @@ on: branches: - stable - dev + - fix-docs-publishing pull_request: concurrency: @@ -93,8 +94,11 @@ jobs: path: .cache restore-keys: | mkdocs-material- - - run: pip install mkdocs-material mkdocs-extra-sass-plugin livereload - - run: mkdocs gh-deploy --force + - name: Install dependencies + run: | + pip install poetry + poetry install --only=docs + - run: poetry run mkdocs gh-deploy --force publish_code: needs: update_docs runs-on: ubuntu-latest diff --git a/poetry.lock b/poetry.lock index 2f1daa6c83..55c25b105c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -874,6 +874,21 @@ files = [ {file = "libsass-0.22.0.tar.gz", hash = "sha256:3ab5ad18e47db560f4f0c09e3d28cf3bb1a44711257488ac2adad69f4f7f8425"}, ] +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +optional = false +python-versions = "*" +files = [ + {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + [[package]] name = "lockfile" version = "0.12.2" @@ -2148,6 +2163,26 @@ files = [ {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, ] +[[package]] +name = "tornado" +version = "6.3.3" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, +] + [[package]] name = "typing-extensions" version = "4.8.0" @@ -2398,4 +2433,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "fefc413a0132045bbdb1665144f272f9ca328dfc0db48926c2585a8927dd0af1" +content-hash = "2215f588e30cd553c593079522ce8b98beb344554688e62d2409237fd245f21d" diff --git a/pyproject.toml b/pyproject.toml index f8436f3bc0..ee2a01c948 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ mkdocs-material = "^9.2.5" mkdocs-material-extensions = "^1.1.1" mkdocstrings = "^0.22.0" mkdocstrings-python = "^1.6.0" +livereload = "^2.6.3" [tool.pytest.ini_options] env = [ From 85ff49a9316fd85e464572a6c00407d8c94a3f3e Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 2 Oct 2023 07:23:46 -0400 Subject: [PATCH 118/123] update for current branch --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index eafd199e59..c3f7183b03 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -81,7 +81,7 @@ jobs: publish_docs: needs: update_docs runs-on: ubuntu-latest - if: github.event_name == 'push' && github.ref == 'refs/heads/dev' + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/fix-docs-publishing') steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 From 065ae62f3e94197dd4da1ecaf701cf5c496ec24b Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 2 Oct 2023 10:04:45 -0400 Subject: [PATCH 119/123] fix tests --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c3f7183b03..5a8f7249a1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -54,7 +54,7 @@ jobs: update_docs: needs: test runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable') + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable || github.ref == 'refs/heads/fix-docs-publishing') steps: - uses: actions/checkout@v3 with: From 70b029e0183fabd31b0c2bce4beafbed88efa6ed Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Mon, 2 Oct 2023 10:09:14 -0400 Subject: [PATCH 120/123] tests troubleshooting --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5a8f7249a1..15da938371 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -54,7 +54,7 @@ jobs: update_docs: needs: test runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable || github.ref == 'refs/heads/fix-docs-publishing') + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/fix-docs-publishing') steps: - uses: actions/checkout@v3 with: From 2e8d17eaa520eed5197d53e46ec5671c2aa6ad26 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 3 Oct 2023 04:37:19 -0400 Subject: [PATCH 121/123] fix test dedup concurrency --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 15da938371..308d4f12a8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,8 +7,8 @@ on: - fix-docs-publishing pull_request: -concurrency: - group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} cancel-in-progress: true jobs: From e2d887c775a65b464986ba49824e2b2d19da28c9 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Tue, 3 Oct 2023 05:01:54 -0400 Subject: [PATCH 122/123] restore branch criteria --- .github/workflows/tests.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 308d4f12a8..7e2c395f71 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,11 +4,10 @@ on: branches: - stable - dev - - fix-docs-publishing pull_request: -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.head.sha || github.sha }} +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: @@ -54,7 +53,7 @@ jobs: update_docs: needs: test runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable' || github.ref == 'refs/heads/fix-docs-publishing') + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/stable') steps: - uses: actions/checkout@v3 with: @@ -81,7 +80,7 @@ jobs: publish_docs: needs: update_docs runs-on: ubuntu-latest - if: github.event_name == 'push' && (github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/fix-docs-publishing') + if: github.event_name == 'push' && (github.ref == 'refs/heads/dev') steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 From 2728157f081e2a97c3dbcca5e6fc30e3c0e71584 Mon Sep 17 00:00:00 2001 From: TheTechromancer Date: Wed, 11 Oct 2023 13:38:10 -0400 Subject: [PATCH 123/123] fix py2neo situation --- bbot/modules/output/neo4j.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bbot/modules/output/neo4j.py b/bbot/modules/output/neo4j.py index 477bdd373b..18bda8bad8 100644 --- a/bbot/modules/output/neo4j.py +++ b/bbot/modules/output/neo4j.py @@ -15,7 +15,7 @@ class neo4j(BaseOutputModule): "username": "Neo4j username", "password": "Neo4j password", } - deps_pip = ["py2neo~=2021.2.3"] + deps_pip = ["git+https://github.com/blacklanternsecurity/py2neo"] batch_size = 50 async def setup(self):