Skip to content

Commit

Permalink
commenting base module
Browse files Browse the repository at this point in the history
  • Loading branch information
TheTechromancer committed Sep 13, 2023
1 parent d0a6ef9 commit e6d5db1
Show file tree
Hide file tree
Showing 8 changed files with 665 additions and 111 deletions.
12 changes: 12 additions & 0 deletions bbot/core/helpers/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,18 @@ async def curl(self, *args, **kwargs):
output = (await self.parent_helper.run(curl_command)).stdout
return output

def is_spider_danger(self, source_event, url):
"""
Todo: write tests for this
"""
url_depth = self.parent_helper.url_depth(url)
web_spider_depth = self.parent_helper.scan.config.get("web_spider_depth", 1)
spider_distance = getattr(source_event, "web_spider_distance", 0) + 1
web_spider_distance = self.parent_helper.scan.config.get("web_spider_distance", 0)
if (url_depth > web_spider_depth) or (spider_distance > web_spider_distance):
return True
return False


user_keywords = [re.compile(r, re.I) for r in ["user", "login", "email"]]
pass_keywords = [re.compile(r, re.I) for r in ["pass"]]
Expand Down
751 changes: 646 additions & 105 deletions bbot/modules/base.py

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion bbot/modules/gowitness.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ async def handle_batch(self, *events):
_id = row["url_id"]
source_url = self.screenshots_taken[_id]
source_event = events[source_url]
if self.is_spider_danger(source_event, url):
if self.helpers.is_spider_danger(source_event, url):
tags.append("spider-danger")
if url and url.startswith("http"):
self.emit_event(url, "URL_UNVERIFIED", source=source_event, tags=tags)
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/internal/excavate.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ async def search(self, content, event, **kwargs):
url_event = self.report(result, name, event, **kwargs)
if url_event is not None:
url_in_scope = self.excavate.scan.in_scope(url_event)
is_spider_danger = self.excavate.is_spider_danger(event, result)
is_spider_danger = self.excavate.helpers.is_spider_danger(event, result)
if (
(
urls_found >= self.web_spider_links_per_page and url_in_scope
Expand Down
2 changes: 1 addition & 1 deletion bbot/modules/robots.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,6 @@ async def handle_event(self, event):
continue

tags = []
if self.is_spider_danger(event, unverified_url):
if self.helpers.is_spider_danger(event, unverified_url):
tags.append("spider-danger")
self.emit_event(unverified_url, "URL_UNVERIFIED", source=event, tags=tags)
2 changes: 1 addition & 1 deletion bbot/scanner/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ async def async_start(self):
break

if "python" in self.modules:
events, finish = await self.modules["python"].events_waiting()
events, finish = await self.modules["python"]._events_waiting()
for e in events:
yield e

Expand Down
2 changes: 0 additions & 2 deletions docs/dev/scanner.md
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
# `bbot.scanner.Scanner()`

::: bbot.scanner.Scanner
3 changes: 3 additions & 0 deletions mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ nav:
- How to Write a Module: contribution.md
- Developer Reference:
- Scanner: dev/scanner.md
- Target: dev/target.md
- BaseModule: dev/basemodule.md
- Helpers:
# dev/helpers/index.md
- Miscellaneous: dev/helpers/misc.md
Expand Down Expand Up @@ -60,6 +62,7 @@ plugins:
handlers:
python:
options:
heading_level: 1
show_signature_annotations: true
show_root_toc_entry: false
show_root_heading: true
Expand Down

0 comments on commit e6d5db1

Please sign in to comment.