Skip to content

Commit

Permalink
ruff check && ruff format
Browse files Browse the repository at this point in the history
  • Loading branch information
cclauss committed Nov 22, 2024
1 parent 785fd62 commit f0e9f50
Show file tree
Hide file tree
Showing 44 changed files with 3,389 additions and 543 deletions.
17 changes: 4 additions & 13 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "--check"
- name: Install Python 3
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install dependencies
run: |
pip install flake8
- name: flake8
run: |
flake8
- run: |
pipx install ruff
ruff check
ruff format
test:
needs: lint
runs-on: ubuntu-latest
Expand Down
4 changes: 0 additions & 4 deletions bbot/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@


async def _main():

import asyncio
import traceback
from contextlib import suppress
Expand All @@ -45,7 +44,6 @@ async def _main():
global scan_name

try:

# start by creating a default scan preset
preset = Preset(_log=True, name="bbot_cli_main")
# parse command line arguments and merge into preset
Expand Down Expand Up @@ -81,7 +79,6 @@ async def _main():

# if we're listing modules or their options
if options.list_modules or options.list_module_options:

# if no modules or flags are specified, enable everything
if not (options.modules or options.output_modules or options.flags):
for module, preloaded in preset.module_loader.preloaded().items():
Expand Down Expand Up @@ -172,7 +169,6 @@ async def _main():
log.trace(f"Command: {' '.join(sys.argv)}")

if sys.stdin.isatty():

# warn if any targets belong directly to a cloud provider
for event in scan.target.seeds.events:
if event.type == "DNS_NAME":
Expand Down
1 change: 0 additions & 1 deletion bbot/core/config/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


class BBOTConfigFiles:

config_dir = (Path.home() / ".config" / "bbot").resolve()
defaults_filename = (bbot_code_dir / "defaults.yml").resolve()
config_filename = (config_dir / "bbot.yml").resolve()
Expand Down
3 changes: 0 additions & 3 deletions bbot/core/event/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1180,7 +1180,6 @@ def __init__(self, *args, **kwargs):
self.num_redirects = getattr(self.parent, "num_redirects", 0)

def _data_id(self):

data = super()._data_id()

# remove the querystring for URL/URL_UNVERIFIED events, because we will conditionally add it back in (based on settings)
Expand Down Expand Up @@ -1267,7 +1266,6 @@ def http_status(self):


class URL(URL_UNVERIFIED):

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

Expand Down Expand Up @@ -1309,7 +1307,6 @@ class URL_HINT(URL_UNVERIFIED):


class WEB_PARAMETER(DictHostEvent):

def _data_id(self):
# dedupe by url:name:param_type
url = self.data.get("url", "")
Expand Down
1 change: 0 additions & 1 deletion bbot/core/helpers/dns/dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@


class DNSHelper(EngineClient):

SERVER_CLASS = DNSEngine
ERROR_CLASS = DNSError

Expand Down
2 changes: 0 additions & 2 deletions bbot/core/helpers/dns/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@


class DNSEngine(EngineServer):

CMDS = {
0: "resolve",
1: "resolve_raw",
Expand Down Expand Up @@ -476,7 +475,6 @@ async def is_wildcard(self, query, rdtypes, raw_dns_records=None):
# for every parent domain, starting with the shortest
parents = list(domain_parents(query))
for parent in parents[::-1]:

# check if the parent domain is set up with wildcards
wildcard_results = await self.is_wildcard_domain(parent, rdtypes_to_check)

Expand Down
1 change: 0 additions & 1 deletion bbot/core/helpers/dns/mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@


class MockResolver:

def __init__(self, mock_data=None, custom_lookup_fn=None):
self.mock_data = mock_data if mock_data else {}
self._custom_lookup_fn = custom_lookup_fn
Expand Down
1 change: 0 additions & 1 deletion bbot/core/helpers/libmagic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@


def get_magic_info(file):

magic_detections = puremagic.magic_file(file)
if magic_detections:
magic_detections.sort(key=lambda x: x.confidence, reverse=True)
Expand Down
2 changes: 0 additions & 2 deletions bbot/core/helpers/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@


class BBOTThread(threading.Thread):

default_name = "default bbot thread"

def __init__(self, *args, **kwargs):
Expand All @@ -24,7 +23,6 @@ def run(self):


class BBOTProcess(SpawnProcess):

default_name = "bbot process pool"

def __init__(self, *args, **kwargs):
Expand Down
1 change: 0 additions & 1 deletion bbot/core/helpers/validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,6 @@ def is_email(email):


class Validators:

def __init__(self, parent_helper):
self.parent_helper = parent_helper

Expand Down
1 change: 0 additions & 1 deletion bbot/core/helpers/web/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@


class HTTPEngine(EngineServer):

CMDS = {
0: "request",
1: "request_batch",
Expand Down
1 change: 0 additions & 1 deletion bbot/core/helpers/web/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@


class WebHelper(EngineClient):

SERVER_CLASS = HTTPEngine
ERROR_CLASS = WebError

Expand Down
1 change: 0 additions & 1 deletion bbot/core/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,6 @@ def preload_module(self, module_file):
# look for classes
if type(root_element) == ast.ClassDef:
for class_attr in root_element.body:

# class attributes that are dictionaries
if type(class_attr) == ast.Assign and type(class_attr.value) == ast.Dict:
# module options
Expand Down
1 change: 0 additions & 1 deletion bbot/db/sql/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ def __eq__(self, other):


class Event(BBOTBaseModel, table=True):

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
data = self._get_data(self.data, self.type)
Expand Down
2 changes: 0 additions & 2 deletions bbot/modules/baddns.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ async def setup(self):
return True

async def handle_event(self, event):

tasks = []
for ModuleClass in self.select_modules():
kwargs = {
Expand All @@ -75,7 +74,6 @@ async def handle_event(self, event):
tasks.append((module_instance, task))

async for completed_task in self.helpers.as_completed([task for _, task in tasks]):

module_instance = next((m for m, t in tasks if t == completed_task), None)
try:
task_result = await completed_task
Expand Down
1 change: 0 additions & 1 deletion bbot/modules/baddns_direct.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ async def handle_event(self, event):

CNAME_direct_instance = CNAME_direct_module(event.host, **kwargs)
if await CNAME_direct_instance.dispatch():

results = CNAME_direct_instance.analyze()
if results and len(results) > 0:
for r in results:
Expand Down
2 changes: 0 additions & 2 deletions bbot/modules/dotnetnuke.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ async def setup(self):
self.interactsh_instance = None

if self.scan.config.get("interactsh_disable", False) == False:

try:
self.interactsh_instance = self.helpers.interactsh()
self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback)
Expand Down Expand Up @@ -114,7 +113,6 @@ async def handle_event(self, event):
)

if "endpoint" not in event.tags:

# NewsArticlesSlider ImageHandler.ashx File Read
result = await self.helpers.request(
f'{event.data["url"]}/DesktopModules/dnnUI_NewsArticlesSlider/ImageHandler.ashx?img=~/web.config'
Expand Down
39 changes: 18 additions & 21 deletions bbot/modules/internal/excavate.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ def _exclude_key(original_dict, key_to_exclude):


def extract_params_url(parsed_url):

params = parse_qs(parsed_url.query)
flat_params = {k: v[0] for k, v in params.items()}

Expand Down Expand Up @@ -94,7 +93,6 @@ def extract_params_location(location_header_value, original_parsed_url):


class YaraRuleSettings:

def __init__(self, description, tags, emit_match):
self.description = description
self.tags = tags
Expand Down Expand Up @@ -263,7 +261,6 @@ async def report(


class CustomExtractor(ExcavateRule):

def __init__(self, excavate):
super().__init__(excavate)

Expand Down Expand Up @@ -358,7 +355,6 @@ def url_unparse(self, param_type, parsed_url):
)

class ParameterExtractor(ExcavateRule):

yara_rules = {}

class ParameterExtractorRule:
Expand All @@ -372,7 +368,6 @@ def __init__(self, excavate, result):
self.result = result

class GetJquery(ParameterExtractorRule):

name = "GET jquery"
discovery_regex = r"/\$.get\([^\)].+\)/ nocase"
extraction_regex = re.compile(r"\$.get\([\'\"](.+)[\'\"].+(\{.+\})\)")
Expand All @@ -393,8 +388,12 @@ def extract(self):
for action, extracted_parameters in extracted_results:
extracted_parameters_dict = self.convert_to_dict(extracted_parameters)
for parameter_name, original_value in extracted_parameters_dict.items():
yield self.output_type, parameter_name, original_value, action, _exclude_key(
extracted_parameters_dict, parameter_name
yield (
self.output_type,
parameter_name,
original_value,
action,
_exclude_key(extracted_parameters_dict, parameter_name),
)

class PostJquery(GetJquery):
Expand All @@ -418,8 +417,12 @@ def extract(self):
k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in query_strings.items()
}
for parameter_name, original_value in query_strings_dict.items():
yield self.output_type, parameter_name, original_value, url, _exclude_key(
query_strings_dict, parameter_name
yield (
self.output_type,
parameter_name,
original_value,
url,
_exclude_key(query_strings_dict, parameter_name),
)

class GetForm(ParameterExtractorRule):
Expand All @@ -444,8 +447,12 @@ def extract(self):
form_parameters[parameter_name] = original_value

for parameter_name, original_value in form_parameters.items():
yield self.output_type, parameter_name, original_value, form_action, _exclude_key(
form_parameters, parameter_name
yield (
self.output_type,
parameter_name,
original_value,
form_action,
_exclude_key(form_parameters, parameter_name),
)

class PostForm(GetForm):
Expand Down Expand Up @@ -485,7 +492,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte
endpoint,
additional_params,
) in extracted_params:

self.excavate.debug(
f"Found Parameter [{parameter_name}] in [{parameterExtractorSubModule.name}] ParameterExtractor Submodule"
)
Expand All @@ -497,7 +503,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte
)

if self.excavate.helpers.validate_parameter(parameter_name, parameter_type):

if self.excavate.in_bl(parameter_name) == False:
parsed_url = urlparse(url)
description = f"HTTP Extracted Parameter [{parameter_name}] ({parameterExtractorSubModule.name} Submodule)"
Expand Down Expand Up @@ -532,7 +537,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte
await self.report(domain, event, yara_rule_settings, discovery_context, event_type="DNS_NAME")

class EmailExtractor(ExcavateRule):

yara_rules = {
"email": 'rule email { meta: description = "contains email address" strings: $email = /[^\\W_][\\w\\-\\.\\+\']{0,100}@[a-zA-Z0-9\\-]{1,100}(\\.[a-zA-Z0-9\\-]{1,100})*\\.[a-zA-Z]{2,63}/ nocase fullword condition: $email }',
}
Expand All @@ -551,7 +555,6 @@ class JWTExtractor(ExcavateRule):
}

class ErrorExtractor(ExcavateRule):

signatures = {
"PHP_1": r"/\.php on line [0-9]+/",
"PHP_2": r"/\.php<\/b> on line <b>[0-9]+/",
Expand Down Expand Up @@ -589,7 +592,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte
await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING")

class SerializationExtractor(ExcavateRule):

regexes = {
"Java": re.compile(r"[^a-zA-Z0-9\/+]rO0[a-zA-Z0-9+\/]+={0,2}"),
"DOTNET": re.compile(r"[^a-zA-Z0-9\/+]AAEAAAD\/\/[a-zA-Z0-9\/+]+={0,2}"),
Expand Down Expand Up @@ -619,7 +621,6 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte
await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING")

class FunctionalityExtractor(ExcavateRule):

yara_rules = {
"File_Upload_Functionality": r'rule File_Upload_Functionality { meta: description = "contains file upload functionality" strings: $fileuploadfunc = /<input[^>]+type=["\']?file["\']?[^>]+>/ nocase condition: $fileuploadfunc }',
"Web_Service_WSDL": r'rule Web_Service_WSDL { meta: emit_match = "True" description = "contains a web service WSDL URL" strings: $wsdl = /https?:\/\/[^\s]*\.(wsdl)/ nocase condition: $wsdl }',
Expand Down Expand Up @@ -704,7 +705,6 @@ class URLExtractor(ExcavateRule):
tag_attribute_regex = bbot_regexes.tag_attribute_regex

async def process(self, yara_results, event, yara_rule_settings, discovery_context):

for identifier, results in yara_results.items():
urls_found = 0
final_url = ""
Expand Down Expand Up @@ -897,7 +897,6 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon
decoded_data = await self.helpers.re.recursive_decode(data)

if self.parameter_extraction:

content_type_lower = content_type.lower() if content_type else ""
extraction_map = {
"json": self.helpers.extract_params_json,
Expand Down Expand Up @@ -934,7 +933,6 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon
self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules")

async def handle_event(self, event):

if event.type == "HTTP_RESPONSE":
# Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled
if (
Expand Down Expand Up @@ -1023,7 +1021,6 @@ async def handle_event(self, event):

# Try to extract parameters from the redirect URL
if self.parameter_extraction:

for (
method,
parsed_url,
Expand Down
Loading

0 comments on commit f0e9f50

Please sign in to comment.