Skip to content

Commit

Permalink
fixing web_parameters appearing when no modules listening
Browse files Browse the repository at this point in the history
  • Loading branch information
liquidsec committed Aug 24, 2024
1 parent 0e1c426 commit 40b034b
Showing 1 changed file with 30 additions and 28 deletions.
58 changes: 30 additions & 28 deletions bbot/modules/internal/excavate.py
Original file line number Diff line number Diff line change
Expand Up @@ -857,33 +857,35 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon

decoded_data = await self.helpers.re.recursive_decode(data)

content_type_lower = content_type.lower() if content_type else ""
extraction_map = {
"json": self.helpers.extract_params_json,
"xml": self.helpers.extract_params_xml,
}

for source_type, extract_func in extraction_map.items():
if source_type in content_type_lower:
results = extract_func(data)
if results:
for parameter_name, original_value in results:
description = (
f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]"
)
data = {
"host": str(event.host),
"type": "SPECULATIVE",
"name": parameter_name,
"original_value": original_value,
"url": str(event.data["url"]),
"additional_params": {},
"assigned_cookies": self.assigned_cookies,
"description": description,
}
context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}"
await self.emit_event(data, "WEB_PARAMETER", event, context=context)
return
if self.parameter_extraction:

content_type_lower = content_type.lower() if content_type else ""
extraction_map = {
"json": self.helpers.extract_params_json,
"xml": self.helpers.extract_params_xml,
}

for source_type, extract_func in extraction_map.items():
if source_type in content_type_lower:
results = extract_func(data)
if results:
for parameter_name, original_value in results:
description = (
f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]"
)
data = {
"host": str(event.host),
"type": "SPECULATIVE",
"name": parameter_name,
"original_value": original_value,
"url": str(event.data["url"]),
"additional_params": {},
"assigned_cookies": self.assigned_cookies,
"description": description,
}
context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}"
await self.emit_event(data, "WEB_PARAMETER", event, context=context)
return

for result in self.yara_rules.match(data=f"{data}\n{decoded_data}"):
rule_name = result.rule
Expand Down Expand Up @@ -938,7 +940,7 @@ async def handle_event(self, event):

for header, header_values in headers.items():
for header_value in header_values:
if header.lower() == "set-cookie":
if header.lower() == "set-cookie" and self.parameter_extraction:
if "=" not in header_value:
self.debug(f"Cookie found without '=': {header_value}")
continue
Expand Down

0 comments on commit 40b034b

Please sign in to comment.