Skip to content

Commit

Permalink
more tests
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions committed Nov 7, 2024
1 parent 15dbc3e commit 56c661e
Show file tree
Hide file tree
Showing 13 changed files with 35 additions and 16 deletions.
3 changes: 1 addition & 2 deletions bbot/modules/bufferoverrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,5 +44,4 @@ async def parse_results(self, r, query):
subdomain = parts[4].strip()
if subdomain and subdomain.endswith(f".{query}"):
subdomains_set.add(subdomain)
for subdomain in subdomains_set:
yield subdomain
return subdomains_set
4 changes: 2 additions & 2 deletions bbot/modules/builtwith.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ async def request_redirects(self, query):
url = f"{self.base_url}/redirect1/api.json?KEY={{api_key}}&LOOKUP={query}"
return await self.api_request(url)

def parse_domains(self, r, query):
async def parse_domains(self, r, query):
"""
This method returns a set of subdomains.
Each subdomain is an "FQDN" that was reported in the "Detailed Technology Profile" page on builtwith.com
Expand Down Expand Up @@ -92,7 +92,7 @@ def parse_domains(self, r, query):
self.verbose(f"No results for {query}: {error}")
return results_set

def parse_redirects(self, r, query):
async def parse_redirects(self, r, query):
"""
This method creates a set.
Each entry in the set is either an Inbound or Outbound Redirect reported in the "Redirect Profile" page on builtwith.com
Expand Down
4 changes: 3 additions & 1 deletion bbot/modules/c99.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,13 @@ async def request_url(self, query):
return await self.api_request(url)

async def parse_results(self, r, query):
results = set()
j = r.json()
if isinstance(j, dict):
subdomains = j.get("subdomains", [])
if subdomains:
for s in subdomains:
subdomain = s.get("subdomain", "")
if subdomain:
yield subdomain
results.add(subdomain)
return results
4 changes: 3 additions & 1 deletion bbot/modules/certspotter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@ def request_url(self, query):
return self.api_request(url, timeout=self.http_timeout + 30)

async def parse_results(self, r, query):
results = set()
json = r.json()
if json:
for r in json:
for dns_name in r.get("dns_names", []):
yield dns_name.lstrip(".*").rstrip(".")
results.add(dns_name.lstrip(".*").rstrip("."))
return results
4 changes: 3 additions & 1 deletion bbot/modules/chaos.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ async def request_url(self, query):
return await self.api_request(url)

async def parse_results(self, r, query):
results = set()
j = r.json()
subdomains_set = set()
if isinstance(j, dict):
Expand All @@ -39,4 +40,5 @@ async def parse_results(self, r, query):
for s in subdomains_set:
full_subdomain = f"{s}.{domain}"
if full_subdomain and full_subdomain.endswith(f".{query}"):
yield full_subdomain
results.add(full_subdomain)
return results
4 changes: 3 additions & 1 deletion bbot/modules/crt.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ async def request_url(self, query):
return await self.api_request(url, timeout=self.http_timeout + 30)

async def parse_results(self, r, query):
results = set()
j = r.json()
for cert_info in j:
if not type(cert_info) == dict:
Expand All @@ -35,4 +36,5 @@ async def parse_results(self, r, query):
domain = cert_info.get("name_value")
if domain:
for d in domain.splitlines():
yield d.lower()
results.add(d.lower())
return results
4 changes: 3 additions & 1 deletion bbot/modules/hackertarget.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,13 @@ async def request_url(self, query):
return response

async def parse_results(self, r, query):
results = set()
for line in r.text.splitlines():
host = line.split(",")[0]
try:
self.helpers.validators.validate_host(host)
yield host
results.add(host)
except ValueError:
self.debug(f"Error validating API result: {line}")
continue
return results
4 changes: 3 additions & 1 deletion bbot/modules/leakix.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,11 @@ async def request_url(self, query):
return response

async def parse_results(self, r, query=None):
results = set()
json = r.json()
if json:
for entry in json:
subdomain = entry.get("subdomain", "")
if subdomain:
yield subdomain
results.add(subdomain)
return results
4 changes: 3 additions & 1 deletion bbot/modules/otx.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@ def request_url(self, query):
return self.api_request(url)

async def parse_results(self, r, query):
results = set()
j = r.json()
if isinstance(j, dict):
for entry in j.get("passive_dns", []):
subdomain = entry.get("hostname", "")
if subdomain:
yield subdomain
results.add(subdomain)
return results
4 changes: 3 additions & 1 deletion bbot/modules/passivetotal.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,10 @@ async def request_url(self, query):
return await self.api_request(url, auth=self.auth)

async def parse_results(self, r, query):
results = set()
for subdomain in r.json().get("subdomains", []):
yield f"{subdomain}.{query}"
results.add(f"{subdomain}.{query}")
return results

@property
def auth_secret(self):
Expand Down
4 changes: 3 additions & 1 deletion bbot/modules/securitytrails.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ async def request_url(self, query):
return response

async def parse_results(self, r, query):
results = set()
j = r.json()
if isinstance(j, dict):
for host in j.get("subdomains", []):
yield f"{host}.{query}"
results.add(f"{host}.{query}")
return results
2 changes: 1 addition & 1 deletion bbot/modules/templates/subdomain_enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ async def query_paginated(self, query):
agen = self.api_page_iter(url, page_size=self.page_size, **self.api_page_iter_kwargs)
try:
async for response in agen:
subdomains = self.parse_results(response, query)
subdomains = await self.parse_results(response, query)
self.verbose(f'Got {len(subdomains):,} subdomains for "{query}"')
if not subdomains:
break
Expand Down
6 changes: 4 additions & 2 deletions bbot/modules/zoomeye.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ async def query(self, query):
agen = self.api_page_iter(url)
try:
async for j in agen:
r = list(self.parse_results(j))
r = list(await self.parse_results(j))
if r:
results.update(set(r))
if not r or i >= (self.max_pages - 1):
Expand All @@ -71,5 +71,7 @@ async def query(self, query):
return results

async def parse_results(self, r):
results = set()
for entry in r.get("list", []):
yield entry["name"]
results.add(entry["name"])
return results

0 comments on commit 56c661e

Please sign in to comment.