Skip to content

Commit

Permalink
successfull crawling of postman workspaces
Browse files Browse the repository at this point in the history
  • Loading branch information
domwhewell-sage committed Dec 14, 2023
1 parent 24bd855 commit 98623db
Show file tree
Hide file tree
Showing 2 changed files with 148 additions and 247 deletions.
54 changes: 43 additions & 11 deletions bbot/modules/postman.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from bbot.modules.templates.subdomain_enum import subdomain_enum_apikey
from bbot.modules.base import BaseModule


class postman(subdomain_enum_apikey):
class postman(BaseModule):
watched_events = ["DNS_NAME"]
produced_events = ["URL_UNVERIFIED"]
flags = ["passive", "subdomain-enum", "safe"]
Expand All @@ -18,7 +18,10 @@ class postman(subdomain_enum_apikey):
}

async def handle_event(self, event):
query = self.make_query(event)
if "target" in event.tags:
query = str(event.data)
else:
query = self.helpers.parent_domain(event.data).lower()
self.verbose(f"Search for any postman workspaces, collections, requests belonging to {query}")
for url in await self.query(query):
self.emit_event(url, "URL_UNVERIFIED", source=event)
Expand Down Expand Up @@ -49,7 +52,7 @@ async def query(self, query):
"domain": "public",
},
}
r = await self.helpers.request(url, json, headers=self.headers)
r = await self.helpers.request(url, method="POST", json=json, headers=self.headers)
if r is None:
return interesting_urls
status_code = getattr(r, "status_code", 0)
Expand All @@ -59,21 +62,39 @@ async def query(self, query):
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
return interesting_urls
workspaces = []
for item in json["data"]:
for item in json.get("data", {}):
for workspace in item.get("document", {}).get("workspaces", []):
if workspace not in workspaces:
workspaces.append(workspace)
for workspace in workspaces:
for item in workspaces:
id = item.get("id", "")
interesting_urls.append(f"{self.base_url}/workspace/{id}")
environments, collections = await self.search_workspace(id)
interesting_urls.append(f"{self.base_url}/workspace/{id}/globals")
for c_id in workspace["dependencies"]["collections"]:
interesting_urls.append(f"https://www.postman.com/_api/collection/{c_id}")
requests = await self.search_collections(r_id)
for e_id in environments:
interesting_urls.append(f"{self.base_url}/environment/{e_id}")
for c_id in collections:
interesting_urls.append(f"{self.base_url}/collection/{c_id}")
requests = await self.search_collections(id)
for r_id in requests:
interesting_urls.append(f"{self.base_url}/request/{r_id}")
return interesting_urls

async def search_workspace(self, id):
url = f"{self.base_url}/workspace/{id}"
r = await self.helpers.request(url)
if r is None:
return [], []
status_code = getattr(r, "status_code", 0)
try:
json = r.json()
except Exception as e:
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
return [], []
environments = json.get("data", {}).get("dependencies", {}).get("environments", [])
collections = json.get("data", {}).get("dependencies", {}).get("collections", [])
return environments, collections

async def search_collections(self, id):
request_ids = []
url = f"{self.base_url}/list/collection?workspace={id}"
Expand All @@ -86,6 +107,17 @@ async def search_collections(self, id):
except Exception as e:
self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}")
return request_ids
for collection in json["data"]:
request_ids.append(collection["requests"])
for item in json.get("data", {}):
request_ids.extend(await self.parse_collection(item))
return request_ids

async def parse_collection(self, json):
request_ids = []
folders = json.get("folders", [])
requests = json.get("requests", [])
for folder in folders:
request_ids.extend(await self.parse_collection(folder))
for request in requests:
r_id = request.get("id", "")
request_ids.append(r_id)
return request_ids
Loading

0 comments on commit 98623db

Please sign in to comment.