Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

File Tree Feature #23

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion man_spider/lib/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,13 @@ class RemoteFile():
Passed from a spiderling up to its parent spide
r '''

def __init__(self, name, share, target, size=0):
def __init__(self, name, share, target, size=0, file_tree_node=None):

self.share = share
self.target = target
self.name = name
self.size = size
self.file_tree_node = file_tree_node
self.smb_client = None

file_suffix = Path(name).suffix.lower()
Expand Down
22 changes: 13 additions & 9 deletions man_spider/lib/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,13 +88,17 @@ def _monitor(self):
### LOG TO FILE ###

log_queue = Queue()
listener = CustomQueueListener(log_queue, console)
sender = QueueHandler(log_queue)
logging.getLogger('manspider').handlers = [sender]

logdir = Path.home() / '.manspider' / 'logs'
logdir.mkdir(parents=True, exist_ok=True)
logfile = f'manspider_{datetime.now().strftime("%m-%d-%Y")}.log'
handler = logging.FileHandler(str(logdir / logfile))
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logging.getLogger('manspider').addHandler(handler)
listener = CustomQueueListener(log_queue, console)

def setup_file_logging(logdir=None):
logging.getLogger('manspider').handlers = [sender]
if logdir is not None and len(logdir) > 0:
logdir = Path(logdir)
else:
logdir = Path.home() / '.manspider' / 'logs'
logdir.mkdir(parents=True, exist_ok=True)
logfile = f'manspider_{datetime.now().strftime("%m-%d-%Y")}.log'
handler = logging.FileHandler(str(logdir / logfile))
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logging.getLogger('manspider').addHandler(handler)
19 changes: 17 additions & 2 deletions man_spider/lib/smb.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import string
import random
import ntpath
import struct
import logging
Expand Down Expand Up @@ -37,8 +39,9 @@ def shares(self):
resp = self.conn.listShares()
for i in range(len(resp)):
sharename = resp[i]['shi1_netname'][:-1]
remark = resp[i]['shi1_remark'][:-1]
log.debug(f'{self.server}: Found share: {sharename}')
yield sharename
yield (sharename, remark)

except Exception as e:
e = handle_impacket_error(e, self)
Expand Down Expand Up @@ -139,7 +142,19 @@ def ls(self, share, path):
e = handle_impacket_error(e, self)
raise FileListError(f'{e.args}: Error listing files at "{share}{nt_path}"')


def check_write_access(self, share):
'''
Check is the share is writeable by creating and deleting a random directory
'''
temp_dir = ntpath.normpath("\\" + ''.join(random.sample(string.ascii_letters, 10)))
try:
self.conn.createDirectory(share, temp_dir)
self.conn.deleteDirectory(share, temp_dir)
log.info(f'{self.server}: {share}: is writeable!')
return True
except SessionError as e:#
raise e
return False

def rebuild(self, error=''):
'''
Expand Down
11 changes: 11 additions & 0 deletions man_spider/lib/spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(self, options):
self.dir_blacklist = options.exclude_dirnames

self.no_download = options.no_download
self.check_write_access = options.check_write

# applies "or" logic instead of "and"
# e.g. file is downloaded if filename OR extension OR content match
Expand Down Expand Up @@ -73,6 +74,16 @@ def __init__(self, options):
if not options.no_download:
log.info(f'Matching files will be downloaded to {self.loot_dir}')

# file tree option and the directory to store its results
self.file_tree = options.file_tree
if options.file_tree_dir:
self.file_tree_dir=Path(options.file_tree_dir)
else:
self.file_tree_dir = Path.home() / '.manspider' / 'file_tree'
if self.file_tree:
log.info(f'Generating file trees of spidered shares in {self.file_tree_dir}')
self.file_tree_dir.mkdir(parents=True, exist_ok=True)


def start(self):

Expand Down
109 changes: 93 additions & 16 deletions man_spider/lib/spiderling.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import string
import logging
import pathlib
import time
import json
from .smb import *
from .file import *
from .util import *
Expand Down Expand Up @@ -114,7 +116,7 @@ def go(self):
else:
# remote files
for file in self.files:

# if content searching is enabled, parse the file
if self.parent.parser.content_filters:
try:
Expand All @@ -129,7 +131,10 @@ def go(self):
log.info(f'{self.target}: {file.share}\\{file.name} ({bytes_to_human(file.size)})')
if not self.parent.no_download:
self.save_file(file)


if self.parent.file_tree:
self.save_file_tree()

log.info(f'Finished spidering {self.target}')


Expand All @@ -155,14 +160,24 @@ def files(self):
log.debug(f'Skipping {file}: does not match filename/extension filters')

else:
for share in self.shares:
for remote_file in self.list_files(share):
self.file_tree = {self.target: {}}
for share_name, share_remark in self.shares:
file_tree_share_node = None
if self.parent.check_write_access:
is_write_access = self.smb_client.check_write_access(share_name)
share_permission = ('read', 'write') if is_write_access else ('read',)
else:
share_permission = ('read', '?')
if self.parent.file_tree:
file_tree_share_node = self.file_tree[self.target][share_name] = {"_meta": {"remark": share_remark, "permission": share_permission}}
for remote_file in self.list_files(share_name, file_tree_node=file_tree_share_node):
if not self.parent.no_download or self.parent.parser.content_filters:
self.get_file(remote_file)
yield remote_file




def parse_file(self, file):
'''
Simple wrapper around self.parent.parser.parse_file()
Expand Down Expand Up @@ -199,20 +214,16 @@ def shares(self):
Lists all shares on single target
'''

for share in self.smb_client.shares:
if self.share_match(share):
yield share
for share_name, share_remarks in self.smb_client.shares:
if self.share_match(share_name):
yield (share_name, share_remarks)



def list_files(self, share, path='', depth=0, tries=2):
def list_files(self, share, path='', depth=0, tries=2, file_tree_node=None):
'''
List files inside a specific directory
Only yield files which conform to all filters (except content)
'''

if depth < self.parent.maxdepth and self.dir_match(path):

files = []
while tries > 0:
try:
Expand All @@ -224,20 +235,24 @@ def list_files(self, share, path='', depth=0, tries=2):
break
else:
tries -= 1

if files:
log.debug(f'{self.target}: {share}{path}: contains {len(files):,} items')

for f in files:
name = f.get_longname()
full_path = f'{path}\\{name}'

if file_tree_node is not None:
file_tree_node[name] = {"_meta": {"size": f.get_filesize(), "created_at": time.ctime(float(f.get_ctime_epoch())), "modified_at": time.ctime(float(f.get_mtime_epoch())), "looted": False}}

# if it's a directory, go deeper
if f.is_directory():
for file in self.list_files(share, full_path, (depth+1)):
file_tree_node[name]["_meta"]["smbclient_cmd"] = self.get_smbclient_command(share, f'{path}/{name}')
for file in self.list_files(share, full_path, (depth+1), file_tree_node=file_tree_node[name] if file_tree_node is not None else None):
yield file

else:

# skip the file if it didn't match extension filters
if self.extension_blacklisted(name):
log.debug(f'{self.target}: Skipping {share}{full_path}: extension is blacklisted')
Expand Down Expand Up @@ -265,7 +280,7 @@ def list_files(self, share, path='', depth=0, tries=2):

# make the RemoteFile object (the file won't be read yet)
full_path_fixed = full_path.lstrip('\\')
remote_file = RemoteFile(full_path_fixed, share, self.target, size=filesize)
remote_file = RemoteFile(full_path_fixed, share, self.target, size=filesize, file_tree_node=file_tree_node[name] if file_tree_node is not None else None)

# if it's a non-empty file that's smaller than the size limit
if filesize > 0 and filesize < self.parent.max_filesize:
Expand All @@ -285,6 +300,9 @@ def list_files(self, share, path='', depth=0, tries=2):

else:
log.debug(f'{self.target}: {full_path} is either empty or too large')
else:
if file_tree_node is not None:
file_tree_node["[...]"] = None


def path_match(self, file):
Expand Down Expand Up @@ -440,6 +458,8 @@ def save_file(self, remote_file):
move(str(remote_file.tmp_filename), str(loot_dest))
except Exception:
log.warning(f'Error saving {remote_file}')
if remote_file.file_tree_node is not None:
remote_file.file_tree_node['_meta']['looted'] = True


def get_file(self, remote_file):
Expand All @@ -457,3 +477,60 @@ def get_file(self, remote_file):

return False


def get_smbclient_command(self, share, path):
'''
Get the command to access a given path using the smbclient for further inspection
'''
parts = [f"smbclient"]

if self.smb_client.domain and self.smb_client.username:
parts.append(f"-U '{self.smb_client.domain}/{self.smb_client.username}'")
elif self.smb_client.username:
parts.append(f"-U '{self.smb_client.username}'")
if self.smb_client.username not in ('Guest', ''):
parts.append(f"--password={self.smb_client.password or self.smb_client.nthash}")
if not self.smb_client.password and self.smb_client.nthash:
parts.append("--pw-nt-hash")
parts.append(f"-D '{path}' '//{self.target}/{share}/'")
return ' '.join(parts)

def save_file_tree(self):
'''
Generate and save the file tree for this spiderling
'''
file_tree_path = self.parent.file_tree_dir / f"{self.target}.json"
with open(file_tree_path, "w") as file_tree_json_file:
json.dump(self.file_tree, file_tree_json_file)

def write_node(node, level=0):
for key,value in node.items():
if key == '_meta':
continue
parts = [' ' *level, key]
try:
if len(value["_meta"]["remark"]) > 0:
parts.append(f" - {value['_meta']['remark']}")
except:
pass
try:
if "write" in value["_meta"]["permission"]:
parts.append(" - [WRITE]")
except:
pass
try:
if value["_meta"]["looted"]:
parts.append(" - [LOOTED]")
except:
pass
parts.append("\n")
file_tree_txt_file.write(''.join(parts))
if isinstance(value, dict):
write_node(value, level+1)

file_tree_path = self.parent.file_tree_dir / f"{self.target}.txt"
with open(file_tree_path, "w") as file_tree_txt_file:
write_node(self.file_tree)



9 changes: 7 additions & 2 deletions man_spider/manspider.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def main():
parser.add_argument('-u', '--username', default='', help='username for authentication')
parser.add_argument('-p', '--password', default='', help='password for authentication')
parser.add_argument('-d', '--domain', default='', help='domain for authentication')
parser.add_argument('-l','--loot-dir', default='', help='loot directory (default ~/.manspider/)')
parser.add_argument('-l','--loot-dir', default='', help='loot directory (default ~/.manspider/loot/)')
parser.add_argument('-m', '--maxdepth', type=int, default=10, help='maximum depth to spider (default: 10)')
parser.add_argument('-H', '--hash', default='', help='NTLM hash for authentication')
parser.add_argument('-t', '--threads', type=int, default=5, help='concurrent threads (default: 5)')
Expand All @@ -95,6 +95,10 @@ def main():
parser.add_argument('--exclude-sharenames', nargs='*', default=['IPC$', 'C$', 'ADMIN$', 'PRINT$'],help='don\'t search shares with these names (multiple supported)', metavar='SHARE')
parser.add_argument('--dirnames', nargs='+', default=[], help='only search directories containing these strings (multiple supported)', metavar='DIR')
parser.add_argument('--exclude-dirnames', nargs='+', default=[], help='don\'t search directories containing these strings (multiple supported)', metavar='DIR')
parser.add_argument('--file-tree', action='store_true', default=False, help='Generate an file tree of each spidered host (json and txt)')
parser.add_argument('--file-tree-dir', default='', help='file tree directory (default ~/.manspider/file_tree/)')
parser.add_argument('--check-write', action='store_true', help='Check if it is possible to write to the share')
parser.add_argument('--log-dir', default='', help='log directory (default ~/.manspider/logs/)')
parser.add_argument('-q', '--quiet', action='store_true', help='don\'t display matching file content')
parser.add_argument('-n', '--no-download', action='store_true', help='don\'t download matching files')
parser.add_argument('-mfail', '--max-failed-logons', type=int, help='limit failed logons', metavar='INT')
Expand All @@ -114,6 +118,7 @@ def main():

if options.verbose:
log.setLevel('DEBUG')
setup_file_logging(options.log_dir)

# make sure extension formats are valid
for i, extension in enumerate(options.extensions):
Expand Down Expand Up @@ -178,4 +183,4 @@ def main():


if __name__ == '__main__':
main()
main()