Skip to content
This repository has been archived by the owner on Apr 20, 2022. It is now read-only.

Commit

Permalink
Format and lint
Browse files Browse the repository at this point in the history
  • Loading branch information
cugu committed Nov 25, 2018
1 parent e1c0374 commit e173c50
Show file tree
Hide file tree
Showing 12 changed files with 669 additions and 152 deletions.
488 changes: 488 additions & 0 deletions .pylintrc

Large diffs are not rendered by default.

34 changes: 14 additions & 20 deletions afro/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@
import io
import argparse
import logging
import sys

from kaitaistruct import KaitaiStream, BytesIO

from . import item_store, log, parse, carve, process
from . import item_store, log, parse, carve, process, libapfs

LOGO = """ `-+yhddhy+-`
.sNMMMMMMMMMMms.
Expand All @@ -31,6 +32,7 @@
`.::::.`.sh/
.so"""


class OffsetBufferedReader(io.BufferedReader):
"""docstring for OffsetBytesIO"""

Expand Down Expand Up @@ -58,7 +60,7 @@ def extract(args):

export = args.export
if not export:
export = ['bodyfile']
export = ['bodyfile', 'gtf', 'files']

with open(args.image, 'rb') as image_io:
image_io = OffsetBufferedReader(image_io, args.offset * 512)
Expand All @@ -69,53 +71,45 @@ def extract(args):
image_io.seek(0)

if args.method == 'parse':
file_entries = parse.parse(image_io, args.image)
file_entries = parse.parse(image_io)
elif args.method == 'carve':
if args.carver == 'nxsb':
file_entries = carve.nxsb(image_io, block_size, args.image)
file_entries = carve.nxsb(image_io, block_size)
elif args.carver == 'apsb':
file_entries = carve.apsb(image_io, block_size, args.image)
file_entries = carve.apsb(image_io, block_size)
elif args.carver == 'nodes':
file_entries = carve.nodes(image_io, block_size, args.image)
file_entries = carve.nodes(image_io, block_size)
else:
print('Carving method unknown')
sys.exit(1)
else:
print('Extraction method unknown')
sys.exit(2)


method = 'parse'
if args.method == 'carve':
method = 'carve_%s' % args.carver

# process file entries
item_store = process.process_file_entries(
file_entries,
apfs,
block_size,
image_io,
args.image,
method
)
store = process.process_file_entries(file_entries, apfs, block_size, image_io)

if 'bodyfile' in export:
item_store.save_bodyfile("%s.%s.bodyfile" % (args.image, method))
store.save_bodyfile("%s.%s.bodyfile" % (args.image, method))
if 'gtf' in export:
item_store.save_gtf("%s.%s.gtf" % (args.image, method))
store.save_gtf("%s.%s.gtf" % (args.image, method))
if 'files' in export:
item_store.save_files("%s.%s.extracted" % (args.image, method), block_size, image_io)
store.save_files("%s.%s.extracted" % (args.image, method), block_size, image_io)


def main():
""" Parse arguments and execure correct extraction method """
parser = argparse.ArgumentParser(description='Recover files from an APFS image')
parser.add_argument('-o', '--offset', type=int, default=0, help='offset to file system')
parser.add_argument('-l', '--log', default='INFO', help='set log level')
parser.add_argument('-e', '--export', action='append', default=['bodyfile', 'gtf', 'files'], choices=['bodyfile', 'gtf', 'files'], help='set outputs')
parser.add_argument('-e', '--export', action='append', choices=['bodyfile', 'gtf', 'files'], help='set outputs')
parser.add_argument('-m', '--method', default="carve", choices=['parse', 'carve'], help='set extraction method')
parser.add_argument('-c', '--carver', default="apsb", choices=['nxsb', 'apsb', 'nodes'], help='set carving method')


parser.add_argument('image', help='path to the image')
args = parser.parse_args()
extract(args)
Expand Down
24 changes: 16 additions & 8 deletions afro/afro_test.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,29 @@
import argparse

from . import main, extract
from . import extract

def wc(path):
with open(path) as f:
for i, l in enumerate(f):

def line_count(path):
i = 0
with open(path) as lines:
for i, _ in enumerate(lines):
pass
return i + 1


def test_parse():
extract(argparse.Namespace(carver='apsb', export=['bodyfile', 'files'], image='test/wsdf.dmg', log='INFO', method='parse', offset=40))
extract(
argparse.Namespace(
carver='apsb', export=['bodyfile', 'files'], image='test/wsdf.dmg', log='INFO', method='parse', offset=40))

# should extract 290 items
assert 26 == wc('test/wsdf.dmg.parse.bodyfile')
assert line_count('test/wsdf.dmg.parse.bodyfile') == 26


def test_carve():
extract(argparse.Namespace(carver='apsb', export=['bodyfile', 'files'], image='test/wsdf.dmg', log='INFO', method='carve', offset=40))
extract(
argparse.Namespace(
carver='apsb', export=['bodyfile', 'files'], image='test/wsdf.dmg', log='INFO', method='carve', offset=40))

# should extract 290 items
assert 290 == wc('test/wsdf.dmg.carve_apsb.bodyfile')
assert line_count('test/wsdf.dmg.carve_apsb.bodyfile') == 290
4 changes: 1 addition & 3 deletions afro/block.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@


def get_block(idx, block_size, file_io):
""" Get data of a single block """
file_io.seek(idx * block_size)
return file_io.read(block_size)
return file_io.read(block_size)
35 changes: 21 additions & 14 deletions afro/carve.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,35 +13,42 @@
-> parse from 9.
"""
import logging
import os

from kaitaistruct import KaitaiStream, BytesIO

from . import libapfs, parse, checksum, block

LOGGER = logging.getLogger(__name__)

def nxsb(image_io, blocksize, image_name):
return carve(image_io, blocksize, 'nxsb', match_magic_func(b'NXSB'), parse.parse_nxsb, image_name)

def apsb(image_io, blocksize, image_name):
return carve(image_io, blocksize, 'apsb', match_magic_func(b'APSB'), parse.parse_apsb, image_name)
def nxsb(image_io, blocksize):
return carve(image_io, blocksize, 'nxsb', match_magic_func(b'NXSB'), parse.parse_nxsb)


def apsb(image_io, blocksize):
return carve(image_io, blocksize, 'apsb', match_magic_func(b'APSB'), parse.parse_apsb)


def nodes(image_io, blocksize):
return carve(image_io, blocksize, 'nodes', match_nodes, parse.parse_node)

def nodes(image_io, blocksize, image_name):
return carve(image_io, blocksize, 'nodes', match_nodes, parse.parse_node, image_name)

def match_magic_func(magic):

def match_magic(data):
return data[32:36] == magic and checksum.check_checksum(data)

return match_magic


def match_nodes(data):
obj_type = int.from_bytes(data[24:26], byteorder='little')
subtype = int.from_bytes(data[28:30], byteorder='little')

return (obj_type == 2 or obj_type == 3) and subtype == 14 and checksum.check_checksum(data)
return (obj_type in (2, 3)) and subtype == 14 and checksum.check_checksum(data)


def carve(image_io, blocksize, name, magic, get_file_entries_func, image_name):
def carve(image_io, blocksize, name, magic, get_file_entries_func):
""" parse image and print files """

# get file entries
Expand All @@ -53,15 +60,15 @@ def carve(image_io, blocksize, name, magic, get_file_entries_func, image_name):
if not data:
break
elif magic(data):
LOGGER.info('Found %s in block %i' % (name, i))
LOGGER.info('Found %s in block %i', name, i)
try:
obj = apfs.Obj(KaitaiStream(BytesIO(data)), apfs, apfs)
fe = get_file_entries_func(obj, apfs)
for xid in fe:
carved_file_entries = get_file_entries_func(obj, apfs)
for xid in carved_file_entries:
file_entries.setdefault(xid, dict())
for volume in fe[xid]:
for volume in carved_file_entries[xid]:
file_entries[xid].setdefault(volume, list())
file_entries[xid][volume] += fe[xid][volume]
file_entries[xid][volume] += carved_file_entries[xid][volume]
except Exception as err:
LOGGER.info(err)
i += 1
Expand Down
29 changes: 15 additions & 14 deletions afro/checksum.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,33 @@
import numpy as np


def create_checksum(data):
sum1 = np.uint64(0)
sum2 = np.uint64(0)

modValue = np.uint64(4294967295) # 2<<31 - 1
mod_value = np.uint64(4294967295) # 2<<31 - 1

for i in range(int(len(data)/4)):
dt = np.dtype(np.uint32)
dt = dt.newbyteorder('L')
d = np.frombuffer(data[i*4:(i+1)*4], dtype=dt)
for i in range(int(len(data) / 4)):
dtype = np.dtype(np.uint32)
dtype = dtype.newbyteorder('L')
data = np.frombuffer(data[i * 4:(i + 1) * 4], dtype=dtype)

sum1 = (sum1 + np.uint64(d)) % modValue
sum2 = (sum2 + sum1) % modValue
sum1 = (sum1 + np.uint64(data)) % mod_value
sum2 = (sum2 + sum1) % mod_value

check1 = modValue - ((sum1 + sum2) % modValue)
check2 = modValue - ((sum1 + check1) % modValue)
check1 = mod_value - ((sum1 + sum2) % mod_value)
check2 = mod_value - ((sum1 + check1) % mod_value)

return (check2 << 32) | check1


def check_checksum(data):
dt = np.dtype(np.uint64)
dt = dt.newbyteorder('L')
return (np.frombuffer(data[:8], dtype=dt) == create_checksum(data[8:]))[0]
dtype = np.dtype(np.uint64)
dtype = dtype.newbyteorder('L')
return (np.frombuffer(data[:8], dtype=dtype) == create_checksum(data[8:]))[0]


if __name__ == '__main__':

with open('test.blk', 'rb') as io:
data = io.read()
print(check_checksum(data))
print(check_checksum(io.read()))
51 changes: 38 additions & 13 deletions afro/item_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

LOGGER = logging.getLogger(__name__)


class ItemStore:
def __init__(self):
self.items = []
Expand All @@ -17,17 +18,33 @@ def reset(self):
self.items = []
self.seen = set()

def add_item(self, item_type, xid, parent_id, item_id, status, volume, path, name, accesstime, modificationtime, creationtime, size, md5, extents=[]):
def add_item(self,
item_type,
xid,
parent_id,
item_id,
status,
volume,
path,
name,
atime,
mtime,
ctime,
size,
md5,
extents=None):
if extents is None:
extents = []
if status == "exists" and (name is None or name == ""):
print("name not found (%d, %d, %d)" % (xid, parent_id, item_id))
new_item = {
'id': item_id,
'xid': xid,
'parent_id': parent_id,
'name': name,
'atime': accesstime,
'mtime': modificationtime,
'crtime': creationtime,
'atime': atime,
'mtime': mtime,
'crtime': ctime,
'size': size,
'md5': md5,
'type': item_type,
Expand All @@ -39,7 +56,7 @@ def add_item(self, item_type, xid, parent_id, item_id, status, volume, path, nam
if tuple(new_item) not in self.seen:
self.items.append(new_item)
hitem = copy.deepcopy(new_item)
del hitem['extents'] # TODO list is not hashable
del hitem['extents'] # TODO list is not hashable
self.seen.add(tuple(hitem.items()))

def save_files(self, name, blocksize, image_file_io):
Expand All @@ -63,16 +80,15 @@ def save_files(self, name, blocksize, image_file_io):
with open(file_path, 'bw+') as file_io:
remaining = (item['size'] or 0)
for extent in item['extents']:
for b in range(int(extent['length'] / blocksize)):
data = block.get_block(extent['start'] + b, blocksize, image_file_io)
for block_part in range(int(extent['length'] / blocksize)):
data = block.get_block(extent['start'] + block_part, blocksize, image_file_io)
if remaining < blocksize:
chunk_size = remaining
else:
chunk_size = blocksize
remaining -= chunk_size
file_io.write(data[:chunk_size])


def save_bodyfile(self, name):
# add suffix if file exists
basename = name
Expand All @@ -83,15 +99,24 @@ def save_bodyfile(self, name):

with open(name, 'w+') as csvfile:
fieldnames = [
'md5', 'name', 'id', 'mode', 'uid', 'gid',
'size', 'atime', 'mtime', 'ctime', 'crtime',
'md5',
'name',
'id',
'mode',
'uid',
'gid',
'size',
'atime',
'mtime',
'ctime',
'crtime',
]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames, delimiter='|', extrasaction='ignore')
writer.writeheader()
for item in self.items:
item = copy.deepcopy(item)
xid = item['xid'] if item['xid'] is not None else 0
item['id'] = "%s-%s-%s" % (0, xid, item['id']) # TODO: add volume number
item['id'] = "%s-%s-%s" % (0, xid, item['id']) # TODO: add volume number
item['name'] = posixpath.join(item['path'], item['name'])
item['mode'] = 'f' if item['type'] == 'file' else 'd'
item['uid'] = 0
Expand All @@ -108,8 +133,8 @@ def save_gtf(self, name):

with open(name, 'w+') as csvfile:
fieldnames = [
'type', 'xid', 'parent_id', 'id', 'status', 'volume',
'path', 'name', 'atime', 'mtime', 'crtime', 'size', 'md5'
'type', 'xid', 'parent_id', 'id', 'status', 'volume', 'path', 'name', 'atime', 'mtime', 'crtime',
'size', 'md5'
]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore')
writer.writeheader()
Expand Down
Loading

0 comments on commit e173c50

Please sign in to comment.