From aa4951ec69368b0e0d6b87e434b7fd0119bc9cee Mon Sep 17 00:00:00 2001 From: genomewalker Date: Sat, 19 Nov 2022 07:30:05 +0100 Subject: [PATCH] Minor --- bam_filter/__main__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bam_filter/__main__.py b/bam_filter/__main__.py index 474d31c..1951a01 100644 --- a/bam_filter/__main__.py +++ b/bam_filter/__main__.py @@ -105,12 +105,14 @@ def main(): logging.info("Reducing results to a single dataframe") data = list(filter(None, data)) data_df = [x[0] for x in data] + data_df = list(filter(None, data_df)) # data_df = fast_flatten(list(filter(None, data_df))) data_df = concat_df(data_df) if args.read_length_freqs: logging.info("Calculating read length frequencies...") lens = [x[1] for x in data] + lens = list(filter(None, lens)) lens = json.dumps(lens, default=obj_dict, ensure_ascii=False, indent=4) with open(out_files["read_length_freqs"], "w", encoding="utf-8") as outfile: print(lens, file=outfile) @@ -118,7 +120,7 @@ def main(): if args.read_hits_count: logging.info("Calculating read hits counts...") hits = [x[2] for x in data] - + hits = list(filter(None, hits)) # merge dicts and sum values hits = reduce(lambda x, y: x.update(y) or x, (Counter(dict(x)) for x in hits)) # hits = sum(map(Counter, hits), Counter())