Skip to content

Commit

Permalink
Merge pull request #1011 from griffithlab/pvacfuse_unprocessable_2
Browse files Browse the repository at this point in the history
Better handling when none of the fusions are processable by pVACfuse
  • Loading branch information
susannasiebert authored Aug 7, 2023
2 parents f93fbd9 + aa6d89b commit 868f34f
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 6 deletions.
2 changes: 0 additions & 2 deletions pvactools/lib/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,6 @@ def parse_outputs(self, chunks):
for a in self.alleles:
for epl in self.epitope_lengths:
split_iedb_output_files = []
status_message("Parsing binding predictions for Allele %s and Epitope Length %s - Entries %s" % (a, epl, fasta_chunk))
for method in self.prediction_algorithms:
prediction_class = globals()[method]
prediction = prediction_class()
Expand Down Expand Up @@ -673,7 +672,6 @@ def parse_outputs(self, chunks, length):
fasta_chunk = "%d-%d" % (split_start*2-1, split_end*2)
for a in self.alleles:
split_iedb_output_files = []
status_message("Parsing binding predictions for Allele %s and Epitope Length %s - Entries %s" % (a, length, fasta_chunk))
for method in self.prediction_algorithms:
prediction_class = globals()[method]
prediction = prediction_class()
Expand Down
11 changes: 7 additions & 4 deletions pvactools/tools/pvacfuse/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,9 +220,10 @@ def main(args_input = sys.argv[1:]):
pipeline = PvacbindPipeline(**run_arguments)
pipeline.execute()
intermediate_output_file = os.path.join(per_epitope_output_dir, "{}.all_epitopes.tsv".format(args.sample_name))
output_file = os.path.join(per_epitope_output_dir, "{}.all_epitopes.final.tsv".format(args.sample_name))
append_columns(intermediate_output_file, "{}.tsv".format(input_file), output_file)
output_files.append(output_file)
if os.path.exists(intermediate_output_file):
output_file = os.path.join(per_epitope_output_dir, "{}.all_epitopes.final.tsv".format(args.sample_name))
append_columns(intermediate_output_file, "{}.tsv".format(input_file), output_file)
output_files.append(output_file)
if epitope_length == max(epitope_lengths):
# copy fasta to output dir
fasta_file = os.path.join(output_dir, "{}.fasta".format(args.sample_name))
Expand All @@ -239,10 +240,12 @@ def main(args_input = sys.argv[1:]):
#!!! make below call to create_net_class_report
#create_combined_reports(output_files, all_epitopes_file, filtered_file, True, args)
create_net_class_report(output_files, all_epitopes_file, filtered_file, args, run_arguments)
else:
print("\nNo processable fusions found. Aborting.\n")
elif len(prediction_algorithms) == 0:
print("No MHC class {} prediction algorithms chosen. Skipping MHC class I predictions.".format(mhc_class))
elif len(alleles) == 0:
print("No MHC class{} alleles chosen. Skipping MHC class I predictions.".format(mhc_class))
print("No MHC class {} alleles chosen. Skipping MHC class II predictions.".format(mhc_class))

if len(class_i_prediction_algorithms) > 0 and len(class_i_alleles) > 0 and len(class_ii_prediction_algorithms) > 0 and len(class_ii_alleles) > 0:
print("Creating combined reports")
Expand Down

0 comments on commit 868f34f

Please sign in to comment.