Skip to content

Commit

Permalink
fix: add more mem to junction_filter
Browse files Browse the repository at this point in the history
  • Loading branch information
shihabdider committed Jul 5, 2024
1 parent 386ded5 commit 7347141
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 26 deletions.
42 changes: 21 additions & 21 deletions assets/schema_input.json
Original file line number Diff line number Diff line change
Expand Up @@ -162,36 +162,35 @@
"format": "file-path",
"exists": true
},
"ploidy": {
"errorMessage": "purityploidy file must have extension '.rds' or be a number",
"cov": {
"errorMessage": "COV file cannot contain spaces and must have extension '.rds' or '.txt'",
"anyOf": [
{
"type": "string",
"pattern": "^\\S+\\.rds$"
"pattern": "^\\S+\\.(rds|txt)$"
},
{
"type": "string",
"pattern": "^NA$"
},
"maxLength": 0
}
],
"format": "file-path",
"exists": true
},
"hets": {
"errorMessage": "Pileups file and must have extension '.txt'",
"anyOf": [
{
"type": "string",
"pattern": "^/dev/null$"
"pattern": "^\\S+\\.txt$"
},
{
"type": "string",
"maxLength": 0
"pattern": "^NA$"
},
{
"type": "number"
}
],
},
"cov": {
"errorMessage": "COV file cannot contain spaces and must have extension '.rds' or '.txt'",
"anyOf": [
{
"type": "string",
"pattern": "^\\S+\\.(rds|txt)$"
"pattern": "^/dev/null$"
},
{
"type": "string",
Expand All @@ -201,12 +200,12 @@
"format": "file-path",
"exists": true
},
"hets": {
"errorMessage": "Pileups file and must have extension '.txt'",
"ploidy": {
"errorMessage": "purityploidy file must have extension '.rds' or be a number",
"anyOf": [
{
"type": "string",
"pattern": "^\\S+\\.txt$"
"pattern": "^\\S+\\.rds$"
},
{
"type": "string",
Expand All @@ -219,10 +218,11 @@
{
"type": "string",
"maxLength": 0
},
{
"type": "number"
}
],
"format": "file-path",
"exists": true
},
"vcf": {
"errorMessage": "VCF file for reads 1 cannot contain spaces and must have extension '.vcf' or '.vcf.gz'",
Expand Down
5 changes: 5 additions & 0 deletions conf/base.config
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,11 @@ process {
memory = { check_max( 48.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withName: 'JUNCTION_FILTER' {
cpus = { check_max( 2 * task.attempt, 'cpus' ) }
memory = { check_max( 128.GB * task.attempt, 'memory' ) }
time = { check_max( 16.h * task.attempt, 'time' ) }
}
withName: 'HETPILEUPS' {
cpus = { check_max( 4 * task.attempt, 'cpus' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
Expand Down
2 changes: 1 addition & 1 deletion conf/modules/snv_calling.config
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ process {
pattern: "*{vcf.gz,txt.gz,vcf*,.command.*}"
]
}
withName: '.*SAGE_SOMATIC' {
withName: '.*SAGE_SOMATIC|SAGE_PASS_FILTER' {
ext.when = { params.tools && params.tools.split(',').contains('sage') }
publishDir = [
mode: params.publish_dir_mode,
Expand Down
7 changes: 3 additions & 4 deletions workflows/nfjabba.nf
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,9 @@ def handleError(step, dataType) {
}

input_sample = ch_from_samplesheet
.map{ meta, fastq_1, fastq_2, table, cram, crai, bam, bai, ploidy, cov, hets, vcf, vcf2, snv_vcf_somatic, snv_tbi_somatic, snv_vcf_germline, snv_tbi_germline, seg, nseg, ggraph, variantcaller ->
.map{ meta, fastq_1, fastq_2, table, cram, crai, bam, bai, cov, hets, ploidy, vcf, vcf2, snv_vcf_somatic, snv_tbi_somatic, snv_vcf_germline, snv_tbi_germline, seg, nseg, ggraph, variantcaller ->
// generate patient_sample key to group lanes together
[ meta.patient + meta.sample, [meta, fastq_1, fastq_2, table, cram, crai, bam, bai, ploidy, cov, hets, vcf, vcf2, snv_vcf_somatic, snv_tbi_somatic, snv_vcf_germline, snv_tbi_germline, seg, nseg, ggraph, variantcaller] ]
[ meta.patient + meta.sample, [meta, fastq_1, fastq_2, table, cram, crai, bam, bai, cov, hets, ploidy, vcf, vcf2, snv_vcf_somatic, snv_tbi_somatic, snv_vcf_germline, snv_tbi_germline, seg, nseg, ggraph, variantcaller] ]
}
.tap{ ch_with_patient_sample } // save the channel
.groupTuple() //group by patient_sample to get all lanes
Expand All @@ -143,7 +143,7 @@ input_sample = ch_from_samplesheet
.combine(ch_with_patient_sample, by: 0) // for each entry add numLanes
.map { patient_sample, num_lanes, ch_items ->

(meta, fastq_1, fastq_2, table, cram, crai, bam, bai, ploidy, cov, hets, vcf, vcf2, snv_vcf_somatic, snv_tbi_somatic, snv_vcf_germline, snv_tbi_germline, seg, nseg, ggraph, variantcaller) = ch_items
(meta, fastq_1, fastq_2, table, cram, crai, bam, bai, cov, hets, ploidy, vcf, vcf2, snv_vcf_somatic, snv_tbi_somatic, snv_vcf_germline, snv_tbi_germline, seg, nseg, ggraph, variantcaller) = ch_items
if (meta.lane && fastq_2) {
meta = meta + [id: "${meta.sample}-${meta.lane}".toString()]
def CN = params.seq_center ? "CN:${params.seq_center}\\t" : ''
Expand Down Expand Up @@ -1835,7 +1835,6 @@ workflow NFJABBA {
[]
]
}
jabba_inputs.dump(tag: 'trace2')
} else {
// join all previous outputs to be used as input for jabba
// excluding svs since they can come from either svaba or gridss
Expand Down

0 comments on commit 7347141

Please sign in to comment.