Skip to content

Commit

Permalink
Merge pull request #463 from LilyAnderssonLee/update-validation
Browse files Browse the repository at this point in the history
Update nf-validation and turn off kaiju2krona output
  • Loading branch information
LilyAnderssonLee authored Mar 21, 2024
2 parents 9213006 + 7f82975 commit d697e6a
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 18 deletions.
12 changes: 11 additions & 1 deletion .github/workflows/download_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,18 @@ jobs:
- name: Inspect download
run: tree ./${{ env.REPOTITLE_LOWERCASE }}

- name: Run the downloaded pipeline
- name: Run the downloaded pipeline (stub)
id: stub_run_pipeline
continue-on-error: true
env:
NXF_SINGULARITY_CACHEDIR: ./
NXF_SINGULARITY_HOME_MOUNT: true
run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results

- name: Run the downloaded pipeline (stub run not supported)
id: run_pipeline
if: ${{ job.steps.stub_run_pipeline.status == failure() }}
env:
NXF_SINGULARITY_CACHEDIR: ./
NXF_SINGULARITY_HOME_MOUNT: true
run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results{% endraw %}
4 changes: 4 additions & 0 deletions conf/modules.config
Original file line number Diff line number Diff line change
Expand Up @@ -633,6 +633,10 @@ process {
withName: KAIJU_KAIJU2KRONA {
tag = {"${meta.db_name}|${meta.id}"}
ext.args = '-v -u'
publishDir = [
path: { "${params.outdir}/kaiju/" },
enabled : false
]
}

withName: DIAMOND_BLASTX {
Expand Down
32 changes: 15 additions & 17 deletions workflows/taxprofiler.nf
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,8 @@ workflow TAXPROFILER {
ch_multiqc_files = Channel.empty()

// Validate input files and create separate channels for FASTQ, FASTA, and Nanopore data
samplesheet
.branch { meta, run_accession, instrument_platform, fastq_1, fastq_2, fasta ->
ch_input = samplesheet
.map { meta, run_accession, instrument_platform, fastq_1, fastq_2, fasta ->
meta.run_accession = run_accession
meta.instrument_platform = instrument_platform

Expand All @@ -130,23 +130,21 @@ workflow TAXPROFILER {
if ( meta.single_end && fastq_2 ) {
error("Error: Please check input samplesheet: for single-end reads entry `fastq_2` should be empty")
}
fastq_se: meta.single_end
return [ meta, [ fastq_1 ] ]
return [ meta, run_accession, instrument_platform, fastq_1, fastq_2, fasta ]
}
.branch { meta, run_accession, instrument_platform, fastq_1, fastq_2, fasta ->
fastq: meta.single_end || fastq_2
return [ meta, fastq_2 ? [ fastq_1, fastq_2 ] : [ fastq_1 ] ]
nanopore: instrument_platform == 'OXFORD_NANOPORE'
meta.single_end = true
return [ meta, [ fastq_1 ] ]
fastq_pe: fastq_2
return [ meta, [ fastq_1, fastq_2 ] ]
ch_fasta: meta.is_fasta
fasta: meta.is_fasta
meta.single_end = true
return [ meta, [fasta] ]
return [ meta, [ fasta ] ]
}
.set { ch_input }

// Merge ch_input.fastq_pe and ch_input.fastq_se into a single channel
def ch_fastq = ch_input.fastq_pe.mix( ch_input.fastq_se )
// Merge ch_fastq and ch_input.nanopore into a single channel
def ch_input_for_fastqc = ch_fastq.mix( ch_input.nanopore )
// Merge ch_input.fastq and ch_input.nanopore into a single channel
def ch_input_for_fastqc = ch_input.fastq.mix( ch_input.nanopore )

// Validate and decompress databases
ch_dbs_for_untar = databases
Expand Down Expand Up @@ -190,10 +188,10 @@ workflow TAXPROFILER {
*/

if ( params.perform_shortread_qc ) {
ch_shortreads_preprocessed = SHORTREAD_PREPROCESSING ( ch_fastq, adapterlist ).reads
ch_shortreads_preprocessed = SHORTREAD_PREPROCESSING ( ch_input.fastq, adapterlist ).reads
ch_versions = ch_versions.mix( SHORTREAD_PREPROCESSING.out.versions )
} else {
ch_shortreads_preprocessed = ch_fastq
ch_shortreads_preprocessed = ch_input.fastq
}

if ( params.perform_longread_qc ) {
Expand Down Expand Up @@ -262,13 +260,13 @@ workflow TAXPROFILER {
meta, reads ->
[ meta, [ reads ].flatten() ]
}
.mix( ch_input.ch_fasta )
.mix( ch_input.fasta )

ch_versions = ch_versions.mix(MERGE_RUNS.out.versions)

} else {
ch_reads_runmerged = ch_shortreads_hostremoved
.mix( ch_longreads_hostremoved, ch_input.ch_fasta )
.mix( ch_longreads_hostremoved, ch_input.fasta )
}

/*
Expand Down

0 comments on commit d697e6a

Please sign in to comment.