-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add alternative IDs, update kraken module and readme [ci skip]
- Loading branch information
Showing
10 changed files
with
388 additions
and
21 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
name: kraken2_kraken2 | ||
channels: | ||
- conda-forge | ||
- bioconda | ||
- defaults | ||
dependencies: | ||
- bioconda::kraken2=2.1.2 | ||
- conda-forge::pigz=2.6 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
process KRAKEN2_KRAKEN2 { | ||
tag "$meta.id" | ||
label 'process_high' | ||
|
||
conda "${moduleDir}/environment.yml" | ||
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? | ||
'https://depot.galaxyproject.org/singularity/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:87fc08d11968d081f3e8a37131c1f1f6715b6542-0' : | ||
'biocontainers/mulled-v2-5799ab18b5fc681e75923b2450abaa969907ec98:87fc08d11968d081f3e8a37131c1f1f6715b6542-0' }" | ||
|
||
input: | ||
tuple val(meta), path(reads) | ||
path db | ||
val save_output_fastqs | ||
val save_reads_assignment | ||
|
||
output: | ||
tuple val(meta), path('*.classified{.,_}*') , optional:true, emit: classified_reads_fastq | ||
tuple val(meta), path('*.unclassified{.,_}*') , optional:true, emit: unclassified_reads_fastq | ||
tuple val(meta), path('*classifiedreads.txt') , optional:true, emit: classified_reads_assignment | ||
tuple val(meta), path('*report.txt') , emit: report | ||
path "versions.yml" , emit: versions | ||
|
||
when: | ||
task.ext.when == null || task.ext.when | ||
|
||
script: | ||
def args = task.ext.args ?: '' | ||
def prefix = task.ext.prefix ?: "${meta.id}" | ||
def paired = meta.single_end ? "" : "--paired" | ||
def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" | ||
def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" | ||
def classified_option = save_output_fastqs ? "--classified-out ${classified}" : "" | ||
def unclassified_option = save_output_fastqs ? "--unclassified-out ${unclassified}" : "" | ||
def readclassification_option = save_reads_assignment ? "--output ${prefix}.kraken2.classifiedreads.txt" : "--output /dev/null" | ||
def compress_reads_command = save_output_fastqs ? "pigz -p $task.cpus *.fastq" : "" | ||
|
||
""" | ||
kraken2 \\ | ||
--db $db \\ | ||
--threads $task.cpus \\ | ||
--report ${prefix}.kraken2.report.txt \\ | ||
--gzip-compressed \\ | ||
$unclassified_option \\ | ||
$classified_option \\ | ||
$readclassification_option \\ | ||
$paired \\ | ||
$args \\ | ||
$reads | ||
$compress_reads_command | ||
cat <<-END_VERSIONS > versions.yml | ||
"${task.process}": | ||
kraken2: \$(echo \$(kraken2 --version 2>&1) | sed 's/^.*Kraken version //; s/ .*\$//') | ||
pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) | ||
END_VERSIONS | ||
""" | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
name: kraken2_kraken2 | ||
description: Classifies metagenomic sequence data | ||
keywords: | ||
- classify | ||
- metagenomics | ||
- fastq | ||
- db | ||
tools: | ||
- kraken2: | ||
description: | | ||
Kraken2 is a taxonomic sequence classifier that assigns taxonomic labels to sequence reads | ||
homepage: https://ccb.jhu.edu/software/kraken2/ | ||
documentation: https://github.com/DerrickWood/kraken2/wiki/Manual | ||
doi: 10.1186/s13059-019-1891-0 | ||
licence: ["MIT"] | ||
input: | ||
- meta: | ||
type: map | ||
description: | | ||
Groovy Map containing sample information | ||
e.g. [ id:'test', single_end:false ] | ||
- reads: | ||
type: file | ||
description: | | ||
List of input FastQ files of size 1 and 2 for single-end and paired-end data, | ||
respectively. | ||
- db: | ||
type: directory | ||
description: Kraken2 database | ||
- save_output_fastqs: | ||
type: string | ||
description: | | ||
If true, optional commands are added to save classified and unclassified reads | ||
as fastq files | ||
- save_reads_assignment: | ||
type: string | ||
description: | | ||
If true, an optional command is added to save a file reporting the taxonomic | ||
classification of each input read | ||
output: | ||
- meta: | ||
type: map | ||
description: | | ||
Groovy Map containing sample information | ||
e.g. [ id:'test', single_end:false ] | ||
- classified_reads_fastq: | ||
type: file | ||
description: | | ||
Reads classified as belonging to any of the taxa | ||
on the Kraken2 database. | ||
pattern: "*{fastq.gz}" | ||
- unclassified_reads_fastq: | ||
type: file | ||
description: | | ||
Reads not classified to any of the taxa | ||
on the Kraken2 database. | ||
pattern: "*{fastq.gz}" | ||
- classified_reads_assignment: | ||
type: file | ||
description: | | ||
Kraken2 output file indicating the taxonomic assignment of | ||
each input read | ||
- report: | ||
type: file | ||
description: | | ||
Kraken2 report containing stats about classified | ||
and not classifed reads. | ||
pattern: "*.{report.txt}" | ||
- versions: | ||
type: file | ||
description: File containing software versions | ||
pattern: "versions.yml" | ||
authors: | ||
- "@joseespinosa" | ||
- "@drpatelh" | ||
maintainers: | ||
- "@joseespinosa" | ||
- "@drpatelh" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,143 @@ | ||
nextflow_process { | ||
name "Test Process KRAKEN2_KRAKEN2" | ||
script "../main.nf" | ||
process "KRAKEN2_KRAKEN2" | ||
tag "modules" | ||
tag "modules_nfcore" | ||
tag "untar" | ||
tag "kraken2" | ||
tag "kraken2/kraken2" | ||
|
||
setup { | ||
run("UNTAR") { | ||
script "modules/nf-core/untar/main.nf" | ||
process { | ||
""" | ||
input[0] = Channel.of([ | ||
[], | ||
file( | ||
params.test_data['sarscov2']['genome']['kraken2_tar_gz'], | ||
checkIfExists: true | ||
) | ||
]) | ||
""" | ||
} | ||
} | ||
} | ||
|
||
test("sarscov2 illumina single end [fastq]") { | ||
when { | ||
process { | ||
""" | ||
input[0] = [ | ||
[ id:'test', single_end:true ], // meta map | ||
[ file( | ||
params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], | ||
checkIfExists: true | ||
)] | ||
] | ||
input[1] = UNTAR.out.untar.map{ it[1] } | ||
input[2] = true | ||
input[3] = false | ||
""" | ||
} | ||
} | ||
|
||
then { | ||
assertAll( | ||
{ assert process.success }, | ||
{ assert snapshot( | ||
process.out.report, | ||
process.out.versions, | ||
).match() | ||
}, | ||
{ assert process.out.classified_reads_fastq.get(0).get(1) ==~ ".*/test.classified.fastq.gz" }, | ||
{ assert process.out.unclassified_reads_fastq.get(0).get(1) ==~ ".*/test.unclassified.fastq.gz" }, | ||
) | ||
} | ||
} | ||
|
||
test("sarscov2 illumina paired end [fastq]") { | ||
when { | ||
params { | ||
outdir = "$outputDir" | ||
} | ||
|
||
process { | ||
""" | ||
input[0] = [ | ||
[ id:'test', single_end:false ], // meta map | ||
[ | ||
file( | ||
params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], | ||
checkIfExists: true | ||
), | ||
file( | ||
params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], | ||
checkIfExists: true | ||
) | ||
|
||
] | ||
] | ||
input[1] = UNTAR.out.untar.map{ it[1] } | ||
input[2] = true | ||
input[3] = false | ||
""" | ||
} | ||
} | ||
|
||
then { | ||
assertAll( | ||
{ assert process.success }, | ||
{ assert snapshot( | ||
process.out.report, | ||
process.out.versions, | ||
).match() | ||
}, | ||
{ assert process.out.classified_reads_fastq.get(0).get(1).get(0) | ||
==~ ".*/test.classified_1.fastq.gz" }, | ||
{ assert process.out.classified_reads_fastq.get(0).get(1).get(1) | ||
==~ ".*/test.classified_2.fastq.gz" }, | ||
{ assert process.out.unclassified_reads_fastq.get(0).get(1).get(0) | ||
==~ ".*/test.unclassified_1.fastq.gz" }, | ||
{ assert process.out.unclassified_reads_fastq.get(0).get(1).get(1) | ||
==~ ".*/test.unclassified_2.fastq.gz" }, | ||
) | ||
} | ||
} | ||
|
||
test("sarscov2 illumina single end [fastq] + save_reads_assignment") { | ||
when { | ||
params { | ||
outdir = "$outputDir" | ||
} | ||
|
||
process { | ||
""" | ||
input[0] = [ | ||
[ id:'test', single_end:true ], // meta map | ||
[ file( | ||
params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], | ||
checkIfExists: true | ||
)] | ||
] | ||
input[1] = UNTAR.out.untar.map{ it[1] } | ||
input[2] = false | ||
input[3] = true | ||
""" | ||
} | ||
} | ||
|
||
then { | ||
assertAll( | ||
{ assert process.success }, | ||
{ assert snapshot( | ||
process.out.report, | ||
process.out.classified_reads_assignment, | ||
process.out.versions, | ||
).match() | ||
}, | ||
) | ||
} | ||
} | ||
} |
Oops, something went wrong.