Skip to content

Commit

Permalink
moving to nf-core compatible
Browse files Browse the repository at this point in the history
  • Loading branch information
erinyoung committed Jan 26, 2024
1 parent ff0c46a commit a405ede
Show file tree
Hide file tree
Showing 57 changed files with 1,083 additions and 1,144 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/legionella.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
- name: Check Legionella file
run: |
for file in grandeur/legsta/legsta_summary.csv
for file in grandeur/elgato/elgato_summary.csv
do
head $file
wc -l $file
Expand Down
Binary file removed assets/fastani_refs.tar.gz
Binary file not shown.
Binary file removed configs/fastani_refs.tar.gz
Binary file not shown.
591 changes: 177 additions & 414 deletions grandeur.nf

Large diffs are not rendered by default.

93 changes: 0 additions & 93 deletions modules/bbmap.nf

This file was deleted.

47 changes: 0 additions & 47 deletions modules/fastp.nf

This file was deleted.

46 changes: 0 additions & 46 deletions modules/kraken2.nf

This file was deleted.

10 changes: 8 additions & 2 deletions modules/amrfinderplus.nf → modules/local/amrfinderplus.nf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
process amrfinderplus {
tag "${sample}"
label "medcpus"
tag "$meta.id"
label "process_medium"
publishDir params.outdir, mode: 'copy'
container 'staphb/ncbi-amrfinderplus:3.11.26-2023-11-15.1'
maxForks 10
Expand All @@ -16,8 +16,14 @@ process amrfinderplus {
output:
path "ncbi-AMRFinderplus/${sample}_amrfinder_plus.txt" , emit: collect
path "logs/${task.process}/${sample}.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

shell:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
'''
mkdir -p ncbi-AMRFinderplus logs/!{task.process}
log_file=logs/!{task.process}/!{sample}.!{workflow.sessionId}.log
Expand Down
45 changes: 45 additions & 0 deletions modules/local/bbduk.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
process bbduk {
tag "$meta.id"
label "process_medium"
publishDir params.outdir, mode: 'copy'
container 'staphb/bbtools:39.01'
//errorStrategy { task.attempt < 2 ? 'retry' : 'ignore'}
time '10m'

input:
tuple val(meta), file(reads)

output:
tuple val(meta), file("bbduk/*_rmphix_R{1,2}.fastq.gz"), emit: fastq
path "bbduk/*", emit: files
path "bbduk/*.phix.stats.txt", emit: stats
path "logs/${task.process}/*.log", emit: log
path "versions.yml", emit: versions

when:
task.ext.when == null || task.ext.when

shell:
def args = task.ext.args ?: 'k=31 hdist=1'
def prefix = task.ext.prefix ?: "${meta.id}"
"""
mkdir -p bbduk logs/${task.process}
log_file=logs/${task.process}/${prefix}.${workflow.sessionId}.log
bbduk.sh ${args} \
in1=${reads[0]} \
in2=${reads[1]} \
out1=bbduk/${prefix}_rmphix_R1.fastq.gz \
out2=bbduk/${prefix}_rmphix_R2.fastq.gz \
outm=bbduk/${prefix}.matched_phix.fq \
ref=/opt/bbmap/resources/phix174_ill.ref.fa.gz \
stats=bbduk/${prefix}.phix.stats.txt \
threads=${task.cpus} \
| tee -a \$log_file
cat <<-END_VERSIONS > versions.yml
"${task.process}":
"bbmap: \$(bbduk.sh --version 2>&1 | grep -v java | grep version)" >> versions.yml
END_VERSIONS
"""
}
10 changes: 8 additions & 2 deletions modules/blast.nf → modules/local/blast.nf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
process blastn {
tag "${sample}"
label "medcpus"
tag "$meta.id"
label "process_medium"
publishDir params.outdir, mode: 'copy'
container 'staphb/blast:2.15.0'
maxForks 10
Expand All @@ -16,8 +16,14 @@ process blastn {
output:
tuple val(sample), file("blastn/${sample}.tsv") , emit: blastn
path "logs/${task.process}/${sample}.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

shell:
def args = task.ext.args ?: '-max_target_seqs 10 -max_hsps 1 -evalue 1e-25'
def prefix = task.ext.prefix ?: "${meta.id}"
'''
mkdir -p blastn logs/!{task.process}
log_file=logs/!{task.process}/!{sample}.!{workflow.sessionId}.log
Expand Down
11 changes: 10 additions & 1 deletion modules/blobtools.nf → modules/local/blobtools.nf
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
process blobtools_create {
tag "${sample}"
tag "$meta.id"
label "process_medium"
publishDir params.outdir, mode: 'copy'
container 'chrishah/blobtools:v1.1.1'
maxForks 10
Expand All @@ -16,8 +17,14 @@ process blobtools_create {
tuple val(sample), file("blobtools/${sample}.blobDB.json") , emit: json
path "blobtools/${sample}.${sample}*.bam.cov" , emit: files
path "logs/${task.process}/${sample}.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

shell:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
'''
mkdir -p blobtools logs/!{task.process}
log_file=logs/!{task.process}/!{sample}.!{workflow.sessionId}.log
Expand Down Expand Up @@ -55,6 +62,7 @@ process blobtools_view {
output:
tuple val(sample), file("blobtools/${sample}.blobDB.table.txt"), emit: file
path "logs/${task.process}/${sample}.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

shell:
'''
Expand Down Expand Up @@ -94,6 +102,7 @@ process blobtools_plot {
tuple val(sample), file("blobtools/${sample}_blobtools.txt") , emit: results
path "blobtools/${sample}_summary.txt" , emit: collect
path "logs/${task.process}/${sample}.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

shell:
'''
Expand Down
11 changes: 11 additions & 0 deletions modules/datasets.nf → modules/local/datasets.nf
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
process datasets_summary {
tag "${taxon}"
label "process_single"
publishDir params.outdir, mode: 'copy'
container 'quay.io/uphl/datasets:15.34.0-2023-12-19'
maxForks 10
Expand All @@ -8,6 +9,7 @@ process datasets_summary {
//#UPHLICA memory 1.GB
//#UPHLICA cpus 3
//#UPHLICA time '10m'
path "versions.yml" , emit: versions

input:
tuple val(taxon), file(script)
Expand All @@ -16,7 +18,12 @@ process datasets_summary {
path "datasets/*_genomes.csv" , emit: genomes, optional: true
path "logs/${task.process}/*.${workflow.sessionId}.log", emit: log

when:
task.ext.when == null || task.ext.when

shell:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
'''
mkdir -p datasets logs/!{task.process}
log_file=logs/!{task.process}/!{taxon}.!{workflow.sessionId}.log
Expand Down Expand Up @@ -53,6 +60,10 @@ process datasets_download {
output:
path "genomes/*" , emit: genomes, optional: true
path "logs/${task.process}/datasets_download.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

shell:
'''
Expand Down
7 changes: 7 additions & 0 deletions modules/download_sra.nf → modules/local/download_sra.nf
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
process download_sra {
tag "${SRR}"
label "process_single"
publishDir params.outdir, mode: 'copy'
container 'quay.io/biocontainers/sra-tools:2.11.0--pl5321ha49a11a_3'
maxForks 10
Expand All @@ -15,8 +16,14 @@ process download_sra {
output:
tuple val(SRR), file("reads/${SRR}_{1,2}.fastq.gz") , emit: fastq
path "logs/${task.process}/${SRR}.${workflow.sessionId}.log", emit: log
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

shell:
def args = task.ext.args ?: ''
def prefix = task.ext.prefix ?: "${meta.id}"
'''
mkdir -p reads logs/!{task.process}
log_file=logs/!{task.process}/!{SRR}.!{workflow.sessionId}.log
Expand Down
Loading

0 comments on commit a405ede

Please sign in to comment.