Skip to content

Commit

Permalink
Merge pull request #46 from karinlag/master
Browse files Browse the repository at this point in the history
Fixes Issue 45 with regards to reuse of database with multiple datasets.
  • Loading branch information
karinlag authored Aug 13, 2020
2 parents 76727fa + 4f25156 commit 59264df
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 11 deletions.
4 changes: 2 additions & 2 deletions conf/specific_genes_template.config
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,20 @@ params.savemode = "copy"

params.threads = 1

// Databases need to be preloaded into the directories specified below.

// Ariba mlst params
params.do_mlst = "yes"
//params.mlst_scheme = "Escherichia coli#1"
params.mlst_db = "/cluster/projects/nn9305k/db_flatfiles/specific_genes_bifrost/mlst/Escherichia_coli_1_db"
params.mlst_results = "mlst_results"

// Ariba AMR params
// Check the ariba webpage for legal values
params.do_amr = "yes"
params.amr_db = "/cluster/projects/nn9305k/db_flatfiles/specific_genes_bifrost/amr/card_db"
params.amr_results = "amr_results"

// Ariba virulence params
// Check the ariba webpage for legal values
params.do_vir = "yes"
params.vir_db = "/cluster/projects/nn9305k/db_flatfiles/specific_genes_bifrost/vir/virulencefinder_db"
params.vir_results = "vir_results"
19 changes: 10 additions & 9 deletions specific_genes.nf
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,20 @@ log.info "================================================="
log.info ""

// First, define the input data that go into input channels
// The databases are input as value channels to enable reuse
Channel
.fromFilePairs( params.reads, size:params.setsize )
.ifEmpty { error "Cannot find any reads matching: ${params.reads}" }
.set{read_pairs}

mlst_db = Channel
.fromPath(params.mlst_db)
.value(params.mlst_db)

amr_db = Channel
.fromPath(params.amr_db)
.value(params.amr_db)

vir_db = Channel
.fromPath(params.vir_db)
.value(params.vir_db)


// if there are more than two data files, we need to cat them together
Expand Down Expand Up @@ -75,7 +76,7 @@ process run_ariba_mlst_pred {

input:
set pair_id, file(reads) from read_pairs_mlst
file "mlst_db" from mlst_db
path mlst_db from mlst_db

output:
file "${pair_id}_mlst_report.tsv" into pair_id_mlst_tsv
Expand All @@ -85,7 +86,7 @@ process run_ariba_mlst_pred {
params.do_mlst == "yes"

"""
ariba run --threads $task.cpus mlst_db/ref_db ${pair_id}_R*_concat.fq.gz ${pair_id}_ariba &> ariba.out
ariba run --threads $task.cpus ${mlst_db}/ref_db ${pair_id}_R*_concat.fq.gz ${pair_id}_ariba &> ariba.out
echo -e "header\t" \$(head -1 ${pair_id}_ariba/mlst_report.tsv) > ${pair_id}_mlst_report.tsv
echo -e "${pair_id}\t" \$(tail -1 ${pair_id}_ariba/mlst_report.tsv) >> ${pair_id}_mlst_report.tsv
"""
Expand Down Expand Up @@ -122,7 +123,7 @@ process run_ariba_amr_pred {

input:
set pair_id, file(reads) from read_pairs_amr
file "db_amr_prepareref" from amr_db
path db_amr_prepareref from amr_db

output:
file "${pair_id}_amr_report.tsv" into pair_id_amr_tsv
Expand All @@ -133,7 +134,7 @@ process run_ariba_amr_pred {


"""
ariba run --threads $task.cpus db_amr_prepareref ${pair_id}_R*_concat.fq.gz ${pair_id}_ariba &> ariba.out
ariba run --threads $task.cpus ${db_amr_prepareref} ${pair_id}_R*_concat.fq.gz ${pair_id}_ariba &> ariba.out
cp ${pair_id}_ariba/report.tsv ${pair_id}_amr_report.tsv
"""
Expand Down Expand Up @@ -166,7 +167,7 @@ process run_ariba_vir_pred {

input:
set pair_id, file(reads) from read_pairs_vir
file "db_vir_prepareref" from vir_db
path db_vir_prepareref from vir_db

output:
file "${pair_id}_vir_report.tsv" into pair_id_vir_tsv
Expand All @@ -176,7 +177,7 @@ process run_ariba_vir_pred {
params.do_vir == "yes"

"""
ariba run --threads $task.cpus db_vir_prepareref ${pair_id}_R*_concat.fq.gz \
ariba run --threads $task.cpus ${db_vir_prepareref} ${pair_id}_R*_concat.fq.gz \
${pair_id}_ariba &> ariba.out
cp ${pair_id}_ariba/report.tsv ${pair_id}_vir_report.tsv
Expand Down

0 comments on commit 59264df

Please sign in to comment.