Skip to content

Commit

Permalink
Merge pull request #247 from danilodileo/review-more-updates
Browse files Browse the repository at this point in the history
changes
  • Loading branch information
danilodileo authored Jan 24, 2024
2 parents 05cc258 + f04fe3d commit 45881d3
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
profiles:
name: Run workflow profile
# Only run on push if this is the nf-core dev branch (merged PRs)
if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/metadenovo') }}
if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/metatdenovo') }}
runs-on: ubuntu-latest
strategy:
matrix:
Expand Down
2 changes: 1 addition & 1 deletion modules/local/eggnog/mapper.nf
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ process EGGNOG_MAPPER {
script:
def args = task.ext.args ?: ''
prefix = task.ext.prefix ?: "${meta.id}"
input = fasta =~ /\.gz$/ ? fasta.name.take(fasta.name.lastIndexOf('.')) : fasta
input = fasta.name.endsWith(".gz") ? fasta.baseName : fasta
gunzip = fasta =~ /\.gz$/ ? "gunzip -c ${fasta} > ${input}" : ""

"""
Expand Down
2 changes: 2 additions & 0 deletions subworkflows/local/eukulele.nf
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@ workflow SUB_EUKULELE {
ch_versions = ch_versions.mix ( EUKULELE_SEARCH.out.versions )

FORMAT_TAX( EUKULELE_SEARCH.out.taxonomy_estimation.map { [ it[0], it[1] ] } )
ch_versions = ch_versions.mix ( FORMAT_TAX.out.versions )

FORMAT_TAX.out.tax
.join(eukulele)
.map { [ it[0], it[3], it[1] ] }
.set { ch_sum_taxonomy }

SUM_TAXONOMY ( ch_sum_taxonomy, feature_counts )
ch_versions = ch_versions.mix ( SUM_TAXONOMY.out.versions )

emit:
taxonomy_summary = SUM_TAXONOMY.out.taxonomy_summary
Expand Down
20 changes: 10 additions & 10 deletions workflows/metatdenovo.nf
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ if ( params.assembly ) {
if ( params.gff && params.protein_fasta ) {
orf_caller = 'user_orfs'
} else if ( params.gff && ! params.protein_fasta ) {
exit 1, 'When supplying ORFs, both --gff and --protein_fasta must be specified, --protein_fasta file is missing!'
error 'When supplying ORFs, both --gff and --protein_fasta must be specified, --protein_fasta file is missing!'
} else if ( params.protein_fasta && ! params.gff ) {
exit 1, 'When supplying ORFs, both --gff and --protein_fasta must be specified, --gff file is missing!'
error 'When supplying ORFs, both --gff and --protein_fasta must be specified, --gff file is missing!'
} else {
orf_caller = params.orf_caller
}
Expand Down Expand Up @@ -173,7 +173,7 @@ workflow METATDENOVO {
.mix(ch_fastq.single)
.set { ch_cat_fastq }

ch_versions = ch_versions.mix(CAT_FASTQ.out.versions.first().ifEmpty(null))
ch_versions = ch_versions.mix(CAT_FASTQ.out.versions.first())

//
// SUBWORKFLOW: Read QC and trim adapters
Expand Down Expand Up @@ -215,7 +215,7 @@ workflow METATDENOVO {
ch_collect_stats
.combine(ch_bbduk_logs)
.set {ch_collect_stats}
ch_multiqc_files = ch_multiqc_files.mix(BBMAP_BBDUK.out.log.collect{it[1]}.ifEmpty([]))
ch_multiqc_files = ch_multiqc_files.mix(BBMAP_BBDUK.out.log.collect{it[1]})
} else {
ch_clean_reads = FASTQC_TRIMGALORE.out.reads
ch_bbduk_logs = Channel.empty()
Expand Down Expand Up @@ -302,7 +302,7 @@ workflow METATDENOVO {
.map { [ [ id: 'megahit' ], it ] }
.set { ch_assembly_contigs }
ch_versions = ch_versions.mix(MEGAHIT_INTERLEAVED.out.versions)
} else { exit 1, 'Assembler not specified!' }
} else { error 'Assembler not specified!' }

// If the user asked for length filtering, perform that with SEQTK_SEQ (the actual length parameter is used in modules.config)
if ( params.min_contig_length > 0 ) {
Expand All @@ -325,7 +325,7 @@ workflow METATDENOVO {
ch_versions = ch_versions.mix(PROKKA_SUBSETS.out.versions)
ch_gff = UNPIGZ_GFF.out.unzipped
ch_protein = PROKKA_SUBSETS.out.faa
ch_multiqc_files = ch_multiqc_files.mix(PROKKA_SUBSETS.out.prokka_log.collect{it[1]}.ifEmpty([]))
ch_multiqc_files = ch_multiqc_files.mix(PROKKA_SUBSETS.out.prokka_log.collect{it[1]})
}

//
Expand Down Expand Up @@ -522,10 +522,10 @@ workflow METATDENOVO {
ch_methods_description = Channel.value(methods_description)

ch_multiqc_files = ch_multiqc_files.mix(CUSTOM_DUMPSOFTWAREVERSIONS.out.mqc_yml.collect())
ch_multiqc_files = ch_multiqc_files.mix(FASTQC_TRIMGALORE.out.trim_zip.collect{it[1]}.ifEmpty([]))
ch_multiqc_files = ch_multiqc_files.mix(TRANSRATE.out.assembly_qc.collect{it[1]}.ifEmpty([]))
ch_multiqc_files = ch_multiqc_files.mix(BAM_SORT_STATS_SAMTOOLS.out.idxstats.collect{it[1]}.ifEmpty([]))
ch_multiqc_files = ch_multiqc_files.mix(FEATURECOUNTS_CDS.out.summary.collect{it[1]}.ifEmpty([]))
ch_multiqc_files = ch_multiqc_files.mix(FASTQC_TRIMGALORE.out.trim_zip.collect{it[1]})
ch_multiqc_files = ch_multiqc_files.mix(TRANSRATE.out.assembly_qc.collect{it[1]})
ch_multiqc_files = ch_multiqc_files.mix(BAM_SORT_STATS_SAMTOOLS.out.idxstats.collect{it[1]})
ch_multiqc_files = ch_multiqc_files.mix(FEATURECOUNTS_CDS.out.summary.collect{it[1]})


MULTIQC (
Expand Down

0 comments on commit 45881d3

Please sign in to comment.