diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 4ecfbfe3..4a9bc5c7 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -18,11 +18,11 @@
"python.linting.flake8Path": "/opt/conda/bin/flake8",
"python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle",
"python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle",
- "python.linting.pylintPath": "/opt/conda/bin/pylint"
+ "python.linting.pylintPath": "/opt/conda/bin/pylint",
},
// Add the IDs of extensions you want installed when the container is created.
- "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"]
- }
- }
+ "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"],
+ },
+ },
}
diff --git a/.nf-core.yml b/.nf-core.yml
index 51a92309..793cb80d 100644
--- a/.nf-core.yml
+++ b/.nf-core.yml
@@ -9,4 +9,6 @@ lint:
- lib/nfcore_external_java_deps.jar
files_unchanged:
- .gitignore
+ - assets/email_template.html
+ - assets/email_template.txt
- assets/sendmail_template.txt
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 04264a5f..d512d75c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,8 +9,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
Special thanks to the following for their contributions to the release:
+- [Adam Talbot](https://github.com/adamrtalbot)
- [Alexandru Mizeranschi](https://github.com/nicolae06)
- [Alexander Blaessle](https://github.com/alexblaessle)
+- [Maxime Garcia](https://github.com/maxulysse)
- [Sebastian Uhrig](https://github.com/suhrig)
Thank you to everyone else that has contributed by reporting bugs, enhancements or in any other way, shape or form.
@@ -18,9 +20,10 @@ Thank you to everyone else that has contributed by reporting bugs, enhancements
### Enhancements & fixes
- [PR #238](https://github.com/nf-core/fetchngs/pull/238) - Resolved bug when prefetching large studies ([#236](https://github.com/nf-core/fetchngs/issues/236))
-- [PR #241](https://github.com/nf-core/fetchngs/pull/241) - Download of big files time out and don't resume ([#169](https://github.com/nf-core/fetchngs/issues/169))
+- [PR #241](https://github.com/nf-core/fetchngs/pull/241) - Use wget instead of curl to download files from FTP ([#169](https://github.com/nf-core/fetchngs/issues/169), [#194](https://github.com/nf-core/fetchngs/issues/194))
- [PR #242](https://github.com/nf-core/fetchngs/pull/242) - Template update for nf-core/tools v2.11
- [PR #243](https://github.com/nf-core/fetchngs/pull/243) - Fixes for [PR #238](https://github.com/nf-core/fetchngs/pull/238)
+- [PR #245](https://github.com/nf-core/fetchngs/pull/246) - Refactor nf-test CI and test and other pre-release fixes ([#233](https://github.com/nf-core/fetchngs/issues/233))
- [PR #246](https://github.com/nf-core/fetchngs/pull/246) - Handle dark/light mode for logo in GitHub README properly
- [PR #248](https://github.com/nf-core/fetchngs/pull/248) - Update pipeline level test data path to use mirror on s3
- [PR #249](https://github.com/nf-core/fetchngs/pull/249) - Update modules which includes absolute paths for test data, making module level test compatible within the pipeline.
diff --git a/assets/email_template.html b/assets/email_template.html
index 1848021d..6e593a53 100644
--- a/assets/email_template.html
+++ b/assets/email_template.html
@@ -12,7 +12,7 @@
-
nf-core/fetchngs v${version}
+nf-core/fetchngs ${version}
Run Name: $runName
<% if (!success){
diff --git a/assets/email_template.txt b/assets/email_template.txt
index f9393aa8..e9f8bcc6 100644
--- a/assets/email_template.txt
+++ b/assets/email_template.txt
@@ -4,7 +4,7 @@
|\\ | |__ __ / ` / \\ |__) |__ } {
| \\| | \\__, \\__/ | \\ |___ \\`-._,-`-,
`._,._,'
- nf-core/fetchngs v${version}
+ nf-core/fetchngs ${version}
----------------------------------------------------
Run Name: $runName
diff --git a/assets/schema_input.json b/assets/schema_input.json
index d451f9e9..13044b1b 100644
--- a/assets/schema_input.json
+++ b/assets/schema_input.json
@@ -7,7 +7,7 @@
"items": {
"type": "object",
"properties": {
- "id": {
+ "": {
"type": "string",
"pattern": "^(((SR|ER|DR)[APRSX])|(SAM(N|EA|D))|(PRJ(NA|EB|DB))|(GS[EM])|(syn))(\\d+)$",
"errorMessage": "Please provide a valid SRA, ENA, DDBJ or GEO identifier"
diff --git a/main.nf b/main.nf
index 2fe0061e..d3779d16 100644
--- a/main.nf
+++ b/main.nf
@@ -30,25 +30,19 @@ workflow NFCORE_FETCHNGS {
main:
- ch_versions = Channel.empty()
-
//
// WORKFLOW: Download FastQ files for SRA / ENA / GEO / DDBJ ids
//
if (params.input_type == 'sra') {
SRA ( ids )
- ch_versions = SRA.out.versions
//
// WORKFLOW: Download FastQ files for Synapse ids
//
} else if (params.input_type == 'synapse') {
SYNAPSE ( ids )
- ch_versions = SYNAPSE.out.versions
}
- emit:
- versions = ch_versions
}
/*
@@ -68,7 +62,16 @@ workflow {
//
// SUBWORKFLOW: Run initialisation tasks
//
- PIPELINE_INITIALISATION ()
+ PIPELINE_INITIALISATION (
+ params.version,
+ params.help,
+ params.validate_params,
+ params.monochrome_logs,
+ params.outdir,
+ params.input,
+ params.input_type,
+ params.ena_metadata_fields
+ )
//
// WORKFLOW: Run primary workflows for the pipeline
@@ -81,12 +84,13 @@ workflow {
// SUBWORKFLOW: Run completion tasks
//
PIPELINE_COMPLETION (
- NFCORE_FETCHNGS.out.versions,
params.input_type,
params.email,
params.email_on_fail,
- params.hook_url,
- PIPELINE_INITIALISATION.out.summary_params
+ params.plaintext_email,
+ params.outdir,
+ params.monochrome_logs,
+ params.hook_url
)
}
diff --git a/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf b/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf
index b36c6a24..21249dd7 100644
--- a/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf
+++ b/subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf
@@ -8,17 +8,17 @@
========================================================================================
*/
-include { UTILS_NEXTFLOW_PIPELINE; getWorkflowVersion } from '../../nf-core/utils_nextflow_pipeline'
-include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin'
-include {
- UTILS_NFCORE_PIPELINE;
- workflowCitation;
- nfCoreLogo;
- dashedLine;
- completionEmail;
- completionSummary;
- imNotification
-} from '../../nf-core/utils_nfcore_pipeline'
+include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin'
+include { fromSamplesheet } from 'plugin/nf-validation'
+include { paramsSummaryMap } from 'plugin/nf-validation'
+include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline'
+include { completionEmail } from '../../nf-core/utils_nfcore_pipeline'
+include { completionSummary } from '../../nf-core/utils_nfcore_pipeline'
+include { dashedLine } from '../../nf-core/utils_nfcore_pipeline'
+include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline'
+include { imNotification } from '../../nf-core/utils_nfcore_pipeline'
+include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline'
+include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline'
/*
========================================================================================
@@ -28,56 +28,64 @@ include {
workflow PIPELINE_INITIALISATION {
+ take:
+ version // boolean: Display version and exit
+ help // boolean: Display help text
+ validate_params // boolean: Boolean whether to validate parameters against the schema at runtime
+ monochrome_logs // boolean: Do not use coloured log outputs
+ outdir // string: The output directory where the results will be saved
+ input // string: File containing SRA/ENA/GEO/DDBJ identifiers one per line to download their associated metadata and FastQ files
+ input_type // string: Specifies the type of identifier provided via `--input` - available options are 'sra' and 'synapse'
+ ena_metadata_fields // string: Comma-separated list of ENA metadata fields to fetch before downloading data
+
main:
//
// Print version and exit if required and dump pipeline parameters to JSON file
//
UTILS_NEXTFLOW_PIPELINE (
- params.version,
+ version,
true,
- params.outdir,
+ outdir,
workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1
)
//
// Validate parameters and generate parameter summary to stdout
//
- def pre_help_text = nfCoreLogo(getWorkflowVersion())
- def post_help_text = '\n' + workflowCitation() + '\n' + dashedLine()
+ def pre_help_text = nfCoreLogo(monochrome_logs)
+ def post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs)
def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input ids.csv --outdir "
UTILS_NFVALIDATION_PLUGIN (
- params.help,
+ help,
workflow_command,
pre_help_text,
post_help_text,
- params.validate_params,
+ validate_params,
"nextflow_schema.json"
)
//
// Check config provided to the pipeline
//
- UTILS_NFCORE_PIPELINE (
- params.monochrome_logs
- )
+ UTILS_NFCORE_PIPELINE ()
//
// Auto-detect input id type
//
- ch_input = file(params.input)
- def input_type = ''
+ ch_input = file(input)
+ def inferred_input_type = ''
if (isSraId(ch_input)) {
- input_type = 'sra'
- sraCheckENAMetadataFields()
+ inferred_input_type = 'sra'
+ sraCheckENAMetadataFields(ena_metadata_fields)
} else if (isSynapseId(ch_input)) {
- input_type = 'synapse'
+ inferred_input_type = 'synapse'
} else {
error('Ids provided via --input not recognised please make sure they are either SRA / ENA / GEO / DDBJ or Synapse ids!')
}
- if (params.input_type != input_type) {
- error("Ids auto-detected as ${input_type}. Please provide '--input_type ${input_type}' as a parameter to the pipeline!")
+ if (input_type != inferred_input_type) {
+ error("Ids auto-detected as ${inferred_input_type}. Please provide '--input_type ${inferred_input_type}' as a parameter to the pipeline!")
}
// Read in ids from --input file
@@ -89,8 +97,7 @@ workflow PIPELINE_INITIALISATION {
.set { ch_ids }
emit:
- ids = ch_ids
- summary_params = UTILS_NFVALIDATION_PLUGIN.out.summary_params
+ ids = ch_ids
}
/*
@@ -102,38 +109,30 @@ workflow PIPELINE_INITIALISATION {
workflow PIPELINE_COMPLETION {
take:
- versions // channel: software tools versions
- input_type // string: 'sra' or 'synapse'
- email // string: email address
- email_on_fail // string: email address sent on pipeline failure
- hook_url // string: hook URL for notifications
- summary_params // map: Groovy map of the parameters used in the pipeline
+ input_type // string: 'sra' or 'synapse'
+ email // string: email address
+ email_on_fail // string: email address sent on pipeline failure
+ plaintext_email // boolean: Send plain-text email instead of HTML
+ outdir // path: Path to output directory where results will be published
+ monochrome_logs // boolean: Disable ANSI colour codes in log output
+ hook_url // string: hook URL for notifications
main:
- //
- // MODULE: Dump software versions for all tools used in the workflow
- //
- pipeline_version_info = Channel.of("""\"workflow\":
- nextflow: ${workflow.nextflow.version}
- ${workflow.manifest.name}: ${workflow.manifest.version}
- """.stripIndent())
-
- versions = versions.mix(pipeline_version_info)
- versions.collectFile(name: 'fetchngs_mqc_versions.yml', storeDir: "${params.outdir}/pipeline_info")
+ summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
//
// Completion email and summary
//
workflow.onComplete {
if (email || email_on_fail) {
- completionEmail(summary_params)
+ completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs)
}
- completionSummary()
+ completionSummary(monochrome_logs)
if (hook_url) {
- imNotification(summary_params)
+ imNotification(summary_params, hook_url)
}
if (input_type == 'sra') {
@@ -205,12 +204,12 @@ def isSynapseId(input) {
//
// Check and validate parameters
//
-def sraCheckENAMetadataFields() {
+def sraCheckENAMetadataFields(ena_metadata_fields) {
// Check minimal ENA fields are provided to download FastQ files
def valid_ena_metadata_fields = ['run_accession', 'experiment_accession', 'library_layout', 'fastq_ftp', 'fastq_md5']
- def ena_metadata_fields = params.ena_metadata_fields ? params.ena_metadata_fields.split(',').collect{ it.trim().toLowerCase() } : valid_ena_metadata_fields
- if (!ena_metadata_fields.containsAll(valid_ena_metadata_fields)) {
- error("Invalid option: '${params.ena_metadata_fields}'. Minimally required fields for '--ena_metadata_fields': '${valid_ena_metadata_fields.join(',')}'")
+ def actual_ena_metadata_fields = ena_metadata_fields ? ena_metadata_fields.split(',').collect{ it.trim().toLowerCase() } : valid_ena_metadata_fields
+ if (!actual_ena_metadata_fields.containsAll(valid_ena_metadata_fields)) {
+ error("Invalid option: '${ena_metadata_fields}'. Minimally required fields for '--ena_metadata_fields': '${valid_ena_metadata_fields.join(',')}'")
}
}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
index 9dbb86c7..e89a57ab 100644
--- a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
@@ -4,6 +4,7 @@
import org.yaml.snakeyaml.Yaml
import groovy.json.JsonOutput
+import nextflow.extension.FilesEx
/*
========================================================================================
@@ -16,7 +17,7 @@ workflow UTILS_NEXTFLOW_PIPELINE {
take:
print_version // bool
dump_parameters // bool
- output_directory // path: base directory used to publish pipeline results
+ outdir // path: base directory used to publish pipeline results
check_conda_channels // bool
main:
@@ -32,8 +33,8 @@ workflow UTILS_NEXTFLOW_PIPELINE {
//
// Dump pipeline parameters to a JSON file
//
- if (dump_parameters && output_directory) {
- dumpParametersToJSON(output_directory)
+ if (dump_parameters && outdir) {
+ dumpParametersToJSON(outdir)
}
//
@@ -42,9 +43,6 @@ workflow UTILS_NEXTFLOW_PIPELINE {
if (check_conda_channels) {
checkCondaChannels()
}
-
- emit:
- out = true
}
/*
@@ -74,16 +72,15 @@ def getWorkflowVersion() {
//
// Dump pipeline parameters to a JSON file
//
-def dumpParametersToJSON(output_directory) {
- def output_d = new File("${output_directory}/pipeline_info/")
- if (!output_d.exists()) {
- output_d.mkdirs()
- }
-
+def dumpParametersToJSON(outdir) {
def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
- def output_pf = new File(output_d, "params_${timestamp}.json")
+ def filename = "params_${timestamp}.json"
+ def temp_pf = new File(workflow.launchDir.toString(), ".${filename}")
def jsonStr = JsonOutput.toJson(params)
- output_pf.text = JsonOutput.prettyPrint(jsonStr)
+ temp_pf.text = JsonOutput.prettyPrint(jsonStr)
+
+ FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json")
+ temp_pf.delete()
}
//
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.functions.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.functions.nf.test
index 05f54930..9b18004b 100644
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.functions.nf.test
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.functions.nf.test
@@ -12,14 +12,6 @@ nextflow_function {
function "getWorkflowVersion"
- when {
- function {
- """
- // no inputs
- """
- }
- }
-
then {
assert function.success
assert snapshot(function.result).match()
@@ -43,8 +35,6 @@ nextflow_function {
then {
assert function.success
- assert function.result =~ /publish_dir_mode/
- assert function.result =~ /copy/
}
}
@@ -53,14 +43,6 @@ nextflow_function {
function "checkCondaChannels"
- when {
- function {
- """
- // no inputs
- """
- }
- }
-
then {
assert function.success
assert snapshot(function.result).match()
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
index 583f6a84..e2fa5b6c 100644
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
@@ -26,7 +26,6 @@ nextflow_workflow {
then {
assert workflow.success
- assert snapshot(workflow.out).match()
}
}
@@ -73,7 +72,6 @@ nextflow_workflow {
then {
assert workflow.success
- assert snapshot(workflow.out).match()
}
}
@@ -96,7 +94,6 @@ nextflow_workflow {
then {
assert workflow.success
- assert snapshot(workflow.out, path(params.outdir).list()).match()
}
}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test.snap b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test.snap
deleted file mode 100644
index 08040fd4..00000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test.snap
+++ /dev/null
@@ -1,44 +0,0 @@
-{
- "Should not create params JSON if no output directory": {
- "content": [
- {
- "0": [
- true
- ],
- "out": [
- true
- ]
- },
- [
-
- ]
- ],
- "timestamp": "2023-10-13T11:55:54.576748"
- },
- "Should dump params": {
- "content": [
- {
- "0": [
- true
- ],
- "out": [
- true
- ]
- }
- ],
- "timestamp": "2023-10-13T11:55:52.711329"
- },
- "Should run no inputs": {
- "content": [
- {
- "0": [
- true
- ],
- "out": [
- true
- ]
- }
- ],
- "timestamp": "2023-10-13T11:55:49.048315"
- }
-}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
index 5716198c..7735ec89 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
@@ -2,6 +2,9 @@
// Subworkflow with utility functions specific to the nf-core pipeline template
//
+import org.yaml.snakeyaml.Yaml
+import nextflow.extension.FilesEx
+
/*
========================================================================================
SUBWORKFLOW DEFINITION
@@ -10,15 +13,8 @@
workflow UTILS_NFCORE_PIPELINE {
- take:
- monochrome_logs // boolean: Disable ANSI colour codes in log output
-
main:
checkConfigProvided()
-
- emit:
- success = true
-
}
/*
@@ -54,10 +50,86 @@ def workflowCitation() {
" https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
}
+//
+// Generate workflow version string
+//
+def getWorkflowVersion() {
+ String version_string = ""
+ if (workflow.manifest.version) {
+ def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : ''
+ version_string += "${prefix_v}${workflow.manifest.version}"
+ }
+
+ if (workflow.commitId) {
+ def git_shortsha = workflow.commitId.substring(0, 7)
+ version_string += "-g${git_shortsha}"
+ }
+
+ return version_string
+}
+
+//
+// Get software versions for pipeline
+//
+def processVersionsFromYAML(yaml_file) {
+ Yaml yaml = new Yaml()
+ versions = yaml.load(yaml_file).collectEntries { k,v -> [ k.tokenize(':')[-1], v ] }
+ return yaml.dumpAsMap(versions).trim()
+}
+
+//
+// Get workflow version for pipeline
+//
+def workflowVersionToYAML() {
+ return """
+ Workflow:
+ $workflow.manifest.name: ${getWorkflowVersion()}
+ Nextflow: $workflow.nextflow.version
+ """.stripIndent().trim()
+}
+
+//
+// Get channel of software versions used in pipeline in YAML format
+//
+def softwareVersionsToYAML(ch_versions) {
+ return ch_versions
+ .unique()
+ .map { processVersionsFromYAML(it) }
+ .mix(Channel.of(workflowVersionToYAML()))
+}
+
+//
+// Get workflow summary for MultiQC
+//
+def paramsSummaryMultiqc(summary_params) {
+ def summary_section = ''
+ for (group in summary_params.keySet()) {
+ def group_params = summary_params.get(group) // This gets the parameters of that particular group
+ if (group_params) {
+ summary_section += " $group
\n"
+ summary_section += " \n"
+ for (param in group_params.keySet()) {
+ summary_section += " - $param
- ${group_params.get(param) ?: 'N/A'}
\n"
+ }
+ summary_section += "
\n"
+ }
+ }
+
+ String yaml_file_text = "id: '${workflow.manifest.name.replace('/','-')}-summary'\n"
+ yaml_file_text += "description: ' - this information is collected when the pipeline is started.'\n"
+ yaml_file_text += "section_name: '${workflow.manifest.name} Workflow Summary'\n"
+ yaml_file_text += "section_href: 'https://github.com/${workflow.manifest.name}'\n"
+ yaml_file_text += "plot_type: 'html'\n"
+ yaml_file_text += "data: |\n"
+ yaml_file_text += "${summary_section}"
+
+ return yaml_file_text
+}
+
//
// nf-core logo
//
-def nfCoreLogo(workflow_version, monochrome_logs = true) {
+def nfCoreLogo(monochrome_logs=true) {
Map colors = logColours(monochrome_logs)
String.format(
"""\n
@@ -67,7 +139,7 @@ def nfCoreLogo(workflow_version, monochrome_logs = true) {
${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset}
${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset}
${colors.green}`._,._,\'${colors.reset}
- ${colors.purple} ${workflow.manifest.name} ${workflow_version}${colors.reset}
+ ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset}
${dashedLine(monochrome_logs)}
""".stripIndent()
)
@@ -76,7 +148,7 @@ def nfCoreLogo(workflow_version, monochrome_logs = true) {
//
// Return dashed line
//
-def dashedLine(monochrome_logs = true) {
+def dashedLine(monochrome_logs=true) {
Map colors = logColours(monochrome_logs)
return "-${colors.dim}----------------------------------------------------${colors.reset}-"
}
@@ -84,7 +156,7 @@ def dashedLine(monochrome_logs = true) {
//
// ANSII colours used for terminal logging
//
-def logColours(monochrome_logs = true) {
+def logColours(monochrome_logs=true) {
Map colorcodes = [:]
// Reset / Meta
@@ -152,7 +224,7 @@ def logColours(monochrome_logs = true) {
//
// Construct and send completion email
//
-def completionEmail(summary_params, monochrome_logs = true) {
+def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs=true) {
// Set up the e-mail variables
def subject = "[$workflow.manifest.name] Successful: $workflow.runName"
@@ -178,7 +250,7 @@ def completionEmail(summary_params, monochrome_logs = true) {
misc_fields['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp
def email_fields = [:]
- email_fields['version'] = NfcoreTemplate.version(workflow)
+ email_fields['version'] = getWorkflowVersion()
email_fields['runName'] = workflow.runName
email_fields['success'] = workflow.success
email_fields['dateComplete'] = workflow.complete
@@ -191,9 +263,9 @@ def completionEmail(summary_params, monochrome_logs = true) {
email_fields['summary'] = summary << misc_fields
// Check if we are only sending emails on failure
- def email_address = params.email
- if (!params.email && params.email_on_fail && !workflow.success) {
- email_address = params.email_on_fail
+ def email_address = email
+ if (!email && email_on_fail && !workflow.success) {
+ email_address = email_on_fail
}
// Render the TXT template
@@ -217,8 +289,10 @@ def completionEmail(summary_params, monochrome_logs = true) {
Map colors = logColours(monochrome_logs)
if (email_address) {
try {
- if (params.plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
+ if (plaintext_email) { throw GroovyException('Send plaintext e-mail, not HTML') }
// Try to send HTML e-mail using sendmail
+ def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
+ sendmail_tf.withWriter { w -> w << sendmail_html }
[ 'sendmail', '-t' ].execute() << sendmail_html
log.info "-${colors.purple}[$workflow.manifest.name]${colors.green} Sent summary e-mail to $email_address (sendmail)-"
} catch (all) {
@@ -230,20 +304,22 @@ def completionEmail(summary_params, monochrome_logs = true) {
}
// Write summary e-mail HTML to a file
- def output_d = new File("${params.outdir}/pipeline_info/")
- if (!output_d.exists()) {
- output_d.mkdirs()
- }
- def output_hf = new File(output_d, "pipeline_report.html")
+ def output_hf = new File(workflow.launchDir.toString(), ".pipeline_report.html")
output_hf.withWriter { w -> w << email_html }
- def output_tf = new File(output_d, "pipeline_report.txt")
+ FilesEx.copyTo(output_hf.toPath(), "${outdir}/pipeline_info/pipeline_report.html");
+ output_hf.delete()
+
+ // Write summary e-mail TXT to a file
+ def output_tf = new File(workflow.launchDir.toString(), ".pipeline_report.txt")
output_tf.withWriter { w -> w << email_txt }
+ FilesEx.copyTo(output_tf.toPath(), "${outdir}/pipeline_info/pipeline_report.txt");
+ output_tf.delete()
}
//
// Print pipeline summary on completion
//
-def completionSummary(monochrome_logs = true) {
+def completionSummary(monochrome_logs=true) {
Map colors = logColours(monochrome_logs)
if (workflow.success) {
if (workflow.stats.ignoredCount == 0) {
@@ -259,9 +335,7 @@ def completionSummary(monochrome_logs = true) {
//
// Construct and send a notification to a web server as JSON e.g. Microsoft Teams and Slack
//
-def imNotification(summary_params) {
- def hook_url = params.hook_url
-
+def imNotification(summary_params, hook_url) {
def summary = [:]
for (group in summary_params.keySet()) {
summary << summary_params[group]
@@ -280,7 +354,7 @@ def imNotification(summary_params) {
misc_fields['nxf_timestamp'] = workflow.nextflow.timestamp
def msg_fields = [:]
- msg_fields['version'] = NfcoreTemplate.version(workflow)
+ msg_fields['version'] = getWorkflowVersion()
msg_fields['runName'] = workflow.runName
msg_fields['success'] = workflow.success
msg_fields['dateComplete'] = workflow.complete
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
index c4f71aeb..f57980c7 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
@@ -13,14 +13,6 @@ nextflow_function {
function "checkConfigProvided"
- when {
- function {
- """
- // input[0] = 1
- """
- }
- }
-
then {
assertAll (
{ assert function.success },
@@ -34,14 +26,6 @@ nextflow_function {
function "workflowCitation"
- when {
- function {
- """
- // input[0] = 1
- """
- }
- }
-
then {
assertAll (
{ assert function.success },
@@ -58,8 +42,7 @@ nextflow_function {
when {
function {
"""
- input[0] = "9.9.9"
- input[1] = false
+ input[0] = false
"""
}
}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
index bdf71d41..e95e2eec 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
@@ -1,19 +1,19 @@
{
"Test Function checkConfigProvided": {
"content": null,
- "timestamp": "2023-10-16T14:27:01.415849"
+ "timestamp": "2024-01-17T20:34:16.220739"
},
"Test Function nfCoreLogo": {
"content": [
- "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow 9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n"
+ "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n"
],
- "timestamp": "2023-10-16T14:27:51.566211"
+ "timestamp": "2024-01-17T20:34:23.89764"
},
"Test Function workflowCitation": {
"content": [
"If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md"
],
- "timestamp": "2023-10-16T14:27:03.505737"
+ "timestamp": "2024-01-17T20:34:20.063846"
},
"Test Function without logColours": {
"content": [
@@ -67,13 +67,13 @@
"biwhite": ""
}
],
- "timestamp": "2023-12-20T10:04:29.095368"
+ "timestamp": "2024-01-17T20:34:31.622874"
},
"Test Function dashedLine": {
"content": [
"-\u001b[2m----------------------------------------------------\u001b[0m-"
],
- "timestamp": "2023-10-16T14:27:07.721916"
+ "timestamp": "2024-01-17T20:34:27.729247"
},
"Test Function with logColours": {
"content": [
@@ -127,6 +127,6 @@
"biwhite": "\u001b[1;97m"
}
],
- "timestamp": "2023-12-20T10:04:31.258856"
+ "timestamp": "2024-01-17T20:34:35.549447"
}
}
\ No newline at end of file
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
index 3b1d4c93..b16445da 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test
@@ -10,19 +10,6 @@ nextflow_workflow {
tag "subworkflows/utils_nfcore_pipeline"
test("Should run without failures") {
-
- when {
- params {
- // define parameters here. Example:
- // outdir = "tests/results"
- }
- workflow {
- """
- input[0] = true
- """
- }
- }
-
then {
assert workflow.success
}
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
index 43c57187..9ec7226f 100644
--- a/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/main.nf
@@ -8,7 +8,9 @@
========================================================================================
*/
-include { paramsHelp; paramsSummaryLog; paramsSummaryMap; validateParameters } from 'plugin/nf-validation'
+include { paramsHelp } from 'plugin/nf-validation'
+include { paramsSummaryLog } from 'plugin/nf-validation'
+include { validateParameters } from 'plugin/nf-validation'
/*
========================================================================================
@@ -19,44 +21,39 @@ include { paramsHelp; paramsSummaryLog; paramsSummaryMap; validateParameters } f
workflow UTILS_NFVALIDATION_PLUGIN {
take:
- print_help // bool
- workflow_command // string: default commmand used to run pipeline
- pre_help_text // string: string to be printed before help text and summary log
- post_help_text // string: string to be printed after help text and summary log
- validate_params // bool: Validate parameters
- schema_filename // path: JSON schema file, null to use default value
+ print_help // bool
+ workflow_command // string: default commmand used to run pipeline
+ pre_help_text // string: string to be printed before help text and summary log
+ post_help_text // string: string to be printed after help text and summary log
+ validate_params // bool: Validate parameters
+ schema_filename // path: JSON schema file, null to use default value
main:
- log.debug "Using schema file: ${schema_filename}"
-
- // Default values for strings
- pre_help_text = pre_help_text ?: ''
- post_help_text = post_help_text ?: ''
- workflow_command = workflow_command ?: ''
-
- //
- // Print help message if needed
- //
- if (print_help) {
- log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text
- System.exit(0)
- }
-
- //
- // Print parameter summary to stdout
- //
- log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text
-
- //
- // Validate parameters relative to the parameter JSON schema
- //
- if (validate_params){
- validateParameters(parameters_schema: schema_filename)
- }
-
- summary_params = paramsSummaryMap(workflow, parameters_schema: schema_filename)
-
- emit:
- summary_params = summary_params
+ log.debug "Using schema file: ${schema_filename}"
+
+ // Default values for strings
+ pre_help_text = pre_help_text ?: ''
+ post_help_text = post_help_text ?: ''
+ workflow_command = workflow_command ?: ''
+
+ //
+ // Print help message if needed
+ //
+ if (print_help) {
+ log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text
+ System.exit(0)
+ }
+
+ //
+ // Print parameter summary to stdout
+ //
+ log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text
+
+ //
+ // Validate parameters relative to the parameter JSON schema
+ //
+ if (validate_params){
+ validateParameters(parameters_schema: schema_filename)
+ }
}
diff --git a/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml b/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
index 80438ccf..1ec5f98e 100644
--- a/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
+++ b/subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml
@@ -32,14 +32,6 @@ input:
type: string
description: |
The filename of the schema to validate against.
-
-output:
- - summary_params:
- type: file
- description: |
- A Groovy map constructed from parameters.
- Structure: [ val(meta), path(bam) ]
- pattern: "*.bam"
authors:
- "@adamrtalbot"
maintainers:
diff --git a/tests/main.nf.test b/tests/main.nf.test
index 1ab2f73a..3c36cd06 100644
--- a/tests/main.nf.test
+++ b/tests/main.nf.test
@@ -72,7 +72,7 @@ nextflow_pipeline {
{ assert new File("$outputDir/metadata/SRR14593545.runinfo_ftp.tsv").exists() },
{ assert new File("$outputDir/metadata/SRR14709033.runinfo_ftp.tsv").exists() },
{ assert new File("$outputDir/metadata/SRR9984183.runinfo_ftp.tsv").exists() },
- { assert new File("$outputDir/pipeline_info/fetchngs_mqc_versions.yml").exists() },
+ { assert new File("$outputDir/pipeline_info/nf_core_fetchngs_software_mqc_versions.yml").exists() },
{ assert new File("$outputDir/samplesheet/id_mappings.csv").exists() },
{ assert new File("$outputDir/samplesheet/multiqc_config.yml").exists() },
{ assert new File("$outputDir/samplesheet/samplesheet.csv").exists() }
diff --git a/workflows/sra/main.nf b/workflows/sra/main.nf
index 3d487431..62d8c577 100644
--- a/workflows/sra/main.nf
+++ b/workflows/sra/main.nf
@@ -9,6 +9,7 @@ include { SRA_FASTQ_FTP } from '../../modules/local/sra_fastq_ftp'
include { SRA_IDS_TO_RUNINFO } from '../../modules/local/sra_ids_to_runinfo'
include { SRA_RUNINFO_TO_FTP } from '../../modules/local/sra_runinfo_to_ftp'
include { SRA_TO_SAMPLESHEET } from '../../modules/local/sra_to_samplesheet'
+include { softwareVersionsToYAML } from '../../subworkflows/nf-core/utils_nfcore_pipeline'
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -150,6 +151,13 @@ workflow SRA {
ch_sample_mappings_yml = MULTIQC_MAPPINGS_CONFIG.out.yml
}
+ //
+ // Collate and save software versions
+ //
+ softwareVersionsToYAML(ch_versions)
+ .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_fetchngs_software_mqc_versions.yml', sort: true, newLine: true)
+
+
emit:
samplesheet = ch_samplesheet
mappings = ch_mappings
diff --git a/workflows/synapse/main.nf b/workflows/synapse/main.nf
index 6b5ea02c..38071b60 100644
--- a/workflows/synapse/main.nf
+++ b/workflows/synapse/main.nf
@@ -9,6 +9,7 @@ include { SYNAPSE_SHOW } from '../../modules/local/synapse_show'
include { SYNAPSE_GET } from '../../modules/local/synapse_get'
include { SYNAPSE_TO_SAMPLESHEET } from '../../modules/local/synapse_to_samplesheet'
include { SYNAPSE_MERGE_SAMPLESHEET } from '../../modules/local/synapse_merge_samplesheet'
+include { softwareVersionsToYAML } from '../../subworkflows/nf-core/utils_nfcore_pipeline'
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -105,6 +106,12 @@ workflow SYNAPSE {
)
ch_versions = ch_versions.mix(SYNAPSE_MERGE_SAMPLESHEET.out.versions)
+ //
+ // Collate and save software versions
+ //
+ softwareVersionsToYAML(ch_versions)
+ .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_fetchngs_software_mqc_versions.yml', sort: true, newLine: true)
+
emit:
fastq = ch_fastq
samplesheet = SYNAPSE_MERGE_SAMPLESHEET.out.samplesheet