From 97fd33586d3f67d211809f109b54797071a21a53 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 20 Jul 2023 15:41:31 -0500 Subject: [PATCH 001/128] Fix Treat HTTP headers as case insensitive (#4116) [ci fast] Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- .../main/nextflow/util/InsensitiveMap.groovy | 50 ++++++++++++++++ .../nextflow/util/InsensitiveMapTest.groovy | 60 +++++++++++++++++++ .../file/http/XFileSystemProvider.groovy | 10 ++-- .../file/http/XFileSystemProviderTest.groovy | 4 +- 4 files changed, 118 insertions(+), 6 deletions(-) create mode 100644 modules/nf-commons/src/main/nextflow/util/InsensitiveMap.groovy create mode 100644 modules/nf-commons/src/test/nextflow/util/InsensitiveMapTest.groovy diff --git a/modules/nf-commons/src/main/nextflow/util/InsensitiveMap.groovy b/modules/nf-commons/src/main/nextflow/util/InsensitiveMap.groovy new file mode 100644 index 0000000000..0be4ef9059 --- /dev/null +++ b/modules/nf-commons/src/main/nextflow/util/InsensitiveMap.groovy @@ -0,0 +1,50 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.util + +import groovy.transform.CompileStatic + +/** + * A {@link Map} that handles keys in a case insensitive manner + * + * @author Paolo Di Tommaso + */ +@CompileStatic +class InsensitiveMap implements Map { + + @Delegate + private Map target + + private InsensitiveMap(Map map) { + this.target = map + } + + @Override + boolean containsKey(Object key) { + target.any( it -> key?.toString()?.toLowerCase() == it.key?.toString()?.toLowerCase()) + } + + @Override + V get(Object key) { + target.find(it -> key?.toString()?.toLowerCase() == it.key?.toString()?.toLowerCase())?.value + } + + static Map of(Map target) { + new InsensitiveMap(target) + } +} diff --git a/modules/nf-commons/src/test/nextflow/util/InsensitiveMapTest.groovy b/modules/nf-commons/src/test/nextflow/util/InsensitiveMapTest.groovy new file mode 100644 index 0000000000..99a3bb394c --- /dev/null +++ b/modules/nf-commons/src/test/nextflow/util/InsensitiveMapTest.groovy @@ -0,0 +1,60 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.util + +import spock.lang.Specification + +/** + * + * @author Paolo Di Tommaso + */ +class InsensitiveMapTest extends Specification { + + def 'should get value by case insensitive keys' () { + given: + def map = InsensitiveMap.of([alpha: 1, BETA: 2]) + + expect: + map.alpha == 1 + map.ALPHA == 1 + map.Alpha == 1 + and: + map.get('alpha') == 1 + map.get('ALPHA') == 1 + map.get('Alpha') == 1 + and: + map.beta == 2 + map.BETA == 2 + map.Beta == 2 + and: + map.get('beta') == 2 + map.get('BETA') == 2 + map.get('Beta') == 2 + and: + map.foo == null + and: + map.containsKey('alpha') + map.containsKey('ALPHA') + and: + map.containsKey('beta') + map.containsKey('BETA') + and: + !map.containsKey('foo') + } + +} diff --git a/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy b/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy index 67e7f9bde0..1fa43da22f 100644 --- a/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy +++ b/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy @@ -44,6 +44,7 @@ import groovy.transform.PackageScope import groovy.util.logging.Slf4j import nextflow.SysEnv import nextflow.extension.FilesEx +import nextflow.util.InsensitiveMap import sun.net.www.protocol.ftp.FtpURLConnection import static XFileSystemConfig.* @@ -191,7 +192,7 @@ abstract class XFileSystemProvider extends FileSystemProvider { XAuthRegistry.instance.authorize(conn) } if ( conn instanceof HttpURLConnection && conn.getResponseCode() in [307, 308] && attempt < MAX_REDIRECT_HOPS ) { - def header = conn.getHeaderFields() + final header = InsensitiveMap.of(conn.getHeaderFields()) String location = header.get("Location")?.get(0) URL newPath = new URI(location).toURL() log.debug "Remote redirect URL: $newPath" @@ -454,15 +455,16 @@ abstract class XFileSystemProvider extends FileSystemProvider { return new XFileAttributes(null,-1) } if ( conn instanceof HttpURLConnection && conn.getResponseCode() in [200, 301, 302, 307, 308]) { - def header = conn.getHeaderFields() + final header = conn.getHeaderFields() return readHttpAttributes(header) } return null } protected XFileAttributes readHttpAttributes(Map> header) { - def lastMod = header.get("Last-Modified")?.get(0) - long contentLen = header.get("Content-Length")?.get(0)?.toLong() ?: -1 + final header0 = InsensitiveMap.>of(header) + def lastMod = header0.get("Last-Modified")?.get(0) + long contentLen = header0.get("Content-Length")?.get(0)?.toLong() ?: -1 def dateFormat = new SimpleDateFormat('E, dd MMM yyyy HH:mm:ss Z', Locale.ENGLISH) // <-- make sure date parse is not language dependent (for the week day) def modTime = lastMod ? FileTime.from(dateFormat.parse(lastMod).time, TimeUnit.MILLISECONDS) : (FileTime)null new XFileAttributes(modTime, contentLen) diff --git a/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy b/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy index 85c4c9479c..9bbabfa06e 100644 --- a/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy +++ b/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy @@ -63,7 +63,7 @@ class XFileSystemProviderTest extends Specification { def "should read file attributes from map"() { given: def fs = new HttpFileSystemProvider() - def attrMap = ['Last-Modified': ['Fri, 04 Nov 2016 21:50:34 GMT'], 'Content-Length': ['21729']] + def attrMap = ['last-modified': ['Fri, 04 Nov 2016 21:50:34 GMT'], 'content-length': ['21729']] when: def attrs = fs.readHttpAttributes(attrMap) @@ -85,7 +85,7 @@ class XFileSystemProviderTest extends Specification { def GERMAN = new Locale.Builder().setLanguage("de").setRegion("DE").build() Locale.setDefault(Locale.Category.FORMAT, GERMAN) def fs = new HttpFileSystemProvider() - def attrMap = ['Last-Modified': ['Fri, 04 Nov 2016 21:50:34 GMT'], 'Content-Length': ['21729']] + def attrMap = ['last-modified': ['Fri, 04 Nov 2016 21:50:34 GMT'], 'content-length': ['21729']] when: def attrs = fs.readHttpAttributes(attrMap) From 903e4d906b9e7d3c51797e6db4a777e1913b7424 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Fri, 21 Jul 2023 12:56:33 +0200 Subject: [PATCH 002/128] Increase wave timeout to 60 mins Signed-off-by: Paolo Di Tommaso --- .../src/main/io/seqera/wave/plugin/config/HttpOpts.groovy | 2 +- .../test/io/seqera/wave/plugin/config/HttpOptsTest.groovy | 6 +++--- .../test/io/seqera/wave/plugin/config/WaveConfigTest.groovy | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy index 8d70ec60f8..20d72ca9d9 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy @@ -32,7 +32,7 @@ class HttpOpts { final private Duration connectTimeout HttpOpts(Map opts) { - connectTimeout = opts.connectTimeout as Duration ?: Duration.of('30s') + connectTimeout = opts.connectTimeout as Duration ?: Duration.of('60s') } java.time.Duration connectTimeout() { diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy index 8f17add048..53908327cd 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy @@ -30,12 +30,12 @@ class HttpOptsTest extends Specification { when: def opts = new HttpOpts([:]) then: - opts.connectTimeout() == Duration.ofSeconds(30) + opts.connectTimeout() == Duration.ofSeconds(60) when: - opts = new HttpOpts([connectTimeout:'50s']) + opts = new HttpOpts([connectTimeout:'120s']) then: - opts.connectTimeout() == Duration.ofSeconds(50) + opts.connectTimeout() == Duration.ofSeconds(120) } } diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy index 1e141e8a3b..8b7353f472 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy @@ -186,7 +186,7 @@ class WaveConfigTest extends Specification { when: def opts = new WaveConfig([:]) then: - opts.httpOpts().connectTimeout() == java.time.Duration.ofSeconds(30) + opts.httpOpts().connectTimeout() == java.time.Duration.ofSeconds(60) when: opts = new WaveConfig([httpClient: [connectTimeout: '90s']]) From 9fc1d3bd951b1d9d75ba8e64a5aa497cc087f62c Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Fri, 21 Jul 2023 13:01:11 +0200 Subject: [PATCH 003/128] Add retry logic to wave image await [ci fast] Signed-off-by: Paolo Di Tommaso --- .../nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index e512f90fe2..339283f9d5 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -490,11 +490,11 @@ class WaveClient { final req = HttpRequest.newBuilder() .uri(manifest) .headers(REQUEST_HEADERS) - .timeout(Duration.ofSeconds(15 * 60 + 10)) + .timeout(Duration.ofMinutes(5)) .GET() .build() final begin = System.currentTimeMillis() - final resp = httpClient.send(req, HttpResponse.BodyHandlers.ofString()) + final resp = httpSend(req) final code = resp.statusCode() if( code>=200 && code<400 ) { log.debug "Wave container available in ${nextflow.util.Duration.of(System.currentTimeMillis()-begin)}: [$code] ${resp.body()}" From ef9d3cf02c63da09a04df31452e8dc37ffc421df Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sat, 22 Jul 2023 09:28:20 -0500 Subject: [PATCH 004/128] Improve "Unexpected input: '{'" error message (#4122) [ci fast] --- .../src/main/groovy/nextflow/script/ScriptParser.groovy | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy index 88046d723e..4feb2446c9 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy @@ -184,6 +184,9 @@ class ScriptParser { msg = msg != 'startup failed' ? msg : header msg = msg.replaceAll(/startup failed:\n/,'') msg = msg.replaceAll(~/$clazzName(: \d+:\b*)?/, header+'\n- cause:') + if( msg.contains "Unexpected input: '{'" ) { + msg += "\nNOTE: If this is the beginning of a process or workflow, there may be a syntax error in the body, such as a missing or extra comma, for which a more specific error message could not be produced." + } throw new ScriptCompilationException(msg, e) } } From 8975734d1219efc9eb068055cfa53d8189e0bf82 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sat, 22 Jul 2023 09:29:12 -0500 Subject: [PATCH 005/128] Improve description of channels and channel types (#4120) Signed-off-by: Ben Sherman --- docs/channel.md | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/docs/channel.md b/docs/channel.md index fcab491dca..754d48c4ad 100644 --- a/docs/channel.md +++ b/docs/channel.md @@ -2,12 +2,12 @@ # Channels -Nextflow is based on the Dataflow programming model in which processes communicate through channels. +Nextflow is based on the dataflow programming model in which processes communicate through channels. A channel has two major properties: -1. Sending a message is an *asynchronous* operation which completes immediately, without having to wait for the receiving process. -2. Receiving data is a blocking operation which stops the receiving process until the message has arrived. +1. Sending a message is an *asynchronous* (i.e. non-blocking) operation, which means the sender doesn't have to wait for the receiving process. +2. Receiving a message is a *synchronous* (i.e. blocking) operation, which means the receiving process must wait until a message has arrived. (channel-types)= @@ -27,12 +27,10 @@ A queue channel can be created by factory methods ([of](#of), [fromPath](#frompa ### Value channel -A *value channel* a.k.a. *singleton channel* is bound to a single value and can be read any number of times without being consumed. +A *value channel* contains a single value and can be consumed any number of times by a process or operator. A value channel can be created with the [value](#value) factory method or by any operator that produces a single value ({ref}`operator-first`, {ref}`operator-collect`, {ref}`operator-reduce`, etc). Additionally, a process will emit value channels if it is invoked with all value channels, including simple values which are implicitly wrapped in a value channel. -A value channel is implicitly created by a process when it is invoked with a simple value. Furthermore, a value channel is also implicitly created as output for a process whose inputs are all value channels. - For example: ```groovy @@ -54,7 +52,7 @@ workflow { } ``` -In the above example, since the `foo` process is invoked with a simple value instead of a channel, the input is implicitly converted to a value channel, and the output is also emitted as a value channel. +In the above example, since the `foo` process is invoked with a simple value instead of a channel, the input is implicitly wrapped in a value channel, and the output is also emitted as a value channel. See also: {ref}`process-multiple-input-channels`. From 9bcb0fc53e877868dd99703b374be5154f0c0fa6 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 16:43:13 +0200 Subject: [PATCH 006/128] Revert "Increase wave timeout to 60 mins" This reverts commit 903e4d906b9e7d3c51797e6db4a777e1913b7424. --- .../src/main/io/seqera/wave/plugin/config/HttpOpts.groovy | 2 +- .../test/io/seqera/wave/plugin/config/HttpOptsTest.groovy | 6 +++--- .../test/io/seqera/wave/plugin/config/WaveConfigTest.groovy | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy index 20d72ca9d9..8d70ec60f8 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/HttpOpts.groovy @@ -32,7 +32,7 @@ class HttpOpts { final private Duration connectTimeout HttpOpts(Map opts) { - connectTimeout = opts.connectTimeout as Duration ?: Duration.of('60s') + connectTimeout = opts.connectTimeout as Duration ?: Duration.of('30s') } java.time.Duration connectTimeout() { diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy index 53908327cd..8f17add048 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/HttpOptsTest.groovy @@ -30,12 +30,12 @@ class HttpOptsTest extends Specification { when: def opts = new HttpOpts([:]) then: - opts.connectTimeout() == Duration.ofSeconds(60) + opts.connectTimeout() == Duration.ofSeconds(30) when: - opts = new HttpOpts([connectTimeout:'120s']) + opts = new HttpOpts([connectTimeout:'50s']) then: - opts.connectTimeout() == Duration.ofSeconds(120) + opts.connectTimeout() == Duration.ofSeconds(50) } } diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy index 8b7353f472..1e141e8a3b 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy @@ -186,7 +186,7 @@ class WaveConfigTest extends Specification { when: def opts = new WaveConfig([:]) then: - opts.httpOpts().connectTimeout() == java.time.Duration.ofSeconds(60) + opts.httpOpts().connectTimeout() == java.time.Duration.ofSeconds(30) when: opts = new WaveConfig([httpClient: [connectTimeout: '90s']]) From 9be64c17bf1e02f3c6d294c8765f93ab8b626eaf Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 16:47:39 +0200 Subject: [PATCH 007/128] Fix comment Signed-off-by: Paolo Di Tommaso --- .../io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy index 08a5369917..875cdfe338 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy @@ -52,7 +52,7 @@ class SubmitContainerTokenRequest { String towerEndpoint /** - * The ID of the executed container + * The ID of the workflow that submitted this container request */ String workflowId From cb6242c4ce6e6c80d7e1b6fe3f896d01208c8dd4 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:17:06 +0200 Subject: [PATCH 008/128] Bump nf-cloudcache@0.1.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-cloudcache/changelog.txt | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 plugins/nf-cloudcache/changelog.txt diff --git a/plugins/nf-cloudcache/changelog.txt b/plugins/nf-cloudcache/changelog.txt new file mode 100644 index 0000000000..0906ab5bb8 --- /dev/null +++ b/plugins/nf-cloudcache/changelog.txt @@ -0,0 +1,4 @@ +nf-cloudcache changelog +======================= +0.1.0 - 22 Jul 2023 +- Initial version From 57464746b6b0320dd5bbd99d7f313a79c01508f8 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:19:11 +0200 Subject: [PATCH 009/128] Bump nf-amazon@2.1.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-amazon/changelog.txt | 6 ++++++ plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/plugins/nf-amazon/changelog.txt b/plugins/nf-amazon/changelog.txt index bbd5bfe8a5..4472d2b1cb 100644 --- a/plugins/nf-amazon/changelog.txt +++ b/plugins/nf-amazon/changelog.txt @@ -1,5 +1,11 @@ nf-amazon changelog =================== +2.1.0 - 22 Jul 2023 +- Add support for AWS SSO credentials provider (#4045) [53e33cde] +- Wait for all child processes in nxf_parallel (#4050) [60a5f1a7] +- Ignore accelerator type for AWS Batch (#4043) [263ecca8] +- Bump Groovy 3.0.18 [207eb535] + 2.0.1 - 14 Jun 2023 - Add support for AWS SES as mail sending provider [df85d443] - Fix access to public S3 bucket when no creds are provided (#3992) [cf8ba466] diff --git a/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF b/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF index 862abca814..6c9c9e82fd 100644 --- a/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.aws.AmazonPlugin Plugin-Id: nf-amazon -Plugin-Version: 2.0.1 +Plugin-Version: 2.1.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 1895efc4ee28cd30b6b2b0accbd8eeff33a33844 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:19:24 +0200 Subject: [PATCH 010/128] Bump nf-azure@1.1.4 Signed-off-by: Paolo Di Tommaso --- plugins/nf-azure/changelog.txt | 7 +++++++ plugins/nf-azure/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/plugins/nf-azure/changelog.txt b/plugins/nf-azure/changelog.txt index 121c4d7392..bcfce7cfe9 100644 --- a/plugins/nf-azure/changelog.txt +++ b/plugins/nf-azure/changelog.txt @@ -1,5 +1,12 @@ nf-azure changelog =================== +1.1.4 - 22 Jul 2023 +- Fix failing test [9a52f848] +- Fix Improve error message for invalid Azure URI [0f4d8867] +- Fix invalid detection of hierarchical namespace stub blobs as files (#4046) [ce06c877] +- Wait for all child processes in nxf_parallel (#4050) [60a5f1a7] +- Bump Groovy 3.0.18 [207eb535] + 1.1.3 - 19 Jum 2023 - Increase Azure min retry delay to 250ms [2e77e5e4] - Fix AzFileSystem retry policy (2) [c2f3cc96] diff --git a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF index 1f0f5822e4..72c440d1a9 100644 --- a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.azure.AzurePlugin Plugin-Id: nf-azure -Plugin-Version: 1.1.3 +Plugin-Version: 1.1.4 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.01.0-edge From f98feb7797f4427df79f5bf2f5e289a28b1e9696 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:19:39 +0200 Subject: [PATCH 011/128] Bump nf-ga4gh@1.1.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-ga4gh/src/resources/META-INF/MANIFEST.MF | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/nf-ga4gh/src/resources/META-INF/MANIFEST.MF b/plugins/nf-ga4gh/src/resources/META-INF/MANIFEST.MF index a5c9623cda..2835d79c84 100644 --- a/plugins/nf-ga4gh/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-ga4gh/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.ga4gh.Ga4ghPlugin Plugin-Id: nf-ga4gh -Plugin-Version: 1.0.6 +Plugin-Version: 1.1.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 6aede7c0d10c7ccc6801364167f065f664f2a012 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:19:55 +0200 Subject: [PATCH 012/128] Bump nf-google@1.8.1 Signed-off-by: Paolo Di Tommaso --- plugins/nf-google/changelog.txt | 4 ++++ plugins/nf-google/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/nf-google/changelog.txt b/plugins/nf-google/changelog.txt index 0b6a190eff..ff439bc729 100644 --- a/plugins/nf-google/changelog.txt +++ b/plugins/nf-google/changelog.txt @@ -1,5 +1,9 @@ nf-google changelog =================== +1.8.1 - 22 Jul 2023 +- Wait for all child processes in nxf_parallel (#4050) [60a5f1a7] +- Bump Groovy 3.0.18 [207eb535] + 1.8.0 - 14 Jun 2023 - Add httpConnectTimeout and httpReadTimeout to Google options (#3974) [49fa15f7] - Add disk resource with type option for Google Batch (#3861) [166b3638] diff --git a/plugins/nf-google/src/resources/META-INF/MANIFEST.MF b/plugins/nf-google/src/resources/META-INF/MANIFEST.MF index bdd904e78e..92e475ff8d 100644 --- a/plugins/nf-google/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-google/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.google.GoogleCloudPlugin Plugin-Id: nf-google -Plugin-Version: 1.8.0 +Plugin-Version: 1.8.1 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.02.0-edge From 3278d798bece82431d37aa1ca9c0613c162430de Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:20:16 +0200 Subject: [PATCH 013/128] Bump nf-tower@1.5.15 Signed-off-by: Paolo Di Tommaso --- plugins/nf-tower/changelog.txt | 3 +++ plugins/nf-tower/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/plugins/nf-tower/changelog.txt b/plugins/nf-tower/changelog.txt index 3fc20448ba..dd3e42551a 100644 --- a/plugins/nf-tower/changelog.txt +++ b/plugins/nf-tower/changelog.txt @@ -1,5 +1,8 @@ nf-tower changelog =================== +1.5.15 - 22 Jul 2023 +- Bump Groovy 3.0.18 [207eb535] + 1.5.14 - 14 Jun 2023 - Update workflow.revision max length to match the one in Tower (#4010) [1433a903] diff --git a/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF b/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF index 096e8fa201..6fc78a5d49 100644 --- a/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.tower.plugin.TowerPlugin Plugin-Id: nf-tower -Plugin-Version: 1.5.14 +Plugin-Version: 1.5.15 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 2998db5d576f3db7154e95fe1055e225e66740d6 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:20:33 +0200 Subject: [PATCH 014/128] Bump nf-wave@0.11.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/changelog.txt | 11 +++++++++++ plugins/nf-wave/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/plugins/nf-wave/changelog.txt b/plugins/nf-wave/changelog.txt index bc30e545af..e0b6743fe7 100644 --- a/plugins/nf-wave/changelog.txt +++ b/plugins/nf-wave/changelog.txt @@ -1,5 +1,16 @@ nf-wave changelog ================== +0.11.0 - 22 Jul 2023 +- Add support legacy wave retry [ci fast] [73a1e7d4] +- Add support for Wave container freeze [9a5903e6] +- Add retry logic to wave image await [ci fast] [9fc1d3bd] +- Add missing header to wave container await [ci fast] [d39866e6] +- Allow disabling the Wave requirement when Fusion is enabled [9180d633] +- Improve handling Wave server errors [84f7a61a] +- Bump micromamba 1.4.9 [6307f9b5] +- Remove default arch from wave request [ci fast] [f0e5c0c1] +- Bump Groovy 3.0.18 [207eb535] + 0.10.0 - 14 Jun 2023 - Add retry policy to Wave http client [1daebeef] - Add support for arch auto-detection to Wave [7b5fdaf0] diff --git a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF index ece0b6c6e2..d30a66bedb 100644 --- a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.wave.plugin.WavePlugin Plugin-Id: nf-wave -Plugin-Version: 0.10.0 +Plugin-Version: 0.11.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 5754bf8265c54def527e0aa8a5c4d8cfbaea99a3 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:38:37 +0200 Subject: [PATCH 015/128] Update changelog Signed-off-by: Paolo Di Tommaso --- changelog.txt | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/changelog.txt b/changelog.txt index 2518538584..62d965f729 100644 --- a/changelog.txt +++ b/changelog.txt @@ -26,6 +26,8 @@ NEXTFLOW CHANGE-LOG - Fix packing all including Wave [735b8d9c] - Fix static compiler errors [f48a473c] - Improve documentation on Nextflow scripts (#3953) [80050c03] +- Improve description of channels and channel types (#4120) [8975734d] +- Improve "Unexpected input: '{'" error message (#4122) [ci fast] [ef9d3cf0] - Minor change in Wave config [4da0442a] - Prevent null exit code when Google batch is unable to acces exit status [f68a39ec] - Refactor Conda and Spack support for Wave to Java [36b9e226] @@ -33,11 +35,13 @@ NEXTFLOW CHANGE-LOG - Update workflow.revision max length to match the one in Tower (#4010) [1433a903] - Bump amazocorretto:17.0.7 [c8aa1214] - Bump azure-storage-blob:12.22.1 [2a36fa77] -- Bump nf-wave@0.10.0 [0872fba5] -- Bump nf-tower@1.5.14 [64bf3115] -- Bump nf-google@1.8.0 [7ca7d808] -- Bump nf-azure@1.1.1 [afd368c4] -- Bump nf-amazon@2.0.1 [58c332a1] +- Bump nf-wave@0.11.0 [2998db5d] +- Bump nf-tower@1.5.15 [3278d798] +- Bump nf-google@1.8.1 [6aede7c0] +- Bump nf-ga4gh@1.1.0 [f98feb77] +- Bump nf-azure@1.1.4 [1895efc4] +- Bump nf-amazon@2.1.0 [57464746] +- Bump nf-cloudcache@0.1.0 [cb6242c4] 23.05.0-edge - 15 May 2023 - Add support for custom custom root directory to resolve relative paths (#3942) [f06bb1f7] From fd4ff04f827ebca0f25efd3b079a68f390a82079 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 17:46:29 +0200 Subject: [PATCH 016/128] [release 23.07.0-edge] Update timestamp and build number [ci fast] Signed-off-by: Paolo Di Tommaso --- VERSION | 2 +- docs/conf.py | 4 ++-- .../src/main/resources/META-INF/plugins-info.txt | 13 +++++++------ modules/nf-commons/src/main/nextflow/Const.groovy | 6 +++--- nextflow | 2 +- nextflow.md5 | 2 +- nextflow.sha1 | 2 +- nextflow.sha256 | 2 +- 8 files changed, 17 insertions(+), 16 deletions(-) diff --git a/VERSION b/VERSION index 28d5238c54..59200001a7 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -23.06.0-edge \ No newline at end of file +23.07.0-edge \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 1104ea6aac..be318c7325 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -58,9 +58,9 @@ # built documents. # # The short X.Y version. -version = '23.06' +version = '23.07' # The full version, including alpha/beta/rc tags. -release = '23.06.0-edge' +release = '23.07.0-edge' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt index 12241cd1e6..604d402fab 100644 --- a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt +++ b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt @@ -1,8 +1,9 @@ -nf-amazon@2.0.1 -nf-azure@1.1.3 +nf-amazon@2.1.0 +nf-azure@1.1.4 +nf-cloudcache@0.1.0 nf-codecommit@0.1.5 nf-console@1.0.6 -nf-ga4gh@1.0.6 -nf-google@1.8.0 -nf-tower@1.5.14 -nf-wave@0.10.0 \ No newline at end of file +nf-ga4gh@1.1.0 +nf-google@1.8.1 +nf-tower@1.5.15 +nf-wave@0.11.0 \ No newline at end of file diff --git a/modules/nf-commons/src/main/nextflow/Const.groovy b/modules/nf-commons/src/main/nextflow/Const.groovy index ce4af4bc56..f2652b923b 100644 --- a/modules/nf-commons/src/main/nextflow/Const.groovy +++ b/modules/nf-commons/src/main/nextflow/Const.groovy @@ -52,17 +52,17 @@ class Const { /** * The application version */ - static public final String APP_VER = "23.06.0-edge" + static public final String APP_VER = "23.07.0-edge" /** * The app build time as linux/unix timestamp */ - static public final long APP_TIMESTAMP = 1686776566745 + static public final long APP_TIMESTAMP = 1690040653541 /** * The app build number */ - static public final int APP_BUILDNUM = 5864 + static public final int APP_BUILDNUM = 5870 /** * The app build time string relative to UTC timezone diff --git a/nextflow b/nextflow index 07f27a1a36..61160e818f 100755 --- a/nextflow +++ b/nextflow @@ -15,7 +15,7 @@ # limitations under the License. [[ "$NXF_DEBUG" == 'x' ]] && set -x -NXF_VER=${NXF_VER:-'23.06.0-edge'} +NXF_VER=${NXF_VER:-'23.07.0-edge'} NXF_ORG=${NXF_ORG:-'nextflow-io'} NXF_HOME=${NXF_HOME:-$HOME/.nextflow} NXF_PROT=${NXF_PROT:-'https'} diff --git a/nextflow.md5 b/nextflow.md5 index d5f6e0ece0..6bfd0cfe97 100644 --- a/nextflow.md5 +++ b/nextflow.md5 @@ -1 +1 @@ -424fe74b9ce5d22dc33024cc11e2f3e2 +d7cff3459c0394236a4f1781843f4655 diff --git a/nextflow.sha1 b/nextflow.sha1 index 1e93fd2666..c056fda67a 100644 --- a/nextflow.sha1 +++ b/nextflow.sha1 @@ -1 +1 @@ -8efb680493dbf8ecff34d3915fc80b1597b952b0 +a45925a853a3e516e5343b46ae291d66e93a047b diff --git a/nextflow.sha256 b/nextflow.sha256 index 06db20c470..7e2e60eb3a 100644 --- a/nextflow.sha256 +++ b/nextflow.sha256 @@ -1 +1 @@ -e40f11caadbe721fac44e6db4b58ff2c151d95c3a88cb5d7362bb379c0d7dbbf +185b41f6b7cdf8061f1fb93621fd5a9e648d6eb152ce66044a9ff6c98506b608 From 00eb145cc758db038bcbe11bc53315877cdf3514 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 22 Jul 2023 18:24:29 +0200 Subject: [PATCH 017/128] Bump amazoncorretto:17.0.8 Signed-off-by: Paolo Di Tommaso --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 26e49b8ed5..5fbe9209ed 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM amazoncorretto:17.0.7 +FROM amazoncorretto:17.0.8 RUN yum install -y procps-ng shadow-utils ENV NXF_HOME=/.nextflow From 547fad62749111d14085bd504b66862a32792ce7 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 24 Jul 2023 10:21:32 +0200 Subject: [PATCH 018/128] Improve wave config logging Signed-off-by: Paolo Di Tommaso --- .../nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index 339283f9d5..c7b3554590 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -116,7 +116,7 @@ class WaveClient { this.tower = new TowerConfig(session.config.tower as Map ?: Collections.emptyMap(), SysEnv.get()) this.endpoint = config.endpoint() this.condaChannels = session.getCondaConfig()?.getChannels() ?: DEFAULT_CONDA_CHANNELS - log.debug "Wave server endpoint: ${endpoint}" + log.debug "Wave endpoint: ${endpoint}; config: $config" this.packer = new Packer() this.waveRegistry = new URI(endpoint).getAuthority() // create cache From 9a33e93651b4b628f9fba70f0d595e8b286efbcb Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 24 Jul 2023 12:37:04 +0200 Subject: [PATCH 019/128] Fix missing changelog for version 23.07.0-edge [ci fast] Signed-off-by: Paolo Di Tommaso --- changelog.txt | 44 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/changelog.txt b/changelog.txt index 62d965f729..ecd6880229 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,5 +1,47 @@ NEXTFLOW CHANGE-LOG =================== +23.07.0-edge - 23 Jul 2023 +- Add CPU model name to trace files and traceRecord (#3946) [e0d91bf7] +- Add ability to disable CLI params type detection [9a1c584d] +- Add cloudcache plugin (#4097) [ac90cc26] +- Add missing header to Wave container await [d39866e6] +- Add remote bin support for TES in a workdir (#3990) [8a22168a] +- Add retry logic to wave image await [9fc1d3bd] +- Add rule to build Nextflow docker image for ARM (#4020) [705d55f5] +- Add support for AWS SSO credentials provider (#4045) [53e33cde] +- Add support for Wave container freeze [9a5903e6] +- Add support legacy Wave retry [73a1e7d4] +- Allow SLURM executor option `--mem-per-cpu` (#4023) [96c04e3b] +- Allow disabling the Wave requirement when Fusion is enabled [9180d633] +- Disable Singularity and Apptainer home mount by default (#4056) [a0ee4657] +- Document `NXF_WRAPPER_STAGE_FILE_THRESHOLD` environment variable (#4113) [ci skip] [bda47567] +- Fix AzFileSystem retry policy [ba9b6d18] [c2f3cc96] +- Fix Improve error message for invalid Azure URI [0f4d8867] +- Fix Treat HTTP headers as case insensitive (#4116) [97fd3358] +- Fix invalid detection of hierarchical namespace stub blobs as files (#4046) [ce06c877] +- Fix stage script in Fusion script launcher (#4109) [0933f47e] +- Ignore accelerator type for AWS Batch (#4043) [263ecca8] +- Implement Weblog feature as an external plugin [f9f2c338] +- Improve "Unexpected input: '{'" error message (#4122) [ef9d3cf0] +- Improve Azure retry logging [de58697a] +- Improve description of channels and channel types (#4120) [8975734d] +- Improve handling Wave server errors [84f7a61a] +- Increase Azure min retry delay to 250ms [2e77e5e4] +- Remove default arch from wave request [f0e5c0c1] +- Remove logging of report JSON data (#4098) [099e5039] +- Wait for all child processes in nxf_parallel (#4050) [60a5f1a7] +- Bump Groovy 3.0.18 [207eb535] +- Bump micromamba 1.4.9 [6307f9b5] +- Bump nf-amazon@2.1.0 [57464746] +- Bump nf-azure@1.1.2 [e1512f6a] +- Bump nf-azure@1.1.3 [27e7f663] +- Bump nf-azure@1.1.4 [1895efc4] +- Bump nf-cloudcache@0.1.0 [cb6242c4] +- Bump nf-ga4gh@1.1.0 [f98feb77] +- Bump nf-google@1.8.1 [6aede7c0] +- Bump nf-tower@1.5.15 [3278d798] +- Bump nf-wave@0.11.0 [2998db5d] + 23.06.0-edge - 14 Jun 2023 - Add AWS Kms integration test [19449bf4] - Add Wave containers reports (preview) [9d9e2758] @@ -27,7 +69,7 @@ NEXTFLOW CHANGE-LOG - Fix static compiler errors [f48a473c] - Improve documentation on Nextflow scripts (#3953) [80050c03] - Improve description of channels and channel types (#4120) [8975734d] -- Improve "Unexpected input: '{'" error message (#4122) [ci fast] [ef9d3cf0] +- Improve "Unexpected input: '{'" error message (#4122) [ef9d3cf0] - Minor change in Wave config [4da0442a] - Prevent null exit code when Google batch is unable to acces exit status [f68a39ec] - Refactor Conda and Spack support for Wave to Java [36b9e226] From 22638d460ae61d308103bf7291f22c9586d10f87 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 25 Jul 2023 03:15:39 -0500 Subject: [PATCH 020/128] Improve plugin docs (#3957) Signed-off-by: Ben Sherman Signed-off-by: Phil Ewels Signed-off-by: Paolo Di Tommaso Co-authored-by: Phil Ewels Co-authored-by: Paolo Di Tommaso --- docs/cli.md | 22 +++ docs/config.md | 5 + docs/plugins.md | 358 +++++++++++++++++++++++++++++++++++++++++------- docs/sharing.md | 4 + 4 files changed, 342 insertions(+), 47 deletions(-) diff --git a/docs/cli.md b/docs/cli.md index 164ca94a04..9e707d294f 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -469,6 +469,8 @@ profiles { } ``` +(cli-console)= + ### console Launch the Nextflow interactive console. @@ -886,6 +888,26 @@ $ nextflow log tiny_leavitt -F 'process =~ /splitLetters/' work/1f/f1ea9158fb23b53d5083953121d6b6 ``` +(cli-plugin)= + +### plugin + +Manage plugins and run plugin-specific commands. + +```console +$ nextflow plugin [options] +``` + +The `plugin` command provides several subcommands for managing and using plugins: + +`install ` + +: Install a plugin. Multiple plugins can be specified as a comma-separated list. Each plugin id consists of a name and optional version separated by a `@`. + +`: [options]` + +: Execute a plugin-specific command. + ### pull Download or update a project. diff --git a/docs/config.md b/docs/config.md index cf68380b31..f28cf22659 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1594,6 +1594,11 @@ The following environment variables control the configuration of the Nextflow ru `NXF_PID_FILE` : Name of the file where the process PID is saved when Nextflow is launched in background. +`NXF_PLUGINS_TEST_REPOSITORY` +: :::{versionadded} 23.04.0 + ::: +: Defines a custom plugin registry or plugin release URL for testing plugins outside of the main registry. See {ref}`testing-plugins` for more information. + `NXF_SCM_FILE` : :::{versionadded} 20.10.0 ::: diff --git a/docs/plugins.md b/docs/plugins.md index eb46489260..f7f1c22d69 100644 --- a/docs/plugins.md +++ b/docs/plugins.md @@ -2,89 +2,205 @@ # Plugins -## Main concepts +Nextflow has a plugin system that allows the use of extensible components that are downloaded and installed at runtime. -Nextflow is based on a plugins system that allows extending core functionalities via pluggable components that are download and installed at runtime. +## Core plugins -Currently the following functionalities are implemented as plugin components and they make part of the Nextflow *default* plugins: +The following functionalities are provided via plugin components, and they make part of the Nextflow *core* plugins: - `nf-amazon`: Support for Amazon cloud. - `nf-azure`: Support for Azure cloud. - `nf-console`: Implement Nextflow [REPL console](https://www.nextflow.io/blog/2015/introducing-nextflow-console.html). - `nf-ga4gh`: Support [GA4GH APIs](https://www.ga4gh.org/). - `nf-google`: Support for Google cloud. -- `nf-cloudcache`: Support for Nextflow cache in object storage. -- `nf-tower`: Support for [Nextflow Tower](https://tower.nf) platform. +- `nf-tower`: Support for [Tower](https://tower.nf) cloud platform. +- `nf-wave`: Support for [Wave containers](https://seqera.io/wave/) service. -## Configuration -Nextflow *default* plugins do not require any configuration. They are automatically installed when the corresponding feature is requested by a Nextflow pipeline. +## Using plugins -To use **non-default** plugins in your pipeline execution, you must declare them in the Nextflow configuration file, listing each plugin as shown below: +The core plugins do not require any configuration. They are automatically installed when the corresponding feature is requested by a Nextflow pipeline. You can still specify them as described below, e.g. if you want to pin the version of a plugin, however if you try to use a plugin version that isn't compatible with your Nextflow version, Nextflow will fail. + +You can enable a plugin by declaring it in your Nextflow configuration: ```groovy plugins { - id 'nf-hello@0.1.0' + id 'nf-hello@0.1.0' } ``` -The plugin identifier consists of the plugin name and plugin version separated by a `@`. - -Alternatively, plugins can be required using the `-plugins` command line option: +Or you can use the `-plugins` command line option: ```bash -nextflow run -plugins nf-hello@0.1.0 +nextflow run -plugins nf-hello@0.1.0 ``` -Multiple plugins can be specified by separating them with a comma. When specifying plugins via the command line, any plugin declarations in the Nextflow config file are ignored. +The plugin identifier consists of the plugin name and plugin version separated by a `@`. Multiple plugins can be specified +in the configuration with multiple `id` declarations, or on the command line as a comma-separated list. When specifying +plugins via the command line, any plugin declarations in the configuration file are ignored. -## Index +The default plugins are documented in this documentation. For all other plugins, please refer to the plugin's code repository +for documentation and support. -Nextflow resolves plugins download location through the [Plugins index](https://github.com/nextflow-io/plugins/). The index stores for each plugin the available version, the creation date, checksum and the link from where the plugin file is downloaded. +## Writing plugins -To add a new plugin to the Index, create a pull request including the request plugin metadata. The [nf-hello](https://github.com/nextflow-io/nf-hello) repository provides a minimal code example for the implementation of a Nextflow plugin. +To get started with your own plugin, refer to the [nf-hello](https://github.com/nextflow-io/nf-hello) repository, +which provides a minimal plugin implementation with several examples of different extension points, as well as instructions +for building, testing, and publishing. -## Import operators from plugin +Nextflow's plugin system exposes a variety of extension points for plugins. The following sections describe how to use +these extension points when writing a plugin, as well as how they are used in a pipeline. -:::{versionadded} 22.04.0 +:::{note} +If you would like to implement something in a plugin that isn't covered by any of the following sections, feel free to +create an issue on GitHub and describe your use case. In general, any class in the Nextflow codebase that implements +`ExtensionPoint` can be extended by a plugin, and existing plugins are a great source of examples when writing new plugins. ::: -Nextflow supports the inclusion of custom operators from Nextflow plugins. +:::{note} +Plugin extension points must be added to `extensions.idx` in the plugin repository to make them discoverable. +See the `nf-hello` plugin for an example. +::: -For example: +### Commands + +Plugins can define custom CLI commands that can be executed with the `nextflow plugin` command. + +To implement a plugin-specific command, implement the `PluginExecAware` interface in your plugin entrypoint +(the class that extends `BasePlugin`). Alternatively, you can implement the `PluginAbstractExec` trait, which +provides an abstract implementation with some boilerplate code. This trait requires you to implement two methods, +`getCommands()` and `exec()`: ```groovy -include { sqlInsert; fromQuery as selectFromTable } from 'plugin/nf-sqldb' +import nextflow.cli.PluginAbstractExec +import nextflow.plugin.BasePlugin + +class MyPlugin extends BasePlugin implements PluginAbstractExec { + @Override + List getCommands() { + [ 'hello' ] + } + + @Override + int exec(String cmd, List args) { + if( cmd == 'hello' ) { + println "Hello! You gave me these arguments: ${args.join(' ')}" + return 0 + } + else { + System.err.println "Invalid command: ${cmd}" + return 1 + } + } +} +``` -def sql = "select * from FOO" -channel - .selectFromTable(sql, db: "test", emitColumns:true) - .sqlInsert(into:"BAR", columns:'id', db:"test") +You can then execute this command using the `nextflow plugin` command: + +```bash +nextflow plugin my-plugin:hello --foo --bar ``` -The above snippet includes the operators `sqlInsert` and `fromQuery` from the [nf-sqldb](https://github.com/nextflow-io/nf-sqldb) plugin. The latter will be accessible using the `selectFromTable` alias in the script. +See the {ref}`cli-plugin` CLI command for usage information. -:::{note} -The prefix `plugin/` must precede the plugin name in the include `from` statement. +### Configuration + +Plugins can access the resolved Nextflow configuration through the session object using `session.config.navigate()`. +Several extension points provide the session object for this reason. This method allows you to query any configuration +option in a safe manner -- if the option isn't defined, it will return `null`. A common practice is to define any +configuration for your plugin in a custom config scope. + +Here is an example of querying a config option in a trace observer hook: + +```groovy +import nextflow.Session +import nextflow.trace.TraceObserver + +class MyObserver implements TraceObserver { + + @Override + void onFlowCreate(Session session) { + final message = session.config.navigate('myplugin.create.message') + println message + } +} +``` + +You can then set this option in your config file: + +```groovy +// dot syntax +myplugin.create.message = "I'm alive!" + +// closure syntax +myplugin { + create { + message = "I'm alive!" + } +} +``` + +### Executors + +Plugins can define custom executors that can then be used with the `executor` process directive. + +To implement an executor, create a class in your plugin that extends the [`Executor`](https://github.com/nextflow-io/nextflow/blob/master/modules/nextflow/src/main/groovy/nextflow/executor/Executor.groovy) class and implements the `ExtensionPoint` interface. Add the `@ServiceName` annotation to your class with the name of your executor: + +```groovy +import nextflow.executor.Executor +import nextflow.util.ServiceName +import org.pf4j.ExtensionPoint + +@ServiceName('my-executor') +class MyExecutor extends Executor implements ExtensionPoint { + + // ... + +} +``` + +You can then use this executor in your pipeline: + +```groovy +process foo { + executor 'my-executor' +} +``` + +:::{tip} +Refer to the source code of Nextflow's built-in executors to see how to implement the various components of an executor. +You might be able to implement most of your executor by simply reusing existing code. ::: -## Import custom functions from plugin +### Functions :::{versionadded} 22.09.0-edge ::: -Nextflow supports the inclusion of custom functions from Nextflow plugins. +Plugins can define custom Groovy functions, which can then be included into Nextflow pipelines. -For example, a plugin can export a util function to reverse a String: +To implement a custom function, create a class in your plugin that extends the `PluginExtensionPoint` class, and implement +your function with the `Function` annotation: ```groovy -@nextflow.plugin.extension.Function -String reverseString( String origin ){ - origin.reverse() +import nextflow.Session +import nextflow.plugin.extension.Function +import nextflow.plugin.extension.PluginExtensionPoint + +class MyExtension implements PluginExtensionPoint { + + @Override + void init(Session session) {} + + @Function + String reverseString(String origin) { + origin.reverse() + } + } ``` -And this function can be used by the pipeline: +You can then use this function in your pipeline: ```groovy include { reverseString } from 'plugin/my-plugin' @@ -92,29 +208,177 @@ include { reverseString } from 'plugin/my-plugin' channel.of( reverseString('hi') ) ``` -The above snippet includes a function from the plugin and allows the channel to call it directly. - -In the same way as operators, functions can be aliased: +You can also use an alias: ```groovy include { reverseString as anotherReverseMethod } from 'plugin/my-plugin' ``` -## Testing custom plugins +### Operators -To make a plugin available to Nextflow, it needs to be included in the [plugins repository index](https://github.com/nextflow-io/plugins). +:::{versionadded} 22.04.0 +::: + +Plugins can define custom channel factories and operators, which can then be included into Nextflow pipelines. + +To implement a custom factory or operator, create a class in your plugin that extends the `PluginExtensionPoint` class, +and implement your function with the `Factory` or `Operator` annotation: + +```groovy +import groovyx.gpars.dataflow.DataflowReadChannel +import groovyx.gpars.dataflow.DataflowWriteChannel +import nextflow.Session +import nextflow.plugin.extension.Factory +import nextflow.plugin.extension.Operator +import nextflow.plugin.extension.PluginExtensionPoint + +class MyExtension implements PluginExtensionPoint { + + @Override + void init(Session session) {} -However, in order to validate a plugin before it's published in the repository index, it is possible to use the environment -variable `NXF_PLUGINS_TEST_REPOSITORY` to specify the URI of a custom index JSON file or the plugin JSON meta file. + @Factory + DataflowWriteChannel fromQuery(Map opts, String query) { + // ... + } + + @Operator + DataflowWriteChannel sqlInsert(DataflowReadChannel source, Map opts) { + // ... + } + +} +``` + +You can then use them in your pipeline: + +```groovy +include { sqlInsert; fromQuery as fromTable } from 'plugin/nf-sqldb' + +def sql = 'select * from FOO' +channel + .fromTable(sql, db: 'test', emitColumns: true) + .sqlInsert(into: 'BAR', columns: 'id', db: 'test') +``` + +The above snippet is based on the [nf-sqldb](https://github.com/nextflow-io/nf-sqldb) plugin. The `fromQuery` factory +is included under the alias `fromTable`. + +### Trace observers + +A *trace observer* in Nextflow is an entity that can listen and react to workflow events, such as when a workflow starts, +a task completes, a file is published, etc. Several components in Nextflow, such as the execution report and DAG visualization, +are implemented as trace observers. + +Plugins can define custom trace observers that react to workflow events with custom behavior. To implement a trace observer, +create a class that implements the `TraceObserver` trait and another class that implements the `TraceObserverFactory` interface. +Implement any of the hooks defined in `TraceObserver`, and implement the `create()` method in your observer factory: + +```groovy +// MyObserverFactory.groovy +import nextflow.Session +import nextflow.trace.TraceObserver +import nextflow.trace.TraceObserverFactory + +class MyObserverFactory implements TraceObserverFactory { + + @Override + Collection create(Session session) { + final enabled = session.config.navigate('myplugin.enabled') + return enabled ? [ new MyObserver() ] : [] + } +} + +// MyObserver.groovy +import java.nio.file.Path + +import nextflow.processor.TaskHandler +import nextflow.trace.TraceObserver +import nextflow.trace.TraceRecord + +class MyObserver implements TraceObserver { + + @Override + void onFlowBegin() { + println "Okay, let's begin!" + } + + @Override + void onProcessComplete(TaskHandler handler, TraceRecord trace) { + println "I completed a task! It's name is '${handler.task.name}'" + } + + @Override + void onProcessCached(TaskHandler handler, TraceRecord trace) { + println "I found a task in the cache! It's name is '${handler.task.name}'" + } + + @Override + void onFilePublish(Path destination, Path source) { + println "I published a file! It's located at ${path.toUriString()}" + } + + @Override + void onFlowError(TaskHandler handler, TraceRecord trace) { + println "Uh oh, something went wrong..." + } + + @Override + void onFlowComplete() { + println 'All done!' + } +} +``` + +You can then use your trace observer by simply enabling the plugin in your pipeline. In the above example, the observer +must also be enabled with a config option: + +```groovy +myplugin.enabled = true +``` + +Refer to the `TraceObserver` [source code](https://github.com/nextflow-io/nextflow/blob/master/modules/nextflow/src/main/groovy/nextflow/trace/TraceObserver.groovy) for descriptions of the available workflow events. + +## Plugin registry + +Nextflow resolves plugins through a plugin registry, which stores metadata for each plugin version, including the publishing date, +checksum, and download URL for the plugin binary. The default registry is located on GitHub at [nextflow-io/plugins](https://github.com/nextflow-io/plugins/). + +To publish a plugin release to the main registry, simply create a pull request with the requested plugin metadata. + +(testing-plugins)= + +### Testing plugins + +:::{versionadded} 23.04.0 +::: + +You can also use a different plugin registry with the `NXF_PLUGINS_TEST_REPOSITORY` environment variable. This setting +is useful for testing a plugin release before publishing it to the main registry. It can refer to the JSON file for a +custom registry or a plugin release. For example: ```bash +# custom registry at https://github.com/my-org/plugins +export NXF_PLUGINS_TEST_REPOSITORY="https://raw.githubusercontent.com/my-org/plugins/main/plugins.json" + +# custom plugin release export NXF_PLUGINS_TEST_REPOSITORY="https://github.com/nextflow-io/nf-hello/releases/download/0.3.0/nf-hello-0.3.0-meta.json" + +nextflow run -plugins nf-hello ``` -Then run Nextflow with the expected plugin: +## Offline usage -```bash -nextflow rub -plugins nf-hello -``` +To use Nextflow plugins in an offline environment: + +1. Download the {ref}`"all" release ` of Nextflow, which comes with the following default plugins: `nf-amazon`, `nf-google`, `nf-tower`. + +2. Download any additional plugins by running `nextflow plugin install `. Alternatively, simply run your pipeline once and Nextflow will download all of the plugins that it needs. + +3. Copy the `nextflow` binary and `$HOME/.nextflow` folder to your offline environment. + +4. In your Nextflow configuration file, specify each plugin that you downloaded, both name and version, including default plugins. This will prevent Nextflow from trying to download newer versions of plugins. + +Nextflow caches the plugins that it downloads, so as long as you keep using the same Nextflow version and pin your plugin versions in your config file, Nextflow will use the locally installed plugins and won't try to download them from the Internet. diff --git a/docs/sharing.md b/docs/sharing.md index aac7341d37..f1291381cc 100644 --- a/docs/sharing.md +++ b/docs/sharing.md @@ -195,6 +195,8 @@ The following configuration properties are supported for each provider configura : *Required only for private SCM servers* : SCM API `endpoint` URL e.g. `https://api.github.com` (default: the same as `providers..server`). +## SCM providers + ### BitBucket credentials Create a `bitbucket` entry in the [SCM configuration file](#scm-configuration-file) specifying your user name and app password, as shown below: @@ -307,6 +309,8 @@ providers { The Personal access token can be generated in the repository `Clone Repository` dialog. ::: +(aws-codecommit)= + ### AWS CodeCommit credentials :::{versionadded} 22.06.0-edge From b14674dc6fb2e228a71976f93660354d68555359 Mon Sep 17 00:00:00 2001 From: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Date: Tue, 25 Jul 2023 09:20:16 +0100 Subject: [PATCH 021/128] Add deleteTasksOnCompletion to Azure Batch configuration (#4114) Deleting Azure Tasks was checking the configuration object deleteJobsOnCompletion which was incorrect since a task belongs to a job. This adds the equivalent configuration for tasks which is checked before deleting the tasks. Signed-off-by: Adam Talbot Signed-off-by: Ben Sherman Signed-off-by: Adam Talbot <12817534+adamrtalbot@users.noreply.github.com> Co-authored-by: Ben Sherman --- docs/config.md | 21 +++++++++--- .../cloud/azure/batch/AzBatchService.groovy | 32 ++++++++----------- .../azure/batch/AzBatchTaskHandler.groovy | 4 +-- .../cloud/azure/config/AzBatchOpts.groovy | 4 ++- .../azure/batch/AzBatchServiceTest.groovy | 24 ++++++++++---- .../cloud/azure/config/AzureConfigTest.groovy | 11 +++++-- 6 files changed, 61 insertions(+), 35 deletions(-) diff --git a/docs/config.md b/docs/config.md index f28cf22659..ed1c11cb4f 100644 --- a/docs/config.md +++ b/docs/config.md @@ -330,14 +330,20 @@ The following settings are available: `azure.batch.copyToolInstallMode` : Specify where the `azcopy` tool used by Nextflow. When `node` is specified it's copied once during the pool creation. When `task` is provider, it's installed for each task execution (default: `node`). -`azure.batch.terminateJobsOnCompletion` -: Enables the Batch Job to automatically terminate a job once all tasks have completed (default: `true`). - `azure.batch.deleteJobsOnCompletion` -: Enable the automatic deletion of jobs created by the pipeline execution (default: `true`). +: Delete all jobs when the workflow completes (default: `false`). +: :::{versionchanged} 23.08.0-edge + Default value was changed from `true` to `false`. + ::: `azure.batch.deletePoolsOnCompletion` -: Enable the automatic deletion of compute node pools upon pipeline completion (default: `false`). +: Delete all compute node pools when the workflow completes (default: `false`). + +`azure.batch.deleteTasksOnCompletion` +: :::{versionadded} 23.08.0-edge + ::: +: Delete each task when it completes (default: `true`). +: Although this setting is enabled by default, failed tasks will not be deleted unless it is explicitly enabled. This way, the default behavior is that successful tasks are deleted while failed tasks are preserved for debugging purposes. `azure.batch.endpoint` : The batch service endpoint e.g. `https://nfbatch1.westeurope.batch.azure.com`. @@ -345,6 +351,11 @@ The following settings are available: `azure.batch.location` : The name of the batch service region, e.g. `westeurope` or `eastus2`. This is not needed when the endpoint is specified. +`azure.batch.terminateJobsOnCompletion` +: :::{versionadded} 23.05.0-edge + ::: +: When the workflow completes, set all jobs to terminate on task completion. (default: `true`). + `azure.batch.pools..autoScale` : Enable autoscaling feature for the pool identified with ``. diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy index 38d10b0c06..bff0b013d1 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy @@ -785,17 +785,13 @@ class AzBatchService implements Closeable { apply(() -> client.taskOperations().deleteTask(key.jobId, key.taskId)) } + /** + * Set all jobs to terminate on completion. + */ protected void terminateJobs() { - /* - We set the job to terminate when all tasks are complete rather than directly terminating, this allows Azure Batch to handle the termination for us. - */ - - for( Map.Entry entry : allJobIds ) { - final proc = entry.key - final jobId = entry.value - + for( String jobId : allJobIds.values() ) { try { - log.trace "Terminating Azure job ${jobId}" + log.trace "Setting Azure job ${jobId} to terminate on completion" CloudJob job = apply(() -> client.jobOperations().getJob(jobId)) final poolInfo = job.poolInfo() @@ -813,10 +809,7 @@ class AzBatchService implements Closeable { } protected void cleanupJobs() { - for( Map.Entry entry : allJobIds ) { - final proc = entry.key - final jobId = entry.value - + for( String jobId : allJobIds.values() ) { try { log.trace "Deleting Azure job ${jobId}" apply(() -> client.jobOperations().deleteJob(jobId)) @@ -828,7 +821,7 @@ class AzBatchService implements Closeable { } protected void cleanupPools() { - for( String poolId : allPools.keySet()) { + for( String poolId : allPools.keySet() ) { try { apply(() -> client.poolOperations().deletePool(poolId)) } @@ -849,17 +842,20 @@ class AzBatchService implements Closeable { } return identity } + @Override void close() { - // Terminate existing jobs to prevent them occupying quota - if( config.batch().terminateJobsOnCompletion!=Boolean.FALSE ) { + // terminate all jobs to prevent them from occupying quota + if( config.batch().terminateJobsOnCompletion ) { terminateJobs() } - // cleanup app successful jobs - if( config.batch().deleteJobsOnCompletion!=Boolean.FALSE ) { + // delete all jobs + if( config.batch().deleteJobsOnCompletion ) { cleanupJobs() } + + // delete all autopools if( config.batch().canCreatePool() && config.batch().deletePoolsOnCompletion ) { cleanupPools() } diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchTaskHandler.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchTaskHandler.groovy index 0559958870..5326c34598 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchTaskHandler.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchTaskHandler.groovy @@ -130,7 +130,7 @@ class AzBatchTaskHandler extends TaskHandler implements FusionAwareTask { } private Boolean shouldDelete() { - executor.config.batch().deleteJobsOnCompletion + executor.config.batch().deleteTasksOnCompletion } protected void deleteTask(AzTaskKey taskKey, TaskRun task) { @@ -138,7 +138,7 @@ class AzBatchTaskHandler extends TaskHandler implements FusionAwareTask { return if( !task.isSuccess() && shouldDelete()==null ) { - // do not delete successfully executed pods for debugging purpose + // preserve failed tasks for debugging purposes, unless deletion is explicitly enabled return } diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/config/AzBatchOpts.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/config/AzBatchOpts.groovy index bd6f38d517..7bafefbfe7 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/config/AzBatchOpts.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/config/AzBatchOpts.groovy @@ -50,6 +50,7 @@ class AzBatchOpts implements CloudTransferOptions { Boolean terminateJobsOnCompletion Boolean deleteJobsOnCompletion Boolean deletePoolsOnCompletion + Boolean deleteTasksOnCompletion CopyToolInstallMode copyToolInstallMode Map pools @@ -63,9 +64,10 @@ class AzBatchOpts implements CloudTransferOptions { location = config.location autoPoolMode = config.autoPoolMode allowPoolCreation = config.allowPoolCreation - terminateJobsOnCompletion = config.terminateJobsOnCompletion + terminateJobsOnCompletion = config.terminateJobsOnCompletion != Boolean.FALSE deleteJobsOnCompletion = config.deleteJobsOnCompletion deletePoolsOnCompletion = config.deletePoolsOnCompletion + deleteTasksOnCompletion = config.deleteTasksOnCompletion pools = parsePools(config.pools instanceof Map ? config.pools as Map : Collections.emptyMap()) maxParallelTransfers = config.maxParallelTransfers ? config.maxParallelTransfers as int : MAX_TRANSFER maxTransferAttempts = config.maxTransferAttempts ? config.maxTransferAttempts as int : MAX_TRANSFER_ATTEMPTS diff --git a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy index c7566abcc8..055a795b27 100644 --- a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy +++ b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy @@ -350,8 +350,7 @@ class AzBatchServiceTest extends Specification { } - - def 'should cleanup jobs by default' () { + def 'should set jobs to automatically terminate by default' () { given: def CONFIG = [:] def exec = Mock(AzBatchExecutor) {getConfig() >> new AzConfig(CONFIG) } @@ -359,12 +358,12 @@ class AzBatchServiceTest extends Specification { when: svc.close() then: - 1 * svc.cleanupJobs() >> null + 1 * svc.terminateJobs() >> null } - def 'should cleanup jobs no cleanup jobs' () { + def 'should not cleanup jobs by default' () { given: - def CONFIG = [batch:[deleteJobsOnCompletion: false]] + def CONFIG = [:] def exec = Mock(AzBatchExecutor) {getConfig() >> new AzConfig(CONFIG) } AzBatchService svc = Spy(AzBatchService, constructorArgs:[exec]) when: @@ -373,7 +372,18 @@ class AzBatchServiceTest extends Specification { 0 * svc.cleanupJobs() >> null } - def 'should cleanup not cleanup pools by default' () { + def 'should cleanup jobs if specified' () { + given: + def CONFIG = [batch:[deleteJobsOnCompletion: true]] + def exec = Mock(AzBatchExecutor) {getConfig() >> new AzConfig(CONFIG) } + AzBatchService svc = Spy(AzBatchService, constructorArgs:[exec]) + when: + svc.close() + then: + 1 * svc.cleanupJobs() >> null + } + + def 'should not cleanup pools by default' () { given: def CONFIG = [:] def exec = Mock(AzBatchExecutor) {getConfig() >> new AzConfig(CONFIG) } @@ -395,7 +405,7 @@ class AzBatchServiceTest extends Specification { 1 * svc.cleanupPools() >> null } - def 'should cleanup cleanup pools with allowPoolCreation' () { + def 'should cleanup pools with allowPoolCreation' () { given: def CONFIG = [batch:[allowPoolCreation: true, deletePoolsOnCompletion: true]] def exec = Mock(AzBatchExecutor) {getConfig() >> new AzConfig(CONFIG) } diff --git a/plugins/nf-azure/src/test/nextflow/cloud/azure/config/AzureConfigTest.groovy b/plugins/nf-azure/src/test/nextflow/cloud/azure/config/AzureConfigTest.groovy index 4ba18c1a8d..71cd83dc5e 100644 --- a/plugins/nf-azure/src/test/nextflow/cloud/azure/config/AzureConfigTest.groovy +++ b/plugins/nf-azure/src/test/nextflow/cloud/azure/config/AzureConfigTest.groovy @@ -70,8 +70,10 @@ class AzureConfigTest extends Specification { and: cfg.batch().endpoint == null + cfg.batch().terminateJobsOnCompletion == true cfg.batch().deleteJobsOnCompletion == null cfg.batch().deletePoolsOnCompletion == null + cfg.batch().deleteTasksOnCompletion == null cfg.batch().location == null cfg.batch().autoPoolMode == null cfg.batch().allowPoolCreation == null @@ -99,8 +101,11 @@ class AzureConfigTest extends Specification { endpoint: ENDPOINT, location: LOCATION, autoPoolMode: true, - allowPoolCreation: true, deleteJobsOnCompletion: false, + allowPoolCreation: true, + terminateJobsOnCompletion: false, + deleteJobsOnCompletion: true, deletePoolsOnCompletion: true, + deleteTasksOnCompletion: false, pools: [ myPool: [ vmType: 'Foo_A1', autoScale: true, @@ -124,8 +129,10 @@ class AzureConfigTest extends Specification { cfg.batch().location == LOCATION cfg.batch().autoPoolMode == true cfg.batch().allowPoolCreation == true - cfg.batch().deleteJobsOnCompletion == false + cfg.batch().terminateJobsOnCompletion == false + cfg.batch().deleteJobsOnCompletion == true cfg.batch().deletePoolsOnCompletion == true + cfg.batch().deleteTasksOnCompletion == false cfg.batch().canCreatePool() and: cfg.batch().pool('myPool').vmType == 'Foo_A1' From 46e828e189b32146d1151f8a35f21e45dd4100e2 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 25 Jul 2023 04:20:11 -0500 Subject: [PATCH 022/128] Disable cache backup/restore if cloudcache is used (#4125) Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- .../seqera/tower/plugin/CacheCommand.groovy | 4 +++- .../seqera/tower/plugin/CacheManager.groovy | 11 ++++++--- .../tower/plugin/CacheManagerTest.groovy | 24 +++++++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy index 7c009ec3b6..4c19948769 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy @@ -46,7 +46,9 @@ class CacheCommand implements PluginAbstractExec { protected void cacheBackup() { log.debug "Running Nextflow cache backup" - new CacheManager(System.getenv()).saveCacheFiles() + final manager = new CacheManager(System.getenv()) + manager.saveCacheFiles() + manager.saveMiscFiles() } protected void archiveLogs(Session sess) { diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy index 5c890f7752..f273ee3ad9 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy @@ -65,7 +65,10 @@ class CacheManager { if( !sessionUuid ) throw new AbortOperationException("Missing target uuid - cache sync cannot be performed") - this.localCachePath = Paths.get(".nextflow/cache/${sessionUuid}") + // ignore the `localCachePath` when the `NXF_CLOUDCACHE_PATH` variable is set because + // the nextflow cache metadata is going to be managed (and stored) via the nf-cloudcache plugin + if( !env.containsKey('NXF_CLOUDCACHE_PATH') ) + this.localCachePath = Paths.get(".nextflow/cache/${sessionUuid}") if( env.NXF_OUT_FILE ) localOutFile = Paths.get(env.NXF_OUT_FILE) @@ -80,7 +83,7 @@ class CacheManager { } protected void restoreCacheFiles() { - if( !remoteWorkDir || !sessionUuid ) + if( !remoteWorkDir || !sessionUuid || !localCachePath ) return if(!Files.exists(remoteCachePath)) { @@ -100,7 +103,7 @@ class CacheManager { } protected void saveCacheFiles() { - if( !remoteWorkDir || !sessionUuid ) + if( !remoteWorkDir || !sessionUuid || !localCachePath ) return if( !Files.exists(localCachePath) ) { @@ -118,7 +121,9 @@ class CacheManager { catch (Throwable e) { log.warn "Failed to backup resume metadata to remote store path: ${remoteCachePath.toUriString()} — cause: ${e}", e } + } + protected void saveMiscFiles() { // — upload out file try { if( localOutFile?.exists() ) diff --git a/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy b/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy index 70e4579a19..67ce4d7d75 100644 --- a/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy +++ b/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy @@ -78,6 +78,7 @@ class CacheManagerTest extends Specification { tower.localCachePath.resolve('db/yyy').text = 'data yyy' and: tower.saveCacheFiles() + tower.saveMiscFiles() then: tower.remoteCachePath.resolve('index-foo').text == 'index foo' tower.remoteCachePath.resolve('db/xxx').text == 'data xxx' @@ -99,6 +100,7 @@ class CacheManagerTest extends Specification { tower.localCachePath.resolve('db/delta').text = 'data delta' and: tower.saveCacheFiles() + tower.saveMiscFiles() then: tower.remoteCachePath.resolve('index-bar').text == 'index bar' tower.remoteCachePath.resolve('db/alpha').text == 'data alpha' @@ -154,4 +156,26 @@ class CacheManagerTest extends Specification { cleanup: folder?.deleteDir() } + + def 'should not backup/restore cache if cloudcache is enabled' () { + given: + def ENV = [ + NXF_UUID: 'uuid', + NXF_WORK: '/work', + NXF_CLOUDCACHE_PATH: 's3://my-bucket/cache' + ] + and: + def tower = new CacheManager(ENV) + + when: + tower.saveCacheFiles() + then: + 0 * tower.getRemoteCachePath() + + when: + tower.restoreCacheFiles() + then: + 0 * tower.getRemoteCachePath() + + } } From 8b7e3d4892e27735c4c3f80bf71a726c9f88c1bd Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Wed, 26 Jul 2023 04:26:53 -0500 Subject: [PATCH 023/128] Document behavior of withName selector with included aliases (#4129) Signed-off-by: Ben Sherman --- docs/config.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/config.md b/docs/config.md index ed1c11cb4f..76d5fa2b99 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1173,6 +1173,10 @@ process { } ``` +:::{note} +The `withName` selector applies to a process even when it is included from a module under an alias. For example, `withName: hello` will apply to any process originally defined as `hello`, regardless of whether it is included under an alias. Similarly, it will not apply to any process not originally defined as `hello`, even if it is included under the alias `hello`. +::: + :::{tip} Label and process names do not need to be enclosed with quotes, provided the name does not include special characters (`-`, `!`, etc) and is not a keyword or a built-in type identifier. When in doubt, you can enclose the label name or process name with single or double quotes. ::: From 077ed5dd04240f86e1aff8178938891d7d70adf8 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Wed, 26 Jul 2023 21:23:00 +0200 Subject: [PATCH 024/128] Logging improvements Signed-off-by: Paolo Di Tommaso --- .../groovy/nextflow/processor/TaskPollingMonitor.groovy | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy index 173579de39..17c2694c46 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy @@ -271,7 +271,7 @@ class TaskPollingMonitor implements TaskMonitor { */ @Override TaskMonitor start() { - log.trace ">>> barrier register (monitor: ${this.name})" + log.debug ">>> barrier register (monitor: ${this.name})" session.barrier.register(this) this.taskCompleteLock = new ReentrantLock() @@ -292,8 +292,11 @@ class TaskPollingMonitor implements TaskMonitor { try { pollLoop() } + catch (Throwable e) { + log.debug "Unexpected error in tasks monitor pool loop", e + } finally { - log.trace "<<< barrier arrives (monitor: ${this.name})" + log.debug "<<< barrier arrives (monitor: ${this.name}) - terminating tasks monitor poll loop" session.barrier.arrive(this) } } @@ -434,7 +437,7 @@ class TaskPollingMonitor implements TaskMonitor { try { def pending = runningQueue.size() if( !pending ) { - log.debug "No more task to compute -- ${session.dumpNetworkStatus() ?: 'Execution may be stalled'}" + log.debug "!! executor $name > No more task to compute -- ${session.dumpNetworkStatus() ?: 'Execution may be stalled'}" return } From dd32f80aef1479750e2a001828473f2116538905 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Wed, 26 Jul 2023 21:45:13 +0200 Subject: [PATCH 025/128] Allow use virtual threads in Wave client Signed-off-by: Paolo Di Tommaso --- .../main/io/seqera/wave/plugin/WaveClient.groovy | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index c7b3554590..bf91dcc053 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -27,6 +27,7 @@ import java.time.Duration import java.time.OffsetDateTime import java.time.temporal.ChronoUnit import java.util.concurrent.Callable +import java.util.concurrent.Executors import java.util.concurrent.TimeUnit import java.util.function.Predicate @@ -56,6 +57,7 @@ import nextflow.processor.Architecture import nextflow.processor.TaskRun import nextflow.script.bundle.ResourcesBundle import nextflow.util.SysHelper +import nextflow.util.Threads import org.slf4j.Logger import org.slf4j.LoggerFactory /** @@ -127,12 +129,20 @@ class WaveClient { // the cookie manager cookieManager = new CookieManager() // create http client - this.httpClient = HttpClient.newBuilder() + this.httpClient = newHttpClient() + } + + protected HttpClient newHttpClient() { + final builder = HttpClient.newBuilder() .version(HttpClient.Version.HTTP_1_1) .followRedirects(HttpClient.Redirect.NEVER) .cookieHandler(cookieManager) .connectTimeout(config.httpOpts().connectTimeout()) - .build() + // use virtual threads executor if enabled + if( Threads.useVirtual() ) + builder.executor(Executors.newVirtualThreadPerTaskExecutor()) + // build and return the new client + return builder.build() } WaveConfig config() { return config } From fcdeec02e4ade47683dfd39724c49e5fe6ad6937 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 29 Jul 2023 11:22:51 +0200 Subject: [PATCH 026/128] Fix Redirection http redirection across different hosts Signed-off-by: Paolo Di Tommaso --- .../file/http/XFileSystemProvider.groovy | 38 +++++++++++---- .../nextflow/file/http/HttpFilesTests.groovy | 2 +- .../file/http/XFileSystemProviderTest.groovy | 48 ++++++++++++------- 3 files changed, 60 insertions(+), 28 deletions(-) diff --git a/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy b/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy index 1fa43da22f..2f00717c2c 100644 --- a/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy +++ b/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy @@ -16,6 +16,8 @@ package nextflow.file.http +import static nextflow.file.http.XFileSystemConfig.* + import java.nio.ByteBuffer import java.nio.channels.SeekableByteChannel import java.nio.file.AccessDeniedException @@ -44,13 +46,9 @@ import groovy.transform.PackageScope import groovy.util.logging.Slf4j import nextflow.SysEnv import nextflow.extension.FilesEx +import nextflow.file.FileHelper import nextflow.util.InsensitiveMap import sun.net.www.protocol.ftp.FtpURLConnection - -import static XFileSystemConfig.* - -import static nextflow.file.http.XFileSystemConfig.config - /** * Implements a read-only JSR-203 compliant file system provider for http/ftp protocols * @@ -64,6 +62,7 @@ abstract class XFileSystemProvider extends FileSystemProvider { private Map fileSystemMap = new LinkedHashMap<>(20) + private static final int[] REDIRECT_CODES = [301, 302, 307, 308] protected static String config(String name, def defValue) { return SysEnv.containsKey(name) ? SysEnv.get(name) : defValue.toString() @@ -185,18 +184,25 @@ abstract class XFileSystemProvider extends FileSystemProvider { protected URLConnection toConnection0(URL url, int attempt) { final conn = url.openConnection() conn.setRequestProperty("User-Agent", 'Nextflow/httpfs') + if( conn instanceof HttpURLConnection ) { + // by default HttpURLConnection does redirect only within the same host + // disable the built-in to implement custom redirection logic (see below) + conn.setInstanceFollowRedirects(false) + } if( url.userInfo ) { conn.setRequestProperty("Authorization", auth(url.userInfo)); } else { XAuthRegistry.instance.authorize(conn) } - if ( conn instanceof HttpURLConnection && conn.getResponseCode() in [307, 308] && attempt < MAX_REDIRECT_HOPS ) { + if ( conn instanceof HttpURLConnection && conn.getResponseCode() in REDIRECT_CODES && attempt < MAX_REDIRECT_HOPS ) { final header = InsensitiveMap.of(conn.getHeaderFields()) - String location = header.get("Location")?.get(0) - URL newPath = new URI(location).toURL() - log.debug "Remote redirect URL: $newPath" - return toConnection0(newPath, attempt+1) + final location = header.get("Location")?.get(0) + log.debug "Remote redirect location: $location" + final newUrl = new URI(absLocation(location,url)).toURL() + if( url.protocol=='https' && newUrl.protocol=='http' ) + throw new IOException("Refuse to follow redirection from HTTPS to HTTP (unsafe) URL - origin: $url - target: $newUrl") + return toConnection0(newUrl, attempt+1) } else if( conn instanceof HttpURLConnection && conn.getResponseCode() in config().retryCodes() && attempt < config().maxAttempts() ) { final delay = (Math.pow(config().backOffBase(), attempt) as long) * config().backOffDelay() @@ -212,6 +218,18 @@ abstract class XFileSystemProvider extends FileSystemProvider { return conn } + protected String absLocation(String location, URL target) { + assert location, "Missing location argument" + assert target, "Missing target URL argument" + + final base = FileHelper.baseUrl(location) + if( base ) + return location + if( !location.startsWith('/') ) + location = '/' + location + return "${target.protocol}://${target.authority}$location" + } + @Override SeekableByteChannel newByteChannel(Path path, Set options, FileAttribute... attrs) throws IOException { diff --git a/modules/nf-httpfs/src/test/nextflow/file/http/HttpFilesTests.groovy b/modules/nf-httpfs/src/test/nextflow/file/http/HttpFilesTests.groovy index 3090144fc5..041f1f2f3e 100644 --- a/modules/nf-httpfs/src/test/nextflow/file/http/HttpFilesTests.groovy +++ b/modules/nf-httpfs/src/test/nextflow/file/http/HttpFilesTests.groovy @@ -112,7 +112,7 @@ class HttpFilesTests extends Specification { def lines = Files.readAllLines(path, Charset.forName('UTF-8')) then: lines.size()>0 - lines[0] == '' + lines[0] == '' } diff --git a/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy b/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy index 9bbabfa06e..8d6e93463c 100644 --- a/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy +++ b/modules/nf-httpfs/src/test/nextflow/file/http/XFileSystemProviderTest.groovy @@ -21,7 +21,6 @@ import java.nio.file.Path import com.github.tomakehurst.wiremock.junit.WireMockRule import com.github.tomjankes.wiremock.WireMockGroovy -import nextflow.SysEnv import org.junit.Rule import spock.lang.Specification import spock.lang.Unroll @@ -106,7 +105,7 @@ class XFileSystemProviderTest extends Specification { when: def attrs = fsp.readHttpAttributes(path) then: - attrs.lastModifiedTime() == null + attrs.lastModifiedTime() attrs.size() > 0 } @@ -157,6 +156,7 @@ class XFileSystemProviderTest extends Specification { @Rule WireMockRule wireMockRule = new WireMockRule(18080) + @Unroll def 'should follow a redirect when read a http file '() { given: def wireMock = new WireMockGroovy(18080) @@ -180,14 +180,14 @@ class XFileSystemProviderTest extends Specification { response { status HTTP_CODE headers { - "Location" "http://localhost:18080/redirected.html" + "Location" "http://localhost:18080/target.html" } } } wireMock.stub { request { method "GET" - url "/redirected.html" + url "/target.html" } response { status 200 @@ -212,22 +212,36 @@ class XFileSystemProviderTest extends Specification { Files.size(path) == EXPECTED where: - HTTP_CODE | REDIRECT_TO | EXPECTED - 300 | "/redirected.html" | 10 - 300 | "/index2.html" | 10 - - 301 | "/redirected.html" | 10 - 301 | "/index2.html" | 10 + HTTP_CODE | REDIRECT_TO | EXPECTED + 301 | "/target.html" | 10 + 301 | "/index2.html" | 10 - 302 | "/redirected.html" | 10 - 302 | "/index2.html" | 10 + 302 | "/target.html" | 10 + 302 | "/index2.html" | 10 - 307 | "/redirected.html" | 10 - 307 | "/index2.html" | 10 + 307 | "/target.html" | 10 + 307 | "/index2.html" | 10 - 308 | "/redirected.html" | 10 - 308 | "/index2.html" | 10 + 308 | "/target.html" | 10 + 308 | "/index2.html" | 10 //infinite redirect to himself - 308 | "/index.html" | -1 + 308 | "/index.html" | -1 + } + + def 'should normalize location' () { + given: + def provider = Spy(XFileSystemProvider) + + expect: + provider.absLocation(LOCATION, new URL(TARGET)) == EXPECTED + + where: + LOCATION | TARGET | EXPECTED + 'https://this/that' | 'http://foo.com:123' | 'https://this/that' + '/' | 'http://foo.com:123' | 'http://foo.com:123/' + '/this/that' | 'http://foo.com:123' | 'http://foo.com:123/this/that' + '/this/that' | 'http://foo.com:123/abc' | 'http://foo.com:123/this/that' + 'this/that' | 'http://foo.com:123/abc' | 'http://foo.com:123/this/that' + } } From 71dfecc2fba482981fb403643219b9079b046699 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 29 Jul 2023 17:15:51 +0200 Subject: [PATCH 027/128] Add tower logs checkpoint (#4132) Signed-off-by: Paolo Di Tommaso --- .../seqera/tower/plugin/CacheCommand.groovy | 22 +-- .../seqera/tower/plugin/LogsCheckpoint.groovy | 90 +++++++++ ...CacheManager.groovy => LogsHandler.groovy} | 76 ++------ .../seqera/tower/plugin/TowerFactory.groovy | 24 ++- .../tower/plugin/CacheManagerTest.groovy | 181 ------------------ .../tower/plugin/LogsCheckpointTest.groovy | 84 ++++++++ .../tower/plugin/LogsHandlerTest.groovy | 84 ++++++++ tests/cache-bak.nf | 8 - tests/checks/cache-bak.nf/.checks | 42 ---- 9 files changed, 291 insertions(+), 320 deletions(-) create mode 100644 plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy rename plugins/nf-tower/src/main/io/seqera/tower/plugin/{CacheManager.groovy => LogsHandler.groovy} (59%) delete mode 100644 plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy create mode 100644 plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsCheckpointTest.groovy create mode 100644 plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsHandlerTest.groovy delete mode 100644 tests/cache-bak.nf delete mode 100644 tests/checks/cache-bak.nf/.checks diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy index 4c19948769..2abf12e94e 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy @@ -19,7 +19,6 @@ package io.seqera.tower.plugin import groovy.transform.CompileStatic import groovy.util.logging.Slf4j -import nextflow.Session import nextflow.cli.PluginAbstractExec /** * Implements nextflow cache and restore commands @@ -37,7 +36,6 @@ class CacheCommand implements PluginAbstractExec { if( cmd == 'cache-backup') { cacheBackup() - archiveLogs(session) } if( cmd == 'cache-restore' ) cacheRestore() @@ -46,26 +44,12 @@ class CacheCommand implements PluginAbstractExec { protected void cacheBackup() { log.debug "Running Nextflow cache backup" - final manager = new CacheManager(System.getenv()) - manager.saveCacheFiles() - manager.saveMiscFiles() - } - - protected void archiveLogs(Session sess) { - // archive logs - final archiver = TowerArchiver.create(sess, System.getenv()) - if( archiver ) try { - log.debug "Running Nextflow logs archiver" - archiver.archiveLogs() - } - finally { - archiver.shutdown(sess) - } + new LogsHandler(getSession(), System.getenv()).saveFiles() } protected void cacheRestore() { - log.debug "Running Nextflow cache restore" - new CacheManager(System.getenv()).restoreCacheFiles() + log.debug "Running Nextflow cache restore - DO NOTHING" + // this command is only kept for backward compatibility } } diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy new file mode 100644 index 0000000000..2b7da86ec3 --- /dev/null +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy @@ -0,0 +1,90 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.seqera.tower.plugin + +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import nextflow.Session +import nextflow.SysEnv +import nextflow.trace.TraceObserver +import nextflow.util.Duration +import nextflow.util.Threads +/** + * Implements a nextflow observer that periodically checkpoint + * log, report and timeline files + * + * @author Paolo Di Tommaso + */ +@Slf4j +@CompileStatic +class LogsCheckpoint implements TraceObserver { + + private Session session + private Map config + private Thread thread + private Duration interval + private LogsHandler handler + + @Override + void onFlowCreate(Session session) { + this.session = session + this.config = session.config + this.handler = new LogsHandler(session, SysEnv.get()) + this.interval = config.navigate('tower.logs.checkpoint.interval', defaultInterval()) as Duration + } + + private String defaultInterval() { + SysEnv.get('TOWER_LOGS_CHECKPOINT_INTERVAL','90s') + } + + @Override + void onFlowBegin() { + thread = Threads.start('tower-logs-checkpoint', this.&run) + } + + @Override + void onFlowComplete() { + thread.interrupt() + thread.join() + } + + protected void run() { + log.debug "Starting logs checkpoint thread - interval: ${interval}" + try { + while( !thread.isInterrupted() ) { + // just wait the declared delay + await(interval) + // checkpoint the logs + handler.saveFiles() + } + } + finally { + log.debug "Terminating logs checkpoint thread" + } + } + + protected void await(Duration interval) { + try { + Thread.sleep(interval.toMillis()) + } + catch (InterruptedException e) { + log.trace "Interrupted logs checkpoint thread" + Thread.currentThread().interrupt() + } + } +} diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsHandler.groovy similarity index 59% rename from plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy rename to plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsHandler.groovy index f273ee3ad9..4964859efa 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsHandler.groovy @@ -19,28 +19,25 @@ package io.seqera.tower.plugin import static java.nio.file.StandardCopyOption.* -import java.nio.file.Files -import java.nio.file.NoSuchFileException +import java.nio.file.FileSystems import java.nio.file.Path import java.nio.file.Paths import groovy.transform.CompileStatic import groovy.transform.PackageScope import groovy.util.logging.Slf4j +import nextflow.Session import nextflow.exception.AbortOperationException -import nextflow.extension.FilesEx import nextflow.file.FileHelper /** - * Back and restore Nextflow cache + * Backup Nextflow logs, timeline and reports files * * @author Paolo Di Tommaso */ @Slf4j @CompileStatic -class CacheManager { +class LogsHandler { - @PackageScope String sessionUuid - @PackageScope Path localCachePath @PackageScope Path localOutFile @PackageScope Path localLogFile @PackageScope Path localTimelineFile @@ -48,27 +45,18 @@ class CacheManager { @PackageScope Path localTowerReports @PackageScope Path remoteWorkDir - @PackageScope Path getRemoteCachePath() { remoteWorkDir.resolve(".nextflow/cache/${sessionUuid}") } @PackageScope Path getRemoteOutFile() { remoteWorkDir.resolve(localOutFile.getName()) } @PackageScope Path getRemoteLogFile() { remoteWorkDir.resolve(localLogFile.getName()) } @PackageScope Path getRemoteTimelineFile() { remoteWorkDir.resolve(localTimelineFile.getName()) } @PackageScope Path getRemoteTowerConfig() { remoteWorkDir.resolve(localTowerConfig.getName()) } @PackageScope Path getRemoteTowerReports() { remoteWorkDir.resolve(localTowerReports.getName()) } - CacheManager(Map env) { - final work = env.get('NXF_WORK') ?: env.get('NXF_TEST_WORK') - if( !work ) - throw new AbortOperationException("Missing target work dir - cache sync cannot be performed") - this.remoteWorkDir = FileHelper.asPath(work) - - this.sessionUuid = env.get('NXF_UUID') - if( !sessionUuid ) - throw new AbortOperationException("Missing target uuid - cache sync cannot be performed") - - // ignore the `localCachePath` when the `NXF_CLOUDCACHE_PATH` variable is set because - // the nextflow cache metadata is going to be managed (and stored) via the nf-cloudcache plugin - if( !env.containsKey('NXF_CLOUDCACHE_PATH') ) - this.localCachePath = Paths.get(".nextflow/cache/${sessionUuid}") + LogsHandler(Session session, Map env) { + if( !session.workDir ) + throw new AbortOperationException("Missing workflow work directory") + if( session.workDir.fileSystem == FileSystems.default ) + throw new AbortOperationException("Logs handler is only meant to be used with a remote workflow work directory") + this.remoteWorkDir = session.workDir if( env.NXF_OUT_FILE ) localOutFile = Paths.get(env.NXF_OUT_FILE) @@ -82,48 +70,8 @@ class CacheManager { localTowerReports = Paths.get(env.TOWER_REPORTS_FILE) } - protected void restoreCacheFiles() { - if( !remoteWorkDir || !sessionUuid || !localCachePath ) - return - - if(!Files.exists(remoteCachePath)) { - log.debug "Remote cache path does not exist: $remoteCachePath - skipping cache restore" - return - } - - try { - log.info "Restoring cache: ${remoteCachePath.toUriString()} => ${localCachePath.toUriString()}" - localCachePath.deleteDir() - localCachePath.parent.mkdirs() - FileHelper.copyPath(remoteCachePath, localCachePath, REPLACE_EXISTING) - } - catch (NoSuchFileException e) { - log.info "Remote cache restore ignored — reason: ${e.message ?: e}" - } - } - - protected void saveCacheFiles() { - if( !remoteWorkDir || !sessionUuid || !localCachePath ) - return - - if( !Files.exists(localCachePath) ) { - log.debug "Local cache path does not exist: $localCachePath — skipping cache backup" - return - } - - // upload nextflow cache metadata - try { - log.info "Saving cache: ${localCachePath.toUriString()} => ${remoteCachePath.toUriString()}" - remoteCachePath.deleteDir() - remoteCachePath.parent.mkdirs() - FilesEx.copyTo(localCachePath, remoteCachePath) - } - catch (Throwable e) { - log.warn "Failed to backup resume metadata to remote store path: ${remoteCachePath.toUriString()} — cause: ${e}", e - } - } - - protected void saveMiscFiles() { + void saveFiles() { + log.trace "Checkpointing logs, timeline and report files" // — upload out file try { if( localOutFile?.exists() ) diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerFactory.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerFactory.groovy index 81e9aa0dff..d3e93df3bb 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerFactory.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerFactory.groovy @@ -39,13 +39,25 @@ class TowerFactory implements TraceObserverFactory { Collection create(Session session) { final config = session.config Boolean isEnabled = config.navigate('tower.enabled') as Boolean || env.get('TOWER_WORKFLOW_ID') - String endpoint = config.navigate('tower.endpoint') as String - Duration requestInterval = config.navigate('tower.requestInterval') as Duration - Duration aliveInterval = config.navigate('tower.aliveInterval') as Duration if( !isEnabled ) return Collections.emptyList() + final result = new ArrayList(1) + // create the tower client + final tower = createTowerClient(session, config) + result.add(tower) + // create the logs checkpoint + if( env.containsKey('NXF_CLOUDCACHE_PATH') ) + result.add( new LogsCheckpoint() ) + return result + } + + protected TowerClient createTowerClient(Session session, Map config) { + String endpoint = config.navigate('tower.endpoint') as String + Duration requestInterval = config.navigate('tower.requestInterval') as Duration + Duration aliveInterval = config.navigate('tower.aliveInterval') as Duration + if ( !endpoint || endpoint=='-' ) endpoint = env.get('TOWER_API_ENDPOINT') ?: TowerClient.DEF_ENDPOINT_URL @@ -64,13 +76,12 @@ class TowerFactory implements TraceObserverFactory { tower.workspaceId = env.get('TOWER_WORKFLOW_ID') ? env.get('TOWER_WORKSPACE_ID') : config.navigate('tower.workspaceId', env.get('TOWER_WORKSPACE_ID')) - final result = new ArrayList(1) - result.add(tower) + // register auth provider // note: this is needed to authorize access to resources via XFileSystemProvider used by NF // it's not needed by the tower client logic XAuthRegistry.instance.register(provider(tower.endpoint, tower.accessToken)) - return result + return tower } protected XAuthProvider provider(String endpoint, String accessToken) { @@ -79,4 +90,5 @@ class TowerFactory implements TraceObserverFactory { final refreshToken = env.get('TOWER_REFRESH_TOKEN') return new TowerXAuth(endpoint, accessToken, refreshToken) } + } diff --git a/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy b/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy deleted file mode 100644 index 67ce4d7d75..0000000000 --- a/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright (c) 2020-2021. Seqera Labs, S.L. - * - * All Rights reserved - * - */ - -package io.seqera.tower.plugin - -import java.nio.file.Files -import java.nio.file.Paths - -import nextflow.exception.AbortOperationException -import spock.lang.Specification -/** - * - * @author Paolo Di Tommaso - */ -class CacheManagerTest extends Specification { - - def 'should init empty files' () { - when: - new CacheManager([:]) - then: - thrown(AbortOperationException) - } - - def 'should upload cache files' () { - given: - def folder = Files.createTempDirectory('test') - def remote = folder.resolve('remote'); remote.mkdir() - def local = folder.resolve('local'); local.mkdir() - def outFile = local.resolve('nf-out.txt'); outFile.text = 'out file' - def logFile = local.resolve('nf-log.txt'); logFile.text = 'log file' - def tmlFile = local.resolve('nf-tml.txt'); tmlFile.text = 'tml file' - def cfgFile = local.resolve('tw-config.txt'); cfgFile.text = 'config file' - def repFile = local.resolve('tw-report.txt'); repFile.text = 'report file' - and: - def uuid = UUID.randomUUID().toString() - and: - def ENV = [ - NXF_UUID:uuid, - NXF_WORK: remote.toString(), - NXF_OUT_FILE: outFile.toString(), - NXF_LOG_FILE: logFile.toString(), - NXF_TML_FILE: tmlFile.toString(), - TOWER_CONFIG_FILE: cfgFile.toString(), - TOWER_REPORTS_FILE: repFile.toString(), - ] - - when: - def tower = new CacheManager(ENV) - then: - tower.sessionUuid == uuid - tower.localCachePath == Paths.get(".nextflow/cache/$uuid") - tower.localOutFile == outFile - tower.localLogFile == logFile - tower.localTimelineFile == tmlFile - tower.localTowerConfig == cfgFile - tower.localTowerReports == repFile - and: - tower.remoteWorkDir == remote - and: - tower.remoteCachePath == remote.resolve(".nextflow/cache/$uuid") - tower.remoteOutFile == remote.resolve( outFile.name ) - tower.remoteLogFile == remote.resolve( logFile.name ) - tower.remoteTimelineFile == remote.resolve( tmlFile.name ) - tower.remoteTowerConfig == remote.resolve( cfgFile.name ) - tower.remoteTowerReports == remote.resolve( repFile.name ) - - when: - // create local cache fake data - tower.localCachePath = local.resolve(".nextflow/cache/$uuid"); - tower.localCachePath.mkdirs() - tower.localCachePath.resolve('index-foo').text = 'index foo' - tower.localCachePath.resolve('db').mkdir() - tower.localCachePath.resolve('db/xxx').text = 'data xxx' - tower.localCachePath.resolve('db/yyy').text = 'data yyy' - and: - tower.saveCacheFiles() - tower.saveMiscFiles() - then: - tower.remoteCachePath.resolve('index-foo').text == 'index foo' - tower.remoteCachePath.resolve('db/xxx').text == 'data xxx' - tower.remoteCachePath.resolve('db/yyy').text == 'data yyy' - and: - tower.remoteOutFile.text == outFile.text - tower.remoteLogFile.text == logFile.text - tower.remoteTimelineFile.text == tmlFile.text - tower.remoteTowerConfig.text == cfgFile.text - tower.remoteTowerReports.text == repFile.text - - // simulate a 2nd run with different data - when: - tower.localCachePath.deleteDir() - tower.localCachePath.mkdirs() - tower.localCachePath.resolve('index-bar').text = 'index bar' - tower.localCachePath.resolve('db').mkdir() - tower.localCachePath.resolve('db/alpha').text = 'data alpha' - tower.localCachePath.resolve('db/delta').text = 'data delta' - and: - tower.saveCacheFiles() - tower.saveMiscFiles() - then: - tower.remoteCachePath.resolve('index-bar').text == 'index bar' - tower.remoteCachePath.resolve('db/alpha').text == 'data alpha' - tower.remoteCachePath.resolve('db/delta').text == 'data delta' - and: - !tower.remoteCachePath.resolve('index-foo').exists() - !tower.remoteCachePath.resolve('db/xxx').exists() - !tower.remoteCachePath.resolve('db/yyy').exists() - and: - tower.remoteOutFile.text == outFile.text - tower.remoteLogFile.text == logFile.text - tower.remoteTimelineFile.text == tmlFile.text - tower.remoteTowerConfig.text == cfgFile.text - tower.remoteTowerReports.text == repFile.text - - cleanup: - folder?.deleteDir() - } - - def 'should download cache files' () { - given: - def uuid = UUID.randomUUID().toString() - def folder = Files.createTempDirectory('test') - def local = folder.resolve('local'); local.mkdir() - def outFile = local.resolve('nf-out.txt'); - def logFile = local.resolve('nf-log.txt') - def tmlFile = local.resolve('nf-tml.txt') - def cfgFile = local.resolve('tw-config.txt') - def repFile = local.resolve('tw-report.txt') - and: - def remote = folder.resolve('remote'); remote.mkdir() - remote.resolve('nf-out.txt').text = 'the out file' - remote.resolve('nf-log.txt').text = 'the log file' - remote.resolve('nf-tml.txt').text = 'the timeline file' - remote.resolve('nf-config.txt').text = 'the config file' - remote.resolve('nf-report.txt').text = 'the report file' - and: - remote.resolve(".nextflow/cache/$uuid").mkdirs() - remote.resolve(".nextflow/cache/$uuid").resolve('index-bar').text = 'index bar' - remote.resolve(".nextflow/cache/$uuid").resolve('db').mkdirs() - remote.resolve(".nextflow/cache/$uuid").resolve('db/alpha').text = 'data alpha' - remote.resolve(".nextflow/cache/$uuid").resolve('db/delta').text = 'data delta' - and: - def tower = new CacheManager([NXF_UUID: uuid, NXF_WORK: remote.toString()]) - - when: - tower.restoreCacheFiles() - then: - tower.localCachePath.resolve('index-bar').text == 'index bar' - tower.localCachePath.resolve('db/alpha').text == 'data alpha' - tower.localCachePath.resolve('db/delta').text == 'data delta' - - cleanup: - folder?.deleteDir() - } - - def 'should not backup/restore cache if cloudcache is enabled' () { - given: - def ENV = [ - NXF_UUID: 'uuid', - NXF_WORK: '/work', - NXF_CLOUDCACHE_PATH: 's3://my-bucket/cache' - ] - and: - def tower = new CacheManager(ENV) - - when: - tower.saveCacheFiles() - then: - 0 * tower.getRemoteCachePath() - - when: - tower.restoreCacheFiles() - then: - 0 * tower.getRemoteCachePath() - - } -} diff --git a/plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsCheckpointTest.groovy b/plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsCheckpointTest.groovy new file mode 100644 index 0000000000..1852c1c114 --- /dev/null +++ b/plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsCheckpointTest.groovy @@ -0,0 +1,84 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.seqera.tower.plugin + +import nextflow.Session +import nextflow.SysEnv +import nextflow.util.Duration +import spock.lang.Specification +import test.TestHelper + +/** + * + * @author Paolo Di Tommaso + */ +class LogsCheckpointTest extends Specification { + + def 'should configure default delay' () { + given: + def session = Mock(Session) { + getWorkDir() >> TestHelper.createInMemTempDir() + getConfig() >> [:] + } + and: + def checkpoint = new LogsCheckpoint() + + when: + checkpoint.onFlowCreate(session) + then: + checkpoint.@interval == Duration.of('90s') + } + + def 'should configure delay via env var' () { + given: + SysEnv.push(TOWER_LOGS_CHECKPOINT_INTERVAL: '200s') + def session = Mock(Session) { + getWorkDir() >> TestHelper.createInMemTempDir() + getConfig() >> [:] + } + and: + def checkpoint = new LogsCheckpoint() + + when: + checkpoint.onFlowCreate(session) + then: + checkpoint.@interval == Duration.of('200s') + + cleanup: + SysEnv.pop() + } + + def 'should configure delay via config file' () { + given: + SysEnv.push(NXF_WORK: '/some/path', TOWER_LOGS_CHECKPOINT_INTERVAL: '200s') + def session = Mock(Session) { + getConfig()>>[tower:[logs:[checkpoint:[interval: '500s']]]] + getWorkDir() >> TestHelper.createInMemTempDir() + } + and: + def checkpoint = new LogsCheckpoint() + + when: + checkpoint.onFlowCreate(session) + then: + checkpoint.@interval == Duration.of('500s') + + cleanup: + SysEnv.pop() + } +} diff --git a/plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsHandlerTest.groovy b/plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsHandlerTest.groovy new file mode 100644 index 0000000000..5ff927a077 --- /dev/null +++ b/plugins/nf-tower/src/test/io/seqera/tower/plugin/LogsHandlerTest.groovy @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2020-2021. Seqera Labs, S.L. + * + * All Rights reserved + * + */ + +package io.seqera.tower.plugin + +import java.nio.file.Files + +import nextflow.Session +import nextflow.exception.AbortOperationException +import spock.lang.Specification +import test.TestHelper + +/** + * + * @author Paolo Di Tommaso + */ +class LogsHandlerTest extends Specification { + + def 'should init empty files' () { + when: + new LogsHandler(Mock(Session), [:]) + then: + thrown(AbortOperationException) + } + + def 'should upload cache files' () { + given: + def folder = Files.createTempDirectory('test') + def remote = TestHelper.createInMemTempDir() + def local = folder.resolve('local'); local.mkdir() + def outFile = local.resolve('nf-out.txt'); outFile.text = 'out file' + def logFile = local.resolve('nf-log.txt'); logFile.text = 'log file' + def tmlFile = local.resolve('nf-tml.txt'); tmlFile.text = 'tml file' + def cfgFile = local.resolve('tw-config.txt'); cfgFile.text = 'config file' + def repFile = local.resolve('tw-report.txt'); repFile.text = 'report file' + and: + def uuid = UUID.randomUUID().toString() + and: + def session = Mock(Session) {getWorkDir() >> remote } + def ENV = [ + NXF_UUID:uuid, + NXF_OUT_FILE: outFile.toString(), + NXF_LOG_FILE: logFile.toString(), + NXF_TML_FILE: tmlFile.toString(), + TOWER_CONFIG_FILE: cfgFile.toString(), + TOWER_REPORTS_FILE: repFile.toString(), + ] + + when: + def tower = new LogsHandler(session, ENV) + then: + tower.localOutFile == outFile + tower.localLogFile == logFile + tower.localTimelineFile == tmlFile + tower.localTowerConfig == cfgFile + tower.localTowerReports == repFile + and: + tower.remoteWorkDir == remote + and: + tower.remoteOutFile == remote.resolve( outFile.name ) + tower.remoteLogFile == remote.resolve( logFile.name ) + tower.remoteTimelineFile == remote.resolve( tmlFile.name ) + tower.remoteTowerConfig == remote.resolve( cfgFile.name ) + tower.remoteTowerReports == remote.resolve( repFile.name ) + + when: + // create local cache fake data + tower.saveFiles() + then: + tower.remoteOutFile.text == outFile.text + tower.remoteLogFile.text == logFile.text + tower.remoteTimelineFile.text == tmlFile.text + tower.remoteTowerConfig.text == cfgFile.text + tower.remoteTowerReports.text == repFile.text + + cleanup: + folder?.deleteDir() + } + +} diff --git a/tests/cache-bak.nf b/tests/cache-bak.nf deleted file mode 100644 index 4dee2f21bb..0000000000 --- a/tests/cache-bak.nf +++ /dev/null @@ -1,8 +0,0 @@ -workflow { - foo() -} - -process foo { - debug true - /echo Hello world/ -} diff --git a/tests/checks/cache-bak.nf/.checks b/tests/checks/cache-bak.nf/.checks deleted file mode 100644 index b1d280b514..0000000000 --- a/tests/checks/cache-bak.nf/.checks +++ /dev/null @@ -1,42 +0,0 @@ -set -e - -# Skip test if AWS keys are missing -if [[ ! $AWS_ACCESS_KEY_ID ]]; then - echo "Skip cache-bak test since AWS keys are not available" - exit 0 -fi - -# -# setup env -# -export NXF_IGNORE_RESUME_HISTORY=true -export NXF_UUID=$(uuidgen | tr [:upper:] [:lower:]) - -# -# run normal mode -# -$NXF_RUN -name test_1 | tee .stdout -[[ `grep -c 'Submitted process > foo' .nextflow.log` == 1 ]] || false - -# -# backup cache -# -NXF_WORK=s3://nextflow-ci/cache-test \ - $NXF_CMD -log .nextflow-backup.log plugin nf-tower:cache-backup - -# -# remove it -# -rm -rf .nextflow - -# -# restore cache -# -NXF_WORK=s3://nextflow-ci/cache-test \ - $NXF_CMD -log .nextflow-restore.log plugin nf-tower:cache-restore - -# -# run resume mode -# -$NXF_RUN -name test_2 -resume $NXF_UUID | tee .stdout -[[ `grep -c 'Cached process > foo' .nextflow.log` == 1 ]] || false From 51f5c842dd9f4326b6781002155d65c576fd5806 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sat, 29 Jul 2023 10:56:43 -0500 Subject: [PATCH 028/128] Allow workflow entry from module import (#4128) Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- .../groovy/nextflow/script/BaseScript.groovy | 12 +-- .../groovy/nextflow/script/ScriptMeta.groovy | 24 +++++- .../nextflow/script/BaseScriptTest.groovy | 77 +++++++++++++++++-- .../nextflow/script/ProcessDefTest.groovy | 2 +- .../nextflow/script/ScriptMetaTest.groovy | 7 +- 5 files changed, 103 insertions(+), 19 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy b/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy index ef7e64b42a..5da185edd6 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy @@ -108,15 +108,14 @@ abstract class BaseScript extends Script implements ExecutionContext { protected workflow(Closure workflowBody) { // launch the execution final workflow = new WorkflowDef(this, workflowBody) - if( !binding.entryName ) - this.entryFlow = workflow + // capture the main (unnamed) workflow definition + this.entryFlow = workflow + // add it to the list of workflow definitions meta.addDefinition(workflow) } protected workflow(String name, Closure workflowDef) { final workflow = new WorkflowDef(this,workflowDef,name) - if( binding.entryName==name ) - this.entryFlow = workflow meta.addDefinition(workflow) } @@ -147,9 +146,10 @@ abstract class BaseScript extends Script implements ExecutionContext { return result } - if( binding.entryName && !entryFlow ) { + // if an `entryName` was specified via the command line, override the `entryFlow` to be executed + if( binding.entryName && !(entryFlow=meta.getWorkflow(binding.entryName) ) ) { def msg = "Unknown workflow entry name: ${binding.entryName}" - final allNames = meta.getLocalWorkflowNames() + final allNames = meta.getWorkflowNames() final guess = allNames.closest(binding.entryName) if( guess ) msg += " -- Did you mean?\n" + guess.collect { " $it"}.join('\n') diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy index 7b23ae6329..804bcd499f 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy @@ -237,15 +237,18 @@ class ScriptMeta { } WorkflowDef getWorkflow(String name) { - (WorkflowDef)getComponent(name) + final result = getComponent(name) + return result instanceof WorkflowDef ? result : null } ProcessDef getProcess(String name) { - (ProcessDef)getComponent(name) + final result = getComponent(name) + return result instanceof ProcessDef ? result : null } FunctionDef getFunction(String name) { - (FunctionDef)getComponent(name) + final result = getComponent(name) + return result instanceof FunctionDef ? result : null } Set getAllNames() { @@ -263,6 +266,21 @@ class ScriptMeta { return result } + Set getWorkflowNames() { + final result = new HashSet(definitions.size() + imports.size()) + // local definitions + for( def item : definitions.values() ) { + if( item instanceof WorkflowDef ) + result.add(item.name) + } + // processes from imports + for( def item: imports.values() ) { + if( item instanceof WorkflowDef ) + result.add(item.name) + } + return result + } + Set getProcessNames() { if( NF.dsl1 ) return new HashSet(getDsl1ProcessNames()) diff --git a/modules/nextflow/src/test/groovy/nextflow/script/BaseScriptTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/BaseScriptTest.groovy index 958656cc8f..fee79ffae9 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/BaseScriptTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/BaseScriptTest.groovy @@ -21,30 +21,29 @@ import java.nio.file.Paths import nextflow.NextflowMeta import nextflow.Session -import spock.lang.Specification +import test.Dsl2Spec +import test.TestHelper /** * * @author Paolo Di Tommaso */ -class BaseScriptTest extends Specification { +class BaseScriptTest extends Dsl2Spec { def 'should define implicit variables' () { given: def script = Files.createTempFile('test',null) - + and: def WORKFLOW = Mock(WorkflowMetadata) def WORK_DIR = Paths.get('/work/dir') def PROJECT_DIR = Paths.get('/some/base') - and: def session = Mock(Session) { getBaseDir() >> PROJECT_DIR getWorkDir() >> WORK_DIR getWorkflowMetadata() >> WORKFLOW } - def binding = new ScriptBinding([:]) def parser = new ScriptParser(session) @@ -63,9 +62,8 @@ class BaseScriptTest extends Specification { parser.setBinding(binding) parser.runScript(script) - then: - binding.result.baseDir ==PROJECT_DIR + binding.result.baseDir == PROJECT_DIR binding.result.projectDir == PROJECT_DIR binding.result.workDir == WORK_DIR binding.result.launchDir == Paths.get('.').toRealPath() @@ -77,5 +75,70 @@ class BaseScriptTest extends Specification { script?.delete() } + def 'should use custom entry workflow' () { + + given: + def script = Files.createTempFile('test',null) + and: + def session = Mock(Session) + def binding = new ScriptBinding([:]) + def parser = new ScriptParser(session) + + when: + script.text = ''' + workflow foo { + } + + workflow { + error 'you were supposed to run foo!' + } + ''' + + parser.setBinding(binding) + parser.setEntryName('foo') + parser.runScript(script) + + then: + noExceptionThrown() + + cleanup: + script?.delete() + } + + def 'should use entry workflow from module' () { + + given: + def folder = TestHelper.createInMemTempDir() + def module = folder.resolve('module.nf') + def script = folder.resolve('main.nf') + and: + def session = Mock(Session) + def binding = new ScriptBinding([:]) + def parser = new ScriptParser(session) + + when: + module.text = ''' + workflow foo { + } + ''' + + script.text = ''' + include { foo } from './module.nf' + + workflow { + error 'you were supposed to run foo!' + } + ''' + + parser.setBinding(binding) + parser.setEntryName('foo') + parser.runScript(script) + + then: + noExceptionThrown() + + cleanup: + folder?.delete() + } } diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ProcessDefTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ProcessDefTest.groovy index 855a4de864..d008e29491 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ProcessDefTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ProcessDefTest.groovy @@ -44,7 +44,7 @@ class ProcessDefTest extends Specification { def script = (BaseScript)new GroovyShell(binding,config).parse(SCRIPT) then: - true + ScriptMeta.get(script).getProcessNames() == ['foo','bar'] as Set } diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy index e3a3b866ed..2e317af7f1 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy @@ -104,12 +104,15 @@ class ScriptMetaTest extends Dsl2Spec { meta1.getComponent('xxx') == null meta1.getComponent('func3') == null meta1.getComponent('proc3') == null + and: meta1.getComponent('work3') == work3 meta1.getComponent('my_process') instanceof ProcessDef meta1.getComponent('my_process').name == 'my_process' -// then: -// meta1.getProcessNames() == ['proc1','proc2','my_process'] as Set + then: + meta1.getProcessNames() == ['proc1','proc2','my_process'] as Set + and: + meta1.getWorkflowNames() == ['work1', 'work2', 'work3'] as Set } From a59af39f4c8712ebff6d52ab43cde6d5a763be66 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sat, 29 Jul 2023 11:35:53 -0500 Subject: [PATCH 029/128] Fix incorrect error message on missing comma (#4085) [ci fast] Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- .../nextflow/script/params/BaseParam.groovy | 28 ++++++++++++++++++- .../script/params/ParamsInTest.groovy | 23 +++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/params/BaseParam.groovy b/modules/nextflow/src/main/groovy/nextflow/script/params/BaseParam.groovy index fcf218ebbf..4a5165f8df 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/params/BaseParam.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/params/BaseParam.groovy @@ -17,7 +17,8 @@ package nextflow.script.params import groovy.util.logging.Slf4j - +import nextflow.exception.ScriptRuntimeException +import nextflow.script.TokenVar /** * Base class for input/output parameters * @@ -141,4 +142,29 @@ abstract class BaseParam implements Cloneable { return mapIndex >= 0 } + /** + * Report missing method calls as possible syntax errors. + */ + def methodMissing( String name, def args ) { + throw new ScriptRuntimeException("Invalid function call `${name}(${argsToString0(args)})` -- possible syntax error") + } + + private String argsToString0(args) { + if( args instanceof Object[] ) + args = Arrays.asList(args) + if( args instanceof List ) { + final result = new ArrayList() + for( def it : args ) + result.add(argsToString1(it)) + return result.join(',') + } + return argsToString1(args) + } + + private String argsToString1(arg) { + if( arg instanceof TokenVar ) + return arg.name + else + return String.valueOf((Object)arg) + } } diff --git a/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy index e2ef5258ce..60dba152bc 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy @@ -23,6 +23,7 @@ import java.nio.file.Paths import groovyx.gpars.dataflow.DataflowQueue import groovyx.gpars.dataflow.DataflowVariable import nextflow.Channel +import nextflow.exception.ScriptRuntimeException import nextflow.processor.TaskProcessor import spock.lang.Timeout import test.Dsl2Spec @@ -985,4 +986,26 @@ class ParamsInTest extends Dsl2Spec { (in1.inner[1] as FileInParam).isNestedParam() } + def 'should throw error on missing comma' () { + setup: + def text = ''' + process hola { + input: + tuple val(x) val(y) + + /command/ + } + + workflow { + hola(['x', 'y']) + } + ''' + when: + parseAndReturnProcess(text) + + then: + def e = thrown(ScriptRuntimeException) + e.message == 'Invalid function call `val(y)` -- possible syntax error' + } + } From a6d08c04a57e522c0d52cc95ea9e223adf399199 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 29 Jul 2023 17:31:46 +0200 Subject: [PATCH 030/128] Remove module all components import Signed-off-by: Paolo Di Tommaso --- .../groovy/nextflow/script/ScriptMeta.groovy | 16 ++++++---------- .../groovy/nextflow/script/ScriptMetaTest.groovy | 4 +++- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy index 804bcd499f..ffecebc534 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy @@ -327,16 +327,12 @@ class ScriptMeta { void addModule(ScriptMeta script, String name, String alias) { assert script - if( name ) { - // include a specific - def item = script.getComponent(name) - if( !item ) - throw new MissingModuleComponentException(script, name) - addModule0(item, alias) - } - else for( def item : script.getDefinitions() ) { - addModule0(item) - } + assert name + // include a specific + def item = script.getComponent(name) + if( !item ) + throw new MissingModuleComponentException(script, name) + addModule0(item, alias) } protected void addModule0(ComponentDef component, String alias=null) { diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy index 2e317af7f1..fac10444f1 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy @@ -83,7 +83,9 @@ class ScriptMetaTest extends Dsl2Spec { meta3.addDefinition(proc3, func3, work3) when: - meta1.addModule(meta2, null, null) + meta1.addModule(meta2, 'func2', null) + meta1.addModule(meta2, 'proc2', null) + meta1.addModule(meta2, 'work2', null) meta1.addModule(meta3, 'proc3', 'my_process') meta1.addModule(meta3, 'work3', null) From f2a2ea353c0a8598c9f7d5eef31e9eca14893e3b Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 29 Jul 2023 19:21:00 +0200 Subject: [PATCH 031/128] Fix Option fixOwnership traverse parent directories Signed-off-by: Paolo Di Tommaso --- .../src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy | 2 +- .../test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy | 2 +- .../nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index 218c462048..25b21626ec 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -319,7 +319,7 @@ class BashWrapperBuilder { binding.after_script = afterScript ? "# 'afterScript' directive\n$afterScript" : null // patch root ownership problem on files created with docker - binding.fix_ownership = fixOwnership() ? "[ \${NXF_OWNER:=''} ] && chown -fR --from root \$NXF_OWNER ${workDir}/{*,.*} || true" : null + binding.fix_ownership = fixOwnership() ? "[ \${NXF_OWNER:=''} ] && (shopt -s extglob; GLOBIGNORE='..'; chown -fR --from root \$NXF_OWNER ${workDir}/{*,.*}) || true" : null binding.trace_script = isTraceRequired() ? getTraceScript(binding) : null diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy index cda97f5bc6..20339d9118 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy @@ -1052,7 +1052,7 @@ class BashWrapperBuilderTest extends Specification { def binding = builder.makeBinding() then: builder.fixOwnership() >> true - binding.fix_ownership == '[ ${NXF_OWNER:=\'\'} ] && chown -fR --from root $NXF_OWNER /work/dir/{*,.*} || true' + binding.fix_ownership == '[ ${NXF_OWNER:=\'\'} ] && (shopt -s extglob; GLOBIGNORE=\'..\'; chown -fR --from root $NXF_OWNER /work/dir/{*,.*}) || true' when: diff --git a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy index 1e36120d09..d2620c2767 100644 --- a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy +++ b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy @@ -627,7 +627,7 @@ class AwsBatchScriptLauncherTest extends Specification { def binding = builder.makeBinding() then: builder.fixOwnership() >> true - binding.fix_ownership == '[ ${NXF_OWNER:=\'\'} ] && chown -fR --from root $NXF_OWNER /work/dir/{*,.*} || true' + binding.fix_ownership == '[ ${NXF_OWNER:=\'\'} ] && (shopt -s extglob; GLOBIGNORE=\'..\'; chown -fR --from root $NXF_OWNER /work/dir/{*,.*}) || true' } From 19a72c402495e72df0140c6af05d299330fa5ba9 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 31 Jul 2023 14:58:59 +0200 Subject: [PATCH 032/128] Fix glob resolution for remove files Signed-off-by: Paolo Di Tommaso --- .../nextflow/src/main/groovy/nextflow/Nextflow.groovy | 4 +++- .../test/nextflow/file/FilePatternSplitterTest.groovy | 3 ++- .../nf-amazon/src/test/nextflow/S3NextflowTest.groovy | 11 +++++++++++ 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/Nextflow.groovy b/modules/nextflow/src/main/groovy/nextflow/Nextflow.groovy index 24012743ac..dd1b03f8e8 100644 --- a/modules/nextflow/src/main/groovy/nextflow/Nextflow.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/Nextflow.groovy @@ -18,6 +18,7 @@ package nextflow import static nextflow.file.FileHelper.* +import java.nio.file.FileSystem import java.nio.file.Files import java.nio.file.NoSuchFileException import java.nio.file.Path @@ -60,7 +61,8 @@ class Nextflow { static private fileNamePattern( FilePatternSplitter splitter, Map opts ) { final scheme = splitter.scheme - final folder = toCanonicalPath(splitter.parent) + final target = scheme ? "$scheme://$splitter.parent" : splitter.parent + final folder = toCanonicalPath(target) final pattern = splitter.fileName if( opts == null ) opts = [:] diff --git a/modules/nf-commons/src/test/nextflow/file/FilePatternSplitterTest.groovy b/modules/nf-commons/src/test/nextflow/file/FilePatternSplitterTest.groovy index 002e517cbf..10173f9804 100644 --- a/modules/nf-commons/src/test/nextflow/file/FilePatternSplitterTest.groovy +++ b/modules/nf-commons/src/test/nextflow/file/FilePatternSplitterTest.groovy @@ -112,7 +112,8 @@ class FilePatternSplitterTest extends Specification { 'test/data/file[a-b]' | 'test/data/' | 'file[a-b]' | null 'test/data[a-b]/file' | 'test/' | 'data[a-b]/file' | null '/some/path\\[a-b\\]/data{a,b}/file\\?' | '/some/path[a-b]/'| 'data{a,b}/file\\?' | null - + 's3://foo/bar/*' | 'foo/bar/' | '*' | 's3' + 's3://foo/bar/file.txt' | 'foo/bar/' | 'file.txt' | 's3' } def 'should strip glob escape chars' () { diff --git a/plugins/nf-amazon/src/test/nextflow/S3NextflowTest.groovy b/plugins/nf-amazon/src/test/nextflow/S3NextflowTest.groovy index ce62bd1140..dddfd6be8c 100644 --- a/plugins/nf-amazon/src/test/nextflow/S3NextflowTest.groovy +++ b/plugins/nf-amazon/src/test/nextflow/S3NextflowTest.groovy @@ -19,6 +19,8 @@ package nextflow import java.nio.file.Paths +import spock.lang.IgnoreIf +import spock.lang.Requires import spock.lang.Specification /** * @@ -46,4 +48,13 @@ class S3NextflowTest extends Specification { SysEnv.pop() } + @IgnoreIf({System.getenv('NXF_SMOKE')}) + @Requires({System.getenv('AWS_S3FS_ACCESS_KEY') && System.getenv('AWS_S3FS_SECRET_KEY')}) + def 'should resolve list of files' () { + when: + def result = Nextflow.files('s3://ngi-igenomes/*') + then: + result.size() == 3 + } + } From d2229bde10e277d0b8d02577c55d8f9633460601 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 3 Aug 2023 10:01:45 -0500 Subject: [PATCH 033/128] Update Wave and Fusion docs (#4149) Signed-off-by: Ben Sherman --- docs/fusion.md | 11 ++++++----- docs/wave.md | 14 +++++++++----- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/docs/fusion.md b/docs/fusion.md index 1aeaeda99d..8b0c1fbca4 100644 --- a/docs/fusion.md +++ b/docs/fusion.md @@ -185,8 +185,10 @@ The following configuration options are available: : Enable/disable the use of Fusion file system. `fusion.exportStorageCredentials` -: When `true` the access credentials required by the underlying object storage are exported the pipeline jobs execution environment -(requires version `23.05.0-edge` or later). +: :::{versionadded} 23.05.0-edge + This option was previously named `fusion.exportAwsAccessKeys`. + ::: +: When `true` the access credentials required by the underlying object storage are exported the pipeline jobs execution environment. `fusion.containerConfigUrl` : The URL from where the container layer provisioning the Fusion client is downloaded. @@ -197,9 +199,8 @@ The following configuration options are available: `fusion.logOutput` : Where the logging output is written. -`tagsEnabled` +`fusion.tagsEnabled` : Enable/disable the tagging of files created in the underlying object storage via the Fusion client (default: `true`). -`tagsPattern` +`fusion.tagsPattern` : The pattern that determines how tags are applied to files created via the Fusion client (default: `[.command.*|.exitcode|.fusion.*](nextflow.io/metadata=true),[*](nextflow.io/temporary=true)`) - diff --git a/docs/wave.md b/docs/wave.md index a33bb9aa30..8eec01b3d5 100644 --- a/docs/wave.md +++ b/docs/wave.md @@ -171,27 +171,31 @@ The following configuration options are available: `wave.build.spack.basePackages` : :::{versionadded} 22.06.0-edge -::: + ::: : One or more Spack packages to be always added in the resulting container. `wave.build.spack.commands` : :::{versionadded} 22.06.0-edge -::: + ::: : One or more commands to be added to the Dockerfile used to build a Spack based image. `wave.httpClient.connectTime` : :::{versionadded} 22.06.0-edge -::: + ::: : Sets the connection timeout duration for the HTTP client connecting to the Wave service (default: `30s`). `wave.strategy` : The strategy to be used when resolving ambiguous Wave container requirements (default: `'container,dockerfile,conda,spack'`). `wave.report.enabled` (preview) -: Enable the reporting of the Wave containers used during the pipeline execution (default: `false`, requires version `23.06.0-edge` or later). +: :::{versionadded} 23.06.0-edge + ::: +: Enable the reporting of the Wave containers used during the pipeline execution (default: `false`). `wave.report.file` (preview) -: The name of the containers report file (default: `containers-.config` requires version `23.06.0-edge` or later). +: :::{versionadded} 23.06.0-edge + ::: +: The name of the containers report file (default: `'containers-.config'`). `wave.retryPolicy.delay` : :::{versionadded} 22.06.0-edge From 6b46b507cfe6fcfd0fcd4e1ffd37fd131cb90b31 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 3 Aug 2023 10:02:12 -0500 Subject: [PATCH 034/128] Fix strict mode docs (#4150) Signed-off-by: Ben Sherman --- docs/config.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/config.md b/docs/config.md index 76d5fa2b99..b6f662ef90 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1713,11 +1713,11 @@ Some features can be enabled using the `nextflow.enable` and `nextflow.preview` - When merging params from a config file with params from the command line, Nextflow will fail if a param is specified from both sources but with different types - - When using the `join` operator, the `failOnDuplicate` option is `true` by default + - When using the `join` operator, the `failOnDuplicate` option is `true` regardless of any user setting - - When using the `join` operator, the `failOnMismatch` option is `true` by default (unless `remainder` is also `true`) + - When using the `join` operator, the `failOnMismatch` option is `true` (unless `remainder` is also `true`) regardless of any user setting - - When using the `publishDir` process directive, the `failOnError` option is `true` by default + - When using the `publishDir` process directive, the `failOnError` option is `true` regardless of any user setting - In a process definition, Nextflow will fail if an input or output tuple has only one element From 3f4b85574532b3d5d46db0a54710d01a6efdb292 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 3 Aug 2023 10:03:48 -0500 Subject: [PATCH 035/128] Fix bug with K8s resource labels (#4147) [ci fast] Signed-off-by: Ben Sherman --- .../src/main/groovy/nextflow/k8s/K8sTaskHandler.groovy | 2 +- .../test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/k8s/K8sTaskHandler.groovy b/modules/nextflow/src/main/groovy/nextflow/k8s/K8sTaskHandler.groovy index 1e3a070dbb..cdbf63411f 100644 --- a/modules/nextflow/src/main/groovy/nextflow/k8s/K8sTaskHandler.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/k8s/K8sTaskHandler.groovy @@ -278,7 +278,7 @@ class K8sTaskHandler extends TaskHandler implements FusionAwareTask { } final resLabels = task.config.getResourceLabels() if( resLabels ) - resLabels.putAll(resLabels) + result.putAll(resLabels) result.'nextflow.io/app' = 'nextflow' result.'nextflow.io/runName' = getRunName() result.'nextflow.io/taskName' = task.getName() diff --git a/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy b/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy index 892e9c3200..ba0ba6404c 100644 --- a/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy @@ -864,7 +864,9 @@ class K8sTaskHandlerTest extends Specification { handler.getRunName() >> 'pedantic-joe' task.getName() >> 'hello-world-1' task.getProcessor() >> proc - task.getConfig() >> Mock(TaskConfig) + task.getConfig() >> Mock(TaskConfig) { + getResourceLabels() >> [mylabel: 'myvalue'] + } proc.getName() >> 'hello-proc' exec.getSession() >> sess sess.getUniqueId() >> uuid @@ -873,7 +875,9 @@ class K8sTaskHandlerTest extends Specification { [label: 'app', value: 'nextflow'], [label: 'x', value: 'hello_world'] ]] - + and: + labels.mylabel == 'myvalue' + and: labels.app == 'nextflow' labels.foo == 'bar' labels.x == 'hello_world' From 57e3100b7c4203385ab69a4502bad70534d0a547 Mon Sep 17 00:00:00 2001 From: Aaron Fishman Date: Thu, 3 Aug 2023 18:16:47 +0100 Subject: [PATCH 036/128] Add `-value` option to `config` command (#4142) Signed-off-by: Aaron Fishman Signed-off-by: Ben Sherman Co-authored-by: Aaron Fishman Co-authored-by: Ben Sherman Co-authored-by: Paolo Di Tommaso --- docs/cli.md | 12 +++++++ .../main/groovy/nextflow/cli/CmdConfig.groovy | 26 +++++++++++++++ .../groovy/nextflow/cli/CmdConfigTest.groovy | 32 +++++++++++++++++-- 3 files changed, 67 insertions(+), 3 deletions(-) diff --git a/docs/cli.md b/docs/cli.md index 9e707d294f..b101b568b9 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -411,6 +411,11 @@ The `config` command is used for printing the project's configuration i.e. the ` `-sort` : Sort config attributes. +`-value` +: :::{versionadded} 23.08.0-edge + ::: +: Print the value of a config option, or fail if the option is not defined. + **Examples** Print out the inferred config using a the default group key-value notation. @@ -445,6 +450,13 @@ docker.enabled = true process.executor = local ``` +Print out the value of a specific configuration property. + +```console +$ nextflow config -value process.executor +local +``` + Print out all profiles from the project's configuration. ```console diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy index 4281b15210..f4e17f07ba 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy @@ -59,6 +59,8 @@ class CmdConfig extends CmdBase { @Parameter(names = '-sort', description = 'Sort config attributes') boolean sort + @Parameter(names = '-value', description = 'Print the value of a config option, or fail if the option is not defined') + String printValue @Override String getName() { NAME } @@ -79,6 +81,12 @@ class CmdConfig extends CmdBase { if( printProperties && printFlatten ) throw new AbortOperationException("Option `-flat` and `-properties` conflicts") + if ( printValue && printFlatten ) + throw new AbortOperationException("Option `-value` and `-flat` conflicts") + + if ( printValue && printProperties ) + throw new AbortOperationException("Option `-value` and `-properties` conflicts") + final builder = new ConfigBuilder() .setShowClosures(true) .showMissingVariables(true) @@ -94,6 +102,9 @@ class CmdConfig extends CmdBase { else if( printFlatten ) { printFlatten0(config, stdout) } + else if( printValue ) { + printValue0(config, printValue, stdout) + } else { printCanonical0(config, stdout) } @@ -123,6 +134,21 @@ class CmdConfig extends CmdBase { output << ConfigHelper.toPropertiesString(config, sort) } + /** + * Prints a property of a {@link ConfigObject}. + * + * @param config The {@link ConfigObject} representing the parsed workflow configuration + * @param name The {@link String} representing the property name using dot notation + * @param output The stream where output the formatted configuration notation + */ + @PackageScope void printValue0(ConfigObject config, String name, OutputStream output) { + final map = config.flatten() + if( !map.containsKey(name) ) + throw new AbortOperationException("Configuration option '$name' not found") + + output << map.get(name).toString() << '\n' + } + /** * Prints a {@link ConfigObject} using properties dot notation. * String values are enclosed in single quote characters. diff --git a/modules/nextflow/src/test/groovy/nextflow/cli/CmdConfigTest.groovy b/modules/nextflow/src/test/groovy/nextflow/cli/CmdConfigTest.groovy index d3295d41d0..6be3ff7d52 100644 --- a/modules/nextflow/src/test/groovy/nextflow/cli/CmdConfigTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/cli/CmdConfigTest.groovy @@ -16,11 +16,11 @@ package nextflow.cli -import nextflow.plugin.Plugins -import spock.lang.IgnoreIf - import java.nio.file.Files +import nextflow.exception.AbortOperationException +import nextflow.plugin.Plugins +import spock.lang.IgnoreIf import spock.lang.Specification /** * @@ -166,6 +166,32 @@ class CmdConfigTest extends Specification { } + def 'should print the value of a config option' () { + + given: + def cmd = new CmdConfig() + and: + def config = new ConfigObject() + config.process.executor = 'slurm' + config.process.queue = 'long' + config.docker.enabled = true + and: + def buffer + + when: + buffer = new ByteArrayOutputStream() + cmd.printValue0(config, 'process.executor', buffer) + then: + buffer.toString() == 'slurm\n' + + when: + buffer = new ByteArrayOutputStream() + cmd.printValue0(config, 'does.not.exist', buffer) + then: + def e = thrown(AbortOperationException) + e.message == "Configuration option 'does.not.exist' not found" + + } def 'should parse config file' () { given: From 8579e7a4739d736851d57ca54d1eebc11f7fe6a3 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Fri, 4 Aug 2023 16:19:59 +0200 Subject: [PATCH 037/128] Fix Wave disable flag Signed-off-by: Paolo Di Tommaso --- docs/config.md | 4 ++-- .../io/seqera/wave/plugin/WaveFactory.groovy | 19 ++++++++++++------- .../seqera/wave/plugin/WaveFactoryTest.groovy | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/docs/config.md b/docs/config.md index b6f662ef90..4c429499a3 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1557,8 +1557,8 @@ The following environment variables control the configuration of the Nextflow ru ::: : Disables the automatic type detection of command line parameters. -`NXF_DISABLE_WAVE_REQUIREMENT` -: :::{versionadded} 23.07.0-edge +`NXF_DISABLE_WAVE_SERVICE` +: :::{versionadded} 23.08.0-edge ::: : Disables the requirement for Wave service when enabling the Fusion file system. diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveFactory.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveFactory.groovy index ec091cdc9e..48ae1b974e 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveFactory.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveFactory.groovy @@ -36,12 +36,17 @@ class WaveFactory implements TraceObserverFactory { @Override Collection create(Session session) { final config = session.config - final wave = (Map)config.wave - final fusion = (Map)config.fusion - if( fusion?.enabled ) { - if( !wave?.enabled ) { - if( SysEnv.get('NXF_DISABLE_WAVE_REQUIREMENT') ) - return + final wave = (Map)config.wave ?: new HashMap<>(1) + final fusion = (Map)config.fusion ?: new HashMap<>(1) + + if( SysEnv.get('NXF_DISABLE_WAVE_SERVICE') ) { + log.debug "Detected NXF_DISABLE_WAVE_SERVICE environment variable - Turning off Wave service" + wave.enabled = false + return List.of() + } + + if( fusion.enabled ) { + if( !wave.enabled ) { throw new AbortOperationException("Fusion feature requires enabling Wave service") } else { @@ -52,7 +57,7 @@ class WaveFactory implements TraceObserverFactory { } final observer = new WaveObserver(session) - return wave?.enabled && observer.reportOpts().enabled() + return wave.enabled && observer.reportOpts().enabled() ? List.of(observer) : List.of() } diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveFactoryTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveFactoryTest.groovy index b06392952b..980c9c9c9d 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveFactoryTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveFactoryTest.groovy @@ -64,7 +64,7 @@ class WaveFactoryTest extends Specification { def 'should not fail when wave is disabled' () { given: - SysEnv.push(NXF_DISABLE_WAVE_REQUIREMENT: 'true') + SysEnv.push(NXF_DISABLE_WAVE_SERVICE: 'true') def CONFIG = [wave:[:], fusion:[enabled:true]] def session = Mock(Session) { getConfig() >> CONFIG } def factory = new WaveFactory() From 6d2690707b510038b7724d24957d8fe4ad8d55f8 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:05:13 +0200 Subject: [PATCH 038/128] Restore Tower CacheManager for backward compatibility Signed-off-by: Paolo Di Tommaso --- .../seqera/tower/plugin/CacheCommand.groovy | 34 +++- .../seqera/tower/plugin/CacheManager.groovy | 164 ++++++++++++++++++ .../seqera/tower/plugin/TowerArchiver.groovy | 1 + .../tower/plugin/CacheManagerTest.groovy | 157 +++++++++++++++++ tests/cache-bak.nf | 8 + tests/checks/cache-bak.nf/.checks | 42 +++++ 6 files changed, 403 insertions(+), 3 deletions(-) create mode 100644 plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy create mode 100644 plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy create mode 100644 tests/cache-bak.nf create mode 100644 tests/checks/cache-bak.nf/.checks diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy index 2abf12e94e..846f35f3b9 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheCommand.groovy @@ -19,6 +19,8 @@ package io.seqera.tower.plugin import groovy.transform.CompileStatic import groovy.util.logging.Slf4j +import nextflow.Session +import nextflow.SysEnv import nextflow.cli.PluginAbstractExec /** * Implements nextflow cache and restore commands @@ -36,6 +38,7 @@ class CacheCommand implements PluginAbstractExec { if( cmd == 'cache-backup') { cacheBackup() + archiveLogs(session) } if( cmd == 'cache-restore' ) cacheRestore() @@ -44,12 +47,37 @@ class CacheCommand implements PluginAbstractExec { protected void cacheBackup() { log.debug "Running Nextflow cache backup" - new LogsHandler(getSession(), System.getenv()).saveFiles() + if( !SysEnv.get('NXF_CLOUDCACHE_PATH')) { + // legacy cache manager + new CacheManager(System.getenv()).saveCacheFiles() + } + else { + new LogsHandler(getSession(), System.getenv()).saveFiles() + } + } + + protected void archiveLogs(Session sess) { + // archive logs + final archiver = TowerArchiver.create(sess, System.getenv()) + if( archiver ) try { + log.debug "Running Nextflow logs archiver" + archiver.archiveLogs() + } + finally { + archiver.shutdown(sess) + } } protected void cacheRestore() { - log.debug "Running Nextflow cache restore - DO NOTHING" - // this command is only kept for backward compatibility + if( !SysEnv.get('NXF_CLOUDCACHE_PATH')) { + log.debug "Running Nextflow cache restore" + // legacy cache manager + new CacheManager(System.getenv()).restoreCacheFiles() + } + else { + // this command is only kept for backward compatibility + log.debug "Running Nextflow cache restore - DO NOTHING" + } } } diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy new file mode 100644 index 0000000000..5c890f7752 --- /dev/null +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/CacheManager.groovy @@ -0,0 +1,164 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package io.seqera.tower.plugin + +import static java.nio.file.StandardCopyOption.* + +import java.nio.file.Files +import java.nio.file.NoSuchFileException +import java.nio.file.Path +import java.nio.file.Paths + +import groovy.transform.CompileStatic +import groovy.transform.PackageScope +import groovy.util.logging.Slf4j +import nextflow.exception.AbortOperationException +import nextflow.extension.FilesEx +import nextflow.file.FileHelper +/** + * Back and restore Nextflow cache + * + * @author Paolo Di Tommaso + */ +@Slf4j +@CompileStatic +class CacheManager { + + @PackageScope String sessionUuid + @PackageScope Path localCachePath + @PackageScope Path localOutFile + @PackageScope Path localLogFile + @PackageScope Path localTimelineFile + @PackageScope Path localTowerConfig + @PackageScope Path localTowerReports + @PackageScope Path remoteWorkDir + + @PackageScope Path getRemoteCachePath() { remoteWorkDir.resolve(".nextflow/cache/${sessionUuid}") } + @PackageScope Path getRemoteOutFile() { remoteWorkDir.resolve(localOutFile.getName()) } + @PackageScope Path getRemoteLogFile() { remoteWorkDir.resolve(localLogFile.getName()) } + @PackageScope Path getRemoteTimelineFile() { remoteWorkDir.resolve(localTimelineFile.getName()) } + @PackageScope Path getRemoteTowerConfig() { remoteWorkDir.resolve(localTowerConfig.getName()) } + @PackageScope Path getRemoteTowerReports() { remoteWorkDir.resolve(localTowerReports.getName()) } + + CacheManager(Map env) { + final work = env.get('NXF_WORK') ?: env.get('NXF_TEST_WORK') + if( !work ) + throw new AbortOperationException("Missing target work dir - cache sync cannot be performed") + this.remoteWorkDir = FileHelper.asPath(work) + + this.sessionUuid = env.get('NXF_UUID') + if( !sessionUuid ) + throw new AbortOperationException("Missing target uuid - cache sync cannot be performed") + + this.localCachePath = Paths.get(".nextflow/cache/${sessionUuid}") + + if( env.NXF_OUT_FILE ) + localOutFile = Paths.get(env.NXF_OUT_FILE) + if( env.NXF_LOG_FILE ) + localLogFile = Paths.get(env.NXF_LOG_FILE) + if( env.NXF_TML_FILE ) + localTimelineFile = Paths.get(env.NXF_TML_FILE) + if( env.TOWER_CONFIG_FILE ) + localTowerConfig = Paths.get(env.TOWER_CONFIG_FILE) + if( env.TOWER_REPORTS_FILE ) + localTowerReports = Paths.get(env.TOWER_REPORTS_FILE) + } + + protected void restoreCacheFiles() { + if( !remoteWorkDir || !sessionUuid ) + return + + if(!Files.exists(remoteCachePath)) { + log.debug "Remote cache path does not exist: $remoteCachePath - skipping cache restore" + return + } + + try { + log.info "Restoring cache: ${remoteCachePath.toUriString()} => ${localCachePath.toUriString()}" + localCachePath.deleteDir() + localCachePath.parent.mkdirs() + FileHelper.copyPath(remoteCachePath, localCachePath, REPLACE_EXISTING) + } + catch (NoSuchFileException e) { + log.info "Remote cache restore ignored — reason: ${e.message ?: e}" + } + } + + protected void saveCacheFiles() { + if( !remoteWorkDir || !sessionUuid ) + return + + if( !Files.exists(localCachePath) ) { + log.debug "Local cache path does not exist: $localCachePath — skipping cache backup" + return + } + + // upload nextflow cache metadata + try { + log.info "Saving cache: ${localCachePath.toUriString()} => ${remoteCachePath.toUriString()}" + remoteCachePath.deleteDir() + remoteCachePath.parent.mkdirs() + FilesEx.copyTo(localCachePath, remoteCachePath) + } + catch (Throwable e) { + log.warn "Failed to backup resume metadata to remote store path: ${remoteCachePath.toUriString()} — cause: ${e}", e + } + + // — upload out file + try { + if( localOutFile?.exists() ) + FileHelper.copyPath(localOutFile, remoteOutFile, REPLACE_EXISTING) + } + catch (Throwable e) { + log.warn "Unable to upload nextflow out file: $localOutFile — reason: ${e.message ?: e}", e + } + // — upload log file + try { + if( localLogFile?.exists() ) + FileHelper.copyPath(localLogFile, remoteLogFile, REPLACE_EXISTING) + } + catch (Throwable e) { + log.warn "Unable to upload nextflow log file: $localLogFile — reason: ${e.message ?: e}", e + } + // — upload timeline file + try { + if( localTimelineFile?.exists() ) + FileHelper.copyPath(localTimelineFile, remoteTimelineFile, REPLACE_EXISTING) + } + catch (Throwable e) { + log.warn "Unable to upload nextflow timeline file: $localTimelineFile — reason: ${e.message ?: e}", e + } + // — upload tower config file + try { + if( localTowerConfig?.exists() ) + FileHelper.copyPath(localTowerConfig, remoteTowerConfig, REPLACE_EXISTING) + } + catch (Throwable e) { + log.warn "Unable to upload tower config file: $localTowerConfig — reason: ${e.message ?: e}", e + } + // — upload tower reports file + try { + if( localTowerReports?.exists() ) + FileHelper.copyPath(localTowerReports, remoteTowerReports, REPLACE_EXISTING) + } + catch (Throwable e) { + log.warn "Unable to upload tower reprts file: $localTowerReports — reason: ${e.message ?: e}", e + } + } + +} diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerArchiver.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerArchiver.groovy index 4aa6afdf90..6f8d270237 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerArchiver.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerArchiver.groovy @@ -44,6 +44,7 @@ import nextflow.util.ThreadPoolManager */ @Slf4j @CompileStatic +@Deprecated class TowerArchiver { private static final String RETRY_REASON = 'slowdown|slow down|toomany|too many' diff --git a/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy b/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy new file mode 100644 index 0000000000..70e4579a19 --- /dev/null +++ b/plugins/nf-tower/src/test/io/seqera/tower/plugin/CacheManagerTest.groovy @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2020-2021. Seqera Labs, S.L. + * + * All Rights reserved + * + */ + +package io.seqera.tower.plugin + +import java.nio.file.Files +import java.nio.file.Paths + +import nextflow.exception.AbortOperationException +import spock.lang.Specification +/** + * + * @author Paolo Di Tommaso + */ +class CacheManagerTest extends Specification { + + def 'should init empty files' () { + when: + new CacheManager([:]) + then: + thrown(AbortOperationException) + } + + def 'should upload cache files' () { + given: + def folder = Files.createTempDirectory('test') + def remote = folder.resolve('remote'); remote.mkdir() + def local = folder.resolve('local'); local.mkdir() + def outFile = local.resolve('nf-out.txt'); outFile.text = 'out file' + def logFile = local.resolve('nf-log.txt'); logFile.text = 'log file' + def tmlFile = local.resolve('nf-tml.txt'); tmlFile.text = 'tml file' + def cfgFile = local.resolve('tw-config.txt'); cfgFile.text = 'config file' + def repFile = local.resolve('tw-report.txt'); repFile.text = 'report file' + and: + def uuid = UUID.randomUUID().toString() + and: + def ENV = [ + NXF_UUID:uuid, + NXF_WORK: remote.toString(), + NXF_OUT_FILE: outFile.toString(), + NXF_LOG_FILE: logFile.toString(), + NXF_TML_FILE: tmlFile.toString(), + TOWER_CONFIG_FILE: cfgFile.toString(), + TOWER_REPORTS_FILE: repFile.toString(), + ] + + when: + def tower = new CacheManager(ENV) + then: + tower.sessionUuid == uuid + tower.localCachePath == Paths.get(".nextflow/cache/$uuid") + tower.localOutFile == outFile + tower.localLogFile == logFile + tower.localTimelineFile == tmlFile + tower.localTowerConfig == cfgFile + tower.localTowerReports == repFile + and: + tower.remoteWorkDir == remote + and: + tower.remoteCachePath == remote.resolve(".nextflow/cache/$uuid") + tower.remoteOutFile == remote.resolve( outFile.name ) + tower.remoteLogFile == remote.resolve( logFile.name ) + tower.remoteTimelineFile == remote.resolve( tmlFile.name ) + tower.remoteTowerConfig == remote.resolve( cfgFile.name ) + tower.remoteTowerReports == remote.resolve( repFile.name ) + + when: + // create local cache fake data + tower.localCachePath = local.resolve(".nextflow/cache/$uuid"); + tower.localCachePath.mkdirs() + tower.localCachePath.resolve('index-foo').text = 'index foo' + tower.localCachePath.resolve('db').mkdir() + tower.localCachePath.resolve('db/xxx').text = 'data xxx' + tower.localCachePath.resolve('db/yyy').text = 'data yyy' + and: + tower.saveCacheFiles() + then: + tower.remoteCachePath.resolve('index-foo').text == 'index foo' + tower.remoteCachePath.resolve('db/xxx').text == 'data xxx' + tower.remoteCachePath.resolve('db/yyy').text == 'data yyy' + and: + tower.remoteOutFile.text == outFile.text + tower.remoteLogFile.text == logFile.text + tower.remoteTimelineFile.text == tmlFile.text + tower.remoteTowerConfig.text == cfgFile.text + tower.remoteTowerReports.text == repFile.text + + // simulate a 2nd run with different data + when: + tower.localCachePath.deleteDir() + tower.localCachePath.mkdirs() + tower.localCachePath.resolve('index-bar').text = 'index bar' + tower.localCachePath.resolve('db').mkdir() + tower.localCachePath.resolve('db/alpha').text = 'data alpha' + tower.localCachePath.resolve('db/delta').text = 'data delta' + and: + tower.saveCacheFiles() + then: + tower.remoteCachePath.resolve('index-bar').text == 'index bar' + tower.remoteCachePath.resolve('db/alpha').text == 'data alpha' + tower.remoteCachePath.resolve('db/delta').text == 'data delta' + and: + !tower.remoteCachePath.resolve('index-foo').exists() + !tower.remoteCachePath.resolve('db/xxx').exists() + !tower.remoteCachePath.resolve('db/yyy').exists() + and: + tower.remoteOutFile.text == outFile.text + tower.remoteLogFile.text == logFile.text + tower.remoteTimelineFile.text == tmlFile.text + tower.remoteTowerConfig.text == cfgFile.text + tower.remoteTowerReports.text == repFile.text + + cleanup: + folder?.deleteDir() + } + + def 'should download cache files' () { + given: + def uuid = UUID.randomUUID().toString() + def folder = Files.createTempDirectory('test') + def local = folder.resolve('local'); local.mkdir() + def outFile = local.resolve('nf-out.txt'); + def logFile = local.resolve('nf-log.txt') + def tmlFile = local.resolve('nf-tml.txt') + def cfgFile = local.resolve('tw-config.txt') + def repFile = local.resolve('tw-report.txt') + and: + def remote = folder.resolve('remote'); remote.mkdir() + remote.resolve('nf-out.txt').text = 'the out file' + remote.resolve('nf-log.txt').text = 'the log file' + remote.resolve('nf-tml.txt').text = 'the timeline file' + remote.resolve('nf-config.txt').text = 'the config file' + remote.resolve('nf-report.txt').text = 'the report file' + and: + remote.resolve(".nextflow/cache/$uuid").mkdirs() + remote.resolve(".nextflow/cache/$uuid").resolve('index-bar').text = 'index bar' + remote.resolve(".nextflow/cache/$uuid").resolve('db').mkdirs() + remote.resolve(".nextflow/cache/$uuid").resolve('db/alpha').text = 'data alpha' + remote.resolve(".nextflow/cache/$uuid").resolve('db/delta').text = 'data delta' + and: + def tower = new CacheManager([NXF_UUID: uuid, NXF_WORK: remote.toString()]) + + when: + tower.restoreCacheFiles() + then: + tower.localCachePath.resolve('index-bar').text == 'index bar' + tower.localCachePath.resolve('db/alpha').text == 'data alpha' + tower.localCachePath.resolve('db/delta').text == 'data delta' + + cleanup: + folder?.deleteDir() + } +} diff --git a/tests/cache-bak.nf b/tests/cache-bak.nf new file mode 100644 index 0000000000..728c9b0779 --- /dev/null +++ b/tests/cache-bak.nf @@ -0,0 +1,8 @@ +workflow { + foo() +} + +process foo { + debug true + /echo Hello world/ +} \ No newline at end of file diff --git a/tests/checks/cache-bak.nf/.checks b/tests/checks/cache-bak.nf/.checks new file mode 100644 index 0000000000..b1d280b514 --- /dev/null +++ b/tests/checks/cache-bak.nf/.checks @@ -0,0 +1,42 @@ +set -e + +# Skip test if AWS keys are missing +if [[ ! $AWS_ACCESS_KEY_ID ]]; then + echo "Skip cache-bak test since AWS keys are not available" + exit 0 +fi + +# +# setup env +# +export NXF_IGNORE_RESUME_HISTORY=true +export NXF_UUID=$(uuidgen | tr [:upper:] [:lower:]) + +# +# run normal mode +# +$NXF_RUN -name test_1 | tee .stdout +[[ `grep -c 'Submitted process > foo' .nextflow.log` == 1 ]] || false + +# +# backup cache +# +NXF_WORK=s3://nextflow-ci/cache-test \ + $NXF_CMD -log .nextflow-backup.log plugin nf-tower:cache-backup + +# +# remove it +# +rm -rf .nextflow + +# +# restore cache +# +NXF_WORK=s3://nextflow-ci/cache-test \ + $NXF_CMD -log .nextflow-restore.log plugin nf-tower:cache-restore + +# +# run resume mode +# +$NXF_RUN -name test_2 -resume $NXF_UUID | tee .stdout +[[ `grep -c 'Cached process > foo' .nextflow.log` == 1 ]] || false From fe5dd497c1de8eaf01ce20574399da4779037b5b Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:10:44 +0200 Subject: [PATCH 039/128] Increase Wave client max attempts Signed-off-by: Paolo Di Tommaso --- .../src/main/io/seqera/wave/plugin/config/RetryOpts.groovy | 4 ++-- .../test/io/seqera/wave/plugin/config/RetryOptsTest.groovy | 4 ++-- .../test/io/seqera/wave/plugin/config/WaveConfigTest.groovy | 3 ++- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/RetryOpts.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/RetryOpts.groovy index 614b2056ef..5b6ceda3ff 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/RetryOpts.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/RetryOpts.groovy @@ -27,9 +27,9 @@ import nextflow.util.Duration @ToString(includeNames = true, includePackage = false) @CompileStatic class RetryOpts { - Duration delay = Duration.of('150ms') + Duration delay = Duration.of('450ms') Duration maxDelay = Duration.of('90s') - int maxAttempts = 5 + int maxAttempts = 10 double jitter = 0.25 RetryOpts() { diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/RetryOptsTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/RetryOptsTest.groovy index 0c53c8cbbb..d2c1342fc0 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/RetryOptsTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/RetryOptsTest.groovy @@ -29,9 +29,9 @@ class RetryOptsTest extends Specification { def 'should create retry config' () { expect: - new RetryOpts().delay == Duration.of('150ms') + new RetryOpts().delay == Duration.of('450ms') new RetryOpts().maxDelay == Duration.of('90s') - new RetryOpts().maxAttempts == 5 + new RetryOpts().maxAttempts == 10 new RetryOpts().jitter == 0.25d and: diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy index 1e141e8a3b..d7fcc47d2d 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/config/WaveConfigTest.groovy @@ -161,7 +161,8 @@ class WaveConfigTest extends Specification { when: def opts = new WaveConfig([:]) then: - opts.retryOpts().maxAttempts == 5 + opts.retryOpts().delay == Duration.of('450ms') + opts.retryOpts().maxAttempts == 10 opts.retryOpts().maxDelay == Duration.of('90s') when: From 981315add6c7f8441a72214c7a96d72b4b800de3 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:31:01 +0200 Subject: [PATCH 040/128] Bump nf-amazon@2.1.1 Signed-off-by: Paolo Di Tommaso --- plugins/nf-amazon/changelog.txt | 4 ++++ plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/nf-amazon/changelog.txt b/plugins/nf-amazon/changelog.txt index 4472d2b1cb..81af4cb0aa 100644 --- a/plugins/nf-amazon/changelog.txt +++ b/plugins/nf-amazon/changelog.txt @@ -1,5 +1,9 @@ nf-amazon changelog =================== +2.1.1 - 5 Aug 2023 +- Fix glob resolution for remove files [19a72c40] +- Fix Option fixOwnership traverse parent directories [f2a2ea35] + 2.1.0 - 22 Jul 2023 - Add support for AWS SSO credentials provider (#4045) [53e33cde] - Wait for all child processes in nxf_parallel (#4050) [60a5f1a7] diff --git a/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF b/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF index 6c9c9e82fd..d7b0cf833b 100644 --- a/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.aws.AmazonPlugin Plugin-Id: nf-amazon -Plugin-Version: 2.1.0 +Plugin-Version: 2.1.1 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 5f33ac17ea4e38c06918827e6fe4e7e3b5eba5d4 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:32:01 +0200 Subject: [PATCH 041/128] Bump nf-azure@1.2.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-azure/changelog.txt | 3 +++ plugins/nf-azure/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/plugins/nf-azure/changelog.txt b/plugins/nf-azure/changelog.txt index bcfce7cfe9..27c932bddc 100644 --- a/plugins/nf-azure/changelog.txt +++ b/plugins/nf-azure/changelog.txt @@ -1,5 +1,8 @@ nf-azure changelog =================== +1.2.0 - 5 Aug 2023 +- Add deleteTasksOnCompletion to Azure Batch configuration (#4114) [b14674dc] + 1.1.4 - 22 Jul 2023 - Fix failing test [9a52f848] - Fix Improve error message for invalid Azure URI [0f4d8867] diff --git a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF index 72c440d1a9..3bf6761375 100644 --- a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.azure.AzurePlugin Plugin-Id: nf-azure -Plugin-Version: 1.1.4 +Plugin-Version: 1.2.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.01.0-edge From 41c8c1642e6c2992d142d2d2c1afbd722c3e7d6e Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:32:28 +0200 Subject: [PATCH 042/128] Bump nf-tower@1.6.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-tower/changelog.txt | 5 +++++ plugins/nf-tower/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/plugins/nf-tower/changelog.txt b/plugins/nf-tower/changelog.txt index dd3e42551a..ce9117eb76 100644 --- a/plugins/nf-tower/changelog.txt +++ b/plugins/nf-tower/changelog.txt @@ -1,5 +1,10 @@ nf-tower changelog =================== +1.6.0 - 5 Aug 2023 +- Add Tower logs checkpoint (#4132) [71dfecc2] +- Restore Tower CacheManager for backward compatibility [6d269070] +- Disable cache backup/restore if cloudcache is used (#4125) [46e828e1] + 1.5.15 - 22 Jul 2023 - Bump Groovy 3.0.18 [207eb535] diff --git a/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF b/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF index 6fc78a5d49..d31cd40212 100644 --- a/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.tower.plugin.TowerPlugin Plugin-Id: nf-tower -Plugin-Version: 1.5.15 +Plugin-Version: 1.6.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 78e4b278b49d83bc2f5ae1d80c5ec7c207c909ce Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:32:47 +0200 Subject: [PATCH 043/128] Bump nf-wave@0.11.1 Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/changelog.txt | 6 ++++++ plugins/nf-wave/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/plugins/nf-wave/changelog.txt b/plugins/nf-wave/changelog.txt index e0b6743fe7..d9c4f7fbde 100644 --- a/plugins/nf-wave/changelog.txt +++ b/plugins/nf-wave/changelog.txt @@ -1,5 +1,11 @@ nf-wave changelog ================== +0.11.1 - 5 Aug 2023 +- Improve Wave config logging [547fad62] +- Increase Wave client max attempts [fe5dd497] +- Enable use virtual threads in Wave client [dd32f80a] +- Fix Wave disable flag [8579e7a4] + 0.11.0 - 22 Jul 2023 - Add support legacy wave retry [ci fast] [73a1e7d4] - Add support for Wave container freeze [9a5903e6] diff --git a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF index d30a66bedb..877822c1a2 100644 --- a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.wave.plugin.WavePlugin Plugin-Id: nf-wave -Plugin-Version: 0.11.0 +Plugin-Version: 0.11.1 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 24db16503cba1cf215b920c186f9552a62e8edf1 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:37:07 +0200 Subject: [PATCH 044/128] Update changelog Signed-off-by: Paolo Di Tommaso --- changelog.txt | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/changelog.txt b/changelog.txt index ecd6880229..6617c767b2 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,5 +1,34 @@ NEXTFLOW CHANGE-LOG =================== +23.08.0-edge - 5 Aug 2023 +- Add `-value` option to `config` command (#4142) [57e3100b] +- Add `deleteTasksOnCompletion` to Azure Batch configuration (#4114) [b14674dc] +- Add Tower logs checkpoint (#4132) [71dfecc2] +- Allow use virtual threads in Wave client [dd32f80a] +- Allow workflow entry from module import (#4128) [51f5c842] +- Disable cache backup/restore if cloudcache is used (#4125) [46e828e1] +- Document behavior of withName selector with included aliases (#4129) [8b7e3d48] +- Fix Option fixOwnership traverse parent directories [f2a2ea35] +- Fix Redirection http redirection across different hosts [fcdeec02] +- Fix Wave disable flag [8579e7a4] +- Fix bug with K8s resource labels (#4147) eu-west-1[3f4b8557] +- Fix glob resolution for remove files [19a72c40] +- Fix incorrect error message on missing comma (#4085) eu-west-1[a59af39f] +- Fix missing changelog for version 23.07.0-edge eu-west-1[9a33e936] +- Fix strict mode docs (#4150) [6b46b507] +- Improve plugin docs (#3957) [22638d46] +- Improve Wave config logging [547fad62] +- Improve TaskPollingMonitor logging [077ed5dd] +- Improve Wave and Fusion docs (#4149) [d2229bde] +- Increase Wave client max attempts [fe5dd497] +- Remove module all components import [a6d08c04] +- Restore Tower CacheManager for backward compatibility [6d269070] +- Bump amazoncorretto:17.0.8 [00eb145c] +- Bump nf-wave@0.11.1 [78e4b278] +- Bump nf-tower@1.6.0 [41c8c164] +- Bump nf-azure@1.2.0 [5f33ac17] +- Bump nf-amazon@2.1.1 [981315ad] + 23.07.0-edge - 23 Jul 2023 - Add CPU model name to trace files and traceRecord (#3946) [e0d91bf7] - Add ability to disable CLI params type detection [9a1c584d] From ace32d007e545dc3da5924c36c39f628c83b1da8 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 5 Aug 2023 16:37:43 +0200 Subject: [PATCH 045/128] [release 23.08.0-edge] Update timestamp and build number [ci fast] Signed-off-by: Paolo Di Tommaso --- VERSION | 2 +- docs/conf.py | 4 ++-- .../nextflow/src/main/resources/META-INF/plugins-info.txt | 8 ++++---- modules/nf-commons/src/main/nextflow/Const.groovy | 6 +++--- nextflow | 2 +- nextflow.md5 | 2 +- nextflow.sha1 | 2 +- nextflow.sha256 | 2 +- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/VERSION b/VERSION index 59200001a7..867f7aa69d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -23.07.0-edge \ No newline at end of file +23.08.0-edge \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index be318c7325..b68f128789 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -58,9 +58,9 @@ # built documents. # # The short X.Y version. -version = '23.07' +version = '23.08' # The full version, including alpha/beta/rc tags. -release = '23.07.0-edge' +release = '23.08.0-edge' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt index 604d402fab..c7fc7ad006 100644 --- a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt +++ b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt @@ -1,9 +1,9 @@ -nf-amazon@2.1.0 -nf-azure@1.1.4 +nf-amazon@2.1.1 +nf-azure@1.2.0 nf-cloudcache@0.1.0 nf-codecommit@0.1.5 nf-console@1.0.6 nf-ga4gh@1.1.0 nf-google@1.8.1 -nf-tower@1.5.15 -nf-wave@0.11.0 \ No newline at end of file +nf-tower@1.6.0 +nf-wave@0.11.1 \ No newline at end of file diff --git a/modules/nf-commons/src/main/nextflow/Const.groovy b/modules/nf-commons/src/main/nextflow/Const.groovy index f2652b923b..23d624ff1a 100644 --- a/modules/nf-commons/src/main/nextflow/Const.groovy +++ b/modules/nf-commons/src/main/nextflow/Const.groovy @@ -52,17 +52,17 @@ class Const { /** * The application version */ - static public final String APP_VER = "23.07.0-edge" + static public final String APP_VER = "23.08.0-edge" /** * The app build time as linux/unix timestamp */ - static public final long APP_TIMESTAMP = 1690040653541 + static public final long APP_TIMESTAMP = 1691246068029 /** * The app build number */ - static public final int APP_BUILDNUM = 5870 + static public final int APP_BUILDNUM = 5872 /** * The app build time string relative to UTC timezone diff --git a/nextflow b/nextflow index 61160e818f..75b6424928 100755 --- a/nextflow +++ b/nextflow @@ -15,7 +15,7 @@ # limitations under the License. [[ "$NXF_DEBUG" == 'x' ]] && set -x -NXF_VER=${NXF_VER:-'23.07.0-edge'} +NXF_VER=${NXF_VER:-'23.08.0-edge'} NXF_ORG=${NXF_ORG:-'nextflow-io'} NXF_HOME=${NXF_HOME:-$HOME/.nextflow} NXF_PROT=${NXF_PROT:-'https'} diff --git a/nextflow.md5 b/nextflow.md5 index 6bfd0cfe97..14f8bff303 100644 --- a/nextflow.md5 +++ b/nextflow.md5 @@ -1 +1 @@ -d7cff3459c0394236a4f1781843f4655 +e84b9b8b1453bb81540b10e057e126ee diff --git a/nextflow.sha1 b/nextflow.sha1 index c056fda67a..aa412cac8b 100644 --- a/nextflow.sha1 +++ b/nextflow.sha1 @@ -1 +1 @@ -a45925a853a3e516e5343b46ae291d66e93a047b +f70facf7083770f11d4d915c6f11502302a3aa4a diff --git a/nextflow.sha256 b/nextflow.sha256 index 7e2e60eb3a..14fbf058ba 100644 --- a/nextflow.sha256 +++ b/nextflow.sha256 @@ -1 +1 @@ -185b41f6b7cdf8061f1fb93621fd5a9e648d6eb152ce66044a9ff6c98506b608 +1448ebceec2213751c8846f0edc06a6791f2ff0bc996a7553fa7f94bb57c2606 From a66b0e631b1974cae5cc4adeb50999ed79a3b5f5 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 8 Aug 2023 10:39:09 -0500 Subject: [PATCH 046/128] Enable cloud cache based on environment variable (#4160) Signed-off-by: Ben Sherman --- .../src/main/nextflow/plugin/PluginsFacade.groovy | 5 +++++ .../src/test/nextflow/plugin/PluginsFacadeTest.groovy | 7 +++++++ .../src/main/nextflow/cache/CloudCacheStore.groovy | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy b/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy index bc5049d305..3a2adb200f 100644 --- a/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy +++ b/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy @@ -371,6 +371,11 @@ class PluginsFacade implements PluginStateListener { specs << defaultPlugins.getPlugin('nf-wave') } + // add cloudcache plugin when NXF_CLOUDCACHE_PATH is set + if( env.NXF_CLOUDCACHE_PATH ) { + specs << defaultPlugins.getPlugin('nf-cloudcache') + } + log.debug "Plugins resolved requirement=$specs" return specs } diff --git a/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy b/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy index 9c503cb3c8..7dc6f9b3b0 100644 --- a/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy +++ b/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy @@ -66,6 +66,7 @@ class PluginsFacadeTest extends Specification { given: def defaults = new DefaultPlugins(plugins: [ 'nf-amazon': new PluginSpec('nf-amazon', '0.1.0'), + 'nf-cloudcache': new PluginSpec('nf-cloudcache', '0.1.0'), 'nf-google': new PluginSpec('nf-google', '0.1.0'), 'nf-tower': new PluginSpec('nf-tower', '0.1.0'), 'nf-wave': new PluginSpec('nf-wave', '0.1.0') @@ -138,6 +139,12 @@ class PluginsFacadeTest extends Specification { then: result == [ new PluginSpec('nf-google','2.0.0') ] + when: + handler = new PluginsFacade(defaultPlugins: defaults, env: [NXF_CLOUDCACHE_PATH:'xyz']) + result = handler.pluginsRequirement([:]) + then: + result == [ new PluginSpec('nf-cloudcache', '0.1.0') ] + } def 'should return default plugins given config' () { diff --git a/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy b/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy index 3ebcb40f0b..7d8b794739 100644 --- a/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy +++ b/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy @@ -76,7 +76,7 @@ class CloudCacheStore implements CacheStore { private Path defaultBasePath() { final basePath = SysEnv.get('NXF_CLOUDCACHE_PATH') if( !basePath ) - throw new IllegalArgumentException("NXF_CLOUDCACHE_PATH must be defined when using the path-based cache store") + throw new IllegalArgumentException("NXF_CLOUDCACHE_PATH must be defined when using the cloud cache store") return basePath as Path } From 7e58c94576b28c1bd59e20761a1bfe08fae740e4 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Wed, 9 Aug 2023 18:07:58 +0200 Subject: [PATCH 047/128] Minor changes [ci fast] Signed-off-by: Paolo Di Tommaso --- .../nf-commons/src/main/nextflow/plugin/PluginUpdater.groovy | 2 +- .../nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/nf-commons/src/main/nextflow/plugin/PluginUpdater.groovy b/modules/nf-commons/src/main/nextflow/plugin/PluginUpdater.groovy index b2feaebdf2..4e773877af 100644 --- a/modules/nf-commons/src/main/nextflow/plugin/PluginUpdater.groovy +++ b/modules/nf-commons/src/main/nextflow/plugin/PluginUpdater.groovy @@ -98,7 +98,7 @@ class PluginUpdater extends UpdateManager { = = = ${repos} = = - = This is only meant to be used for plugin repos purposes. = + = This is only meant to be used for plugin testing purposes. = ============================================================================= """.stripIndent(true) log.warn(msg) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index bf91dcc053..45e2125cea 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -596,7 +596,7 @@ class WaveClient { return Failsafe.with(policy).get(action) } - static private List SERVER_ERRORS = [502,503,504] + static private final List SERVER_ERRORS = [502,503,504] protected HttpResponse httpSend(HttpRequest req) { return safeApply(() -> { From e78bc37eccfa5bb3126c7f69653081138d3ba9e2 Mon Sep 17 00:00:00 2001 From: Marcel Ribeiro-Dantas Date: Thu, 10 Aug 2023 05:06:07 -0300 Subject: [PATCH 048/128] Fix typos in source code comments (#4173) [ci fast] Signed-off-by: Marcel Ribeiro-Dantas Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- changelog.txt | 2 +- .../ast/TaskTemplateVarsXformImpl.groovy | 2 +- .../main/groovy/nextflow/cli/CmdConfig.groovy | 2 +- .../groovy/nextflow/conda/CondaConfig.groovy | 2 +- .../nextflow/config/CascadingConfig.groovy | 2 +- .../container/SingularityCache.groovy | 2 +- .../nextflow/executor/ExecutorFactory.groovy | 2 +- .../nextflow/executor/GridTaskHandler.groovy | 2 +- .../nextflow/extension/OperatorImpl.groovy | 2 +- .../nextflow/k8s/model/PodHostMount.groovy | 2 +- .../nextflow/processor/TaskProcessor.groovy | 2 +- .../groovy/nextflow/scm/ProviderPath.groovy | 2 +- .../groovy/nextflow/spack/SpackConfig.groovy | 2 +- .../nextflow/splitter/FastaSplitter.groovy | 2 +- .../nextflow/trace/GraphObserver.groovy | 2 +- .../nextflow/trace/ReportSummary.groovy | 20 +++++++++---------- .../nextflow/trace/TraceObserver.groovy | 8 ++++---- .../groovy/nextflow/util/LoggerHelper.groovy | 2 +- .../groovy/nextflow/util/NameGenerator.groovy | 4 ++-- .../src/main/nextflow/extension/Bolts.groovy | 2 +- .../src/main/nextflow/file/FileHelper.groovy | 2 +- .../main/nextflow/plugin/PluginsFacade.groovy | 2 +- .../src/main/nextflow/util/CacheHelper.java | 4 ++-- .../main/nextflow/util/CharsetHelper.groovy | 2 +- .../nextflow/extension/FilesExTest.groovy | 2 +- .../nextflow/plugin/PluginsFacadeTest.groovy | 2 +- .../plugin/hello/HelloExtension.groovy | 2 +- .../file/http/XFileSystemProvider.groovy | 2 +- plugins/README.md | 2 +- .../cloud/azure/batch/AzBatchService.groovy | 4 ++-- .../io/seqera/tower/plugin/TowerClient.groovy | 2 +- 31 files changed, 46 insertions(+), 46 deletions(-) diff --git a/changelog.txt b/changelog.txt index 6617c767b2..852e84c0bf 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1361,7 +1361,7 @@ NEXTFLOW CHANGE-LOG - see 20.07.0 and 20.07.0-RC1 20.07.0 - (skipped) -- Allow unqualified stdin/stdout defintions with DSL2 [bcdcaab6] +- Allow unqualified stdin/stdout definitions with DSL2 [bcdcaab6] 20.07.0-RC1 - 21 Jul 2020 - Add Dsl2 enable flag [08238109] diff --git a/modules/nextflow/src/main/groovy/nextflow/ast/TaskTemplateVarsXformImpl.groovy b/modules/nextflow/src/main/groovy/nextflow/ast/TaskTemplateVarsXformImpl.groovy index 4aa32126f7..4be53993bc 100644 --- a/modules/nextflow/src/main/groovy/nextflow/ast/TaskTemplateVarsXformImpl.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/ast/TaskTemplateVarsXformImpl.groovy @@ -26,7 +26,7 @@ import org.codehaus.groovy.transform.ASTTransformation import org.codehaus.groovy.transform.GroovyASTTransformation /** * Implements a xform that captures variable names declared - * in task tamplate + * in task template * * @author Paolo Di Tommaso */ diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy index f4e17f07ba..29b874198b 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdConfig.groovy @@ -115,7 +115,7 @@ class CmdConfig extends CmdBase { /** * Prints a {@link ConfigObject} using Java {@link Properties} in canonical format - * ie. any nested config object is printed withing curly brackets + * ie. any nested config object is printed within curly brackets * * @param config The {@link ConfigObject} representing the parsed workflow configuration * @param output The stream where output the formatted configuration notation diff --git a/modules/nextflow/src/main/groovy/nextflow/conda/CondaConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/conda/CondaConfig.groovy index e8f7dba54e..a58dc7f562 100644 --- a/modules/nextflow/src/main/groovy/nextflow/conda/CondaConfig.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/conda/CondaConfig.groovy @@ -55,6 +55,6 @@ class CondaConfig extends LinkedHashMap { return value.tokenize(',').collect(it -> it.trim()) } - throw new IllegalArgumentException("Unexected conda.channels value: $value") + throw new IllegalArgumentException("Unexpected conda.channels value: $value") } } diff --git a/modules/nextflow/src/main/groovy/nextflow/config/CascadingConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/config/CascadingConfig.groovy index 18461cc7c0..f393e224b4 100644 --- a/modules/nextflow/src/main/groovy/nextflow/config/CascadingConfig.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/config/CascadingConfig.groovy @@ -122,7 +122,7 @@ abstract class CascadingConfig { /** * Convert this object to an equivalent {@link ConfigObject} * - * @return A {@link ConfigObject} holding teh same data + * @return A {@link ConfigObject} holding the same data */ ConfigObject toConfigObject() { toConfigObject0(this.config) diff --git a/modules/nextflow/src/main/groovy/nextflow/container/SingularityCache.groovy b/modules/nextflow/src/main/groovy/nextflow/container/SingularityCache.groovy index 79203047b3..7e2134aa67 100644 --- a/modules/nextflow/src/main/groovy/nextflow/container/SingularityCache.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/container/SingularityCache.groovy @@ -224,7 +224,7 @@ class SingularityCache { return libraryPath } - // check for the image in teh cache dir + // check for the image in the cache dir // if the image does not exist in the cache dir, download it final localPath = localCachePath(imageUrl) if( localPath.exists() ) { diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/ExecutorFactory.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/ExecutorFactory.groovy index 6b72deab11..57a1535b6c 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/ExecutorFactory.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/ExecutorFactory.groovy @@ -212,7 +212,7 @@ class ExecutorFactory { } /** - * Find out the 'executor' to be used in the process definition or in teh session configuration object + * Find out the 'executor' to be used in the process definition or in the session configuration object * * @param taskConfig */ diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/GridTaskHandler.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/GridTaskHandler.groovy index e0ed5c4bb7..503b679d6a 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/GridTaskHandler.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/GridTaskHandler.groovy @@ -381,7 +381,7 @@ class GridTaskHandler extends TaskHandler implements FusionAwareTask { else { /* * Since working with NFS it may happen that the file exists BUT it is empty due to network latencies, - * before retuning an invalid exit code, wait some seconds. + * before returning an invalid exit code, wait some seconds. * * More in detail: * 1) the very first time that arrive here initialize the 'exitTimestampMillis' to the current timestamp diff --git a/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy b/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy index 12c6abbe28..154dcee0f2 100644 --- a/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy @@ -1048,7 +1048,7 @@ class OperatorImpl { static private final Map PARAMS_VIEW = [newLine: Boolean] /** - * Print out the channel content retuning a new channel emitting the identical content as the original one + * Print out the channel content returning a new channel emitting the identical content as the original one * * @param source * @param closure diff --git a/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodHostMount.groovy b/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodHostMount.groovy index 037e6f7fdd..949a10ca06 100644 --- a/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodHostMount.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodHostMount.groovy @@ -21,7 +21,7 @@ import groovy.transform.EqualsAndHashCode import groovy.transform.ToString /** - * Model a K8s pod host mount defintion + * Model a K8s pod host mount definition * * @author Paolo Di Tommaso */ diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy index 38cac391c9..07fe7a7ef6 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy @@ -379,7 +379,7 @@ class TaskProcessor { /** * Launch the 'script' define by the code closure as a local bash script * - * @param code A {@code Closure} retuning a bash script e.g. + * @param code A {@code Closure} returning a bash script e.g. *
      *              {
      *                 """
diff --git a/modules/nextflow/src/main/groovy/nextflow/scm/ProviderPath.groovy b/modules/nextflow/src/main/groovy/nextflow/scm/ProviderPath.groovy
index 01732a89bd..8e4815cf09 100644
--- a/modules/nextflow/src/main/groovy/nextflow/scm/ProviderPath.groovy
+++ b/modules/nextflow/src/main/groovy/nextflow/scm/ProviderPath.groovy
@@ -89,7 +89,7 @@ class ProviderPath implements Path {
             return true
         }
         catch (Exception e) {
-            log.trace "Failed to check existance -- cause [${e.class.name}] ${e.message}"
+            log.trace "Failed to check existence -- cause [${e.class.name}] ${e.message}"
             return false
         }
     }
diff --git a/modules/nextflow/src/main/groovy/nextflow/spack/SpackConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/spack/SpackConfig.groovy
index f28e18cca5..6465f87156 100644
--- a/modules/nextflow/src/main/groovy/nextflow/spack/SpackConfig.groovy
+++ b/modules/nextflow/src/main/groovy/nextflow/spack/SpackConfig.groovy
@@ -55,6 +55,6 @@ class SpackConfig extends LinkedHashMap {
             return value.tokenize(',').collect(it -> it.trim())
         }
 
-        throw new IllegalArgumentException("Unexected spack.channels value: $value")
+        throw new IllegalArgumentException("Unexpected spack.channels value: $value")
     }
 }
diff --git a/modules/nextflow/src/main/groovy/nextflow/splitter/FastaSplitter.groovy b/modules/nextflow/src/main/groovy/nextflow/splitter/FastaSplitter.groovy
index 2e12cff890..fc492e55a0 100644
--- a/modules/nextflow/src/main/groovy/nextflow/splitter/FastaSplitter.groovy
+++ b/modules/nextflow/src/main/groovy/nextflow/splitter/FastaSplitter.groovy
@@ -79,7 +79,7 @@ class FastaSplitter extends AbstractTextSplitter {
     }
 
     /**
-     * Parse a {@code CharSequence} as a FASTA formatted text, retuning a {@code Map} object
+     * Parse a {@code CharSequence} as a FASTA formatted text, returning a {@code Map} object
      * containing the fields as specified by the @{code record} parameter.
      * 

* For example: diff --git a/modules/nextflow/src/main/groovy/nextflow/trace/GraphObserver.groovy b/modules/nextflow/src/main/groovy/nextflow/trace/GraphObserver.groovy index 0a61d43eda..a07f611f16 100644 --- a/modules/nextflow/src/main/groovy/nextflow/trace/GraphObserver.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/trace/GraphObserver.groovy @@ -67,7 +67,7 @@ class GraphObserver implements TraceObserver { @Override void onFlowCreate(Session session) { this.dag = session.dag - // check file existance + // check file existence final attrs = FileHelper.readAttributes(file) if( attrs ) { if( overwrite && (attrs.isDirectory() || !file.delete()) ) diff --git a/modules/nextflow/src/main/groovy/nextflow/trace/ReportSummary.groovy b/modules/nextflow/src/main/groovy/nextflow/trace/ReportSummary.groovy index 427668bb7a..b205e43cdc 100644 --- a/modules/nextflow/src/main/groovy/nextflow/trace/ReportSummary.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/trace/ReportSummary.groovy @@ -113,11 +113,11 @@ class ReportSummary { * - q3: third quartile * - max: maximum value * - mean: average value - * - minLabel: label fot the task reporting the min value - * - q1Label: label fot the task reporting the q1 value - * - q2Label: label fot the task reporting the q2 value - * - q3Label: label fot the task reporting the q3 value - * - maxLabel: label fot the task reporting the max value + * - minLabel: label for the task reporting the min value + * - q1Label: label for the task reporting the q1 value + * - q2Label: label for the task reporting the q2 value + * - q3Label: label for the task reporting the q3 value + * - maxLabel: label for the task reporting the max value */ Map compute(String name) { if( !names.contains(name) ) @@ -207,11 +207,11 @@ class ReportSummary { * - q3: third quartile * - max: maximum value * - mean: average value - * - minLabel: label fot the task reporting the min value - * - q1Label: label fot the task reporting the q1 value - * - q2Label: label fot the task reporting the q2 value - * - q3Label: label fot the task reporting the q3 value - * - maxLabel: label fot the task reporting the max value + * - minLabel: label for the task reporting the min value + * - q1Label: label for the task reporting the q1 value + * - q2Label: label for the task reporting the q2 value + * - q3Label: label for the task reporting the q3 value + * - maxLabel: label for the task reporting the max value */ Map compute() { if( count==0 ) diff --git a/modules/nextflow/src/main/groovy/nextflow/trace/TraceObserver.groovy b/modules/nextflow/src/main/groovy/nextflow/trace/TraceObserver.groovy index 45e414764e..c5fac42d00 100644 --- a/modules/nextflow/src/main/groovy/nextflow/trace/TraceObserver.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/trace/TraceObserver.groovy @@ -71,7 +71,7 @@ trait TraceObserver { * @param handler * The {@link TaskHandler} instance for the current task. * @param trace - * The associated {@link TraceRecord} fot the current task. + * The associated {@link TraceRecord} for the current task. */ void onProcessSubmit(TaskHandler handler, TraceRecord trace){} @@ -81,7 +81,7 @@ trait TraceObserver { * @param handler * The {@link TaskHandler} instance for the current task. * @param trace - * The associated {@link TraceRecord} fot the current task. + * The associated {@link TraceRecord} for the current task. */ void onProcessStart(TaskHandler handler, TraceRecord trace){} @@ -91,7 +91,7 @@ trait TraceObserver { * @param handler * The {@link TaskHandler} instance for the current task. * @param trace - * The associated {@link TraceRecord} fot the current task. + * The associated {@link TraceRecord} for the current task. */ void onProcessComplete(TaskHandler handler, TraceRecord trace){} @@ -118,7 +118,7 @@ trait TraceObserver { * @param handler * The {@link TaskHandler} instance for the current task. * @param trace - * The associated {@link TraceRecord} fot the current task. + * The associated {@link TraceRecord} for the current task. */ void onFlowError(TaskHandler handler, TraceRecord trace){} diff --git a/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy b/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy index 3db81305b7..8d995b29b6 100644 --- a/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy @@ -336,7 +336,7 @@ class LoggerHelper { * instead in the file are saved the DEBUG level messages. * * @param logFileName The file where save the application log - * @param quiet When {@code true} only Warning and Error messages are visualized to teh console + * @param quiet When {@code true} only Warning and Error messages are visualized to the console * @param debugConf The list of packages for which use a Debug logging level * @param traceConf The list of packages for which use a Trace logging level */ diff --git a/modules/nextflow/src/main/groovy/nextflow/util/NameGenerator.groovy b/modules/nextflow/src/main/groovy/nextflow/util/NameGenerator.groovy index 3691e50033..9ad2641cb8 100644 --- a/modules/nextflow/src/main/groovy/nextflow/util/NameGenerator.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/util/NameGenerator.groovy @@ -441,7 +441,7 @@ class NameGenerator { // A. P. J. Abdul Kalam - is an Indian scientist aka Missile Man of India for his work on the development of ballistic missile and launch vehicle technology. https://en.wikipedia.org/wiki/A._P._J._Abdul_Kalam "kalam", - // Rudolf E. Kálmán, American engineer and mathematican of Hungarian origin. One of the inventors of the smoother/predictor commonly known as "Kalman Filter". https://en.wikipedia.org/wiki/Rudolf_E._K%C3%A1lm%C3%A1n + // Rudolf E. Kálmán, American engineer and mathematician of Hungarian origin. One of the inventors of the smoother/predictor commonly known as "Kalman Filter". https://en.wikipedia.org/wiki/Rudolf_E._K%C3%A1lm%C3%A1n "kalman", // Susan Kare, created the icons and many of the interface elements for the original Apple Macintosh in the 1980s, and was an original employee of NeXT, working as the Creative Director. https://en.wikipedia.org/wiki/Susan_Kare @@ -654,7 +654,7 @@ class NameGenerator { // Max Karl Ernst Ludwig Planck was a German theoretical physicist, best known for the discovery of energy quenta and Planck's constant. https://en.wikipedia.org/wiki/Max_Planck "planck", - // Joseph Plateau - Belgian physisist known for being one of the first persons to demonstrate the illusion of moving image. https://en.wikipedia.org/wiki/Joseph_Plateau + // Joseph Plateau - Belgian physicist known for being one of the first persons to demonstrate the illusion of moving image. https://en.wikipedia.org/wiki/Joseph_Plateau "plateau", // Henri Poincaré made fundamental contributions in several fields of mathematics. https://en.wikipedia.org/wiki/Henri_Poincar%C3%A9 diff --git a/modules/nf-commons/src/main/nextflow/extension/Bolts.groovy b/modules/nf-commons/src/main/nextflow/extension/Bolts.groovy index 0778d810ad..76622474b8 100644 --- a/modules/nf-commons/src/main/nextflow/extension/Bolts.groovy +++ b/modules/nf-commons/src/main/nextflow/extension/Bolts.groovy @@ -275,7 +275,7 @@ class Bolts { } /** - * Check if a alphabetic characters in a string are lowercase. Non alphabetic characters are ingored + * Check if a alphabetic characters in a string are lowercase. Non alphabetic characters are ignored * @param self The string to check * @return {@true} if the string contains no uppercase characters, {@code false} otherwise */ diff --git a/modules/nf-commons/src/main/nextflow/file/FileHelper.groovy b/modules/nf-commons/src/main/nextflow/file/FileHelper.groovy index a846a86565..b0f0acc85f 100644 --- a/modules/nf-commons/src/main/nextflow/file/FileHelper.groovy +++ b/modules/nf-commons/src/main/nextflow/file/FileHelper.groovy @@ -365,7 +365,7 @@ class FileHelper { throw new IllegalArgumentException("Malformed file URI: $uri -- It must start either with a `file:/` or `file:///` prefix") if( !uri.path ) - throw new IllegalArgumentException("Malformed file URI: $uri -- Make sure it starts with an absolue path prefix i.e. `file:/`") + throw new IllegalArgumentException("Malformed file URI: $uri -- Make sure it starts with an absolute path prefix i.e. `file:/`") } else if( !uri.path ) { throw new IllegalArgumentException("URI path cannot be empty") diff --git a/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy b/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy index 3a2adb200f..5800582a40 100644 --- a/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy +++ b/modules/nf-commons/src/main/nextflow/plugin/PluginsFacade.groovy @@ -252,7 +252,7 @@ class PluginsFacade implements PluginStateListener { return manager.getExtensions(type) } else { - // this should oly be used to load system extensions + // this should only be used to load system extensions // i.e. included in the app class path not provided by // a plugin extension log.debug "Using Default plugin manager" diff --git a/modules/nf-commons/src/main/nextflow/util/CacheHelper.java b/modules/nf-commons/src/main/nextflow/util/CacheHelper.java index e21a417304..991da6c60a 100644 --- a/modules/nf-commons/src/main/nextflow/util/CacheHelper.java +++ b/modules/nf-commons/src/main/nextflow/util/CacheHelper.java @@ -230,7 +230,7 @@ public static Hasher hasher( Hasher hasher, Object value, HashMode mode ) { * * @param hasher The current {@code Hasher} object * @param file The {@code File} object to hash - * @param mode When {@code mode} is equals to the string {@code deep} is used teh file content + * @param mode When {@code mode} is equals to the string {@code deep} is used the file content * in order to create the hash key for this file, otherwise just the file metadata information * (full name, size and last update timestamp) * @return The updated {@code Hasher} object @@ -244,7 +244,7 @@ static private Hasher hashFile( Hasher hasher, java.io.File file, HashMode mode * * @param hasher The current {@code Hasher} object * @param path The {@code Path} object to hash - * @param mode When {@code mode} is equals to the string {@code deep} is used teh file content + * @param mode When {@code mode} is equals to the string {@code deep} is used the file content * in order to create the hash key for this file, otherwise just the file metadata information * (full name, size and last update timestamp) * @return The updated {@code Hasher} object diff --git a/modules/nf-commons/src/main/nextflow/util/CharsetHelper.groovy b/modules/nf-commons/src/main/nextflow/util/CharsetHelper.groovy index 45d3897834..d0af3923cc 100644 --- a/modules/nf-commons/src/main/nextflow/util/CharsetHelper.groovy +++ b/modules/nf-commons/src/main/nextflow/util/CharsetHelper.groovy @@ -43,7 +43,7 @@ class CharsetHelper { return Charset.forName(object) if( object != null ) - log.warn "Invalid charset object: $object -- using defualt: ${Charset.defaultCharset()}" + log.warn "Invalid charset object: $object -- using default: ${Charset.defaultCharset()}" Charset.defaultCharset() } diff --git a/modules/nf-commons/src/test/nextflow/extension/FilesExTest.groovy b/modules/nf-commons/src/test/nextflow/extension/FilesExTest.groovy index 7b1ecbb7b9..a56b8bbe87 100644 --- a/modules/nf-commons/src/test/nextflow/extension/FilesExTest.groovy +++ b/modules/nf-commons/src/test/nextflow/extension/FilesExTest.groovy @@ -1272,7 +1272,7 @@ class FilesExTest extends Specification { FilesEx.toPosixFilePermission(0644) == [OWNER_READ, OWNER_WRITE, GROUP_READ, OTHERS_READ] as Set } - def 'should convert permissions to actal' () { + def 'should convert permissions to octal' () { expect: FilesEx.toOctalFileMode([] as Set) == 0 and: diff --git a/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy b/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy index 7dc6f9b3b0..d8a58b637e 100644 --- a/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy +++ b/modules/nf-commons/src/test/nextflow/plugin/PluginsFacadeTest.groovy @@ -288,7 +288,7 @@ class PluginsFacadeTest extends Specification { plugins.size()==4 plugins.find { it.id == 'nf-amazon' && it.version=='0.1.0' } // <-- version from default plugins.find { it.id == 'nf-tower' && it.version=='1.0.1' } // <-- version from the env var - plugins.find { it.id == 'nf-foo' && it.version=='2.2.0' } // <-- version from tne env var + plugins.find { it.id == 'nf-foo' && it.version=='2.2.0' } // <-- version from the env var plugins.find { it.id == 'nf-bar' && it.version==null } // <-- no version } diff --git a/modules/nf-commons/src/testFixtures/groovy/nextflow/plugin/hello/HelloExtension.groovy b/modules/nf-commons/src/testFixtures/groovy/nextflow/plugin/hello/HelloExtension.groovy index 2c4dc2e7d4..1436a5f790 100644 --- a/modules/nf-commons/src/testFixtures/groovy/nextflow/plugin/hello/HelloExtension.groovy +++ b/modules/nf-commons/src/testFixtures/groovy/nextflow/plugin/hello/HelloExtension.groovy @@ -139,7 +139,7 @@ class HelloExtension extends PluginExtensionPoint { */ @Function String sayHello(String lang='en'){ - assert initialized, "PluginExtension was not initilized" + assert initialized, "PluginExtension was not initialized" // sayHello is the entrypoint where we can write all the logic or delegate to other classes, ... return functions.sayHello(lang) } diff --git a/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy b/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy index 2f00717c2c..ac3e8cee11 100644 --- a/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy +++ b/modules/nf-httpfs/src/main/nextflow/file/http/XFileSystemProvider.groovy @@ -389,7 +389,7 @@ abstract class XFileSystemProvider extends FileSystemProvider { @Override DirectoryStream newDirectoryStream(Path dir, DirectoryStream.Filter filter) throws IOException { - throw new UnsupportedOperationException("Direcotry listing unsupported by ${getScheme().toUpperCase()} file system provider") + throw new UnsupportedOperationException("Directory listing unsupported by ${getScheme().toUpperCase()} file system provider") } @Override diff --git a/plugins/README.md b/plugins/README.md index 5c3569fb13..153eecab26 100644 --- a/plugins/README.md +++ b/plugins/README.md @@ -107,7 +107,7 @@ If no plugins are specified in the nextflow.config file, Nextflow default plugin The default plugins list is defined in the Nextflow resources file included in the distribution runtime `./modules/nextflow/src/main/resources/META-INF/plugins-info.txt`. -To disable the use of defualt plugins set the following variable `NXF_PLUGINS_DEFAULT=false`. +To disable the use of default plugins set the following variable `NXF_PLUGINS_DEFAULT=false`. ## Gradle Tasks diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy index bff0b013d1..ddd858991f 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy @@ -274,7 +274,7 @@ class AzBatchService implements Closeable { if (!config.batch().accountName) throw new IllegalArgumentException("Missing Azure Batch account name -- Specify it in the nextflow.config file using the setting 'azure.batch.accountName'") if (!config.batch().accountKey) - throw new IllegalArgumentException("Missing Azure Batch account key -- Specify it in the nextflow.config file using the setting 'azure.batch.accountKet'") + throw new IllegalArgumentException("Missing Azure Batch account key -- Specify it in the nextflow.config file using the setting 'azure.batch.accountKey'") return new BatchSharedKeyCredentials(config.batch().endpoint, config.batch().accountName, config.batch().accountKey) @@ -872,7 +872,7 @@ class AzBatchService implements Closeable { final listener = new EventListener>() { @Override void accept(ExecutionAttemptedEvent event) throws Throwable { - log.debug("Azure TooManyRequests reponse error - attempt: ${event.attemptCount}; reason: ${event.lastFailure.message}") + log.debug("Azure TooManyRequests response error - attempt: ${event.attemptCount}; reason: ${event.lastFailure.message}") } } return RetryPolicy.builder() diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerClient.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerClient.groovy index 7fa2dac217..b494e8a30b 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerClient.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/TowerClient.groovy @@ -652,7 +652,7 @@ class TowerClient implements TraceObserver { def map = obj as Map return map.collect { k,v -> "$k:$v" }.join(',') } - throw new IllegalArgumentException("Illegal container attribut type: ${obj.getClass().getName()} = ${obj}" ) + throw new IllegalArgumentException("Illegal container attribute type: ${obj.getClass().getName()} = ${obj}" ) } protected Map makeTaskMap0(TraceRecord trace) { From 8eb5f30584ef24f02f24c93c4dfc79fe561cdea0 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 10 Aug 2023 21:51:45 +0200 Subject: [PATCH 049/128] Add 429 http status code to Wave retriable errors Signed-off-by: Paolo Di Tommaso --- .../nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index 45e2125cea..d189704e86 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -596,7 +596,7 @@ class WaveClient { return Failsafe.with(policy).get(action) } - static private final List SERVER_ERRORS = [502,503,504] + static private final List SERVER_ERRORS = [429,502,503,504] protected HttpResponse httpSend(HttpRequest req) { return safeApply(() -> { From 98f88a502fcbff13c596588d6c912020e013d383 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Fri, 11 Aug 2023 18:41:55 +0000 Subject: [PATCH 050/128] Update changelog Signed-off-by: Paolo Di Tommaso --- changelog.txt | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/changelog.txt b/changelog.txt index 852e84c0bf..9e2995e8f8 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,5 +1,13 @@ NEXTFLOW CHANGE-LOG =================== +23.04.3 - 11 Aug 2023 +- Increase Wave client max attempts [8c67610a] +- Fix log typo [03e19ea2] +- Add 429 http status code to Wave retriable errors [a8b8c6c5] +- Improve handling Wave server errors [621c9665] +- Bump nf-wave@0.8.4 [d7fa3f26] +- Bump corretto 17.0.8 [7a73a78f] + 23.08.0-edge - 5 Aug 2023 - Add `-value` option to `config` command (#4142) [57e3100b] - Add `deleteTasksOnCompletion` to Azure Batch configuration (#4114) [b14674dc] @@ -114,6 +122,12 @@ NEXTFLOW CHANGE-LOG - Bump nf-amazon@2.1.0 [57464746] - Bump nf-cloudcache@0.1.0 [cb6242c4] +23.04.2 - 8 Jun 2023 +- Fix non-deterministic null container engine error [f93221ab] +- Add retry policy to Wave client [2f1532f6] +- Fix wave build docs [34a73022] +- Bump nf-wave@0.8.3 [350201b5] + 23.05.0-edge - 15 May 2023 - Add support for custom custom root directory to resolve relative paths (#3942) [f06bb1f7] - Add `arch` directive support for Spack (#3786) [62dfc482] From 7b19fb03396236f0722b565bdbb017c4c2d5d351 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 13 Aug 2023 10:41:30 +0200 Subject: [PATCH 051/128] Improve Conda build error report [ci fast] Signed-off-by: Paolo Di Tommaso --- .../nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy b/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy index f508c74fdc..451aeecac2 100644 --- a/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy @@ -315,9 +315,9 @@ class CondaCache { final max = createTimeout.toMillis() final builder = new ProcessBuilder(['bash','-c',cmd]) - final proc = builder.start() + final proc = builder.redirectErrorStream(true).start() final err = new StringBuilder() - final consumer = proc.consumeProcessErrorStream(err) + final consumer = proc.consumeProcessOutputStream(err) proc.waitForOrKill(max) def status = proc.exitValue() if( status != 0 ) { From d99b34322369c1a2602da26866d5a34302939da2 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 13 Aug 2023 15:55:01 +0200 Subject: [PATCH 052/128] Fix Process hangs when using flatten and finish errorStrategy Signed-off-by: Paolo Di Tommaso --- .../nextflow/extension/OperatorImpl.groovy | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy b/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy index 154dcee0f2..16a9b87a45 100644 --- a/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/extension/OperatorImpl.groovy @@ -41,6 +41,7 @@ import nextflow.script.TokenBranchDef import nextflow.script.TokenMultiMapDef import nextflow.splitter.FastaSplitter import nextflow.splitter.FastqSplitter +import nextflow.splitter.JsonSplitter import nextflow.splitter.TextSplitter import org.codehaus.groovy.runtime.callsite.BooleanReturningMethodInvoker import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation @@ -725,23 +726,26 @@ class OperatorImpl { final listeners = [] final target = CH.create() + final stopOnFirst = source instanceof DataflowExpression - if( source instanceof DataflowExpression ) { - listeners << new DataflowEventAdapter() { - @Override - void afterRun(final DataflowProcessor processor, final List messages) { - processor.bindOutput( Channel.STOP ) + listeners << new DataflowEventAdapter() { + @Override + void afterRun(final DataflowProcessor processor, final List messages) { + if( stopOnFirst ) processor.terminate() - } + } - boolean onException(final DataflowProcessor processor, final Throwable e) { - OperatorImpl.log.error("@unknown", e) - session.abort(e) - return true; - } + @Override + void afterStop(final DataflowProcessor processor) { + processor.bindOutput(Channel.STOP) } - } + boolean onException(final DataflowProcessor processor, final Throwable e) { + OperatorImpl.log.error("@unknown", e) + session.abort(e) + return true; + } + } newOperator(inputs: [source], outputs: [target], listeners: listeners) { item -> From b238d7e238959edb5300dbebcda67ff4f7929e98 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 13 Aug 2023 16:23:06 +0200 Subject: [PATCH 053/128] Fix Execution should fail if report or timeline file already exists Signed-off-by: Paolo Di Tommaso --- .../src/main/groovy/nextflow/trace/ReportObserver.groovy | 4 ++++ .../src/main/groovy/nextflow/trace/TimelineObserver.groovy | 3 +++ 2 files changed, 7 insertions(+) diff --git a/modules/nextflow/src/main/groovy/nextflow/trace/ReportObserver.groovy b/modules/nextflow/src/main/groovy/nextflow/trace/ReportObserver.groovy index 978e248983..b3886ac205 100644 --- a/modules/nextflow/src/main/groovy/nextflow/trace/ReportObserver.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/trace/ReportObserver.groovy @@ -24,6 +24,7 @@ import groovy.text.GStringTemplateEngine import groovy.transform.CompileStatic import groovy.util.logging.Slf4j import nextflow.Session +import nextflow.exception.AbortOperationException import nextflow.processor.TaskHandler import nextflow.processor.TaskId import nextflow.processor.TaskProcessor @@ -127,6 +128,9 @@ class ReportObserver implements TraceObserver { void onFlowCreate(Session session) { this.session = session this.aggregator = new ResourcesAggregator(session) + // check if the process exists + if( Files.exists(reportFile) && !overwrite ) + throw new AbortOperationException("Report file already exists: ${reportFile.toUriString()} -- enable the 'report.overwrite' option in your config file to overwrite existing files") } /** diff --git a/modules/nextflow/src/main/groovy/nextflow/trace/TimelineObserver.groovy b/modules/nextflow/src/main/groovy/nextflow/trace/TimelineObserver.groovy index e96efab748..8ae3369613 100644 --- a/modules/nextflow/src/main/groovy/nextflow/trace/TimelineObserver.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/trace/TimelineObserver.groovy @@ -24,6 +24,7 @@ import groovy.text.GStringTemplateEngine import groovy.transform.CompileStatic import groovy.util.logging.Slf4j import nextflow.Session +import nextflow.exception.AbortOperationException import nextflow.processor.TaskHandler import nextflow.processor.TaskId import nextflow.processor.TaskProcessor @@ -71,6 +72,8 @@ class TimelineObserver implements TraceObserver { @Override void onFlowCreate(Session session) { beginMillis = startMillis = System.currentTimeMillis() + if( Files.exists(reportFile) && !overwrite ) + throw new AbortOperationException("Timeline file already exists: ${reportFile.toUriString()} -- enable the 'timelime.overwrite' option in your config file to overwrite existing files") } /** From 8e4d7fedd0b0338ec3752e0d9442c688bc13bbc0 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 13 Aug 2023 17:29:44 +0200 Subject: [PATCH 054/128] Fix env output when changing task workdir Signed-off-by: Paolo Di Tommaso --- .../nextflow/container/ShifterBuilder.groovy | 3 ++ .../executor/BashWrapperBuilder.groovy | 3 ++ .../nextflow/executor/command-run.txt | 1 + .../executor/BashWrapperBuilderTest.groovy | 43 ++++++++++--------- .../executor/test-bash-wrapper-with-trace.txt | 1 + .../nextflow/executor/test-bash-wrapper.txt | 1 + .../google/lifesciences/bash-wrapper-gcp.txt | 1 + 7 files changed, 32 insertions(+), 21 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/container/ShifterBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/container/ShifterBuilder.groovy index d0f757053f..d1cb33bfc1 100644 --- a/modules/nextflow/src/main/groovy/nextflow/container/ShifterBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/container/ShifterBuilder.groovy @@ -95,6 +95,9 @@ class ShifterBuilder extends ContainerBuilder { else if( env instanceof String && env.contains('=') ) { result << env } + else if( env instanceof String ) { + result << "\${$env:+\"$env=\$$env\"}" + } else if( env ) { throw new IllegalArgumentException("Not a valid environment value: $env [${env.class.name}]") } diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index 25b21626ec..835739e5ae 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -183,6 +183,7 @@ class BashWrapperBuilder { result.append('\n') result.append('# capture process environment\n') result.append('set +u\n') + result.append('cd "$NXF_TASK_WORKDIR"\n') for( int i=0; i/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' } @@ -734,7 +734,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' and: @@ -755,7 +755,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true, entrypointOverride: false] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' } @@ -769,7 +769,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true, entrypointOverride: false] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' and: @@ -790,7 +790,7 @@ class BashWrapperBuilderTest extends Specification { containerEnabled: true ).makeBinding() then: - binding.launch_cmd == 'sudo docker run -i -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'sudo docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'sudo docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'sudo docker stop $NXF_BOXID' } @@ -803,7 +803,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true, remove:false, kill: false] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == "" binding.kill_cmd == null binding.containsKey('kill_cmd') @@ -817,7 +817,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', enabled: true, remove:false, kill: 'SIGXXX'] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == "" binding.kill_cmd == 'docker kill -s SIGXXX $NXF_BOXID' binding.containsKey('kill_cmd') @@ -832,7 +832,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v /folder\\ with\\ blanks:/folder\\ with\\ blanks -v /work/dir:/work/dir -w "\$PWD" --name \$NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /folder\\ with\\ blanks:/folder\\ with\\ blanks -v /work/dir:/work/dir -w "\$PWD" --name \$NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' } @@ -847,7 +847,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', sudo: true, enabled: true] ).makeBinding() then: - binding.launch_cmd == 'sudo docker run -i -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'sudo docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.kill_cmd == 'sudo docker stop $NXF_BOXID' binding.cleanup_cmd == '''\ (sudo -n true && sudo rm -rf "$NXF_SCRATCH" || rm -rf "$NXF_SCRATCH")&>/dev/null || true @@ -864,7 +864,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i -v /work/dir:/work/dir -w "$PWD" -v /foo:/bar --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" -v /foo:/bar --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.kill_cmd == 'docker stop $NXF_BOXID' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' } @@ -879,7 +879,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh + sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -896,7 +896,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh" + sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh" '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -921,7 +921,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run --mount=type=bind,source=/folder\\ with\\ blanks,destination=/folder\\ with\\ blanks --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh + sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/folder\\ with\\ blanks,destination=/folder\\ with\\ blanks --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -938,7 +938,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" --mount=type=bind,source=/foo,destination=/bar busybox /bin/bash -ue /work/dir/.command.sh + sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" --mount=type=bind,source=/foo,destination=/bar busybox /bin/bash -ue /work/dir/.command.sh '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -961,7 +961,7 @@ class BashWrapperBuilderTest extends Specification { STATUS=$(shifterimg -v pull docker://ubuntu:latest | tail -n2 | head -n1 | awk \'{print $6}\') [[ $STATUS == "FAILURE" || -z $STATUS ]] && echo "Shifter failed to pull image \'docker://ubuntu:latest\'" >&2 && exit 1 done - shifter --image docker://ubuntu:latest /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh" + ${NXF_TASK_WORKDIR:+"NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} shifter --image docker://ubuntu:latest /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh" '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -977,7 +977,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [enabled: true, engine: 'singularity'] as ContainerConfig ).makeBinding() then: - binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity exec --no-home --pid docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' } @@ -991,7 +991,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [enabled: true, engine: 'singularity', entrypointOverride: true] as ContainerConfig ).makeBinding() then: - binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity exec --no-home --pid docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' } @@ -1086,6 +1086,7 @@ class BashWrapperBuilderTest extends Specification { str == ''' # capture process environment set +u + cd "$NXF_TASK_WORKDIR" echo FOO=${FOO[@]} > .command.env echo BAR=${BAR[@]} >> .command.env ''' @@ -1130,7 +1131,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'podman', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'podman run -i -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'podman run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'podman rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'podman stop $NXF_BOXID' } @@ -1143,7 +1144,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'podman', enabled: true, entrypointOverride: true] ).makeBinding() then: - binding.launch_cmd == 'podman run -i -v /work/dir:/work/dir -w "$PWD" --entrypoint /bin/bash --name $NXF_BOXID busybox -c "/bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'podman run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --entrypoint /bin/bash --name $NXF_BOXID busybox -c "/bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == 'podman rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'podman stop $NXF_BOXID' } @@ -1157,7 +1158,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'podman', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'podman run -i -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'podman run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'rm -rf $NXF_SCRATCH || true\npodman rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'podman stop $NXF_BOXID' } diff --git a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt index 49ea12e675..b0672d36ad 100644 --- a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt +++ b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt @@ -305,6 +305,7 @@ nxf_main() { set +u set -u [[ $NXF_SCRATCH ]] && cd $NXF_SCRATCH + export NXF_TASK_WORKDIR="$PWD" nxf_stage set +e diff --git a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt index 2a6a6c0e71..e855aa1a1e 100644 --- a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt +++ b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt @@ -122,6 +122,7 @@ nxf_main() { spack env activate -d /spack/env/path set -u [[ $NXF_SCRATCH ]] && cd $NXF_SCRATCH + export NXF_TASK_WORKDIR="$PWD" nxf_stage set +e diff --git a/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt b/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt index ff58667ae8..5cb73b9050 100644 --- a/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt +++ b/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt @@ -210,6 +210,7 @@ nxf_main() { set +u set -u [[ $NXF_SCRATCH ]] && cd $NXF_SCRATCH + export NXF_TASK_WORKDIR="$PWD" set +e (set -o pipefail; (nxf_launch | tee .command.out) 3>&1 1>&2 2>&3 | tee .command.err) & From 3051cd13da0298eaca142509ac05c7080a1c4328 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 13 Aug 2023 17:39:11 +0200 Subject: [PATCH 055/128] Improve handling of name-only container env variables Signed-off-by: Paolo Di Tommaso --- .../container/ContainerBuilder.groovy | 2 +- .../container/ContainerBuilderTest.groovy | 2 +- .../container/DockerBuilderTest.groovy | 2 +- .../container/PodmanBuilderTest.groovy | 2 +- .../executor/BashWrapperBuilderTest.groovy | 34 +++++++++---------- 5 files changed, 21 insertions(+), 21 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/container/ContainerBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/container/ContainerBuilder.groovy index 0d02192fb2..9f505f0a29 100644 --- a/modules/nextflow/src/main/groovy/nextflow/container/ContainerBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/container/ContainerBuilder.groovy @@ -246,7 +246,7 @@ abstract class ContainerBuilder { result << '-e "' << env << '"' } else if( env instanceof String ) { - result << "\${$env:+-e \"$env=\$$env\"}" + result << "-e \"$env\"" } else if( env ) { throw new IllegalArgumentException("Not a valid environment value: $env [${env.class.name}]") diff --git a/modules/nextflow/src/test/groovy/nextflow/container/ContainerBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/ContainerBuilderTest.groovy index 45a94927e9..a1c0c4cf8c 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/ContainerBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/ContainerBuilderTest.groovy @@ -56,7 +56,7 @@ class ContainerBuilderTest extends Specification { when: result = builder.makeEnv( 'FOO' ) then: - result.toString() == '${FOO:+-e "FOO=$FOO"}' + result.toString() == '-e "FOO"' when: builder.makeEnv( 1 ) diff --git a/modules/nextflow/src/test/groovy/nextflow/container/DockerBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/DockerBuilderTest.groovy index 1d0fa6d848..fd94d44262 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/DockerBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/DockerBuilderTest.groovy @@ -58,7 +58,7 @@ class DockerBuilderTest extends Specification { ENV | EXPECT 'X=1' | '-e "X=1"' [VAR_X:1, VAR_Y: 2] | '-e "VAR_X=1" -e "VAR_Y=2"' - 'BAR' | '${BAR:+-e "BAR=$BAR"}' + 'BAR' | '-e "BAR"' } def 'test docker create command line'() { diff --git a/modules/nextflow/src/test/groovy/nextflow/container/PodmanBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/PodmanBuilderTest.groovy index 47e6404271..2d57837f84 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/PodmanBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/PodmanBuilderTest.groovy @@ -62,7 +62,7 @@ class PodmanBuilderTest extends Specification { ENV | EXPECT 'X=1' | '-e "X=1"' [VAR_X:1, VAR_Y: 2] | '-e "VAR_X=1" -e "VAR_Y=2"' - 'BAR' | '${BAR:+-e "BAR=$BAR"}' + 'BAR' | '-e "BAR"' } def 'test podman create command line'() { diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy index 3e2f3a2c93..15c9965a18 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy @@ -720,7 +720,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' } @@ -734,7 +734,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' and: @@ -755,7 +755,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true, entrypointOverride: false] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' } @@ -769,7 +769,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true, entrypointOverride: false] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' and: @@ -790,7 +790,7 @@ class BashWrapperBuilderTest extends Specification { containerEnabled: true ).makeBinding() then: - binding.launch_cmd == 'sudo docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'sudo docker run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'sudo docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'sudo docker stop $NXF_BOXID' } @@ -803,7 +803,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', temp: 'auto', enabled: true, remove:false, kill: false] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v $(nxf_mktemp):/tmp -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == "" binding.kill_cmd == null binding.containsKey('kill_cmd') @@ -817,7 +817,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', enabled: true, remove:false, kill: 'SIGXXX'] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID ubuntu /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == "" binding.kill_cmd == 'docker kill -s SIGXXX $NXF_BOXID' binding.containsKey('kill_cmd') @@ -832,7 +832,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /folder\\ with\\ blanks:/folder\\ with\\ blanks -v /work/dir:/work/dir -w "\$PWD" --name \$NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v /folder\\ with\\ blanks:/folder\\ with\\ blanks -v /work/dir:/work/dir -w "\$PWD" --name \$NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'docker stop $NXF_BOXID' } @@ -847,7 +847,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', sudo: true, enabled: true] ).makeBinding() then: - binding.launch_cmd == 'sudo docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'sudo docker run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.kill_cmd == 'sudo docker stop $NXF_BOXID' binding.cleanup_cmd == '''\ (sudo -n true && sudo rm -rf "$NXF_SCRATCH" || rm -rf "$NXF_SCRATCH")&>/dev/null || true @@ -864,7 +864,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'docker', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'docker run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" -v /foo:/bar --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'docker run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -w "$PWD" -v /foo:/bar --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.kill_cmd == 'docker stop $NXF_BOXID' binding.cleanup_cmd == 'docker rm $NXF_BOXID &>/dev/null || true\n' } @@ -879,7 +879,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh + sarus run -e "NXF_TASK_WORKDIR" --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -896,7 +896,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh" + sarus run -e "NXF_TASK_WORKDIR" --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -c "eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh" '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -921,7 +921,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/folder\\ with\\ blanks,destination=/folder\\ with\\ blanks --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh + sarus run -e "NXF_TASK_WORKDIR" --mount=type=bind,source=/folder\\ with\\ blanks,destination=/folder\\ with\\ blanks --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" busybox /bin/bash -ue /work/dir/.command.sh '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -938,7 +938,7 @@ class BashWrapperBuilderTest extends Specification { then: binding.launch_cmd == '''\ sarus pull busybox 1>&2 - sarus run ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" --mount=type=bind,source=/foo,destination=/bar busybox /bin/bash -ue /work/dir/.command.sh + sarus run -e "NXF_TASK_WORKDIR" --mount=type=bind,source=/work/dir,destination=/work/dir -w "$PWD" --mount=type=bind,source=/foo,destination=/bar busybox /bin/bash -ue /work/dir/.command.sh '''.stripIndent().rightTrim() binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' @@ -1131,7 +1131,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'podman', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'podman run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'podman run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'podman rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'podman stop $NXF_BOXID' } @@ -1144,7 +1144,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'podman', enabled: true, entrypointOverride: true] ).makeBinding() then: - binding.launch_cmd == 'podman run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -w "$PWD" --entrypoint /bin/bash --name $NXF_BOXID busybox -c "/bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'podman run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -w "$PWD" --entrypoint /bin/bash --name $NXF_BOXID busybox -c "/bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == 'podman rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'podman stop $NXF_BOXID' } @@ -1158,7 +1158,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [engine: 'podman', enabled: true] ).makeBinding() then: - binding.launch_cmd == 'podman run -i ${NXF_TASK_WORKDIR:+-e "NXF_TASK_WORKDIR=$NXF_TASK_WORKDIR"} -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' + binding.launch_cmd == 'podman run -i -e "NXF_TASK_WORKDIR" -v /work/dir:/work/dir -v "$PWD":"$PWD" -w "$PWD" --name $NXF_BOXID busybox /bin/bash -ue /work/dir/.command.sh' binding.cleanup_cmd == 'rm -rf $NXF_SCRATCH || true\npodman rm $NXF_BOXID &>/dev/null || true\n' binding.kill_cmd == 'podman stop $NXF_BOXID' } From 245afa5d5be55ebc46d0d14738c1147a76ceff57 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sun, 13 Aug 2023 11:13:25 -0500 Subject: [PATCH 056/128] Document error about trailing backslash with space (#4180) Signed-off-by: Ben Sherman --- docs/script.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/script.md b/docs/script.md index c5b63012b0..e7bda1c07f 100644 --- a/docs/script.md +++ b/docs/script.md @@ -223,6 +223,14 @@ result = myLongCmdline.execute().text In the preceding example, `blastp` and its `-in`, `-out`, `-db` and `-html` switches and their arguments are effectively a single line. +:::{warning} +When using backslashes to continue a multi-line command, make sure to not put any spaces after the backslash, otherwise it will be interpreted by the Groovy lexer as an escaped space instead of a backslash, which will make your script incorrect. It will also print this warning: + +``` +unknown recognition error type: groovyjarjarantlr4.v4.runtime.LexerNoViableAltException +``` +::: + (script-regexp)= ### Regular expressions From 563bff13eec29095a9e9d86d95033e98377fd2c8 Mon Sep 17 00:00:00 2001 From: Manuele Simi <2822686+manuelesimi@users.noreply.github.com> Date: Sun, 13 Aug 2023 18:22:55 +0200 Subject: [PATCH 057/128] Update AWS instructions for creating a custom AMI [ci skip] (#4174) Signed-off-by: Manuele Simi --- docs/aws.md | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/aws.md b/docs/aws.md index c2f0b69724..4cd2488f1b 100644 --- a/docs/aws.md +++ b/docs/aws.md @@ -260,10 +260,11 @@ There are several reasons why you might need to create your own [AMI (Amazon Mac ### Create your custom AMI -From the EC2 Dashboard, select **Launch Instance**, then select **AWS Marketplace** in the left-hand pane and search for "ECS". In the result list, select **Amazon ECS-Optimized Amazon Linux 2 AMI**, then continue as usual to configure and launch the instance. +From the EC2 Dashboard, select **Launch Instance**, then select **Browse more AMIs**. In the new page, select +**AWS Marketplace AMIs**, and then search for **Amazon ECS-Optimized Amazon Linux 2 (AL2) x86_64 AMI**. Select the AMI and continue as usual to configure and launch the instance. :::{note} -The selected instance has a bootstrap volume of 8GB and a second EBS volume of 30GB for scratch storage, which is not enough for real genomic workloads. Make sure to specify an additional volume with enough storage for your pipeline execution. +The selected instance has a root volume of 30GB. Make sure to increase its size or add a second EBS volume with enough storage for real genomic workloads. ::: When the instance is running, SSH into it (or connect with the Session Manager service), install the AWS CLI, and install any other tool that may be required (see following sections). @@ -303,7 +304,7 @@ Afterwards, verify that the AWS CLI package works correctly: ```console $ ./miniconda/bin/aws --version -aws-cli/1.19.79 Python/3.8.5 Linux/4.14.231-173.361.amzn2.x86_64 botocore/1.20.79 +aws-cli/1.29.20 Python/3.11.4 Linux/4.14.318-241.531.amzn2.x86_64 botocore/1.31.20 ``` :::{note} @@ -328,7 +329,7 @@ The grandparent directory of the `aws` tool will be mounted into the container a ### Docker installation -Docker is required by Nextflow to execute tasks on AWS Batch. The **Amazon ECS-Optimized Amazon Linux 2** AMI has Docker installed, however, if you create your AMI from a different AMI that does not have Docker installed, you will need to install it manually. +Docker is required by Nextflow to execute tasks on AWS Batch. The **Amazon ECS-Optimized Amazon Linux 2 (AL2) x86_64 AMI** has Docker installed, however, if you create your AMI from a different AMI that does not have Docker installed, you will need to install it manually. The following snippet shows how to install Docker on an Amazon EC2 instance: @@ -353,7 +354,7 @@ These steps must be done *before* creating the AMI from the current EC2 instance The [ECS container agent](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_agent.html) is a component of Amazon Elastic Container Service (Amazon ECS) and is responsible for managing containers on behalf of ECS. AWS Batch uses ECS to execute containerized jobs, therefore it requires the agent to be installed on EC2 instances within your Compute Environments. -The ECS agent is included in the **Amazon ECS-Optimized Amazon Linux 2** AMI. If you use a different AMI, you can also install the agent on any EC2 instance that supports the Amazon ECS specification. +The ECS agent is included in the **Amazon ECS-Optimized Amazon Linux 2 (AL2) x86_64 AMI** . If you use a different base AMI, you can also install the agent on any EC2 instance that supports the Amazon ECS specification. To install the agent, follow these steps: @@ -373,6 +374,10 @@ curl -s http://localhost:51678/v1/metadata | python -mjson.tool (test) The `AmazonEC2ContainerServiceforEC2Role` policy must be attached to the instance role in order to be able to connect the EC2 instance created by the Compute Environment to the ECS container. ::: +:::{note} +The `AmazonEC2ContainerRegistryReadOnly` policy should be attached to the instance role in order to get read-only access to Amazon EC2 Container Registry repositories. +::: + ## Jobs & Execution ### Custom job definition From 6e6ea579afc203722addb354c7719338cca8e9b2 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sun, 13 Aug 2023 11:23:21 -0500 Subject: [PATCH 058/128] Remove lock file from cloudcache (#4167) Signed-off-by: Ben Sherman --- .../nextflow/cache/CloudCacheStore.groovy | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy b/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy index 7d8b794739..c9f98bdef4 100644 --- a/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy +++ b/plugins/nf-cloudcache/src/main/nextflow/cache/CloudCacheStore.groovy @@ -35,8 +35,6 @@ import nextflow.util.CacheHelper @CompileStatic class CloudCacheStore implements CacheStore { - private final String LOCK_NAME = 'LOCK' - private final int KEY_SIZE /** The session UUID */ @@ -51,9 +49,6 @@ class CloudCacheStore implements CacheStore { /** The base path for this cache instance */ private Path dataPath - /** The lock file for this cache instance */ - private Path lock - /** The path to the index file */ private Path indexPath @@ -69,7 +64,6 @@ class CloudCacheStore implements CacheStore { this.runName = runName this.basePath = basePath ?: defaultBasePath() this.dataPath = this.basePath.resolve("$uniqueId") - this.lock = dataPath.resolve(LOCK_NAME) this.indexPath = dataPath.resolve("index.$runName") } @@ -83,7 +77,6 @@ class CloudCacheStore implements CacheStore { @Override CloudCacheStore open() { - acquireLock() indexWriter = new BufferedOutputStream(Files.newOutputStream(indexPath)) return this } @@ -92,28 +85,10 @@ class CloudCacheStore implements CacheStore { CloudCacheStore openForRead() { if( !dataPath.exists() ) throw new AbortOperationException("Missing cache directory: $dataPath") - acquireLock() indexReader = Files.newInputStream(indexPath) return this } - private void acquireLock() { - if( lock.exists() ) { - final msg = """ - Unable to acquire lock for session with ID ${uniqueId} - - Common reasons for this error are: - - You are trying to resume the execution of an already running pipeline - - A previous execution was abruptly interrupted, leaving the session open - - You can see the name of the conflicting run by inspecting the contents of the following path: ${lock} - """ - throw new IOException(msg) - } - - lock.text = runName - } - @Override void drop() { dataPath.deleteDir() @@ -122,7 +97,6 @@ class CloudCacheStore implements CacheStore { @Override void close() { FilesEx.closeQuietly(indexWriter) - lock.delete() } @Override From 2b449daa88ac9da0f134da817346c8666f3ba612 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 13 Aug 2023 18:24:20 +0200 Subject: [PATCH 059/128] Fix checkpoint thread termination (#4166) Signed-off-by: Paolo Di Tommaso --- .../src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy b/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy index 2b7da86ec3..3e0edfc9ba 100644 --- a/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy +++ b/plugins/nf-tower/src/main/io/seqera/tower/plugin/LogsCheckpoint.groovy @@ -39,6 +39,7 @@ class LogsCheckpoint implements TraceObserver { private Thread thread private Duration interval private LogsHandler handler + private volatile boolean terminated @Override void onFlowCreate(Session session) { @@ -59,14 +60,14 @@ class LogsCheckpoint implements TraceObserver { @Override void onFlowComplete() { - thread.interrupt() + this.terminated = true thread.join() } protected void run() { log.debug "Starting logs checkpoint thread - interval: ${interval}" try { - while( !thread.isInterrupted() ) { + while( !terminated && !thread.isInterrupted() ) { // just wait the declared delay await(interval) // checkpoint the logs From 30036dbf3cde48a3aff8d17c7d65b14cb9ef07c4 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Mon, 14 Aug 2023 07:52:10 -0500 Subject: [PATCH 060/128] Update tip about modifying maps (#4153) [ci skip] Signed-off-by: Ben Sherman Co-authored-by: Paolo Di Tommaso --- docs/script.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/script.md b/docs/script.md index e7bda1c07f..71df9a3487 100644 --- a/docs/script.md +++ b/docs/script.md @@ -114,7 +114,7 @@ new_scores = scores + ["Pete": 3, "Cedric": 120] When adding two maps, the first map is copied and then appended with the keys from the second map. Any conflicting keys are overwritten by the second map. :::{tip} -Appending an "update" map is a safer way to modify maps in Nextflow, specifically when passing maps through channels. This way, any references to the original map elsewhere in the pipeline won't be modified. +Copying a map with the `+` operator is a safer way to modify maps in Nextflow, specifically when passing maps through channels. This way, a new instance of the map will be created, and any references to the original map won't be affected. ::: Learn more about maps: From 4d91862712ab40686bc69f84c9a312c47229e897 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Mon, 14 Aug 2023 07:53:30 -0500 Subject: [PATCH 061/128] Apply K8s Pod metadata to Job (#4057) [ci fast] Signed-off-by: Ben Sherman --- .../nextflow/k8s/model/PodSpecBuilder.groovy | 36 ++--- .../nextflow/k8s/K8sDriverLauncherTest.groovy | 16 ++- .../nextflow/k8s/K8sTaskHandlerTest.groovy | 33 +++-- .../k8s/model/PodSpecBuilderTest.groovy | 126 +++++++----------- 4 files changed, 87 insertions(+), 124 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodSpecBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodSpecBuilder.groovy index 1aea042686..6a36b27e63 100644 --- a/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodSpecBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/k8s/model/PodSpecBuilder.groovy @@ -564,30 +564,18 @@ class PodSpecBuilder { Map buildAsJob() { final pod = build() - // job metadata - final metadata = new LinkedHashMap() - metadata.name = this.podName // just use the podName for simplicity, it may be renamed to just `name` or `resourceName` in the future - metadata.namespace = this.namespace ?: 'default' - - // job spec - final spec = new LinkedHashMap() - spec.backoffLimit = 0 - spec.template = [spec: pod.spec] - - if( labels ) - metadata.labels = sanitize(labels, MetaType.LABEL) - - if( annotations ) - metadata.annotations = sanitize(annotations, MetaType.ANNOTATION) - - final result = [ - apiVersion: 'batch/v1', - kind: 'Job', - metadata: metadata, - spec: spec ] - - return result - + return [ + apiVersion: 'batch/v1', + kind: 'Job', + metadata: pod.metadata, + spec: [ + backoffLimit: 0, + template: [ + metadata: pod.metadata, + spec: pod.spec + ] + ] + ] } @PackageScope diff --git a/modules/nextflow/src/test/groovy/nextflow/k8s/K8sDriverLauncherTest.groovy b/modules/nextflow/src/test/groovy/nextflow/k8s/K8sDriverLauncherTest.groovy index 2c58309ee1..8696f3a41a 100644 --- a/modules/nextflow/src/test/groovy/nextflow/k8s/K8sDriverLauncherTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/k8s/K8sDriverLauncherTest.groovy @@ -213,6 +213,9 @@ class K8sDriverLauncherTest extends Specification { driver.@k8sClient = new K8sClient(new ClientConfig(namespace: 'foo', serviceAccount: 'bar')) driver.@k8sConfig = k8s + and: + def metadata = [name: 'foo-boo', namespace: 'foo', labels: [app: 'nextflow', runName: 'foo-boo']] + when: def spec = driver.makeLauncherSpec() then: @@ -221,11 +224,12 @@ class K8sDriverLauncherTest extends Specification { spec == [ apiVersion: 'batch/v1', kind: 'Job', - metadata: [name: 'foo-boo', namespace: 'foo', labels: [app: 'nextflow', runName: 'foo-boo']], + metadata: metadata, spec: [ - backoffLimit: 0, - template: [ - spec: [ + backoffLimit: 0, + template: [ + metadata: metadata, + spec: [ restartPolicy: 'Never', containers: [ [ @@ -249,8 +253,8 @@ class K8sDriverLauncherTest extends Specification { [name:'vol-1', persistentVolumeClaim:[claimName:'pvc-1']], [name:'vol-2', configMap:[name:'cfg-2']] ] - ] - ] + ] + ] ] ] } diff --git a/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy b/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy index ba0ba6404c..4d6f06af2f 100644 --- a/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/k8s/K8sTaskHandlerTest.groovy @@ -605,23 +605,22 @@ class K8sTaskHandlerTest extends Specification { 1 * task.getConfig() >> config result == [ - apiVersion: 'batch/v1', - kind: 'Job', - metadata:[name: 'nf-123', namespace: 'default'], - spec:[ - backoffLimit: 0, - template: [ - spec: [ - restartPolicy: 'Never', - containers: [ - [ - name: 'nf-123', - image: 'debian:latest', - command: ['/bin/bash', '-ue','/some/work/dir/.command.run'] - ] - ] - ] - ] + apiVersion: 'batch/v1', + kind: 'Job', + metadata: [name: 'nf-123', namespace: 'default'], + spec: [ + backoffLimit: 0, + template: [ + metadata: [name: 'nf-123', namespace: 'default'], + spec: [ + restartPolicy: 'Never', + containers: [[ + name: 'nf-123', + image: 'debian:latest', + command: ['/bin/bash', '-ue','/some/work/dir/.command.run'] + ]] + ] + ] ] ] } diff --git a/modules/nextflow/src/test/groovy/nextflow/k8s/model/PodSpecBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/k8s/model/PodSpecBuilderTest.groovy index 79e3919fd7..d04cbc1849 100644 --- a/modules/nextflow/src/test/groovy/nextflow/k8s/model/PodSpecBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/k8s/model/PodSpecBuilderTest.groovy @@ -1001,23 +1001,24 @@ class PodSpecBuilderTest extends Specification { .buildAsJob() then: - spec == [ apiVersion: 'batch/v1', - kind: 'Job', - metadata: [name:'foo', namespace:'default'], + spec == [ + apiVersion: 'batch/v1', + kind: 'Job', + metadata: [name: 'foo', namespace: 'default'], + spec: [ + backoffLimit: 0, + template: [ + metadata: [name: 'foo', namespace: 'default'], spec: [ - backoffLimit: 0, - template: [ - spec: [ - restartPolicy:'Never', - containers:[ - [name:'foo', - image:'busybox', - command:['echo', 'hello'], - ] - ] - ] - ] + restartPolicy: 'Never', + containers: [[ + name: 'foo', + image: 'busybox', + command: ['echo', 'hello'], + ]] ] + ] + ] ] } @@ -1035,37 +1036,40 @@ class PodSpecBuilderTest extends Specification { .withAnnotations([anno2: "val2", anno3: "val3"]) .buildAsJob() + def metadata = [ + name: 'foo', + namespace: 'default', + labels: [ + app: 'someApp', + runName: 'someName', + version: '3.8.1' + ], + annotations: [ + anno1: "val1", + anno2: "val2", + anno3: "val3" + ] + ] + then: - spec == [ apiVersion: 'batch/v1', - kind: 'Job', - metadata: [ - name:'foo', - namespace:'default', - labels: [ - app: 'someApp', - runName: 'someName', - version: '3.8.1' - ], - annotations: [ - anno1: "val1", - anno2: "val2", - anno3: "val3" - ] - ], - spec: [ - backoffLimit: 0, - template: [ - spec: [ - restartPolicy:'Never', - containers:[ - [name:'foo', - image:'busybox', - command:['echo', 'hello'], - ] - ] - ] - ] - ] + spec == [ + apiVersion: 'batch/v1', + kind: 'Job', + metadata: metadata, + spec: [ + backoffLimit: 0, + template: [ + metadata: metadata, + spec: [ + restartPolicy: 'Never', + containers: [[ + name: 'foo', + image: 'busybox', + command: ['echo', 'hello'], + ]] + ] + ] + ] ] } @@ -1099,36 +1103,4 @@ class PodSpecBuilderTest extends Specification { } - def 'should create job spec with activeDeadlineSeconds' () { - - when: - def spec = new PodSpecBuilder() - .withPodName('foo') - .withImageName('busybox') - .withCommand(['echo', 'hello']) - .withActiveDeadline(100) - .buildAsJob() - - then: - spec == [ apiVersion: 'batch/v1', - kind: 'Job', - metadata: [name:'foo', namespace:'default'], - spec: [ - backoffLimit: 0, - template: [ - spec: [ - restartPolicy:'Never', - activeDeadlineSeconds: 100, - containers:[ - [name:'foo', - image:'busybox', - command:['echo', 'hello'], - ] - ] - ] - ] - ] - ] - } - } From 2bae5198478857aa0f4c3111967bc444dff98a5a Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 15 Aug 2023 09:47:41 -0500 Subject: [PATCH 062/128] Remove dockerize launcher classpath file (#4191) [ci fast] Signed-off-by: Ben Sherman --- nextflow | 7 ++----- packing.gradle | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/nextflow b/nextflow index 75b6424928..7dd0470051 100755 --- a/nextflow +++ b/nextflow @@ -385,11 +385,8 @@ EOF } # checked if a cached classpath file exists and it newer that the nextflow boot jar file -if [[ -f /.nextflow/dockerized ]]; then - LAUNCH_FILE=/.nextflow/launch-classpath -else - LAUNCH_FILE="${NXF_LAUNCHER}/classpath-$(env_md5)" -fi +LAUNCH_FILE="${NXF_LAUNCHER}/classpath-$(env_md5)" + if [ -s "$LAUNCH_FILE" ] && [ "$LAUNCH_FILE" -nt "$NXF_BIN" ]; then declare -a launcher="($(cat "$LAUNCH_FILE"))" else diff --git a/packing.gradle b/packing.gradle index de6473607e..09993e7844 100644 --- a/packing.gradle +++ b/packing.gradle @@ -293,7 +293,7 @@ task dockerPack(type: Exec, dependsOn: ['packOne']) { COPY entry.sh /usr/local/bin/entry.sh COPY dist/docker /usr/local/bin/docker ENV NXF_HOME=/.nextflow - RUN touch /.nextflow/dockerized && rm -rf /.nextflow/launch-classpath + RUN touch /.nextflow/dockerized RUN chmod +x /usr/local/bin/nextflow /usr/local/bin/entry.sh RUN nextflow info ENTRYPOINT ["/usr/local/bin/entry.sh"] From 96ab8a69a64f4cbf0274514cbb41b839956512a8 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 15 Aug 2023 09:48:35 -0500 Subject: [PATCH 063/128] Fix `workflow.container` map resolution (#4190) Signed-off-by: Ben Sherman --- .../src/main/groovy/nextflow/Session.groovy | 19 ++++++++++++++----- .../nextflow/config/ConfigBuilder.groovy | 4 ++-- .../test/groovy/nextflow/SessionTest.groovy | 12 ++++++------ .../nextflow/config/ConfigBuilderTest.groovy | 7 ++++--- 4 files changed, 26 insertions(+), 16 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/Session.groovy b/modules/nextflow/src/main/groovy/nextflow/Session.groovy index 165bb4e5f9..03fc8698c6 100644 --- a/modules/nextflow/src/main/groovy/nextflow/Session.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/Session.groovy @@ -879,12 +879,13 @@ class Session implements ISession { def keys = (config.process as Map).keySet() for(String key : keys) { String name = null - if( key.startsWith('$') ) { - name = key.substring(1) - } - else if( key.startsWith('withName:') ) { + if( key.startsWith('withName:') ) { name = key.substring('withName:'.length()) } + else if( key.startsWith('$') ) { + name = key.substring(1) + log.warn1 "Process config \$${name} is deprecated, use withName:'${name}' instead" + } if( name ) checkValidProcessName(processNames, name, result) } @@ -1268,7 +1269,15 @@ class Session implements ISession { * look for `container` definition at process level */ config.process.each { String name, value -> - if( name.startsWith('$') && value instanceof Map && value.container ) { + if( name.startsWith('withName:') ) { + name = name.substring('withName:'.length()) + } + else if( name.startsWith('$') ) { + name = name.substring(1) + log.warn1 "Process config \$${name} is deprecated, use withName:'${name}' instead" + } + + if( value instanceof Map && value.container ) { result[name] = resolveClosure(value.container) } } diff --git a/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy index bb963d1684..6cb39fe9d2 100644 --- a/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy @@ -787,8 +787,8 @@ class ConfigBuilder { return true def result = process - .findAll { String name, value -> name.startsWith('$') && value instanceof Map } - .find { String name, Map value -> value.container as boolean } // the first non-empty `container` string + .findAll { String name, value -> (name.startsWith('withName:') || name.startsWith('$')) && value instanceof Map } + .find { String name, Map value -> value.container as boolean } // the first non-empty `container` string return result as boolean } diff --git a/modules/nextflow/src/test/groovy/nextflow/SessionTest.groovy b/modules/nextflow/src/test/groovy/nextflow/SessionTest.groovy index 6261ace967..71ef367d07 100644 --- a/modules/nextflow/src/test/groovy/nextflow/SessionTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/SessionTest.groovy @@ -525,25 +525,25 @@ class SessionTest extends Specification { when: text = ''' process { - $proc1 { container = 'alpha' } - $proc2 { container ='beta' } + withName:'proc1' { container = 'alpha' } + withName:'proc2' { container = 'beta' } } ''' then: - new Session(cfg(text)).fetchContainers() == ['$proc1': 'alpha', '$proc2': 'beta'] + new Session(cfg(text)).fetchContainers() == ['proc1': 'alpha', 'proc2': 'beta'] when: text = ''' process { - $proc1 { container = 'alpha' } - $proc2 { container ='beta' } + withName:'proc1' { container = 'alpha' } + withName:'proc2' { container = 'beta' } } process.container = 'gamma' ''' then: - new Session(cfg(text)).fetchContainers() == ['$proc1': 'alpha', '$proc2': 'beta', default: 'gamma'] + new Session(cfg(text)).fetchContainers() == ['proc1': 'alpha', 'proc2': 'beta', 'default': 'gamma'] when: diff --git a/modules/nextflow/src/test/groovy/nextflow/config/ConfigBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/config/ConfigBuilderTest.groovy index c5d9b5d6bc..420fe7b54a 100644 --- a/modules/nextflow/src/test/groovy/nextflow/config/ConfigBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/config/ConfigBuilderTest.groovy @@ -677,14 +677,14 @@ class ConfigBuilderTest extends Specification { when: file.text = ''' - process.$test.container = 'busybox' + process.'withName:test'.container = 'busybox' ''' def opt = new CliOptions(config: [file.toFile().canonicalPath]) def run = new CmdRun(withDocker: '-') def config = new ConfigBuilder().setOptions(opt).setCmdRun(run).build() then: config.docker.enabled - config.process.$test.container == 'busybox' + config.process.'withName:test'.container == 'busybox' when: file.text = @@ -709,7 +709,7 @@ class ConfigBuilderTest extends Specification { when: file.text = ''' - process.$test.tag = 'tag' + process.'withName:test'.tag = 'tag' ''' opt = new CliOptions(config: [file.toFile().canonicalPath]) run = new CmdRun(withDocker: '-') @@ -777,6 +777,7 @@ class ConfigBuilderTest extends Specification { !config.hasContainerDirective([foo: 1, bar: 2]) !config.hasContainerDirective([foo: 1, bar: 2, baz: [container: 'user/repo']]) config.hasContainerDirective([foo: 1, bar: 2, $baz: [container: 'user/repo']]) + config.hasContainerDirective([foo: 1, bar: 2, 'withName:baz': [container: 'user/repo']]) } From 552501cefd001ade587394fb3de14633578535ac Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 15 Aug 2023 10:28:28 -0500 Subject: [PATCH 064/128] Escape semicolons in paths (#4193) Signed-off-by: Ben Sherman --- modules/nf-commons/src/main/nextflow/util/Escape.groovy | 2 +- modules/nf-commons/src/test/nextflow/util/EscapeTest.groovy | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/nf-commons/src/main/nextflow/util/Escape.groovy b/modules/nf-commons/src/main/nextflow/util/Escape.groovy index 6bef4dce69..5db0682bc6 100644 --- a/modules/nf-commons/src/main/nextflow/util/Escape.groovy +++ b/modules/nf-commons/src/main/nextflow/util/Escape.groovy @@ -29,7 +29,7 @@ import nextflow.extension.FilesEx @CompileStatic class Escape { - private static List SPECIAL_CHARS = ["'", '"', ' ', '(', ')', '\\', '!', '&', '|', '<', '>', '`', ':'] + private static List SPECIAL_CHARS = ["'", '"', ' ', '(', ')', '\\', '!', '&', '|', '<', '>', '`', ':', ';'] private static List VAR_CHARS = ['$', "'", '"', '(', ')', '\\', '&', '|', '<', '>', '`'] diff --git a/modules/nf-commons/src/test/nextflow/util/EscapeTest.groovy b/modules/nf-commons/src/test/nextflow/util/EscapeTest.groovy index 59841ba7dc..85f4cf5fb4 100644 --- a/modules/nf-commons/src/test/nextflow/util/EscapeTest.groovy +++ b/modules/nf-commons/src/test/nextflow/util/EscapeTest.groovy @@ -55,6 +55,8 @@ class EscapeTest extends Specification { Escape.path("hello<3.txt") == "hello\\<3.txt" Escape.path("hello>3.txt") == "hello\\>3.txt" Escape.path("hello`3.txt") == "hello\\`3.txt" + Escape.path('hello:3.txt') == "hello\\:3.txt" + Escape.path('hello;3.txt') == "hello\\;3.txt" Escape.path("/some'5/data'3/with/quote's/file's.txt") == "/some\\'5/data\\'3/with/quote\\'s/file\\'s.txt" Escape.path("Hello '$world'") == "Hello\\ \\'world\\'" From a3a75ea26127e4415d1a6a98ad3e33fdfa5d1931 Mon Sep 17 00:00:00 2001 From: Dr Marco Claudio De La Pierre Date: Wed, 16 Aug 2023 00:02:15 +0800 Subject: [PATCH 065/128] Use root user in Wave container based on micromamba (#4038) [ci fast] Signed-off-by: Marco De La Pierre --- .../resources/templates/conda/dockerfile-conda-file.txt | 1 + .../templates/conda/dockerfile-conda-packages.txt | 1 + .../src/test/io/seqera/wave/plugin/WaveClientTest.groovy | 2 ++ .../src/test/io/seqera/wave/util/DockerHelperTest.groovy | 7 +++++++ 4 files changed, 11 insertions(+) diff --git a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt b/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt index a7cc7806e8..d9be54e326 100644 --- a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt +++ b/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt @@ -3,3 +3,4 @@ COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml RUN micromamba install -y -n base -f /tmp/conda.yml \ {{base_packages}} && micromamba clean -a -y +USER root diff --git a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt b/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt index 6885c84ea0..67e01b4378 100644 --- a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt +++ b/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt @@ -4,3 +4,4 @@ RUN \ {{target}} \ {{base_packages}} && micromamba clean -a -y +USER root diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index 4005b1c0b8..8e5bcb31df 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -476,6 +476,7 @@ class WaveClientTest extends Specification { micromamba install -y -n base -c conda-forge -c defaults \\ salmon=1.2.3 \\ && micromamba clean -a -y + USER root '''.stripIndent() and: !assets.moduleResources @@ -544,6 +545,7 @@ class WaveClientTest extends Specification { COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml RUN micromamba install -y -n base -f /tmp/conda.yml \\ && micromamba clean -a -y + USER root '''.stripIndent() and: assets.condaFile == condaFile diff --git a/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy index 566838978a..4112c96900 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy @@ -41,6 +41,7 @@ class DockerHelperTest extends Specification { RUN micromamba install -y -n base -f /tmp/conda.yml \\ && micromamba install -y -n base conda-forge::procps-ng \\ && micromamba clean -a -y + USER root '''.stripIndent() } @@ -52,6 +53,7 @@ class DockerHelperTest extends Specification { COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml RUN micromamba install -y -n base -f /tmp/conda.yml \\ && micromamba clean -a -y + USER root '''.stripIndent() } @@ -67,6 +69,7 @@ class DockerHelperTest extends Specification { micromamba install -y -n base -c conda-forge -c defaults \\ bwa=0.7.15 salmon=1.1.1 \\ && micromamba clean -a -y + USER root '''.stripIndent() } @@ -84,6 +87,7 @@ class DockerHelperTest extends Specification { bwa=0.7.15 salmon=1.1.1 \\ && micromamba install -y -n base foo::one bar::two \\ && micromamba clean -a -y + USER root '''.stripIndent() } @@ -99,6 +103,7 @@ class DockerHelperTest extends Specification { micromamba install -y -n base -c foo -c bar \\ bwa=0.7.15 salmon=1.1.1 \\ && micromamba clean -a -y + USER root '''.stripIndent() } @@ -115,6 +120,7 @@ class DockerHelperTest extends Specification { micromamba install -y -n base -c conda-forge -c defaults \\ bwa=0.7.15 salmon=1.1.1 \\ && micromamba clean -a -y + USER root USER my-user RUN apt-get update -y && apt-get install -y nano '''.stripIndent() @@ -134,6 +140,7 @@ class DockerHelperTest extends Specification { micromamba install -y -n base -c conda-forge -c defaults \\ -f https://foo.com/some/conda-lock.yml \\ && micromamba clean -a -y + USER root USER my-user RUN apt-get update -y && apt-get install -y procps '''.stripIndent() From 67980f19c8840d875e29bba3aa2e5e16534b1173 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 15 Aug 2023 11:03:13 -0500 Subject: [PATCH 066/128] FIx nested InvocationTargetException (#4192) [ci fast] Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- .../src/main/groovy/nextflow/script/BaseScript.groovy | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy b/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy index 5da185edd6..521d6dca6d 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy @@ -191,9 +191,12 @@ abstract class BaseScript extends Script implements ExecutionContext { try { run0() } - catch(InvocationTargetException e) { + catch( InvocationTargetException e ) { // provide the exception cause which is more informative than InvocationTargetException - throw(e.cause ?: e) + Throwable target = e + do target = target.cause + while ( target instanceof InvocationTargetException ) + throw target } finally { ExecutionStack.pop() From 50f6f6d598b77010b0bd5e22b05fba0207e153b9 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Wed, 16 Aug 2023 14:11:03 +0200 Subject: [PATCH 067/128] Fix if-guard on log.trace in trask processor inner class [ci fast] Signed-off-by: Paolo Di Tommaso --- .../nextflow/processor/TaskProcessor.groovy | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy index 07fe7a7ef6..166082b9ea 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy @@ -2388,7 +2388,9 @@ class TaskProcessor { @Override List beforeRun(final DataflowProcessor processor, final List messages) { - log.trace "<${name}> Before run -- messages: ${messages}" + // apparently auto if-guard instrumented by @Slf4j is not honoured in inner classes - add it explictly + if( log.isTraceEnabled() ) + log.trace "<${name}> Before run -- messages: ${messages}" // the counter must be incremented here, otherwise it won't be consistent state.update { StateObj it -> it.incSubmitted() } // task index must be created here to guarantee consistent ordering @@ -2403,12 +2405,15 @@ class TaskProcessor { @Override void afterRun(DataflowProcessor processor, List messages) { - log.trace "<${name}> After run" + // apparently auto if-guard instrumented by @Slf4j is not honoured in inner classes - add it explictly + if( log.isTraceEnabled() ) + log.trace "<${name}> After run" currentTask.remove() } @Override Object messageArrived(final DataflowProcessor processor, final DataflowReadChannel channel, final int index, final Object message) { + // apparently auto if-guard instrumented by @Slf4j is not honoured in inner classes - add it explictly if( log.isTraceEnabled() ) { def channelName = config.getInputs()?.names?.get(index) def taskName = currentTask.get()?.name ?: name @@ -2420,6 +2425,7 @@ class TaskProcessor { @Override Object controlMessageArrived(final DataflowProcessor processor, final DataflowReadChannel channel, final int index, final Object message) { + // apparently auto if-guard instrumented by @Slf4j is not honoured in inner classes - add it explictly if( log.isTraceEnabled() ) { def channelName = config.getInputs()?.names?.get(index) def taskName = currentTask.get()?.name ?: name @@ -2429,7 +2435,9 @@ class TaskProcessor { super.controlMessageArrived(processor, channel, index, message) if( message == PoisonPill.instance ) { - log.trace "<${name}> Poison pill arrived; port: $index" + // apparently auto if-guard instrumented by @Slf4j is not honoured in inner classes - add it explictly + if( log.isTraceEnabled() ) + log.trace "<${name}> Poison pill arrived; port: $index" openPorts.set(index, 0) // mark the port as closed state.update { StateObj it -> it.poison() } } @@ -2439,7 +2447,9 @@ class TaskProcessor { @Override void afterStop(final DataflowProcessor processor) { - log.trace "<${name}> After stop" + // apparently auto if-guard instrumented by @Slf4j is not honoured in inner classes - add it explictly + if( log.isTraceEnabled() ) + log.trace "<${name}> After stop" } /** From 7b5e50a15cc1e1b7c22a12c1234c9e60c87d4743 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 17 Aug 2023 09:23:27 -0500 Subject: [PATCH 068/128] Add resource labels support for Azure Batch (#4178) Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- docs/executor.md | 6 +++++- docs/process.md | 14 ++++++++++++-- .../cloud/azure/batch/AzBatchService.groovy | 16 ++++++++++++++-- .../cloud/azure/batch/AzVmPoolSpec.groovy | 1 + .../cloud/azure/batch/AzBatchServiceTest.groovy | 4 +++- 5 files changed, 35 insertions(+), 6 deletions(-) diff --git a/docs/executor.md b/docs/executor.md index 1cce5ecd1d..a14d867146 100644 --- a/docs/executor.md +++ b/docs/executor.md @@ -28,6 +28,7 @@ Resource requests and other job characteristics can be controlled via the follow - {ref}`process-cpus` - {ref}`process-memory` - {ref}`process-queue` +- {ref}`process-resourcelabels` - {ref}`process-time` See the {ref}`AWS Batch` page for further configuration details. @@ -55,6 +56,7 @@ Resource requests and other job characteristics can be controlled via the follow - {ref}`process-machineType` - {ref}`process-memory` - {ref}`process-queue` +- {ref}`process-resourcelabels` - {ref}`process-time` See the {ref}`Azure Batch ` page for further configuration details. @@ -191,8 +193,8 @@ Resource requests and other job characteristics can be controlled via the follow - {ref}`process-disk` - {ref}`process-machineType` - {ref}`process-memory` -- {ref}`process-time` - {ref}`process-resourcelabels` +- {ref}`process-time` See the {ref}`Google Cloud Batch ` page for further configuration details. @@ -218,6 +220,7 @@ Resource requests and other job characteristics can be controlled via the follow - {ref}`process-disk` - {ref}`process-machineType` - {ref}`process-memory` +- {ref}`process-resourcelabels` - {ref}`process-time` See the {ref}`Google Life Sciences ` page for further configuration details. @@ -312,6 +315,7 @@ Resource requests and other job characteristics can be controlled via the follow - {ref}`process-disk` - {ref}`process-memory` - {ref}`process-pod` +- {ref}`process-resourcelabels` - {ref}`process-time` See the {ref}`Kubernetes ` page to learn how to set up a Kubernetes cluster to run Nextflow pipelines. diff --git a/docs/process.md b/docs/process.md index db7a594aca..7c4eb7fce0 100644 --- a/docs/process.md +++ b/docs/process.md @@ -2121,8 +2121,18 @@ process my_task { The limits and the syntax of the corresponding cloud provider should be taken into consideration when using resource labels. -:::{note} -Resource labels are currently only supported by the {ref}`awsbatch-executor`, {ref}`google-lifesciences-executor`, Google Cloud Batch and {ref}`k8s-executor` executors. +Resource labels are currently supported by the following executors: + +- {ref}`awsbatch-executor` +- {ref}`azurebatch-executor` +- {ref}`google-batch-executor` +- {ref}`google-lifesciences-executor` +- {ref}`k8s-executor` + +:::{versionadded} 23.09.0-edge +Resource labels are supported for Azure Batch when using automatic pool creation. + +Resource labels in Azure are added to pools, rather than jobs, in order to facilitate cost analysis. A new pool will be created for each new set of resource labels, therefore it is recommended to also set `azure.batch.deletePoolsOnCompletion = true` when using process-specific resource labels. ::: See also: [label](#label) diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy index ddd858991f..a16506e4ad 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzBatchService.groovy @@ -39,6 +39,7 @@ import com.microsoft.azure.batch.protocol.models.ContainerRegistry import com.microsoft.azure.batch.protocol.models.ElevationLevel import com.microsoft.azure.batch.protocol.models.ImageInformation import com.microsoft.azure.batch.protocol.models.JobUpdateParameter +import com.microsoft.azure.batch.protocol.models.MetadataItem import com.microsoft.azure.batch.protocol.models.MountConfiguration import com.microsoft.azure.batch.protocol.models.NetworkConfiguration import com.microsoft.azure.batch.protocol.models.OnAllTasksComplete @@ -563,9 +564,10 @@ class AzBatchService implements Closeable { throw new IllegalArgumentException(msg) } - final key = CacheHelper.hasher([vmType.name, opts]).hash().toString() + final metadata = task.config.getResourceLabels() + final key = CacheHelper.hasher([vmType.name, opts, metadata]).hash().toString() final poolId = "nf-pool-$key-$vmType.name" - return new AzVmPoolSpec(poolId: poolId, vmType: vmType, opts: opts) + return new AzVmPoolSpec(poolId: poolId, vmType: vmType, opts: opts, metadata: metadata) } protected void checkPool(CloudPool pool, AzVmPoolSpec spec) { @@ -698,6 +700,16 @@ class AzBatchService implements Closeable { .withTaskSlotsPerNode(spec.vmType.numberOfCores) .withStartTask(poolStartTask) + // resource labels + if( spec.metadata ) { + final metadata = spec.metadata.collect { name, value -> + new MetadataItem() + .withName(name) + .withValue(value) + } + poolParams.withMetadata(metadata) + } + // virtual network if( spec.opts.virtualNetwork ) poolParams.withNetworkConfiguration( new NetworkConfiguration().withSubnetId(spec.opts.virtualNetwork) ) diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzVmPoolSpec.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzVmPoolSpec.groovy index d9ac092c85..257edb6cfb 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzVmPoolSpec.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/batch/AzVmPoolSpec.groovy @@ -36,4 +36,5 @@ class AzVmPoolSpec { String poolId AzVmType vmType AzPoolOpts opts + Map metadata } diff --git a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy index 055a795b27..0b00cc9a38 100644 --- a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy +++ b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzBatchServiceTest.groovy @@ -338,6 +338,7 @@ class AzBatchServiceTest extends Specification { getMemory() >> MEM getCpus() >> CPUS getMachineType() >> TYPE + getResourceLabels() >> [foo: 'bar'] } } @@ -346,7 +347,8 @@ class AzBatchServiceTest extends Specification { then: 1 * svc.guessBestVm(LOC, CPUS, MEM, TYPE) >> VM and: - spec.poolId == 'nf-pool-ddb1223ab79edfe07c0af2be7fceeb13-Standard_X1' + spec.poolId == 'nf-pool-9022a3fbfb5f93028d78fefaea5e21ab-Standard_X1' + spec.metadata == [foo: 'bar'] } From 6670bb06ec588e9c3dd02f85d6c6f5152f53c913 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 16:34:40 +0200 Subject: [PATCH 069/128] Bump nf-azure@1.3.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-azure/changelog.txt | 4 ++++ plugins/nf-azure/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/nf-azure/changelog.txt b/plugins/nf-azure/changelog.txt index 27c932bddc..808543c7da 100644 --- a/plugins/nf-azure/changelog.txt +++ b/plugins/nf-azure/changelog.txt @@ -1,5 +1,9 @@ nf-azure changelog =================== +1.3.0 - 17 Aug 2023 +- Add resource labels support for Azure Batch (#4178) [7b5e50a1] +- Fix typos in source code comments (#4173) [ci fast] [e78bc37e] + 1.2.0 - 5 Aug 2023 - Add deleteTasksOnCompletion to Azure Batch configuration (#4114) [b14674dc] diff --git a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF index 3bf6761375..48a1d75d94 100644 --- a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.azure.AzurePlugin Plugin-Id: nf-azure -Plugin-Version: 1.2.0 +Plugin-Version: 1.3.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.01.0-edge From fb8f6681ac30f879be586faf97d10c0c32fc3dd3 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 16:34:53 +0200 Subject: [PATCH 070/128] Bump nf-cloudcache@0.2.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-cloudcache/changelog.txt | 4 ++++ plugins/nf-cloudcache/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/nf-cloudcache/changelog.txt b/plugins/nf-cloudcache/changelog.txt index 0906ab5bb8..2e9af969f8 100644 --- a/plugins/nf-cloudcache/changelog.txt +++ b/plugins/nf-cloudcache/changelog.txt @@ -1,4 +1,8 @@ nf-cloudcache changelog ======================= +0.2.0 - 17 Aug 2023 +- Remove lock file from cloudcache (#4167) [6e6ea579] +- Enable cloud cache based on environment variable (#4160) [a66b0e63] + 0.1.0 - 22 Jul 2023 - Initial version diff --git a/plugins/nf-cloudcache/src/resources/META-INF/MANIFEST.MF b/plugins/nf-cloudcache/src/resources/META-INF/MANIFEST.MF index f000ea6e0a..042ea9ff79 100644 --- a/plugins/nf-cloudcache/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-cloudcache/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.CloudCachePlugin Plugin-Id: nf-cloudcache -Plugin-Version: 0.1.0 +Plugin-Version: 0.2.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.04.0 From d06b83652212feca38f6d0305af09dbcf9136ea0 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 16:35:09 +0200 Subject: [PATCH 071/128] Bump nf-tower@1.6.1 Signed-off-by: Paolo Di Tommaso --- plugins/nf-tower/changelog.txt | 4 ++++ plugins/nf-tower/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/nf-tower/changelog.txt b/plugins/nf-tower/changelog.txt index ce9117eb76..ee8b075f69 100644 --- a/plugins/nf-tower/changelog.txt +++ b/plugins/nf-tower/changelog.txt @@ -1,5 +1,9 @@ nf-tower changelog =================== +1.6.1 -17 Aug 2023 +- Fix checkpoint thread termination (#4166) [2b449daa] +- Fix typos in source code comments (#4173) [ci fast] [e78bc37e] + 1.6.0 - 5 Aug 2023 - Add Tower logs checkpoint (#4132) [71dfecc2] - Restore Tower CacheManager for backward compatibility [6d269070] diff --git a/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF b/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF index d31cd40212..2708dd3558 100644 --- a/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-tower/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.tower.plugin.TowerPlugin Plugin-Id: nf-tower -Plugin-Version: 1.6.0 +Plugin-Version: 1.6.1 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 7555b17daf4afa4d196a21e245c517b3c9d87539 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 16:35:27 +0200 Subject: [PATCH 072/128] Bump nf-wave@0.11.2 Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/changelog.txt | 4 ++++ plugins/nf-wave/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/plugins/nf-wave/changelog.txt b/plugins/nf-wave/changelog.txt index d9c4f7fbde..5d0b016b7c 100644 --- a/plugins/nf-wave/changelog.txt +++ b/plugins/nf-wave/changelog.txt @@ -1,5 +1,9 @@ nf-wave changelog ================== +0.11.2 - 17 Aug 2023 +- Use root user in Wave container based on micromamba (#4038) [ci fast] [a3a75ea2] +- Add 429 http status code to Wave retriable errors [8eb5f305] + 0.11.1 - 5 Aug 2023 - Improve Wave config logging [547fad62] - Increase Wave client max attempts [fe5dd497] diff --git a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF index 877822c1a2..143419776a 100644 --- a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.wave.plugin.WavePlugin Plugin-Id: nf-wave -Plugin-Version: 0.11.1 +Plugin-Version: 0.11.2 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 9a595895d035e66dbcd0fca0ee114a03f8f0416c Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 16:37:20 +0200 Subject: [PATCH 073/128] Update changelog [ci fast] Signed-off-by: Paolo Di Tommaso --- changelog.txt | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/changelog.txt b/changelog.txt index 9e2995e8f8..359db4221b 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,5 +1,34 @@ NEXTFLOW CHANGE-LOG =================== +23.08.1-edge - 17 Aug 2023 +- Add 429 http status code to Wave retriable errors [8eb5f305] +- Add resource labels support for Azure Batch (#4178) [7b5e50a1] +- Apply K8s Pod metadata to Job (#4057) [4d918627] +- Document error about trailing backslash with space (#4180) [245afa5d] +- Enable cloud cache based on environment variable (#4160) [a66b0e63] +- Escape semicolons in paths (#4193) [552501ce] +- FIx nested InvocationTargetException (#4192) [67980f19] +- Fix Execution should fail if report or timeline file already exists [b238d7e2] +- Fix Process hangs when using flatten and finish errorStrategy [d99b3432] +- Fix `workflow.container` map resolution (#4190) [96ab8a69] +- Fix checkpoint thread termination (#4166) [2b449daa] +- Fix env output when changing task workdir [8e4d7fed] +- Fix if-guard on log.trace in trask processor inner class [50f6f6d5] +- Fix typos in source code comments (#4173) [e78bc37e] +- Improve Conda build error report [7b19fb03] +- Improve handling of name-only container env variables [3051cd13] +- Minor changes [7e58c945] +- Remove dockerize launcher classpath file (#4191) [2bae5198] +- Remove lock file from cloudcache (#4167) [6e6ea579] +- Update AWS instructions for creating a custom AMI (#4174) [563bff13] +- Update changelog [98f88a50] +- Update tip about modifying maps (#4153) [30036dbf] +- Use root user in Wave container based on micromamba (#4038) [a3a75ea2] +- Bump nf-azure@1.3.0 [6670bb06] +- Bump nf-cloudcache@0.2.0 [fb8f6681] +- Bump nf-tower@1.6.1 [d06b8365] +- Bump nf-wave@0.11.2 [7555b17d] + 23.04.3 - 11 Aug 2023 - Increase Wave client max attempts [8c67610a] - Fix log typo [03e19ea2] @@ -51,7 +80,7 @@ NEXTFLOW CHANGE-LOG - Allow SLURM executor option `--mem-per-cpu` (#4023) [96c04e3b] - Allow disabling the Wave requirement when Fusion is enabled [9180d633] - Disable Singularity and Apptainer home mount by default (#4056) [a0ee4657] -- Document `NXF_WRAPPER_STAGE_FILE_THRESHOLD` environment variable (#4113) [ci skip] [bda47567] +- Document `NXF_WRAPPER_STAGE_FILE_THRESHOLD` environment variable (#4113) [bda47567] - Fix AzFileSystem retry policy [ba9b6d18] [c2f3cc96] - Fix Improve error message for invalid Azure URI [0f4d8867] - Fix Treat HTTP headers as case insensitive (#4116) [97fd3358] From 9f1b68900acb29098effd5a57998a15364958207 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 15:02:53 +0000 Subject: [PATCH 074/128] [release 23.08.1-edge] Update timestamp and build number [ci fast] --- VERSION | 2 +- docs/conf.py | 2 +- .../nextflow/src/main/resources/META-INF/plugins-info.txt | 8 ++++---- modules/nf-commons/src/main/nextflow/Const.groovy | 6 +++--- nextflow | 2 +- nextflow.md5 | 2 +- nextflow.sha1 | 2 +- nextflow.sha256 | 2 +- 8 files changed, 13 insertions(+), 13 deletions(-) diff --git a/VERSION b/VERSION index 867f7aa69d..ed8e6580ae 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -23.08.0-edge \ No newline at end of file +23.08.1-edge diff --git a/docs/conf.py b/docs/conf.py index b68f128789..dd9c2a42f6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,7 @@ # The short X.Y version. version = '23.08' # The full version, including alpha/beta/rc tags. -release = '23.08.0-edge' +release = '23.08.1-edge' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt index c7fc7ad006..60364143e3 100644 --- a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt +++ b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt @@ -1,9 +1,9 @@ nf-amazon@2.1.1 -nf-azure@1.2.0 -nf-cloudcache@0.1.0 +nf-azure@1.3.0 +nf-cloudcache@0.2.0 nf-codecommit@0.1.5 nf-console@1.0.6 nf-ga4gh@1.1.0 nf-google@1.8.1 -nf-tower@1.6.0 -nf-wave@0.11.1 \ No newline at end of file +nf-tower@1.6.1 +nf-wave@0.11.2 \ No newline at end of file diff --git a/modules/nf-commons/src/main/nextflow/Const.groovy b/modules/nf-commons/src/main/nextflow/Const.groovy index 23d624ff1a..38687fde5b 100644 --- a/modules/nf-commons/src/main/nextflow/Const.groovy +++ b/modules/nf-commons/src/main/nextflow/Const.groovy @@ -52,17 +52,17 @@ class Const { /** * The application version */ - static public final String APP_VER = "23.08.0-edge" + static public final String APP_VER = "23.08.1-edge" /** * The app build time as linux/unix timestamp */ - static public final long APP_TIMESTAMP = 1691246068029 + static public final long APP_TIMESTAMP = 1692284319999 /** * The app build number */ - static public final int APP_BUILDNUM = 5872 + static public final int APP_BUILDNUM = 5874 /** * The app build time string relative to UTC timezone diff --git a/nextflow b/nextflow index 7dd0470051..6e3931e648 100755 --- a/nextflow +++ b/nextflow @@ -15,7 +15,7 @@ # limitations under the License. [[ "$NXF_DEBUG" == 'x' ]] && set -x -NXF_VER=${NXF_VER:-'23.08.0-edge'} +NXF_VER=${NXF_VER:-'23.08.1-edge'} NXF_ORG=${NXF_ORG:-'nextflow-io'} NXF_HOME=${NXF_HOME:-$HOME/.nextflow} NXF_PROT=${NXF_PROT:-'https'} diff --git a/nextflow.md5 b/nextflow.md5 index 14f8bff303..5aa63f083a 100644 --- a/nextflow.md5 +++ b/nextflow.md5 @@ -1 +1 @@ -e84b9b8b1453bb81540b10e057e126ee +74e5cedd3fd06021c698fab73cf5f03e diff --git a/nextflow.sha1 b/nextflow.sha1 index aa412cac8b..d24bb70c95 100644 --- a/nextflow.sha1 +++ b/nextflow.sha1 @@ -1 +1 @@ -f70facf7083770f11d4d915c6f11502302a3aa4a +fbaa08de20ccc1743711f9d58fc2ada055d79f01 diff --git a/nextflow.sha256 b/nextflow.sha256 index 14fbf058ba..144ff2727b 100644 --- a/nextflow.sha256 +++ b/nextflow.sha256 @@ -1 +1 @@ -1448ebceec2213751c8846f0edc06a6791f2ff0bc996a7553fa7f94bb57c2606 +2b099e5febcc8236308be215936af5ac0af6d9c007e329247f25d4bd9a7cf444 From c30d52113db4f2dbc78ba845740cd285134b0159 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 17 Aug 2023 18:09:48 +0200 Subject: [PATCH 075/128] Fix security deps in nf-azure plugin Signed-off-by: Paolo Di Tommaso --- plugins/nf-azure/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/nf-azure/build.gradle b/plugins/nf-azure/build.gradle index 0868cdbdbf..ffcfd88869 100644 --- a/plugins/nf-azure/build.gradle +++ b/plugins/nf-azure/build.gradle @@ -52,6 +52,7 @@ dependencies { constraints { api 'net.minidev:json-smart:2.4.9' + api 'com.google.code.gson:gson:2.8.9' } testImplementation(testFixtures(project(":nextflow"))) From 8a4348939949ce5532179cc8067350c9c2c598ef Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 20 Aug 2023 19:44:47 +0200 Subject: [PATCH 076/128] Add support for Wave native build for singularity Signed-off-by: Paolo Di Tommaso --- build.gradle | 2 + .../script/bundle/ResourcesBundle.groovy | 15 +- .../script/bundle/ResourcesBundleTest.groovy | 27 +- plugins/nf-wave/build.gradle | 1 + .../main/io/seqera/wave/config/CondaOpts.java | 41 -- .../main/io/seqera/wave/config/SpackOpts.java | 48 --- .../plugin/SubmitContainerTokenRequest.groovy | 5 + .../io/seqera/wave/plugin/WaveAssets.groovy | 9 +- .../io/seqera/wave/plugin/WaveClient.groovy | 91 +++-- .../resolver/WaveContainerResolver.groovy | 19 +- .../io/seqera/wave/util/DockerHelper.java | 256 ------------ .../io/seqera/wave/util/TemplateRenderer.java | 150 ------- .../templates/conda/dockerfile-conda-file.txt | 6 - .../conda/dockerfile-conda-packages.txt | 7 - .../templates/spack/dockerfile-spack-file.txt | 18 - .../seqera/wave/config/CondaOptsTest.groovy | 47 --- .../seqera/wave/config/SpackOptsTest.groovy | 48 --- .../seqera/wave/plugin/WaveClientTest.groovy | 174 +++++++- .../resolver/WaveContainerResolverTest.groovy | 25 +- .../seqera/wave/util/DockerHelperTest.groovy | 381 ------------------ .../wave/util/TemplateRendererTest.groovy | 202 ---------- 21 files changed, 296 insertions(+), 1276 deletions(-) delete mode 100644 plugins/nf-wave/src/main/io/seqera/wave/config/CondaOpts.java delete mode 100644 plugins/nf-wave/src/main/io/seqera/wave/config/SpackOpts.java delete mode 100644 plugins/nf-wave/src/main/io/seqera/wave/util/DockerHelper.java delete mode 100644 plugins/nf-wave/src/main/io/seqera/wave/util/TemplateRenderer.java delete mode 100644 plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt delete mode 100644 plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt delete mode 100644 plugins/nf-wave/src/resources/templates/spack/dockerfile-spack-file.txt delete mode 100644 plugins/nf-wave/src/test/io/seqera/wave/config/CondaOptsTest.groovy delete mode 100644 plugins/nf-wave/src/test/io/seqera/wave/config/SpackOptsTest.groovy delete mode 100644 plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy delete mode 100644 plugins/nf-wave/src/test/io/seqera/wave/util/TemplateRendererTest.groovy diff --git a/build.gradle b/build.gradle index 82c8422e3d..2b8f83e687 100644 --- a/build.gradle +++ b/build.gradle @@ -82,6 +82,8 @@ allprojects { mavenCentral() maven { url 'https://repo.eclipse.org/content/groups/releases' } maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } + maven { url = "https://s3-eu-west-1.amazonaws.com/maven.seqera.io/releases" } + maven { url = "https://s3-eu-west-1.amazonaws.com/maven.seqera.io/snapshots" } } configurations { diff --git a/modules/nextflow/src/main/groovy/nextflow/script/bundle/ResourcesBundle.groovy b/modules/nextflow/src/main/groovy/nextflow/script/bundle/ResourcesBundle.groovy index a0f17094b0..6aa1ef9622 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/bundle/ResourcesBundle.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/bundle/ResourcesBundle.groovy @@ -45,13 +45,15 @@ class ResourcesBundle { private Path root private LinkedHashMap content = new LinkedHashMap<>(100) private Path dockerfile + private Path singularityfile private MemoryUnit maxFileSize = MAX_FILE_SIZE private MemoryUnit maxBundleSize = MAX_BUNDLE_SIZE private String baseDirectory ResourcesBundle(Path root) { this.root = root - this.dockerfile = dockefile0(root.resolveSibling('Dockerfile')) + this.dockerfile = pathIfExists0(root.resolveSibling('Dockerfile')) + this.singularityfile = pathIfExists0(root.resolveSibling('Singularityfile')) } ResourcesBundle withMaxFileSize(MemoryUnit mem) { @@ -68,7 +70,7 @@ class ResourcesBundle { Map content() { content } - static private Path dockefile0(Path path) { + static private Path pathIfExists0(Path path) { return path?.exists() ? path : null } @@ -100,6 +102,10 @@ class ResourcesBundle { return dockerfile } + Path getSingularityfile() { + return singularityfile + } + Set getPaths() { return new HashSet(content.values()) } @@ -125,7 +131,7 @@ class ResourcesBundle { } boolean asBoolean() { - return content.size() || dockerfile + return content.size() || dockerfile || singularityfile } /** @@ -189,6 +195,9 @@ class ResourcesBundle { if( dockerfile ) { allMeta.add(fileMeta(dockerfile.name, dockerfile)) } + if( singularityfile ) { + allMeta.add(fileMeta(singularityfile.name, singularityfile)) + } return CacheHelper.hasher(allMeta).hash().toString() } diff --git a/modules/nextflow/src/test/groovy/nextflow/script/bundle/ResourcesBundleTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/bundle/ResourcesBundleTest.groovy index 655dd82cd3..2ec7a41688 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/bundle/ResourcesBundleTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/bundle/ResourcesBundleTest.groovy @@ -106,12 +106,31 @@ class ResourcesBundleTest extends Specification { then: bundle.fingerprint() == '7b2200ff24230f76cea22e5eb15b1701' + } + + def 'should get singularityfile' () { + given: + def singularPath = folder.resolve('Singularityfile'); singularPath.text = "I'm the main file" + def bundlePath = folder.resolve('bundle') + and: + singularPath.setLastModified(LAST_MODIFIED) + singularPath.setPermissions(6,4,4) when: - // changing the last modified time, change the fingerprint - dockerPath.setLastModified(LAST_MODIFIED +100) + def bundle = ResourcesBundle.scan(bundlePath) then: - bundle.fingerprint() == '7b2200ff24230f76cea22e5eb15b1701' - + bundle.getSingularityfile() == singularPath + and: + bundle + !bundle.hasEntries() + and: + bundle.fingerprint() == '6933e9238f3363c8e013a35715fa0540' + + when: + // changing file permissions, change the fingerprint + singularPath.setPermissions(6,0,0) + then: + bundle.fingerprint() == '3ffe7f16cd5ae17e6ba7485e01972b20' + } def 'should check max file size'() { diff --git a/plugins/nf-wave/build.gradle b/plugins/nf-wave/build.gradle index 6c37eba4b1..61255db8e3 100644 --- a/plugins/nf-wave/build.gradle +++ b/plugins/nf-wave/build.gradle @@ -36,6 +36,7 @@ dependencies { api 'org.apache.commons:commons-lang3:3.12.0' api 'com.google.code.gson:gson:2.10.1' api 'org.yaml:snakeyaml:2.0' + api 'io.seqera:wave-utils:0.6.2' testImplementation(testFixtures(project(":nextflow"))) testImplementation "org.codehaus.groovy:groovy:3.0.18" diff --git a/plugins/nf-wave/src/main/io/seqera/wave/config/CondaOpts.java b/plugins/nf-wave/src/main/io/seqera/wave/config/CondaOpts.java deleted file mode 100644 index fe36f8aa8c..0000000000 --- a/plugins/nf-wave/src/main/io/seqera/wave/config/CondaOpts.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.config; - -import java.util.List; -import java.util.Map; - -/** - * Conda build options - * - * @author Paolo Di Tommaso - */ -public class CondaOpts { - final public static String DEFAULT_MAMBA_IMAGE = "mambaorg/micromamba:1.4.9"; - - final public String mambaImage; - final public List commands; - final public String basePackages; - - public CondaOpts(Map opts) { - this.mambaImage = opts.containsKey("mambaImage") ? opts.get("mambaImage").toString(): DEFAULT_MAMBA_IMAGE; - this.commands = opts.containsKey("commands") ? (List)opts.get("commands") : null; - this.basePackages = (String)opts.get("basePackages"); - } - -} diff --git a/plugins/nf-wave/src/main/io/seqera/wave/config/SpackOpts.java b/plugins/nf-wave/src/main/io/seqera/wave/config/SpackOpts.java deleted file mode 100644 index 1ccabcf22e..0000000000 --- a/plugins/nf-wave/src/main/io/seqera/wave/config/SpackOpts.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.config; - -import java.util.List; -import java.util.Map; - -/** - * Spack build options - * - * @author Marco De La Pierre - */ -public class SpackOpts { - - /** - * Custom Dockerfile `RUN` commands that can be used to customise the target container - */ - public final List commands; - - /** - * Spack packages that should be added to any Spack environment requested via Wave - */ - public final String basePackages; - - public SpackOpts() { - this(Map.of()); - } - public SpackOpts(Map opts) { - this.commands = opts.containsKey("commands") ? (List)opts.get("commands") : null; - this.basePackages = opts.containsKey("basePackages") ? opts.get("basePackages").toString() : null; - } - -} diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy index 875cdfe338..1a219ef79c 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy @@ -111,4 +111,9 @@ class SubmitContainerTokenRequest { */ boolean freeze + /** + * Specify the format of the container file + */ + String format + } diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveAssets.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveAssets.groovy index 4541bf72b0..8201cdafb9 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveAssets.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveAssets.groovy @@ -36,10 +36,11 @@ class WaveAssets { final String containerPlatform final ResourcesBundle moduleResources final ContainerConfig containerConfig - final String dockerFileContent + final String containerFile final Path condaFile final Path spackFile final ResourcesBundle projectResources + final boolean singularity static fromImage(String containerImage,String containerPlatform=null) { new WaveAssets(containerImage, containerPlatform) @@ -50,8 +51,8 @@ class WaveAssets { } String dockerFileEncoded() { - return dockerFileContent - ? dockerFileContent.bytes.encodeBase64() + return containerFile + ? containerFile.bytes.encodeBase64() : null } @@ -73,7 +74,7 @@ class WaveAssets { allMeta.add( this.containerImage ) allMeta.add( this.moduleResources?.fingerprint() ) allMeta.add( this.containerConfig?.fingerprint() ) - allMeta.add( this.dockerFileContent ) + allMeta.add( this.containerFile ) allMeta.add( this.condaFile?.text ) allMeta.add( this.spackFile?.text ) allMeta.add( this.projectResources?.fingerprint() ) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index d189704e86..bf87c02135 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -165,10 +165,10 @@ class WaveClient { containerConfig.prependLayer(makeLayer(assets.projectResources)) } - if( !assets.containerImage && !assets.dockerFileContent ) + if( !assets.containerImage && !assets.containerFile ) throw new IllegalArgumentException("Wave container request requires at least a image or container file to build") - if( assets.containerImage && assets.dockerFileContent ) + if( assets.containerImage && assets.containerFile ) throw new IllegalArgumentException("Wave container image and container file cannot be specified in the same request") return new SubmitContainerTokenRequest( @@ -182,7 +182,8 @@ class WaveClient { cacheRepository: config.cacheRepository(), timestamp: OffsetDateTime.now().toString(), fingerprint: assets.fingerprint(), - freeze: config.freezeMode() + freeze: config.freezeMode(), + format: assets.singularity ? 'sif' : null ) } @@ -311,24 +312,29 @@ class WaveClient { } protected void checkConflicts(Map attrs, String name) { - if( attrs.dockerfile && attrs.conda ) { - throw new IllegalArgumentException("Process '${name}' declares both a 'conda' directive and a module bundle dockerfile that conflict each other") - } - if( attrs.container && attrs.dockerfile ) { - throw new IllegalArgumentException("Process '${name}' declares both a 'container' directive and a module bundle dockerfile that conflict each other") - } if( attrs.container && attrs.conda ) { throw new IllegalArgumentException("Process '${name}' declares both 'container' and 'conda' directives that conflict each other") } - if( attrs.dockerfile && attrs.spack ) { - throw new IllegalArgumentException("Process '${name}' declares both a 'spack' directive and a module bundle dockerfile that conflict each other") - } if( attrs.container && attrs.spack ) { throw new IllegalArgumentException("Process '${name}' declares both 'container' and 'spack' directives that conflict each other") } if( attrs.spack && attrs.conda ) { throw new IllegalArgumentException("Process '${name}' declares both 'spack' and 'conda' directives that conflict each other") } + checkConflicts0(attrs, name, 'dockerfile') + checkConflicts0(attrs, name, 'singularityfile') + } + + protected void checkConflicts0(Map attrs, String name, String fileType) { + if( attrs.get(fileType) && attrs.conda ) { + throw new IllegalArgumentException("Process '${name}' declares both a 'conda' directive and a module bundle $fileType that conflict each other") + } + if( attrs.container && attrs.get(fileType) ) { + throw new IllegalArgumentException("Process '${name}' declares both a 'container' directive and a module bundle $fileType that conflict each other") + } + if( attrs.get(fileType) && attrs.spack ) { + throw new IllegalArgumentException("Process '${name}' declares both a 'spack' directive and a module bundle $fileType that conflict each other") + } } Map resolveConflicts(Map attrs, List strategy) { @@ -341,6 +347,21 @@ class WaveClient { return result } + protected List patchStrategy(List strategy, boolean singularity) { + if( !singularity ) + return strategy + // when singularity is enabled, replaces `dockerfile` with `singularityfile` + // in the strategy if not specified explicitly + final p = strategy.indexOf('dockerfile') + if( p!=-1 && !strategy.contains('singularityfile') ) { + final result = new ArrayList(strategy) + result.remove(p) + result.add(p, 'singularityfile') + return Collections.unmodifiableList(result) + } + return strategy + } + static Architecture defaultArch() { try { return new Architecture(SysHelper.getArch()) @@ -352,7 +373,7 @@ class WaveClient { } @Memoized - WaveAssets resolveAssets(TaskRun task, String containerImage) { + WaveAssets resolveAssets(TaskRun task, String containerImage, boolean singularity) { // get the bundle final bundle = task.getModuleBundle() // get the Spack architecture @@ -367,49 +388,60 @@ class WaveClient { if( bundle!=null && bundle.dockerfile ) { attrs.dockerfile = bundle.dockerfile.text } + if( bundle!=null && bundle.singularityfile ) { + attrs.singularityfile = bundle.singularityfile.text + } // validate request attributes - if( config().strategy() ) - attrs = resolveConflicts(attrs, config().strategy()) + final strategy = config().strategy() + if( strategy ) + attrs = resolveConflicts(attrs, patchStrategy(strategy, singularity)) else checkConflicts(attrs, task.lazyName()) // resolve the wave assets - return resolveAssets0(attrs, bundle, dockerArch, spackArch) + return resolveAssets0(attrs, bundle, singularity, dockerArch, spackArch) } - protected WaveAssets resolveAssets0(Map attrs, ResourcesBundle bundle, String dockerArch, String spackArch) { + protected WaveAssets resolveAssets0(Map attrs, ResourcesBundle bundle, boolean singularity, String dockerArch, String spackArch) { - String dockerScript = attrs.dockerfile + final scriptType = singularity ? 'singularityfile' : 'dockerfile' + String containerScript = attrs.get(scriptType) final containerImage = attrs.container /* - * If 'conda' directive is specified use it to create a Dockefile + * If 'conda' directive is specified use it to create a container file * to assemble the target container */ Path condaFile = null if( attrs.conda ) { - if( dockerScript ) - throw new IllegalArgumentException("Unexpected conda and dockerfile conflict while resolving wave container") + if( containerScript ) + throw new IllegalArgumentException("Unexpected conda and $scriptType conflict while resolving wave container") // map the recipe to a dockerfile if( isCondaLocalFile(attrs.conda) ) { condaFile = Path.of(attrs.conda) - dockerScript = condaFileToDockerFile(config.condaOpts()) + containerScript = singularity + ? condaFileToSingularityFile(config.condaOpts()) + : condaFileToDockerFile(config.condaOpts()) } // 'conda' attributes is resolved as the conda packages to be used else { - dockerScript = condaPackagesToDockerFile(attrs.conda, condaChannels, config.condaOpts()) + containerScript = singularity + ? condaPackagesToSingularityFile(attrs.conda, condaChannels, config.condaOpts()) + : condaPackagesToDockerFile(attrs.conda, condaChannels, config.condaOpts()) } } /* - * If 'spack' directive is specified use it to create a Dockefile + * If 'spack' directive is specified use it to create a container file * to assemble the target container */ Path spackFile = null if( attrs.spack ) { - if( dockerScript ) + if( singularity ) + throw new IllegalArgumentException("Wave containers do not support (yet) the resolution of Spack package with Singularity") + if( containerScript ) throw new IllegalArgumentException("Unexpected spack and dockerfile conflict while resolving wave container") if( isSpackFile(attrs.spack) ) { @@ -420,14 +452,14 @@ class WaveClient { // create a minimal spack file with package spec from user input spackFile = spackPackagesToSpackFile(attrs.spack, config.spackOpts()) } - dockerScript = spackFileToDockerFile(config.spackOpts()) + containerScript = spackFileToDockerFile(config.spackOpts()) } /* * The process should declare at least a container image name via 'container' directive * or a dockerfile file to build, otherwise there's no job to be done by wave */ - if( !dockerScript && !containerImage ) { + if( !containerScript && !containerImage ) { return null } @@ -451,10 +483,11 @@ class WaveClient { platform, bundle, containerConfig, - dockerScript, + containerScript, condaFile, spackFile, - projectRes) + projectRes, + singularity) } @Memoized diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/resolver/WaveContainerResolver.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/resolver/WaveContainerResolver.groovy index 643e8dc755..46b00c464c 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/resolver/WaveContainerResolver.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/resolver/WaveContainerResolver.groovy @@ -59,17 +59,19 @@ class WaveContainerResolver implements ContainerResolver { if( !client().enabled() ) return defaultResolver.resolveImage(task, imageName) + final freeze = client().config().freezeMode() + final engine= task.getContainerConfig().getEngine() + final nativeSingularityBuild = freeze && engine in SINGULARITY_LIKE if( !imageName ) { // when no image name is provided the module bundle should include a // Dockerfile or a Conda recipe or a Spack recipe to build // an image on-fly with an automatically assigned name - return waveContainer(task, null) + return waveContainer(task, null, nativeSingularityBuild) } - final engine= task.getContainerConfig().getEngine() if( engine in DOCKER_LIKE ) { final image = defaultResolver.resolveImage(task, imageName) - return waveContainer(task, image.target) + return waveContainer(task, image.target, false) } else if( engine in SINGULARITY_LIKE ) { // remove any `docker://` prefix if any @@ -80,8 +82,11 @@ class WaveContainerResolver implements ContainerResolver { return defaultResolver.resolveImage(task, imageName) } // fetch the wave container name - final image = waveContainer(task, imageName) - // then adapt it to singularity format + final image = waveContainer(task, imageName, nativeSingularityBuild) + // oras prefixed container are served directly + if( image && image.target.startsWith("oras://") ) + return image + // otherwise adapt it to singularity format return defaultResolver.resolveImage(task, image.target) } else @@ -102,9 +107,9 @@ class WaveContainerResolver implements ContainerResolver { * The container image name returned by the Wave backend or {@code null} * when the task does not request any container or dockerfile to build */ - protected ContainerInfo waveContainer(TaskRun task, String container) { + protected ContainerInfo waveContainer(TaskRun task, String container, boolean singularity) { validateContainerRepo(container) - final assets = client().resolveAssets(task, container) + final assets = client().resolveAssets(task, container, singularity) if( assets ) { return client().fetchContainerImage(assets) } diff --git a/plugins/nf-wave/src/main/io/seqera/wave/util/DockerHelper.java b/plugins/nf-wave/src/main/io/seqera/wave/util/DockerHelper.java deleted file mode 100644 index 33b844a3c8..0000000000 --- a/plugins/nf-wave/src/main/io/seqera/wave/util/DockerHelper.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.util; - -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import java.io.File; - -import io.seqera.wave.config.CondaOpts; -import io.seqera.wave.config.SpackOpts; -import org.apache.commons.lang3.StringUtils; -import org.yaml.snakeyaml.Yaml; - -/** - * Helper class to create Dockerfile for Conda and Spack package managers - * - * @author Paolo Di Tommaso - */ -public class DockerHelper { - - static public List spackPackagesToList(String packages) { - if( packages==null || packages.isEmpty() ) - return null; - final List entries = Arrays.asList(packages.split(" ")); - final List result = new ArrayList<>(); - List current = new ArrayList<>(); - for( String it : entries ) { - if( it==null || it.isEmpty() || it.isBlank() ) - continue; - if( !Character.isLetterOrDigit(it.charAt(0)) || it.contains("=") ) { - current.add(it); - } - else { - if( current.size()>0 ) - result.add(String.join(" ",current)); - current = new ArrayList<>(); - current.add(it); - } - } - // remaining entries - if( current.size()>0 ) - result.add(String.join(" ",current)); - return result; - } - - static public String spackPackagesToSpackYaml(String packages, SpackOpts opts) { - final List base = spackPackagesToList(opts.basePackages); - final List custom = spackPackagesToList(packages); - if( base==null && custom==null ) - return null; - - final List specs = new ArrayList<>(); - if( base!=null ) - specs.addAll(base); - if( custom!=null ) - specs.addAll(custom); - - final Map concretizer = new LinkedHashMap<>(); - concretizer.put("unify", true); - concretizer.put("reuse", false); - - final Map spack = new LinkedHashMap<>(); - spack.put("specs", specs); - spack.put("concretizer", concretizer); - - final Map root = new LinkedHashMap<>(); - root.put("spack", spack); - - return new Yaml().dump(root); - } - - static public Path spackPackagesToSpackFile(String packages, SpackOpts opts) { - final String yaml = spackPackagesToSpackYaml(packages, opts); - if( yaml==null || yaml.length()==0 ) - return null; - return toYamlFile(yaml); - } - - static private Path toYamlFile(String yaml) { - try { - final File tempFile = File.createTempFile("nf-spack", ".yaml"); - tempFile.deleteOnExit(); - final Path result = tempFile.toPath(); - Files.write(result, yaml.getBytes()); - return result; - } - catch (IOException e) { - throw new IllegalStateException("Unable to write temporary Spack environment file - Reason: " + e.getMessage(), e); - } - } - - static public String spackFileToDockerFile(SpackOpts opts) { - // create bindings - final Map binding = spackBinding(opts); - // final ignored variables - final List ignore = List.of("spack_runner_image"); - // return the template - return renderTemplate0("/templates/spack/dockerfile-spack-file.txt", binding, ignore); - } - - static private Map spackBinding(SpackOpts opts) { - final Map binding = new HashMap<>(); - binding.put("add_commands", joinCommands(opts.commands)); - return binding; - } - - static public String condaPackagesToDockerFile(String packages, List condaChannels, CondaOpts opts) { - final List channels0 = condaChannels!=null ? condaChannels : List.of(); - final String channelsOpts = channels0.stream().map(it -> "-c "+it).collect(Collectors.joining(" ")); - final String image = opts.mambaImage; - final String target = packages.startsWith("http://") || packages.startsWith("https://") - ? "-f " + packages - : packages; - final Map binding = new HashMap<>(); - binding.put("base_image", image); - binding.put("channel_opts", channelsOpts); - binding.put("target", target); - binding.put("base_packages", mambaInstallBasePackage0(opts.basePackages)); - - final String result = renderTemplate0("/templates/conda/dockerfile-conda-packages.txt", binding) ; - return addCommands(result, opts.commands); - } - - static public String condaFileToDockerFile(CondaOpts opts) { - // create the binding map - final Map binding = new HashMap<>(); - binding.put("base_image", opts.mambaImage); - binding.put("base_packages", mambaInstallBasePackage0(opts.basePackages)); - - final String result = renderTemplate0("/templates/conda/dockerfile-conda-file.txt", binding); - return addCommands(result, opts.commands); - } - - static private String renderTemplate0(String templatePath, Map binding) { - return renderTemplate0(templatePath, binding, List.of()); - } - - static private String renderTemplate0(String templatePath, Map binding, List ignore) { - final URL template = DockerHelper.class.getResource(templatePath); - if( template==null ) - throw new IllegalStateException(String.format("Unable to load template '%s' from classpath", templatePath)); - try { - final InputStream reader = template.openStream(); - return new TemplateRenderer() - .withIgnore(ignore) - .render(reader, binding); - } - catch (IOException e) { - throw new IllegalStateException(String.format("Unable to read classpath template '%s'", templatePath), e); - } - } - - private static String mambaInstallBasePackage0(String basePackages) { - return !StringUtils.isEmpty(basePackages) - ? String.format("&& micromamba install -y -n base %s \\", basePackages) - : null; - } - - static private String addCommands(String result, List commands) { - if( commands==null || commands.isEmpty() ) - return result; - for( String cmd : commands ) { - result += cmd + "\n"; - } - return result; - } - - static private String joinCommands(List commands) { - if( commands==null || commands.size()==0 ) - return null; - StringBuilder result = new StringBuilder(); - for( String cmd : commands ) { - if( result.length()>0 ) - result.append("\n"); - result.append(cmd); - } - return result.toString(); - } - - public static Path addPackagesToSpackFile(String spackFile, SpackOpts opts) { - // Case A - both empty, nothing to do - if( StringUtils.isEmpty(spackFile) && StringUtils.isEmpty(opts.basePackages) ) - return null; - - // Case B - the spack file is empty, but some base package are given - // create a spack file with those packages - if( StringUtils.isEmpty(spackFile) ) { - return spackPackagesToSpackFile(null, opts); - } - - final Path spackEnvPath = Path.of(spackFile); - - // make sure the file exists - if( !Files.exists(spackEnvPath) ) { - throw new IllegalArgumentException("The specific Spack environment file cannot be found: " + spackFile); - } - - // Case C - if not base packages are given just return the spack file as a path - if( StringUtils.isEmpty(opts.basePackages) ) { - return spackEnvPath; - } - - // Case D - last case, both spack file and base packages are specified - // => parse the spack file yaml, add the base packages to it - final Yaml yaml = new Yaml(); - try { - // 1. parse the file - Map data = yaml.load(new FileReader(spackFile)); - // 2. parse the base packages - final List base = spackPackagesToList(opts.basePackages); - // 3. append to the specs - Map spack = (Map) data.get("spack"); - if( spack==null ) { - throw new IllegalArgumentException("The specified Spack environment file does not contain a root entry 'spack:' - offending file path: " + spackFile); - } - List specs = (List)spack.get("specs"); - if( specs==null ) { - specs = new ArrayList<>(); - spack.put("specs", specs); - } - specs.addAll(base); - // 5. return it as a new temp file - return toYamlFile( yaml.dump(data) ); - } - catch (FileNotFoundException e) { - throw new IllegalArgumentException("The specific Spack environment file cannot be found: " + spackFile, e); - } - } -} diff --git a/plugins/nf-wave/src/main/io/seqera/wave/util/TemplateRenderer.java b/plugins/nf-wave/src/main/io/seqera/wave/util/TemplateRenderer.java deleted file mode 100644 index af9d4b8f95..0000000000 --- a/plugins/nf-wave/src/main/io/seqera/wave/util/TemplateRenderer.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.util; - -import java.io.InputStream; -import java.util.List; -import java.util.Map; -import java.util.Scanner; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Template rendering helper - * - * @author Paolo Di Tommaso - */ -public class TemplateRenderer { - - private static final Pattern PATTERN = Pattern.compile("\\{\\{([^}]+)}}"); - - private static final Pattern VAR1 = Pattern.compile("(\\s*)\\{\\{([\\d\\w_-]+)}}(\\s*$)"); - private static final Pattern VAR2 = Pattern.compile("(? ignoreNames = List.of(); - - public TemplateRenderer withIgnore(String... names) { - return withIgnore(List.of(names)); - } - - public TemplateRenderer withIgnore(List names) { - if( names!=null ) { - ignoreNames = List.copyOf(names); - } - return this; - } - - public String render(InputStream template, Map binding) { - String str = new Scanner(template).useDelimiter("\\A").next(); - return render(str, binding); - } - - public String render(String template, Map binding) { - final String[] lines = template.split("(?<=\n)"); - final StringBuilder result = new StringBuilder(); - for( String it : lines ) { - if( it==null || it.startsWith("##")) - continue; - final String resolved = replace0(it, binding); - if( resolved!=null ) - result.append(resolved); - } - return result.toString(); - } - - /** - * Simple template helper class replacing all variable enclosed by {{..}} - * with the corresponding variable specified in a map object - * - * @param template The template string - * @param binding The binding {@link Map} - * @return The templated having the variables replaced with the corresponding value - */ - String replace1(CharSequence template, Map binding) { - Matcher matcher = PATTERN.matcher(template); - - // Loop through each matched variable placeholder - StringBuilder builder = new StringBuilder(); - boolean isNull=false; - while (matcher.find()) { - String variable = matcher.group(1); - - // Check if the variable exists in the values map - if (binding.containsKey(variable)) { - Object value = binding.get(variable); - String str = value!=null ? value.toString() : ""; - isNull |= value==null; - matcher.appendReplacement(builder, str); - } - else if( !ignoreNames.contains(variable) ) { - throw new IllegalArgumentException(String.format("Unable to resolve template variable: {{%s}}", variable)); - } - } - matcher.appendTail(builder); - - final String result = builder.toString(); - return !isNull || !result.isBlank() ? result : null; - } - - String replace0(String line, Map binding) { - if( line==null || line.length()==0 ) - return line; - - Matcher matcher = VAR1.matcher(line); - if( matcher.matches() ) { - final String name = matcher.group(2); - if( ignoreNames.contains(name) ) - return line; - if( !binding.containsKey(name) ) - throw new IllegalArgumentException("Missing template key: "+name); - final String prefix = matcher.group(1); - final String value = binding.get(name); - if( value==null ) - return null; // <-- return null to skip this line - - final StringBuilder result = new StringBuilder(); - final String[] multi = value.split("(?<=\n)"); - for (String s : multi) { - result.append(prefix); - result.append(s); - } - result.append( matcher.group(3) ); - return result.toString(); - } - - final StringBuilder result = new StringBuilder(); - while( (matcher=VAR2.matcher(line)).find() ) { - final String name = matcher.group(1); - if( !binding.containsKey(name) && !ignoreNames.contains(name)) { - throw new IllegalArgumentException("Missing template key: "+name); - } - final String value = !ignoreNames.contains(name) - ? (binding.get(name)!=null ? binding.get(name) : "") - : "{{"+name+"}}"; - final int p = matcher.start(1); - final int q = matcher.end(1); - - result.append(line.substring(0,p-2)); - result.append(value); - line = line.substring(q+2); - } - result.append(line); - return result.toString(); - } - -} diff --git a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt b/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt deleted file mode 100644 index d9be54e326..0000000000 --- a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-file.txt +++ /dev/null @@ -1,6 +0,0 @@ -FROM {{base_image}} -COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml -RUN micromamba install -y -n base -f /tmp/conda.yml \ - {{base_packages}} - && micromamba clean -a -y -USER root diff --git a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt b/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt deleted file mode 100644 index 67e01b4378..0000000000 --- a/plugins/nf-wave/src/resources/templates/conda/dockerfile-conda-packages.txt +++ /dev/null @@ -1,7 +0,0 @@ -FROM {{base_image}} -RUN \ - micromamba install -y -n base {{channel_opts}} \ - {{target}} \ - {{base_packages}} - && micromamba clean -a -y -USER root diff --git a/plugins/nf-wave/src/resources/templates/spack/dockerfile-spack-file.txt b/plugins/nf-wave/src/resources/templates/spack/dockerfile-spack-file.txt deleted file mode 100644 index bf6e436b15..0000000000 --- a/plugins/nf-wave/src/resources/templates/spack/dockerfile-spack-file.txt +++ /dev/null @@ -1,18 +0,0 @@ -# Runner image -FROM {{spack_runner_image}} - -COPY --from=builder /opt/spack-env /opt/spack-env -COPY --from=builder /opt/software /opt/software -COPY --from=builder /opt/._view /opt/._view - -# Entrypoint for Singularity -RUN mkdir -p /.singularity.d/env && \ - cp -p /opt/spack-env/z10_spack_environment.sh /.singularity.d/env/91-environment.sh -# Entrypoint for Docker -RUN echo "#!/usr/bin/env bash\n\nset -ef -o pipefail\nsource /opt/spack-env/z10_spack_environment.sh\nexec \"\$@\"" \ - >/opt/spack-env/spack_docker_entrypoint.sh && chmod a+x /opt/spack-env/spack_docker_entrypoint.sh - -{{add_commands}} - -ENTRYPOINT [ "/opt/spack-env/spack_docker_entrypoint.sh" ] -CMD [ "/bin/bash" ] diff --git a/plugins/nf-wave/src/test/io/seqera/wave/config/CondaOptsTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/config/CondaOptsTest.groovy deleted file mode 100644 index 055259888a..0000000000 --- a/plugins/nf-wave/src/test/io/seqera/wave/config/CondaOptsTest.groovy +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.config - -import spock.lang.Specification - -/** - * - * @author Paolo Di Tommaso - */ -class CondaOptsTest extends Specification { - - def 'check conda options' () { - when: - def opts = new CondaOpts([:]) - then: - opts.mambaImage == CondaOpts.DEFAULT_MAMBA_IMAGE - !opts.basePackages - !opts.commands - - when: - opts = new CondaOpts([ - mambaImage:'foo:latest', - commands: ['this','that'], - basePackages: 'some::more-package' - ]) - then: - opts.mambaImage == 'foo:latest' - opts.basePackages == 'some::more-package' - opts.commands == ['this','that'] - } -} diff --git a/plugins/nf-wave/src/test/io/seqera/wave/config/SpackOptsTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/config/SpackOptsTest.groovy deleted file mode 100644 index f3e5f5a955..0000000000 --- a/plugins/nf-wave/src/test/io/seqera/wave/config/SpackOptsTest.groovy +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.config - -import spock.lang.Specification - -/** - * - * @author Paolo Di Tommaso - */ -class SpackOptsTest extends Specification { - - def 'check spack default options' () { - given: - def opts = new SpackOpts() - expect: - opts.commands == null - opts.basePackages == null - } - - def 'check spack custom opts' () { - given: - def opts = new SpackOpts([ - basePackages: 'foo bar', - commands: ['run','--this','--that'] - ]) - - expect: - opts.commands == ['run','--this','--that'] - and: - opts.basePackages == 'foo bar' - } -} diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index 8e5bcb31df..6b6446cc0a 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -231,6 +231,33 @@ class WaveClientTest extends Specification { !req.containerConfig.layers } + def 'should create request object with singularityfile' () { + given: + def session = Mock(Session) { getConfig() >> [:]} + def SINGULARITY_FILE = 'From foo:latest' + def wave = new WaveClient(session) + and: + def assets = new WaveAssets(null, + 'linux/amd64', + null, + null, + SINGULARITY_FILE, + null, + null, + null, + true) + when: + def req = wave.makeRequest(assets) + then: + !req.containerImage + new String(req.containerFile.decodeBase64()) == SINGULARITY_FILE + !req.condaFile + !req.spackFile + !req.containerConfig.layers + and: + req.format == 'sif' + } + def 'should create request object with build and cache repos' () { given: def session = Mock(Session) { getConfig() >> [wave:[build:[repository:'some/repo',cacheRepository:'some/cache']]]} @@ -347,11 +374,11 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, IMAGE) + def assets = client.resolveAssets(task, IMAGE, false) then: assets.containerImage == IMAGE !assets.moduleResources - !assets.dockerFileContent + !assets.containerFile !assets.containerConfig !assets.condaFile !assets.spackFile @@ -369,12 +396,12 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, IMAGE) + def assets = client.resolveAssets(task, IMAGE, false) then: assets.containerImage == IMAGE assets.containerPlatform == 'linux/arm64' !assets.moduleResources - !assets.dockerFileContent + !assets.containerFile !assets.containerConfig !assets.condaFile !assets.spackFile @@ -392,11 +419,11 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, IMAGE) + def assets = client.resolveAssets(task, IMAGE, false) then: assets.containerImage == IMAGE assets.moduleResources == BUNDLE - !assets.dockerFileContent + !assets.containerFile !assets.containerConfig !assets.condaFile !assets.spackFile @@ -416,7 +443,7 @@ class WaveClientTest extends Specification { WaveClient client = Spy(WaveClient, constructorArgs:[session]) when: - def assets = client.resolveAssets(task, IMAGE) + def assets = client.resolveAssets(task, IMAGE, false) then: client.resolveContainerConfig(ARCH) >> CONTAINER_CONFIG and: @@ -424,7 +451,7 @@ class WaveClientTest extends Specification { assets.moduleResources == BUNDLE assets.containerConfig == CONTAINER_CONFIG and: - !assets.dockerFileContent + !assets.containerFile !assets.condaFile !assets.spackFile !assets.projectResources @@ -445,9 +472,9 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, null) + def assets = client.resolveAssets(task, null, false) then: - assets.dockerFileContent == 'FROM foo\nRUN this/that' + assets.containerFile == 'FROM foo\nRUN this/that' assets.moduleResources == BUNDLE !assets.containerImage !assets.containerConfig @@ -468,9 +495,9 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, null) + def assets = client.resolveAssets(task, null, false) then: - assets.dockerFileContent == '''\ + assets.containerFile == '''\ FROM mambaorg/micromamba:1.4.9 RUN \\ micromamba install -y -n base -c conda-forge -c defaults \\ @@ -496,9 +523,9 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, null) + def assets = client.resolveAssets(task, null, false) then: - assets.dockerFileContent == '''\ + assets.containerFile == '''\ # Runner image FROM {{spack_runner_image}} @@ -538,9 +565,9 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, null) + def assets = client.resolveAssets(task, null, false) then: - assets.dockerFileContent == '''\ + assets.containerFile == '''\ FROM mambaorg/micromamba:1.4.9 COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml RUN micromamba install -y -n base -f /tmp/conda.yml \\ @@ -571,9 +598,9 @@ class WaveClientTest extends Specification { def client = new WaveClient(session) when: - def assets = client.resolveAssets(task, null) + def assets = client.resolveAssets(task, null, false) then: - assets.dockerFileContent == '''\ + assets.containerFile == '''\ # Runner image FROM {{spack_runner_image}} @@ -605,6 +632,78 @@ class WaveClientTest extends Specification { folder?.deleteDir() } + // ==== singularity native build + conda ==== + + def 'should create asset with conda recipe and singularity native build' () { + given: + def session = Mock(Session) { getConfig() >> [:]} + and: + def task = Mock(TaskRun) {getConfig() >> [conda:'salmon=1.2.3'] } + and: + def client = new WaveClient(session) + + when: + def assets = client.resolveAssets(task, null, true) + then: + assets.containerFile == '''\ + BootStrap: docker + From: mambaorg/micromamba:1.4.9 + %post + micromamba install -y -n base -c conda-forge -c defaults \\ + salmon=1.2.3 \\ + && micromamba clean -a -y + %environment + export PATH="$MAMBA_ROOT_PREFIX/bin:$PATH" + '''.stripIndent() + and: + assets.singularity + and: + !assets.moduleResources + !assets.containerImage + !assets.containerConfig + !assets.condaFile + !assets.spackFile + !assets.projectResources + } + + def 'should create asset with conda file and singularity native build' () { + given: + def folder = Files.createTempDirectory('test') + def condaFile = folder.resolve('conda.yml'); condaFile.text = 'the-conda-recipe-here' + and: + def session = Mock(Session) { getConfig() >> [:]} + def task = Mock(TaskRun) {getConfig() >> [conda:condaFile.toString()] } + and: + def client = new WaveClient(session) + + when: + def assets = client.resolveAssets(task, null, true) + then: + assets.containerFile == '''\ + BootStrap: docker + From: mambaorg/micromamba:1.4.9 + %files + {{wave_context_dir}}/conda.yml /tmp/conda.yml + %post + micromamba install -y -n base -f /tmp/conda.yml \\ + && micromamba clean -a -y + %environment + export PATH="$MAMBA_ROOT_PREFIX/bin:$PATH" + '''.stripIndent() + and: + assets.condaFile == condaFile + assets.singularity + and: + !assets.moduleResources + !assets.containerImage + !assets.containerConfig + !assets.spackFile + !assets.projectResources + + cleanup: + folder?.deleteDir() + } + def 'should create assets with project resources' () { given: def MODULE_RES = Mock(ResourcesBundle) @@ -623,7 +722,7 @@ class WaveClientTest extends Specification { WaveClient wave = Spy(WaveClient, constructorArgs: [session]) when: - def assets = wave.resolveAssets(task, 'image:latest') + def assets = wave.resolveAssets(task, 'image:latest', false) then: 1 * wave.projectResources(BIN_DIR) >> PROJECT_RES and: @@ -666,6 +765,23 @@ class WaveClientTest extends Specification { result == [spack:'x'] } + def 'should patch strategy for singularity' () { + given: + def session = Mock(Session) { getConfig() >> [:]} + and: + def client = new WaveClient(session) + + expect: + client.patchStrategy(Collections.unmodifiableList(STRATEGY), SING) == EXPECTED + + where: + STRATEGY | SING | EXPECTED + ['conda','dockerfile', 'spack'] | false | ['conda','dockerfile', 'spack'] + ['conda','dockerfile', 'spack'] | true | ['conda','singularityfile', 'spack'] + ['conda','dockerfile', 'spack'] | true | ['conda','singularityfile', 'spack'] + ['conda','singularityfile','dockerfile', 'spack'] | true | ['conda','singularityfile','dockerfile', 'spack'] + } + def 'should check conflicts' () { given: def session = Mock(Session) { getConfig() >> [:]} @@ -713,6 +829,25 @@ class WaveClientTest extends Specification { e = thrown(IllegalArgumentException) e.message == "Process 'foo' declares both 'spack' and 'conda' directives that conflict each other" + // singularity file checks + when: + client.checkConflicts([conda:'this', singularityfile:'that'], 'foo') + then: + e = thrown(IllegalArgumentException) + e.message == "Process 'foo' declares both a 'conda' directive and a module bundle singularityfile that conflict each other" + + when: + client.checkConflicts([container:'this', singularityfile:'that'], 'foo') + then: + e = thrown(IllegalArgumentException) + e.message == "Process 'foo' declares both a 'container' directive and a module bundle singularityfile that conflict each other" + + when: + client.checkConflicts([spack:'this', singularityfile:'that'], 'foo') + then: + e = thrown(IllegalArgumentException) + e.message == "Process 'foo' declares both a 'spack' directive and a module bundle singularityfile that conflict each other" + } def 'should get project resource bundle' () { @@ -762,7 +897,6 @@ class WaveClientTest extends Specification { assert (it[0] as SubmitContainerTokenRequest).towerWorkspaceId == 123 assert (it[0] as SubmitContainerTokenRequest).towerEndpoint == 'http://foo.com' } - } def 'should send request with tower access token and refresh token' () { diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/resolver/WaveContainerResolverTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/resolver/WaveContainerResolverTest.groovy index 7825dd0540..da190d728e 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/resolver/WaveContainerResolverTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/resolver/WaveContainerResolverTest.groovy @@ -18,6 +18,7 @@ package io.seqera.wave.plugin.resolver import io.seqera.wave.plugin.WaveClient +import io.seqera.wave.plugin.config.WaveConfig import nextflow.container.ContainerConfig import nextflow.container.resolver.ContainerInfo import nextflow.container.resolver.DefaultContainerResolver @@ -35,6 +36,7 @@ class WaveContainerResolverTest extends Specification { given: def CONTAINER_NAME = "ubuntu:latest" def WAVE_CONTAINER = new ContainerInfo(CONTAINER_NAME, "wave.io/ubuntu:latest", "12345") + def ORAS_CONTAINER = new ContainerInfo(CONTAINER_NAME, "oras://wave.io/ubuntu:latest", "12345") def SINGULARITY_CONTAINER = new ContainerInfo('ubuntu:latest', '/some/singularity/ubuntu.img') and: def defaultResolver = Spy(DefaultContainerResolver) @@ -46,27 +48,40 @@ class WaveContainerResolverTest extends Specification { } } + // docker images when: def result = resolver.resolveImage(task, CONTAINER_NAME) then: - resolver.client() >> Mock(WaveClient) { enabled()>>true } + resolver.client() >> Mock(WaveClient) { enabled()>>true; config()>>Mock(WaveConfig) } _ * task.getContainerConfig() >> Mock(ContainerConfig) { getEngine()>>'docker' } and: - 1 * resolver.waveContainer(task, CONTAINER_NAME) >> WAVE_CONTAINER + 1 * resolver.waveContainer(task, CONTAINER_NAME, false) >> WAVE_CONTAINER and: result == WAVE_CONTAINER - + // singularity images when: result = resolver.resolveImage(task, CONTAINER_NAME) then: - resolver.client() >> Mock(WaveClient) { enabled()>>true } + resolver.client() >> Mock(WaveClient) { enabled()>>true; config()>>Mock(WaveConfig) } _ * task.getContainerConfig() >> Mock(ContainerConfig) { getEngine()>>'singularity' } and: - 1 * resolver.waveContainer(task, CONTAINER_NAME) >> WAVE_CONTAINER + 1 * resolver.waveContainer(task, CONTAINER_NAME, false) >> WAVE_CONTAINER 1 * defaultResolver.resolveImage(task, WAVE_CONTAINER.target) >> SINGULARITY_CONTAINER and: result == SINGULARITY_CONTAINER + + // singularity images + oras protocol + when: + result = resolver.resolveImage(task, CONTAINER_NAME) + then: + resolver.client() >> Mock(WaveClient) { enabled()>>true; config()>>Mock(WaveConfig) { freezeMode()>>true } } + _ * task.getContainerConfig() >> Mock(ContainerConfig) { getEngine()>>'singularity' } + and: + 1 * resolver.waveContainer(task, CONTAINER_NAME, true) >> ORAS_CONTAINER + 0 * defaultResolver.resolveImage(task, WAVE_CONTAINER.target) >> null + and: + result == ORAS_CONTAINER } def 'should validate container name' () { diff --git a/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy deleted file mode 100644 index 4112c96900..0000000000 --- a/plugins/nf-wave/src/test/io/seqera/wave/util/DockerHelperTest.groovy +++ /dev/null @@ -1,381 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.util - -import java.nio.file.Files - -import spock.lang.Specification - -import io.seqera.wave.config.CondaOpts -import io.seqera.wave.config.SpackOpts - -/** - * - * @author Paolo Di Tommaso - */ -class DockerHelperTest extends Specification { - - def 'should create dockerfile content from conda file' () { - given: - def CONDA_OPTS = new CondaOpts([basePackages: 'conda-forge::procps-ng']) - - expect: - DockerHelper.condaFileToDockerFile(CONDA_OPTS)== '''\ - FROM mambaorg/micromamba:1.4.9 - COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml - RUN micromamba install -y -n base -f /tmp/conda.yml \\ - && micromamba install -y -n base conda-forge::procps-ng \\ - && micromamba clean -a -y - USER root - '''.stripIndent() - } - - def 'should create dockerfile content from conda file and base packages' () { - - expect: - DockerHelper.condaFileToDockerFile(new CondaOpts([:]))== '''\ - FROM mambaorg/micromamba:1.4.9 - COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml - RUN micromamba install -y -n base -f /tmp/conda.yml \\ - && micromamba clean -a -y - USER root - '''.stripIndent() - } - - - def 'should create dockerfile content from conda package' () { - given: - def PACKAGES = 'bwa=0.7.15 salmon=1.1.1' - def CHANNELS = ['conda-forge', 'defaults'] - expect: - DockerHelper.condaPackagesToDockerFile(PACKAGES, CHANNELS, new CondaOpts([:])) == '''\ - FROM mambaorg/micromamba:1.4.9 - RUN \\ - micromamba install -y -n base -c conda-forge -c defaults \\ - bwa=0.7.15 salmon=1.1.1 \\ - && micromamba clean -a -y - USER root - '''.stripIndent() - } - - def 'should create dockerfile with base packages' () { - given: - def CHANNELS = ['conda-forge', 'defaults'] - def CONDA_OPTS = new CondaOpts([basePackages: 'foo::one bar::two']) - def PACKAGES = 'bwa=0.7.15 salmon=1.1.1' - - expect: - DockerHelper.condaPackagesToDockerFile(PACKAGES, CHANNELS, CONDA_OPTS) == '''\ - FROM mambaorg/micromamba:1.4.9 - RUN \\ - micromamba install -y -n base -c conda-forge -c defaults \\ - bwa=0.7.15 salmon=1.1.1 \\ - && micromamba install -y -n base foo::one bar::two \\ - && micromamba clean -a -y - USER root - '''.stripIndent() - } - - def 'should create dockerfile content with custom channels' () { - given: - def CHANNELS = 'foo,bar'.tokenize(',') - def PACKAGES = 'bwa=0.7.15 salmon=1.1.1' - - expect: - DockerHelper.condaPackagesToDockerFile(PACKAGES, CHANNELS, new CondaOpts([:])) == '''\ - FROM mambaorg/micromamba:1.4.9 - RUN \\ - micromamba install -y -n base -c foo -c bar \\ - bwa=0.7.15 salmon=1.1.1 \\ - && micromamba clean -a -y - USER root - '''.stripIndent() - } - - def 'should create dockerfile content with custom conda config' () { - given: - def CHANNELS = ['conda-forge', 'defaults'] - def CONDA_OPTS = [mambaImage:'my-base:123', commands: ['USER my-user', 'RUN apt-get update -y && apt-get install -y nano']] - def PACKAGES = 'bwa=0.7.15 salmon=1.1.1' - - expect: - DockerHelper.condaPackagesToDockerFile(PACKAGES, CHANNELS, new CondaOpts(CONDA_OPTS)) == '''\ - FROM my-base:123 - RUN \\ - micromamba install -y -n base -c conda-forge -c defaults \\ - bwa=0.7.15 salmon=1.1.1 \\ - && micromamba clean -a -y - USER root - USER my-user - RUN apt-get update -y && apt-get install -y nano - '''.stripIndent() - } - - - def 'should create dockerfile content with remote conda lock' () { - given: - def CHANNELS = ['conda-forge', 'defaults'] - def OPTS = [mambaImage:'my-base:123', commands: ['USER my-user', 'RUN apt-get update -y && apt-get install -y procps']] - def PACKAGES = 'https://foo.com/some/conda-lock.yml' - - expect: - DockerHelper.condaPackagesToDockerFile(PACKAGES, CHANNELS, new CondaOpts(OPTS)) == '''\ - FROM my-base:123 - RUN \\ - micromamba install -y -n base -c conda-forge -c defaults \\ - -f https://foo.com/some/conda-lock.yml \\ - && micromamba clean -a -y - USER root - USER my-user - RUN apt-get update -y && apt-get install -y procps - '''.stripIndent() - } - - - def 'should create dockerfile content from spack package' () { - given: - def PACKAGES = 'bwa@0.7.15 salmon@1.1.1' - - expect: - DockerHelper.spackPackagesToSpackFile(PACKAGES, Mock(SpackOpts)).text == '''\ - spack: - specs: [bwa@0.7.15, salmon@1.1.1] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - - DockerHelper.spackFileToDockerFile(new SpackOpts())== '''\ - # Runner image - FROM {{spack_runner_image}} - - COPY --from=builder /opt/spack-env /opt/spack-env - COPY --from=builder /opt/software /opt/software - COPY --from=builder /opt/._view /opt/._view - - # Entrypoint for Singularity - RUN mkdir -p /.singularity.d/env && \\ - cp -p /opt/spack-env/z10_spack_environment.sh /.singularity.d/env/91-environment.sh - # Entrypoint for Docker - RUN echo "#!/usr/bin/env bash\\n\\nset -ef -o pipefail\\nsource /opt/spack-env/z10_spack_environment.sh\\nexec \\"\\\$@\\"" \\ - >/opt/spack-env/spack_docker_entrypoint.sh && chmod a+x /opt/spack-env/spack_docker_entrypoint.sh - - - ENTRYPOINT [ "/opt/spack-env/spack_docker_entrypoint.sh" ] - CMD [ "/bin/bash" ] - '''.stripIndent() - } - - def 'should create dockerfile content with custom spack config' () { - given: - def SPACK_OPTS = [ commands:['USER hola'] ] - def PACKAGES = 'bwa@0.7.15 salmon@1.1.1' - - expect: - DockerHelper.spackPackagesToSpackFile(PACKAGES, Mock(SpackOpts)).text == '''\ - spack: - specs: [bwa@0.7.15, salmon@1.1.1] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - - DockerHelper.spackFileToDockerFile(new SpackOpts(SPACK_OPTS))== '''\ - # Runner image - FROM {{spack_runner_image}} - - COPY --from=builder /opt/spack-env /opt/spack-env - COPY --from=builder /opt/software /opt/software - COPY --from=builder /opt/._view /opt/._view - - # Entrypoint for Singularity - RUN mkdir -p /.singularity.d/env && \\ - cp -p /opt/spack-env/z10_spack_environment.sh /.singularity.d/env/91-environment.sh - # Entrypoint for Docker - RUN echo "#!/usr/bin/env bash\\n\\nset -ef -o pipefail\\nsource /opt/spack-env/z10_spack_environment.sh\\nexec \\"\\\$@\\"" \\ - >/opt/spack-env/spack_docker_entrypoint.sh && chmod a+x /opt/spack-env/spack_docker_entrypoint.sh - - USER hola - - ENTRYPOINT [ "/opt/spack-env/spack_docker_entrypoint.sh" ] - CMD [ "/bin/bash" ] - '''.stripIndent() - } - - - def 'should create dockerfile content from spack file' () { - expect: - DockerHelper.spackFileToDockerFile(new SpackOpts())== '''\ - # Runner image - FROM {{spack_runner_image}} - - COPY --from=builder /opt/spack-env /opt/spack-env - COPY --from=builder /opt/software /opt/software - COPY --from=builder /opt/._view /opt/._view - - # Entrypoint for Singularity - RUN mkdir -p /.singularity.d/env && \\ - cp -p /opt/spack-env/z10_spack_environment.sh /.singularity.d/env/91-environment.sh - # Entrypoint for Docker - RUN echo "#!/usr/bin/env bash\\n\\nset -ef -o pipefail\\nsource /opt/spack-env/z10_spack_environment.sh\\nexec \\"\\\$@\\"" \\ - >/opt/spack-env/spack_docker_entrypoint.sh && chmod a+x /opt/spack-env/spack_docker_entrypoint.sh - - - ENTRYPOINT [ "/opt/spack-env/spack_docker_entrypoint.sh" ] - CMD [ "/bin/bash" ] - '''.stripIndent() - } - - def 'should return empty packages' () { - when: - def result = DockerHelper.spackPackagesToSpackYaml(null, new SpackOpts()) - then: - result == null - } - - def 'should convert a list of packages to a spack yaml' () { - when: - def result = DockerHelper.spackPackagesToSpackYaml('foo@1.2.3 x=one bar @2', new SpackOpts()) - then: - result == '''\ - spack: - specs: [foo@1.2.3 x=one, bar @2] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - } - - - def 'should add base packages' () { - when: - def result = DockerHelper.spackPackagesToSpackYaml(null, new SpackOpts(basePackages: 'foo bar')) - then: - result == '''\ - spack: - specs: [foo, bar] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - - when: - result = DockerHelper.spackPackagesToSpackYaml('this that @2', new SpackOpts(basePackages: 'foo bar @1')) - then: - result == '''\ - spack: - specs: [foo, bar @1, this, that @2] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - } - - def 'should convert a list of packages to a spack file' () { - when: - def result = DockerHelper.spackPackagesToSpackFile('foo@1.2.3 x=one bar @2', new SpackOpts()) - then: - result.text == '''\ - spack: - specs: [foo@1.2.3 x=one, bar @2] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - } - - def 'should parse a spack packages string' () { - expect: - DockerHelper.spackPackagesToList(PACKAGES) == EXPECTED - - where: - PACKAGES | EXPECTED - null | null - 'alpha' | ['alpha'] - 'alpha delta' | ['alpha', 'delta'] - 'alpha delta gamma' | ['alpha', 'delta', 'gamma'] - 'alpha 1aa' | ['alpha', '1aa'] - and: - 'alpha x=1' | ['alpha x=1'] - 'alpha x=1 delta' | ['alpha x=1', 'delta'] - 'alpha ^foo delta' | ['alpha ^foo', 'delta'] - and: - '^alpha ~beta foo' | ['^alpha ~beta', 'foo'] // <-- this should not be valid - - } - - def 'should merge spack file and base package' () { - given: - def folder = Files.createTempDirectory('test') - and: - def SPACK_FILE1 = folder.resolve('spack1.yaml') - SPACK_FILE1.text = '''\ - spack: - specs: [foo@1.2.3 x=one, bar @2] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - and: - def SPACK_FILE2 = folder.resolve('spack2.yaml') - SPACK_FILE2.text = '''\ - spack: - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - and: - def SPACK_FILE3 = folder.resolve('spack3.yaml') - SPACK_FILE3.text = '''\ - foo: - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - - when: - def result = DockerHelper.addPackagesToSpackFile(null, new SpackOpts()) - then: - result == null - - when: - result = DockerHelper.addPackagesToSpackFile(SPACK_FILE1.toString(), new SpackOpts()) - then: - result.toString() == SPACK_FILE1.toString() - - when: - result = DockerHelper.addPackagesToSpackFile(SPACK_FILE1.toString(), new SpackOpts(basePackages: 'alpha delta')) - then: - result.text == '''\ - spack: - specs: [foo@1.2.3 x=one, bar @2, alpha, delta] - concretizer: {unify: true, reuse: false} - '''.stripIndent(true) - - - when: - result = DockerHelper.addPackagesToSpackFile(SPACK_FILE2.toString(), new SpackOpts(basePackages: 'alpha delta')) - then: - result.text == '''\ - spack: - concretizer: {unify: true, reuse: false} - specs: [alpha, delta] - '''.stripIndent(true) - - when: - DockerHelper.addPackagesToSpackFile(SPACK_FILE3.toString(), new SpackOpts(basePackages: 'foo')) - then: - thrown(IllegalArgumentException) - - when: - DockerHelper.addPackagesToSpackFile('missing file', new SpackOpts(basePackages: 'foo')) - then: - thrown(IllegalArgumentException) - - when: - DockerHelper.addPackagesToSpackFile('missing file', new SpackOpts()) - then: - thrown(IllegalArgumentException) - - cleanup: - folder?.deleteDir() - } - -} diff --git a/plugins/nf-wave/src/test/io/seqera/wave/util/TemplateRendererTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/util/TemplateRendererTest.groovy deleted file mode 100644 index 392893e16c..0000000000 --- a/plugins/nf-wave/src/test/io/seqera/wave/util/TemplateRendererTest.groovy +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright 2013-2023, Seqera Labs - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -package io.seqera.wave.util - -import spock.lang.Specification - -/** - * - * @author Paolo Di Tommaso - */ -class TemplateRendererTest extends Specification { - - def 'should replace vars' () { - given: - def binding = [foo: 'Hello', bar: 'world'] - def render = new TemplateRenderer() - expect: - render.replace0('{{foo}}', binding) == 'Hello' - render.replace0('{{foo}} ', binding) == 'Hello ' - render.replace0('{{foo}}\n', binding) == 'Hello\n' - render.replace0(' {{foo}}', binding) == ' Hello' - render.replace0(' {{foo}}\n', binding) == ' Hello\n' - render.replace0(' ${foo}', binding) == ' ${foo}' - render.replace0(' ${{foo}}', binding) == ' ${{foo}}' - render.replace0('{{foo}}', [foo:'']) == '' - render.replace0('{{foo}}', [foo:null]) == null - render.replace0(' {{foo}}\n', [foo:null]) == null - render.replace0('', binding) == '' - render.replace0(null, binding) == null - - render.replace0('{{foo}} {{bar}}!', binding) == 'Hello world!' - render.replace0('abc {{foo}} pq {{bar}} xyz', binding) == 'abc Hello pq world xyz' - render.replace0('{{foo}} 123 {{bar}} xyz {{foo}}', binding) == 'Hello 123 world xyz Hello' - render.replace0('1{{foo}}2{{foo}}3', [foo:'']) == '123' - render.replace0('1{{foo}}2{{foo}}3', [foo:null]) == '123' - } - - def 'should throw an exception when missing variables' () { - when: - new TemplateRenderer().replace0('{{x1}}', [:]) - then: - def e = thrown(IllegalArgumentException) - e.message == 'Missing template key: x1' - - when: - new TemplateRenderer().replace0('{{foo}} {{x2}}', [foo:'ciao']) - then: - e = thrown(IllegalArgumentException) - e.message == 'Missing template key: x2' - } - - def 'should not throw an exception when missing variables' () { - when: - def result = new TemplateRenderer().withIgnore("x1").replace0('{{x1}}', [x1:'one']) - then: - result == '{{x1}}' - - when: - result = new TemplateRenderer().withIgnore('x1','x2').replace0('{{x1}} {{x2}}', [x1:'one']) - then: - result == '{{x1}} {{x2}}' - } - - def 'should render template' () { - given: - def template = "Hello, {{name}}!\n" + - "Today is {{day}} and the weather is {{weather}}."; - and: - def binding = new HashMap(); - binding.put("name", "John"); - binding.put("day", "Monday"); - binding.put("weather", "sunny"); - - when: - def renderer = new TemplateRenderer() - and: - def result = renderer.render(template, binding); - - then: - result == 'Hello, John!\nToday is Monday and the weather is sunny.' - } - - def 'should render a template with comment'() { - given: - def template = """\ - ## remove this comment - 1: {{alpha}} - 2: {{delta}} {{delta}} - 3: {{gamma}} {{gamma}} {{gamma}} - 4: end - """.stripIndent() - and: - def binding = new HashMap(); - binding.put("alpha", "one"); - binding.put("delta", "two"); - binding.put("gamma", "three"); - - when: - def renderer = new TemplateRenderer() - and: - def result = renderer.render(new ByteArrayInputStream(template.bytes), binding); - - then: - result == """\ - 1: one - 2: two two - 3: three three three - 4: end - """.stripIndent() - } - - - def 'should render a template using an input stream'() { - given: - def template = """\ - {{one}} - {{two}} - xxx - {{three}} - zzz - """.stripIndent() - and: - def binding = [ - one: '1', // this is rendered - two:null, // a line containing a null variable is not rendered - three:'' // empty value is considered ok - ] - - when: - def renderer = new TemplateRenderer() - and: - def result = renderer.render(new ByteArrayInputStream(template.bytes), binding); - - then: - result == """\ - 1 - xxx - - zzz - """.stripIndent() - } - - def 'should render template with indentations' () { - given: - def binding = [foo: 'Hello', bar: 'world'] - - when: - def renderer = new TemplateRenderer() - and: - def result = renderer.render('{{foo}}\n{{bar}}', binding) - then: - result == 'Hello\nworld' - - when: - def template = '''\ - {{foo}} - {{bar}} - '''.stripIndent() - result = renderer.render(template, [foo:'11\n22\n33', bar:'Hello world']) - then: - result == '''\ - 11 - 22 - 33 - Hello world - '''.stripIndent() - - - when: - template = '''\ - {{x1}} - {{x2}} - {{x3}} - '''.stripIndent() - result = renderer.render(template, [x1:'aa\nbb\n', x2:null, x3:'pp\nqq']) - then: - result == '''\ - aa - bb - - pp - qq - '''.stripIndent() - - } - -} From cb4112080994a2e749e17f5e0234c1e6d47b55e3 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Wed, 23 Aug 2023 09:01:52 +0200 Subject: [PATCH 077/128] Bump groovy 3.0.19 Signed-off-by: Paolo Di Tommaso --- build.gradle | 4 ++-- modules/nextflow/build.gradle | 14 +++++++------- modules/nf-commons/build.gradle | 4 ++-- modules/nf-httpfs/build.gradle | 6 +++--- plugins/nf-amazon/build.gradle | 4 ++-- plugins/nf-azure/build.gradle | 4 ++-- plugins/nf-cloudcache/build.gradle | 4 ++-- plugins/nf-codecommit/build.gradle | 4 ++-- plugins/nf-console/build.gradle | 8 ++++---- plugins/nf-ga4gh/build.gradle | 4 ++-- plugins/nf-google/build.gradle | 4 ++-- plugins/nf-tower/build.gradle | 4 ++-- plugins/nf-wave/build.gradle | 4 ++-- 13 files changed, 34 insertions(+), 34 deletions(-) diff --git a/build.gradle b/build.gradle index 2b8f83e687..438e51dacb 100644 --- a/build.gradle +++ b/build.gradle @@ -102,8 +102,8 @@ allprojects { // Documentation required libraries groovyDoc 'org.fusesource.jansi:jansi:1.11' - groovyDoc "org.codehaus.groovy:groovy-groovydoc:3.0.18" - groovyDoc "org.codehaus.groovy:groovy-ant:3.0.18" + groovyDoc "org.codehaus.groovy:groovy-groovydoc:3.0.19" + groovyDoc "org.codehaus.groovy:groovy-ant:3.0.19" } test { diff --git a/modules/nextflow/build.gradle b/modules/nextflow/build.gradle index 66f39fad28..4b468ecc14 100644 --- a/modules/nextflow/build.gradle +++ b/modules/nextflow/build.gradle @@ -17,12 +17,12 @@ compileGroovy { dependencies { api(project(':nf-commons')) api(project(':nf-httpfs')) - api "org.codehaus.groovy:groovy:3.0.18" - api "org.codehaus.groovy:groovy-nio:3.0.18" - api "org.codehaus.groovy:groovy-xml:3.0.18" - api "org.codehaus.groovy:groovy-json:3.0.18" - api "org.codehaus.groovy:groovy-templates:3.0.18" - api "org.codehaus.groovy:groovy-yaml:3.0.18" + api "org.codehaus.groovy:groovy:3.0.19" + api "org.codehaus.groovy:groovy-nio:3.0.19" + api "org.codehaus.groovy:groovy-xml:3.0.19" + api "org.codehaus.groovy:groovy-json:3.0.19" + api "org.codehaus.groovy:groovy-templates:3.0.19" + api "org.codehaus.groovy:groovy-yaml:3.0.19" api "org.slf4j:jcl-over-slf4j:2.0.7" api "org.slf4j:jul-to-slf4j:2.0.7" api "org.slf4j:log4j-over-slf4j:2.0.7" @@ -48,7 +48,7 @@ dependencies { testImplementation 'org.subethamail:subethasmtp:3.1.7' // test configuration - testFixturesApi ("org.codehaus.groovy:groovy-test:3.0.18") { exclude group: 'org.codehaus.groovy' } + testFixturesApi ("org.codehaus.groovy:groovy-test:3.0.19") { exclude group: 'org.codehaus.groovy' } testFixturesApi ("cglib:cglib-nodep:3.3.0") testFixturesApi ("org.objenesis:objenesis:3.2") testFixturesApi ("org.spockframework:spock-core:2.3-groovy-3.0") { exclude group: 'org.codehaus.groovy'; exclude group: 'net.bytebuddy' } diff --git a/modules/nf-commons/build.gradle b/modules/nf-commons/build.gradle index d42542be3b..e3a00932d9 100644 --- a/modules/nf-commons/build.gradle +++ b/modules/nf-commons/build.gradle @@ -26,8 +26,8 @@ sourceSets { dependencies { api "ch.qos.logback:logback-classic:1.4.6" - api "org.codehaus.groovy:groovy:3.0.18" - api "org.codehaus.groovy:groovy-nio:3.0.18" + api "org.codehaus.groovy:groovy:3.0.19" + api "org.codehaus.groovy:groovy-nio:3.0.19" api "commons-lang:commons-lang:2.6" api 'com.google.guava:guava:31.1-jre' api 'org.pf4j:pf4j:3.4.1' diff --git a/modules/nf-httpfs/build.gradle b/modules/nf-httpfs/build.gradle index e6e9d81ecd..9db2f0da7b 100644 --- a/modules/nf-httpfs/build.gradle +++ b/modules/nf-httpfs/build.gradle @@ -30,12 +30,12 @@ sourceSets { dependencies { api project(':nf-commons') api "ch.qos.logback:logback-classic:1.4.6" - api "org.codehaus.groovy:groovy:3.0.18" - api "org.codehaus.groovy:groovy-nio:3.0.18" + api "org.codehaus.groovy:groovy:3.0.19" + api "org.codehaus.groovy:groovy-nio:3.0.19" api("com.esotericsoftware.kryo:kryo:2.24.0") { exclude group: 'com.esotericsoftware.minlog', module: 'minlog' } /* testImplementation inherited from top gradle build file */ - testImplementation "org.codehaus.groovy:groovy-json:3.0.18" // needed by wiremock + testImplementation "org.codehaus.groovy:groovy-json:3.0.19" // needed by wiremock testImplementation ('com.github.tomakehurst:wiremock:1.57') { exclude module: 'groovy-all' } testImplementation ('com.github.tomjankes:wiremock-groovy:0.2.0') { exclude module: 'groovy-all' } diff --git a/plugins/nf-amazon/build.gradle b/plugins/nf-amazon/build.gradle index 8499e56160..fad4a22db4 100644 --- a/plugins/nf-amazon/build.gradle +++ b/plugins/nf-amazon/build.gradle @@ -56,6 +56,6 @@ dependencies { testImplementation(testFixtures(project(":nextflow"))) testImplementation project(':nextflow') - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-azure/build.gradle b/plugins/nf-azure/build.gradle index ffcfd88869..c74ef70fe8 100644 --- a/plugins/nf-azure/build.gradle +++ b/plugins/nf-azure/build.gradle @@ -57,6 +57,6 @@ dependencies { testImplementation(testFixtures(project(":nextflow"))) testImplementation project(':nextflow') - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-cloudcache/build.gradle b/plugins/nf-cloudcache/build.gradle index 977c686c08..d37b5354c9 100644 --- a/plugins/nf-cloudcache/build.gradle +++ b/plugins/nf-cloudcache/build.gradle @@ -35,7 +35,7 @@ dependencies { compileOnly 'org.pf4j:pf4j:3.4.1' testImplementation(testFixtures(project(":nextflow"))) - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-codecommit/build.gradle b/plugins/nf-codecommit/build.gradle index ba425ffebc..0137c25c5a 100644 --- a/plugins/nf-codecommit/build.gradle +++ b/plugins/nf-codecommit/build.gradle @@ -42,6 +42,6 @@ dependencies { testImplementation(testFixtures(project(":nextflow"))) testImplementation project(':nextflow') - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-console/build.gradle b/plugins/nf-console/build.gradle index 94a6973fb0..9b88a1daf2 100644 --- a/plugins/nf-console/build.gradle +++ b/plugins/nf-console/build.gradle @@ -37,11 +37,11 @@ dependencies { compileOnly 'org.slf4j:slf4j-api:2.0.7' compileOnly 'org.pf4j:pf4j:3.4.1' - api("org.codehaus.groovy:groovy-console:3.0.18") { transitive=false } - api("org.codehaus.groovy:groovy-swing:3.0.18") { transitive=false } + api("org.codehaus.groovy:groovy-console:3.0.19") { transitive=false } + api("org.codehaus.groovy:groovy-swing:3.0.19") { transitive=false } testImplementation project(':nextflow') - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-ga4gh/build.gradle b/plugins/nf-ga4gh/build.gradle index 96302cd9ac..1f87616d4d 100644 --- a/plugins/nf-ga4gh/build.gradle +++ b/plugins/nf-ga4gh/build.gradle @@ -45,6 +45,6 @@ dependencies { api 'joda-time:joda-time:2.9.9' testImplementation(testFixtures(project(":nextflow"))) - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-google/build.gradle b/plugins/nf-google/build.gradle index c232099166..644aa9502e 100644 --- a/plugins/nf-google/build.gradle +++ b/plugins/nf-google/build.gradle @@ -46,8 +46,8 @@ dependencies { api 'com.google.code.gson:gson:2.10.1' testImplementation(testFixtures(project(":nextflow"))) - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } test { diff --git a/plugins/nf-tower/build.gradle b/plugins/nf-tower/build.gradle index b3b487be82..bb5980c1a8 100644 --- a/plugins/nf-tower/build.gradle +++ b/plugins/nf-tower/build.gradle @@ -37,6 +37,6 @@ dependencies { api "com.fasterxml.jackson.core:jackson-databind:2.12.7.1" testImplementation(testFixtures(project(":nextflow"))) - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } diff --git a/plugins/nf-wave/build.gradle b/plugins/nf-wave/build.gradle index 61255db8e3..0d161c146b 100644 --- a/plugins/nf-wave/build.gradle +++ b/plugins/nf-wave/build.gradle @@ -39,6 +39,6 @@ dependencies { api 'io.seqera:wave-utils:0.6.2' testImplementation(testFixtures(project(":nextflow"))) - testImplementation "org.codehaus.groovy:groovy:3.0.18" - testImplementation "org.codehaus.groovy:groovy-nio:3.0.18" + testImplementation "org.codehaus.groovy:groovy:3.0.19" + testImplementation "org.codehaus.groovy:groovy-nio:3.0.19" } From 090c31cea264f9578596f08a7f4147fbb9002abc Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 24 Aug 2023 09:26:26 -0500 Subject: [PATCH 078/128] Add inspect command (#4069) This commit introduces a new nextflow command named `inspect`. The inspect command allows resolving a pipeline script or project reporting all container images used by the pipeline execution. The main advantage of this command over the existing `config` command is that it's able to resolve container names defined "dynamically" or Wave containers that are only determined at execution time. The command option `-concretise` when used along with the Wave freeze option allows building ahead all the container images required by the pipeline execution. Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso --- docs/cli.md | 67 +++++++++-- .../src/main/groovy/nextflow/Session.groovy | 5 + .../groovy/nextflow/cli/CmdInspect.groovy | 106 ++++++++++++++++ .../main/groovy/nextflow/cli/CmdRun.groovy | 7 +- .../main/groovy/nextflow/cli/Launcher.groovy | 3 +- .../inspect/ContainersInspector.groovy | 113 ++++++++++++++++++ .../nextflow/processor/TaskProcessor.groovy | 23 ++++ .../groovy/nextflow/script/BaseScript.groovy | 15 +++ .../nextflow/script/ScriptRunner.groovy | 12 +- .../groovy/nextflow/util/LoggerHelper.groovy | 24 ++-- .../groovy/nextflow/cli/CmdInspectTest.groovy | 54 +++++++++ .../inspect/ContainersInspectorTest.groovy | 109 +++++++++++++++++ .../processor/TaskProcessorTest.groovy | 17 +++ .../plugin/SubmitContainerTokenRequest.groovy | 5 + .../io/seqera/wave/plugin/WaveClient.groovy | 8 +- .../wave/plugin/config/WaveConfig.groovy | 4 + .../seqera/wave/plugin/WaveClientTest.groovy | 23 ++++ 17 files changed, 571 insertions(+), 24 deletions(-) create mode 100644 modules/nextflow/src/main/groovy/nextflow/cli/CmdInspect.groovy create mode 100644 modules/nextflow/src/main/groovy/nextflow/container/inspect/ContainersInspector.groovy create mode 100644 modules/nextflow/src/test/groovy/nextflow/cli/CmdInspectTest.groovy create mode 100644 modules/nextflow/src/test/groovy/nextflow/container/inspect/ContainersInspectorTest.groovy diff --git a/docs/cli.md b/docs/cli.md index b101b568b9..cfc1c40ef2 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -643,6 +643,57 @@ $ nextflow info nextflow-io/hello v1.2 [t] ``` +### inspect + +:::{versionadded} 23.09.0-edge +::: + +Inspect process settings in a pipeline project. Currently only supports the `container` directive. + +**Usage** + +```console +$ nextflow inspect [options] [project] +``` + +**Description** + +The `inspect` command allows you to determine the container for each process in a pipeline without running the pipeline. It prints to stdout a listing of containers for each process, formatted either as JSON or Nextflow configuration. + +**Options** + +`-concretize` +: Build the container images resolved by the inspect command. + +`-format` (`json`) +: Inspect output format. Can be `json` or `config`. + +`-i, -ignore-errors` +: Ignore errors while inspecting the pipeline. + +`-params-file` +: Load script parameters from a JSON/YAML file. + +`-profile` +: Use the given configuration profile(s). + +`-r, revision` +: Revision of the project to inspect (either a git branch, tag or commit SHA number). + +**Examples** + +Get the list of containers used by a pipeline. + +```console +$ nextflow inspect nextflow-io/hello +``` + +Specify parameters as with the `run` command: + +```console +$ nextflow inspect main.nf --alpha 1 --beta foo +``` + ### kuberun Launch a Nextflow pipeline on a Kubernetes cluster. @@ -1070,7 +1121,7 @@ The `run` command is used to execute a local pipeline script or remote pipeline `-preview` : :::{versionadded} 22.06.0-edge ::: -: Run the workflow script skipping the execution of all processes +: Run the workflow script skipping the execution of all processes. `-process.=` : Set process config options. @@ -1106,7 +1157,7 @@ The `run` command is used to execute a local pipeline script or remote pipeline `-with-conda` : Use the specified Conda environment package or file (must end with `.yml` or `.yaml`) -`-with-dag` (`dag.dot`) +`-with-dag` (`dag-.dot`) : Create pipeline DAG file. `-with-docker` @@ -1118,7 +1169,7 @@ The `run` command is used to execute a local pipeline script or remote pipeline `-with-podman` : Enable process execution in a Podman container. -`-with-report` (`report.html`) +`-with-report` (`report-.html`) : Create workflow execution HTML report. `-with-singularity` @@ -1127,19 +1178,19 @@ The `run` command is used to execute a local pipeline script or remote pipeline `-with-spack` : Use the specified Spack environment package or file (must end with `.yaml`) -`-with-timeline` (`timeline.html`) +`-with-timeline` (`timeline-.html`) : Create workflow execution timeline. -`-with-tower` +`-with-tower` (`https://api.tower.nf`) : Monitor workflow execution with [Tower](https://cloud.tower.nf/). -`-with-trace` (`trace.txt`) +`-with-trace` (`trace-.txt`) : Create workflow execution trace file. -`-with-wave` +`-with-wave` (`https://wave.seqera.io`) : Enable the use of Wave containers. -`-with-weblog` +`-with-weblog` (`http://localhost`) : Send workflow status messages via HTTP to target URL. `-without-conda` diff --git a/modules/nextflow/src/main/groovy/nextflow/Session.groovy b/modules/nextflow/src/main/groovy/nextflow/Session.groovy index 03fc8698c6..c7df94b8c6 100644 --- a/modules/nextflow/src/main/groovy/nextflow/Session.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/Session.groovy @@ -178,6 +178,11 @@ class Session implements ISession { */ boolean disableRemoteBinDir + /** + * Suppress all output from pipeline script + */ + boolean quiet + /** * Local path where script generated classes are saved */ diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdInspect.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdInspect.groovy new file mode 100644 index 0000000000..f1f192d031 --- /dev/null +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdInspect.groovy @@ -0,0 +1,106 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.cli + +import com.beust.jcommander.DynamicParameter +import com.beust.jcommander.Parameter +import com.beust.jcommander.Parameters +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import nextflow.Session +import nextflow.container.inspect.ContainersInspector +import nextflow.util.LoggerHelper +/** + * Implement `inspect` command + * + * @author Paolo Di Tommaso + */ +@Slf4j +@CompileStatic +@Parameters(commandDescription = "Inspect process settings in a pipeline project") +class CmdInspect extends CmdBase { + + @Override + String getName() { + return 'inspect' + } + + @Parameter(names=['-concretize'], description = "Build the container images resolved by the inspect command") + boolean concretize + + @Parameter(names=['-c','-config'], hidden = true) + List runConfig + + @Parameter(names=['-format'], description = "Inspect output format. Can be 'json' or 'config'") + String format = 'json' + + @Parameter(names=['-i','-ignore-errors'], description = 'Ignore errors while inspecting the pipeline') + boolean ignoreErrors + + @DynamicParameter(names = '--', hidden = true) + Map params = new LinkedHashMap<>() + + @Parameter(names='-params-file', description = 'Load script parameters from a JSON/YAML file') + String paramsFile + + @Parameter(names=['-profile'], description = 'Use the given configuration profile(s)') + String profile + + @Parameter(names=['-r','-revision'], description = 'Revision of the project to inspect (either a git branch, tag or commit SHA number)') + String revision + + @Parameter(description = 'Project name or repository url') + List args + + @Override + void run() { + // configure quiet mode + LoggerHelper.setQuiet(true) + // setup the target run command + final target = new CmdRun() + target.launcher = this.launcher + target.args = args + target.profile = this.profile + target.revision = this.revision + target.runConfig = this.runConfig + target.params = this.params + target.paramsFile = this.paramsFile + target.preview = true + target.previewAction = this.&applyInspect + target.ansiLog = false + // run it + target.run() + } + + protected void applyInspect(Session session) { + // disable wave await mode when running + if( session.config.wave instanceof Map ) + checkWaveConfig(session.config.wave as Map) + // run the inspector + new ContainersInspector(session.dag) + .withFormat(format) + .withIgnoreErrors(ignoreErrors) + .printContainers() + } + + protected void checkWaveConfig(Map wave) { + if( wave.enabled && wave.freeze ) + wave.dryRun = !concretize + } + +} diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy index d1bb17fe20..4023bb3adc 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy @@ -273,6 +273,11 @@ class CmdRun extends CmdBase implements HubOptions { : sysEnv.get('NXF_DISABLE_JOBS_CANCELLATION') as boolean } + /** + * Optional closure modelling an action to be invoked when the preview mode is enabled + */ + Closure previewAction + @Override String getName() { NAME } @@ -343,7 +348,7 @@ class CmdRun extends CmdBase implements HubOptions { // -- create a new runner instance final runner = new ScriptRunner(config) runner.setScript(scriptFile) - runner.setPreview(this.preview) + runner.setPreview(this.preview, previewAction) runner.session.profile = profile runner.session.commandLine = launcher.cliString runner.session.ansiLog = launcher.options.ansiLog diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy index 7889d4ffaf..c8986cc5d0 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy @@ -102,7 +102,8 @@ class Launcher { new CmdHelp(), new CmdSelfUpdate(), new CmdPlugins(), - new CmdPlugin() + new CmdPlugin(), + new CmdInspect() ] if(SecretsLoader.isEnabled()) diff --git a/modules/nextflow/src/main/groovy/nextflow/container/inspect/ContainersInspector.groovy b/modules/nextflow/src/main/groovy/nextflow/container/inspect/ContainersInspector.groovy new file mode 100644 index 0000000000..b4ee859969 --- /dev/null +++ b/modules/nextflow/src/main/groovy/nextflow/container/inspect/ContainersInspector.groovy @@ -0,0 +1,113 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.container.inspect + +import groovy.json.JsonBuilder +import groovy.json.JsonOutput +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import nextflow.dag.DAG +import nextflow.exception.AbortOperationException +import org.codehaus.groovy.util.ListHashMap +/** + * Preview the list of containers used by a pipeline. + * + * @author Ben Sherman + */ +@Slf4j +@CompileStatic +class ContainersInspector { + + private DAG dag + + private String format + + private boolean ignoreErrors + + ContainersInspector(DAG dag) { + this.dag = dag + } + + ContainersInspector withFormat(String format) { + if( format !in ['config', 'json'] ) + throw new AbortOperationException("Invalid format for containers inspect '${format}' -- should be 'config' or 'json'") + this.format = format + return this + } + + ContainersInspector withIgnoreErrors(boolean ignore) { + this.ignoreErrors = ignore + return this + } + + String renderContainers() { + log.debug "Rendering container preview" + final containers = getContainers() + if( format == 'config' ) + return renderConfig(containers) + if( format == 'json' ) + return renderJson(containers) + else + throw new IllegalStateException("Unknown containers preview format: $format") + } + + void printContainers() { + final result = renderContainers() + if( result ) + print result + } + + protected Map getContainers() { + final containers = new ListHashMap() + + for( def vertex : dag.vertices ) { + // skip nodes that are not processes + final process = vertex.process + if( !process ) + continue + + try { + // get container preview + containers[process.name] = process.createTaskPreview().getContainer() + } + catch( Exception e ) { + if( ignoreErrors ) + log.warn "Unable to inspect container for task `$process.name` - cause: ${e.message}" + else + throw e + } + } + + return containers + } + + protected String renderConfig(Map containers) { + final result = new StringBuilder() + for( Map.Entry entry : containers ) { + result.append("process { withName: '${entry.key}' { container = '${entry.value}' } }\n") + } + return result.toString() + } + + protected String renderJson(Map containers) { + final list = containers.collect( (k, v) -> [name: k, container: v] ) + final result = Map.of("processes", list) + return JsonOutput.prettyPrint(new JsonBuilder(result).toString()) + '\n' + } + +} diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy index 166082b9ea..bc3ae5e711 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy @@ -367,6 +367,29 @@ class TaskProcessor { boolean hasErrors() { errorCount>0 } + /** + * Create a "preview" for a task run. This method is only meant for the creation of "mock" task run + * to allow the access for the associated {@link TaskConfig} during a pipeline "preview" execution. + * + * Note this returns an "eventually" task configuration object. Also Inputs ane output parameters are NOT + * resolved by this method. + * + * @return A {@link TaskRun} object holding a reference to the associated {@link TaskConfig} + */ + TaskRun createTaskPreview() { + final task = new TaskRun( + processor: this, + type: scriptType, + config: config.createTaskConfig(), + context: new TaskContext(this) + ) + task.config.context = task.context + task.config.process = task.processor.name + task.config.executor = task.processor.executor.name + + return task + } + protected void checkWarn(String msg, Map opts=null) { if( NF.isStrictMode() ) throw new ProcessUnrecoverableException(msg) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy b/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy index 521d6dca6d..81c7891c6b 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/BaseScript.groovy @@ -207,6 +207,9 @@ abstract class BaseScript extends Script implements ExecutionContext { @Override void print(Object object) { + if( session?.quiet ) + return + if( session?.ansiLog ) log.info(object?.toString()) else @@ -215,6 +218,9 @@ abstract class BaseScript extends Script implements ExecutionContext { @Override void println() { + if( session?.quiet ) + return + if( session?.ansiLog ) log.info("") else @@ -223,6 +229,9 @@ abstract class BaseScript extends Script implements ExecutionContext { @Override void println(Object object) { + if( session?.quiet ) + return + if( session?.ansiLog ) log.info(object?.toString()) else @@ -231,6 +240,9 @@ abstract class BaseScript extends Script implements ExecutionContext { @Override void printf(String msg, Object arg) { + if( session?.quiet ) + return + if( session?.ansiLog ) log.info(String.printf(msg, arg)) else @@ -239,6 +251,9 @@ abstract class BaseScript extends Script implements ExecutionContext { @Override void printf(String msg, Object[] args) { + if( session?.quiet ) + return + if( session?.ansiLog ) log.info(String.printf(msg, args)) else diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy index 3cab2bddb0..7be978e903 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy @@ -62,6 +62,11 @@ class ScriptRunner { */ private boolean preview + /** + * Optional callback to perform a custom action on a preview event + */ + private Closure previewAction + /** * Instantiate the runner object creating a new session */ @@ -86,8 +91,9 @@ class ScriptRunner { return this } - ScriptRunner setPreview(boolean value ) { + ScriptRunner setPreview(boolean value, Closure action) { this.preview = value + this.previewAction = action return this } @@ -229,8 +235,10 @@ class ScriptRunner { } protected await() { - if( preview ) + if( preview ) { + previewAction?.call(session) return + } log.debug "> Awaiting termination " session.await() } diff --git a/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy b/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy index 8d995b29b6..1e657d6952 100644 --- a/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy @@ -89,6 +89,8 @@ class LoggerHelper { static private String logFileName + static private LoggerHelper INSTANCE + private CliOptions opts private boolean rolling = false @@ -153,7 +155,11 @@ class LoggerHelper { return false } - void setup() { + private void setQuiet0(boolean quiet) { + packages[MAIN_PACKAGE] = quiet ? Level.ERROR : Level.INFO + } + + LoggerHelper setup() { logFileName = opts.logFile ?: System.getenv('NXF_LOG_FILE') final boolean quiet = opts.quiet @@ -165,7 +171,7 @@ class LoggerHelper { root.detachAndStopAllAppenders() // -- define the console appender - packages[MAIN_PACKAGE] = quiet ? Level.WARN : Level.INFO + setQuiet0(quiet) // -- add the S3 uploader by default if( !containsClassName(debugConf,traceConf, 'nextflow.cloud.aws.nio') ) @@ -224,6 +230,8 @@ class LoggerHelper { if(!consoleAppender) logger.debug "Console appender: disabled" + + return this } protected Logger createLogger(String clazz, Level level ) { @@ -342,19 +350,17 @@ class LoggerHelper { */ static void configureLogger( Launcher launcher ) { - new LoggerHelper(launcher.options) + INSTANCE = new LoggerHelper(launcher.options) .setDaemon(launcher.isDaemon()) .setRolling(true) .setSyslog(launcher.options.syslog) .setup() } - static void configureLogger( final CliOptions opts, boolean daemon = false ) { - new LoggerHelper(opts) - .setDaemon(daemon) - .setRolling(true) - .setSyslog(opts.syslog) - .setup() + static setQuiet(boolean quiet) { + if( INSTANCE==null ) + throw new IllegalStateException("Method 'LoggerHelper.setQuiet' must be called after the invocation of 'LoggerHelper.configureLogger'") + INSTANCE.setQuiet0(quiet) } /* diff --git a/modules/nextflow/src/test/groovy/nextflow/cli/CmdInspectTest.groovy b/modules/nextflow/src/test/groovy/nextflow/cli/CmdInspectTest.groovy new file mode 100644 index 0000000000..c6ed93ca74 --- /dev/null +++ b/modules/nextflow/src/test/groovy/nextflow/cli/CmdInspectTest.groovy @@ -0,0 +1,54 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.cli + + +import spock.lang.Specification +import spock.lang.Unroll +/** + * + * @author Paolo Di Tommaso + */ +class CmdInspectTest extends Specification { + + @Unroll + def 'should ask for confirmation' () { + given: + def cmd = Spy(new CmdInspect(concretize: CONCRETIZE)) + Map wave + + when: + wave = WAVE + cmd.checkWaveConfig(wave) + then: + wave == EXPECTED + + where: + WAVE | CONCRETIZE | EXPECTED + [:] | false | [:] + [:] | true | [:] + and: + [enabled:true] | false | [enabled:true] + [enabled:true] | true | [enabled:true] + and: + [enabled:true, freeze: true] | false | [enabled:true, freeze:true, dryRun: true] + [enabled:true, freeze: true] | true | [enabled:true, freeze:true, dryRun: false] + + } + +} diff --git a/modules/nextflow/src/test/groovy/nextflow/container/inspect/ContainersInspectorTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/inspect/ContainersInspectorTest.groovy new file mode 100644 index 0000000000..039f568ba0 --- /dev/null +++ b/modules/nextflow/src/test/groovy/nextflow/container/inspect/ContainersInspectorTest.groovy @@ -0,0 +1,109 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.container.inspect + + +import nextflow.dag.DAG +import nextflow.processor.TaskProcessor +import nextflow.processor.TaskRun +import spock.lang.Specification +/** + * + * @author Ben Sherman + */ +class ContainersInspectorTest extends Specification { + + def makeVertex(DAG dag, String name, String container) { + final processor = Mock(TaskProcessor) + processor.name >> name + processor.createTaskPreview() >> Mock(TaskRun) { + getContainer() >> container + } + + final vertex = new DAG.Vertex(dag, DAG.Type.PROCESS) + vertex.process = processor + + return vertex + } + + def 'should get containers' () { + given: + def dag = Mock(DAG) + dag.vertices >> [ + makeVertex(dag, 'proc1', 'container1'), + makeVertex(dag, 'proc2', 'container2') + ] + + when: + def observer = new ContainersInspector(dag) + then: + observer.getContainers() == [ + 'proc1': 'container1', + 'proc2': 'container2', + ] + } + + def 'should render containers as json' () { + given: + def dag = Mock(DAG) + dag.vertices >> [ + makeVertex(dag, 'proc1', 'container1'), + makeVertex(dag, 'proc2', 'container2') + ] + + when: + def result = new ContainersInspector(dag) + .withFormat('json') + .renderContainers() + then: + result == '''\ + { + "processes": [ + { + "name": "proc2", + "container": "container2" + }, + { + "name": "proc1", + "container": "container1" + } + ] + } + '''.stripIndent(true) + } + + def 'should render containers as nextflow config' () { + given: + def dag = Mock(DAG) + dag.vertices >> [ + makeVertex(dag, 'proc1', 'container1'), + makeVertex(dag, 'proc2', 'container2') + ] + + when: + def result = new ContainersInspector(dag) + .withFormat('config') + .renderContainers() + then: + result == '''\ + process { withName: 'proc2' { container = 'container2' } } + process { withName: 'proc1' { container = 'container1' } } + '''.stripIndent(true) + } + +} diff --git a/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy b/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy index ab2f3bc511..4a075faa8c 100644 --- a/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy @@ -40,6 +40,7 @@ import nextflow.script.bundle.ResourcesBundle import nextflow.script.params.FileOutParam import nextflow.util.ArrayBag import nextflow.util.CacheHelper +import nextflow.util.MemoryUnit import spock.lang.Specification import spock.lang.Unroll import test.TestHelper @@ -940,4 +941,20 @@ class TaskProcessorTest extends Specification { then: result == [ALPHA:'one', DELTA: "x=y", OMEGA: ''] } + + def 'should create a task preview' () { + given: + def config = new ProcessConfig([cpus: 10, memory: '100 GB']) + def EXEC = Mock(Executor) { getName()>>'exec-name'} + def BODY = Mock(BodyDef) { getType()>>ScriptType.SCRIPTLET } + def processor = new TaskProcessor(config: config, name: 'proc-name', executor: EXEC, taskBody: BODY) + + when: + def result = processor.createTaskPreview() + then: + result.config.process == 'proc-name' + result.config.executor == 'exec-name' + result.config.getCpus() == 10 + result.config.getMemory() == MemoryUnit.of('100 GB') + } } diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy index 1a219ef79c..4530911e0f 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/SubmitContainerTokenRequest.groovy @@ -116,4 +116,9 @@ class SubmitContainerTokenRequest { */ String format + /** + * When {@code true} build requests are carried out in dry-run mode. + */ + Boolean dryRun + } diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index bf87c02135..d01d26acc8 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -183,7 +183,8 @@ class WaveClient { timestamp: OffsetDateTime.now().toString(), fingerprint: assets.fingerprint(), freeze: config.freezeMode(), - format: assets.singularity ? 'sif' : null + format: assets.singularity ? 'sif' : null, + dryRun: config.dryRun() ) } @@ -206,7 +207,8 @@ class WaveClient { towerWorkspaceId: tower.workspaceId, towerEndpoint: tower.endpoint, workflowId: tower.workflowId, - freeze: config.freezeMode() + freeze: config.freezeMode(), + dryRun: config.dryRun(), ) return sendRequest(request) } @@ -507,7 +509,7 @@ class WaveClient { // get from cache or submit a new request final response = cache.get(key, { sendRequest(assets) } as Callable ) if( config.freezeMode() ) { - if( response.buildId ) { + if( response.buildId && !config.dryRun() ) { // await the image to be available when a new image is being built awaitImage(response.targetImage) } diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/WaveConfig.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/WaveConfig.groovy index 03c3926574..abdd4e2953 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/WaveConfig.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/config/WaveConfig.groovy @@ -48,11 +48,13 @@ class WaveConfig { final private RetryOpts retryOpts final private HttpOpts httpClientOpts final private Boolean freezeMode + final private Boolean dryRunMode WaveConfig(Map opts, Map env=System.getenv()) { this.enabled = opts.enabled this.endpoint = (opts.endpoint?.toString() ?: env.get('WAVE_API_ENDPOINT') ?: DEF_ENDPOINT)?.stripEnd('/') this.freezeMode = opts.freeze as Boolean + this.dryRunMode = opts.navigate('dryRun', false) this.containerConfigUrl = parseConfig(opts, env) this.tokensCacheMaxDuration = opts.navigate('tokens.cache.maxDuration', '30m') as Duration this.condaOpts = opts.navigate('build.conda', Collections.emptyMap()) as CondaOpts @@ -84,6 +86,8 @@ class WaveConfig { boolean freezeMode() { return this.freezeMode } + boolean dryRun() { return this.dryRunMode } + boolean bundleProjectResources() { bundleProjectResources } String buildRepository() { buildRepository } diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index 6b6446cc0a..7bb3aa9522 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -166,6 +166,7 @@ class WaveClientTest extends Specification { !req.spackFile !req.containerConfig.layers !req.freeze + !req.dryRun and: req.fingerprint == 'bd2cb4b32df41f2d290ce2366609f2ad' req.timestamp instanceof String @@ -193,6 +194,28 @@ class WaveClientTest extends Specification { req.timestamp instanceof String } + def 'should create request object with dry-run mode' () { + given: + def session = Mock(Session) { getConfig() >> [wave:[dryRun:true]]} + def IMAGE = 'foo:latest' + def wave = new WaveClient(session) + + when: + def req = wave.makeRequest(WaveAssets.fromImage(IMAGE)) + then: + req.containerImage == IMAGE + !req.containerPlatform + !req.containerFile + !req.condaFile + !req.spackFile + !req.containerConfig.layers + and: + req.dryRun + and: + req.fingerprint == 'bd2cb4b32df41f2d290ce2366609f2ad' + req.timestamp instanceof String + } + def 'should create request object and platform' () { given: def session = Mock(Session) { getConfig() >> [:]} From 465468b02de9bc08dfb4f531277d529e136e0ce8 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 24 Aug 2023 09:27:42 -0500 Subject: [PATCH 079/128] Fix printf command with negative exit code (#4213) Signed-off-by: Ben Sherman --- .../src/main/resources/nextflow/executor/command-run.txt | 2 +- .../nextflow/executor/test-bash-wrapper-with-trace.txt | 2 +- .../src/test/resources/nextflow/executor/test-bash-wrapper.txt | 2 +- .../nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/nextflow/src/main/resources/nextflow/executor/command-run.txt b/modules/nextflow/src/main/resources/nextflow/executor/command-run.txt index 1c261c937c..d1f695a022 100644 --- a/modules/nextflow/src/main/resources/nextflow/executor/command-run.txt +++ b/modules/nextflow/src/main/resources/nextflow/executor/command-run.txt @@ -100,7 +100,7 @@ nxf_fs_fcp() { on_exit() { exit_status=${nxf_main_ret:=$?} - printf $exit_status {{exit_file}} + printf -- $exit_status {{exit_file}} set +u {{cleanup_cmd}} {{sync_cmd}} diff --git a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt index b0672d36ad..0aaf49f410 100644 --- a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt +++ b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt @@ -269,7 +269,7 @@ nxf_fs_fcp() { on_exit() { exit_status=${nxf_main_ret:=$?} - printf $exit_status > {{folder}}/.exitcode + printf -- $exit_status > {{folder}}/.exitcode set +u sync || true exit $exit_status diff --git a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt index e855aa1a1e..3c380c4067 100644 --- a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt +++ b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper.txt @@ -80,7 +80,7 @@ nxf_fs_fcp() { on_exit() { exit_status=${nxf_main_ret:=$?} - printf $exit_status > {{folder}}/.exitcode + printf -- $exit_status > {{folder}}/.exitcode set +u sync || true exit $exit_status diff --git a/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt b/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt index 5cb73b9050..f1ac0b4251 100644 --- a/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt +++ b/plugins/nf-google/src/test/nextflow/cloud/google/lifesciences/bash-wrapper-gcp.txt @@ -167,7 +167,7 @@ nxf_fs_fcp() { on_exit() { exit_status=${nxf_main_ret:=$?} - printf $exit_status > {{folder}}/.exitcode + printf -- $exit_status > {{folder}}/.exitcode set +u {{sync_cmd}} exit $exit_status From b6721b7128c55df31613d0314a16ab46b0312cee Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 24 Aug 2023 16:42:23 +0200 Subject: [PATCH 080/128] Remove -dsl1 and -dsl1 rub cli options Signed-off-by: Paolo Di Tommaso --- .../src/main/groovy/nextflow/cli/CmdRun.groovy | 9 --------- .../main/groovy/nextflow/config/ConfigBuilder.groovy | 10 ---------- 2 files changed, 19 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy index 4023bb3adc..4da1a3b35c 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy @@ -246,12 +246,6 @@ class CmdRun extends CmdBase implements HubOptions { @Parameter(names=['-entry'], description = 'Entry workflow name to be executed', arity = 1) String entryName - @Parameter(names=['-dsl1'], description = 'Execute the workflow using DSL1 syntax') - boolean dsl1 - - @Parameter(names=['-dsl2'], description = 'Execute the workflow using DSL2 syntax') - boolean dsl2 - @Parameter(names=['-main-script'], description = 'The script file to be executed when launching a project directory or repository' ) String mainScript @@ -311,9 +305,6 @@ class CmdRun extends CmdBase implements HubOptions { if( offline && latest ) throw new AbortOperationException("Command line options `-latest` and `-offline` cannot be specified at the same time") - if( dsl1 && dsl2 ) - throw new AbortOperationException("Command line options `-dsl1` and `-dsl2` cannot be specified at the same time") - checkRunName() log.info "N E X T F L O W ~ version ${Const.APP_VER}" diff --git a/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy index 6cb39fe9d2..03bcf3c3b4 100644 --- a/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/config/ConfigBuilder.groovy @@ -709,16 +709,6 @@ class ConfigBuilder { config.fusion.enabled = cmdRun.withFusion == 'true' } - // -- nextflow setting - if( cmdRun.dsl1 || cmdRun.dsl2 ) { - if( config.nextflow !instanceof Map ) - config.nextflow = [:] - if( cmdRun.dsl1 ) - config.nextflow.enable.dsl = 1 - if( cmdRun.dsl2 ) - config.nextflow.enable.dsl = 2 - } - // -- add the command line parameters to the 'taskConfig' object if( cmdRun.hasParams() ) config.params = mergeMaps( (Map)config.params, cmdRun.parsedParams(configVars()), NF.strictMode ) From 80c5cb276c6d1857897b7883ef7f01deea8f635c Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 24 Aug 2023 23:41:14 +0200 Subject: [PATCH 081/128] Deprecated Wave report feature Signed-off-by: Paolo Di Tommaso --- .../src/main/io/seqera/wave/plugin/WaveObserver.groovy | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveObserver.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveObserver.groovy index 1032c873d0..029e034546 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveObserver.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveObserver.groovy @@ -20,6 +20,7 @@ package io.seqera.wave.plugin import java.util.concurrent.ConcurrentHashMap import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j import io.seqera.wave.plugin.config.ReportOpts import nextflow.Session import nextflow.file.FileHelper @@ -30,6 +31,7 @@ import nextflow.trace.TraceRecord * * @author Paolo Di Tommaso */ +@Slf4j @CompileStatic class WaveObserver implements TraceObserver { @@ -41,6 +43,11 @@ class WaveObserver implements TraceObserver { this.client = new WaveClient(session) } + @Override + void onFlowCreate(Session session) { + log.warn "Wave report feature has been deprecated in favour of the new 'nextflow inspect' command" + } + protected void apply(TaskHandler handler) { final process = handler.task.getProcessor().getName() containers.computeIfAbsent(process, (String it) -> handler.task.getContainer()) From 736ab9bb8ea7d257491676fa3870c3c486f72ed0 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 26 Aug 2023 11:11:36 +0200 Subject: [PATCH 082/128] Improve Wave handing of Conda envs Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/build.gradle | 2 +- .../io/seqera/wave/plugin/WaveClient.groovy | 26 ++++-- .../seqera/wave/plugin/WaveClientTest.groovy | 92 ++++++++++++++++++- validation/wave-tests/example3/demo.nf | 12 +++ .../wave-tests/example3/nextflow.config | 13 +++ validation/wave-tests/example3/run.sh | 2 + validation/wave-tests/example4/demo.nf | 12 +++ validation/wave-tests/example4/run.sh | 2 + validation/wave.sh | 6 ++ 9 files changed, 157 insertions(+), 10 deletions(-) create mode 100644 validation/wave-tests/example3/demo.nf create mode 100644 validation/wave-tests/example3/nextflow.config create mode 100644 validation/wave-tests/example3/run.sh create mode 100644 validation/wave-tests/example4/demo.nf create mode 100644 validation/wave-tests/example4/run.sh diff --git a/plugins/nf-wave/build.gradle b/plugins/nf-wave/build.gradle index 0d161c146b..957ac19c2c 100644 --- a/plugins/nf-wave/build.gradle +++ b/plugins/nf-wave/build.gradle @@ -36,7 +36,7 @@ dependencies { api 'org.apache.commons:commons-lang3:3.12.0' api 'com.google.code.gson:gson:2.10.1' api 'org.yaml:snakeyaml:2.0' - api 'io.seqera:wave-utils:0.6.2' + api 'io.seqera:wave-utils:0.7.0' testImplementation(testFixtures(project(":nextflow"))) testImplementation "org.codehaus.groovy:groovy:3.0.19" diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index d01d26acc8..f979da6d9f 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -421,17 +421,25 @@ class WaveClient { throw new IllegalArgumentException("Unexpected conda and $scriptType conflict while resolving wave container") // map the recipe to a dockerfile - if( isCondaLocalFile(attrs.conda) ) { - condaFile = Path.of(attrs.conda) + if( isCondaRemoteFile(attrs.conda) ) { containerScript = singularity - ? condaFileToSingularityFile(config.condaOpts()) - : condaFileToDockerFile(config.condaOpts()) + ? condaPackagesToSingularityFile(attrs.conda, condaChannels, config.condaOpts()) + : condaPackagesToDockerFile(attrs.conda, condaChannels, config.condaOpts()) } - // 'conda' attributes is resolved as the conda packages to be used else { + if( isCondaLocalFile(attrs.conda) ) { + // 'conda' attribute is the path to the local conda environment + // note: ignore the 'channels' attribute because they are supposed to be provided by the conda file + condaFile = condaFileFromPath(attrs.conda, null, config.condaOpts()) + } + else { + // 'conda' attributes is resolved as the conda packages to be used + condaFile = condaFileFromPackages(attrs.conda, condaChannels, config.condaOpts()) + } + // create the container file to build the container containerScript = singularity - ? condaPackagesToSingularityFile(attrs.conda, condaChannels, config.condaOpts()) - : condaPackagesToDockerFile(attrs.conda, condaChannels, config.condaOpts()) + ? condaFileToSingularityFile(config.condaOpts()) + : condaFileToDockerFile(config.condaOpts()) } } @@ -556,6 +564,10 @@ class WaveClient { return value.endsWith('.yaml') || value.endsWith('.yml') || value.endsWith('.txt') } + static protected boolean isCondaRemoteFile(String value) { + value.startsWith('http://') || value.startsWith('https://') + } + protected boolean isSpackFile(String value) { if( value.contains('\n') ) return false diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index 7bb3aa9522..6d36e5ffc6 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -517,6 +517,40 @@ class WaveClientTest extends Specification { and: def client = new WaveClient(session) + when: + def assets = client.resolveAssets(task, null, false) + then: + assets.containerFile == '''\ + FROM mambaorg/micromamba:1.4.9 + COPY --chown=$MAMBA_USER:$MAMBA_USER conda.yml /tmp/conda.yml + RUN micromamba install -y -n base -f /tmp/conda.yml \\ + && micromamba clean -a -y + USER root + '''.stripIndent() + and: + !assets.moduleResources + !assets.containerImage + !assets.containerConfig + !assets.spackFile + !assets.projectResources + and: + assets.condaFile.text == '''\ + channels: + - conda-forge + - defaults + dependencies: + - salmon=1.2.3 + '''.stripIndent(true) + } + + def 'should create asset with conda lock file' () { + given: + def session = Mock(Session) { getConfig() >> [:]} + and: + def task = Mock(TaskRun) {getConfig() >> [conda:'https://host.com/conda-lock.yml'] } + and: + def client = new WaveClient(session) + when: def assets = client.resolveAssets(task, null, false) then: @@ -524,7 +558,7 @@ class WaveClientTest extends Specification { FROM mambaorg/micromamba:1.4.9 RUN \\ micromamba install -y -n base -c conda-forge -c defaults \\ - salmon=1.2.3 \\ + -f https://host.com/conda-lock.yml \\ && micromamba clean -a -y USER root '''.stripIndent() @@ -665,6 +699,46 @@ class WaveClientTest extends Specification { and: def client = new WaveClient(session) + when: + def assets = client.resolveAssets(task, null, true) + then: + assets.containerFile == '''\ + BootStrap: docker + From: mambaorg/micromamba:1.4.9 + %files + {{wave_context_dir}}/conda.yml /tmp/conda.yml + %post + micromamba install -y -n base -f /tmp/conda.yml \\ + && micromamba clean -a -y + %environment + export PATH="$MAMBA_ROOT_PREFIX/bin:$PATH" + '''.stripIndent() + and: + assets.singularity + and: + !assets.moduleResources + !assets.containerImage + !assets.containerConfig + !assets.spackFile + !assets.projectResources + and: + assets.condaFile.text == '''\ + channels: + - conda-forge + - defaults + dependencies: + - salmon=1.2.3 + '''.stripIndent(true) + } + + def 'should create asset with conda remote lock file and singularity native build' () { + given: + def session = Mock(Session) { getConfig() >> [:]} + and: + def task = Mock(TaskRun) {getConfig() >> [conda:'https://host.com/lock-file.yaml'] } + and: + def client = new WaveClient(session) + when: def assets = client.resolveAssets(task, null, true) then: @@ -673,7 +747,7 @@ class WaveClientTest extends Specification { From: mambaorg/micromamba:1.4.9 %post micromamba install -y -n base -c conda-forge -c defaults \\ - salmon=1.2.3 \\ + -f https://host.com/lock-file.yaml \\ && micromamba clean -a -y %environment export PATH="$MAMBA_ROOT_PREFIX/bin:$PATH" @@ -1032,4 +1106,18 @@ class WaveClientTest extends Specification { 'http://foo.com' | false } + def 'should check is remote conda file' () { + expect: + WaveClient.isCondaRemoteFile(CONTENT) == EXPECTED + + where: + CONTENT | EXPECTED + 'foo' | false + 'foo.yml' | false + 'foo.txt' | false + 'foo\nbar.yml' | false + 'http://foo.com' | true + 'https://foo.com' | true + } + } diff --git a/validation/wave-tests/example3/demo.nf b/validation/wave-tests/example3/demo.nf new file mode 100644 index 0000000000..5f1e9be8ab --- /dev/null +++ b/validation/wave-tests/example3/demo.nf @@ -0,0 +1,12 @@ +process cow { + debug true + conda 'cowpy=1.1.5' + + ''' + echo cowpy 'Hello Spack' + ''' +} + +workflow { + cow() +} diff --git a/validation/wave-tests/example3/nextflow.config b/validation/wave-tests/example3/nextflow.config new file mode 100644 index 0000000000..38a0e07f6c --- /dev/null +++ b/validation/wave-tests/example3/nextflow.config @@ -0,0 +1,13 @@ +tower { + accessToken = "$TOWER_ACCESS_TOKEN" +} + +wave { + enabled = true + strategy = ['conda'] + build.repository = 'docker.io/pditommaso/wave-tests' +} + +docker { + enabled = true +} diff --git a/validation/wave-tests/example3/run.sh b/validation/wave-tests/example3/run.sh new file mode 100644 index 0000000000..66148af05b --- /dev/null +++ b/validation/wave-tests/example3/run.sh @@ -0,0 +1,2 @@ +$NXF_CMD run demo.nf -with-wave + diff --git a/validation/wave-tests/example4/demo.nf b/validation/wave-tests/example4/demo.nf new file mode 100644 index 0000000000..3384b942cc --- /dev/null +++ b/validation/wave-tests/example4/demo.nf @@ -0,0 +1,12 @@ +process cow { + debug true + conda 'https://prefix.dev/envs/pditommaso/wave/conda-lock.yml' + + ''' + echo cowpy 'Hello Spack' + ''' +} + +workflow { + cow() +} diff --git a/validation/wave-tests/example4/run.sh b/validation/wave-tests/example4/run.sh new file mode 100644 index 0000000000..66148af05b --- /dev/null +++ b/validation/wave-tests/example4/run.sh @@ -0,0 +1,2 @@ +$NXF_CMD run demo.nf -with-wave + diff --git a/validation/wave.sh b/validation/wave.sh index 374e92d16b..6b8953a7f3 100644 --- a/validation/wave.sh +++ b/validation/wave.sh @@ -12,6 +12,12 @@ echo "Test Wave accessing private container repository" echo "Test Wave building a container" (cd wave-tests/example2; bash run.sh) +echo "Test Wave building from Conda package" +(cd wave-tests/example3; bash run.sh) + +echo "Test Wave building from Conda lock file" +(cd wave-tests/example4; bash run.sh) + echo "Test Wave running rnaseq-nf with Fusion on local executor" (cd wave-tests/example6; bash run.sh) From a60ef72b4acc30d2f6d51269f2bb08d75fabe119 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 26 Aug 2023 16:48:16 +0200 Subject: [PATCH 083/128] Fix Wave build for Singularity files Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/build.gradle | 2 +- .../src/test/io/seqera/wave/plugin/WaveClientTest.groovy | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/nf-wave/build.gradle b/plugins/nf-wave/build.gradle index 957ac19c2c..d3a604c19b 100644 --- a/plugins/nf-wave/build.gradle +++ b/plugins/nf-wave/build.gradle @@ -36,7 +36,7 @@ dependencies { api 'org.apache.commons:commons-lang3:3.12.0' api 'com.google.code.gson:gson:2.10.1' api 'org.yaml:snakeyaml:2.0' - api 'io.seqera:wave-utils:0.7.0' + api 'io.seqera:wave-utils:0.7.1' testImplementation(testFixtures(project(":nextflow"))) testImplementation "org.codehaus.groovy:groovy:3.0.19" diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index 6d36e5ffc6..cf19d9a8c2 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -706,9 +706,9 @@ class WaveClientTest extends Specification { BootStrap: docker From: mambaorg/micromamba:1.4.9 %files - {{wave_context_dir}}/conda.yml /tmp/conda.yml + {{wave_context_dir}}/conda.yml /scratch/conda.yml %post - micromamba install -y -n base -f /tmp/conda.yml \\ + micromamba install -y -n base -f /scratch/conda.yml \\ && micromamba clean -a -y %environment export PATH="$MAMBA_ROOT_PREFIX/bin:$PATH" @@ -780,9 +780,9 @@ class WaveClientTest extends Specification { BootStrap: docker From: mambaorg/micromamba:1.4.9 %files - {{wave_context_dir}}/conda.yml /tmp/conda.yml + {{wave_context_dir}}/conda.yml /scratch/conda.yml %post - micromamba install -y -n base -f /tmp/conda.yml \\ + micromamba install -y -n base -f /scratch/conda.yml \\ && micromamba clean -a -y %environment export PATH="$MAMBA_ROOT_PREFIX/bin:$PATH" From a1e1d3ca322afd64eb6d67bc7bb8e9d2a8ff4c00 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 31 Aug 2023 10:53:39 +0200 Subject: [PATCH 084/128] Align Singularity experience with Docker (#4230) Signed-off-by: Paolo Di Tommaso Signed-off-by: Ben Sherman Co-authored-by: Ben Sherman --- docs/config.md | 5 +- docs/container.md | 9 +++ .../container/SingularityBuilder.groovy | 19 ++++- .../groovy/nextflow/cli/CmdInfoTest.groovy | 2 + .../container/ApptainerBuilderTest.groovy | 80 +++++++++++++++++-- .../container/SingularityBuilderTest.groovy | 80 +++++++++++++++++-- .../executor/BashWrapperBuilderTest.groovy | 4 +- .../executor/fusion/FusionHelperTest.groovy | 6 +- 8 files changed, 183 insertions(+), 22 deletions(-) diff --git a/docs/config.md b/docs/config.md index 4c429499a3..34ba2e5b97 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1294,7 +1294,10 @@ The `singularity` scope controls how [Singularity](https://sylabs.io/singularity The following settings are available: `singularity.autoMounts` -: When `true` Nextflow automatically mounts host paths in the executed container. It requires the `user bind control` feature to be enabled in your Singularity installation (default: `false`). +: When `true` Nextflow automatically mounts host paths in the executed container. It requires the `user bind control` feature to be enabled in your Singularity installation (default: `true`). +: :::{versionchanged} 23.09.0-edge + Default value was changed from `false` to `true`. + ::: `singularity.cacheDir` : The directory where remote Singularity images are stored. When using a computing cluster it must be a shared folder accessible to all compute nodes. diff --git a/docs/container.md b/docs/container.md index 5f43db1a31..39281700f7 100644 --- a/docs/container.md +++ b/docs/container.md @@ -583,6 +583,15 @@ When a process input is a *symbolic link* file, make sure the linked file is sto Nextflow no longer mounts the home directory when launching a Singularity container. To re-enable the old behavior, set the environment variable `NXF_SINGULARITY_HOME_MOUNT` to `true`. ::: +:::{versionchanged} 23.09.0-edge +Nextflow automatically mounts the required host paths in the container. To re-enable the old behavior, set the environment variable `NXF_SINGULARITY_AUTO_MOUNTS` to `false` or set `singularity.autoMounts=false` in the Nextflow configuration file. +::: + +:::{versionchanged} 23.09.0-edge +Nextflow uses the command `run` to carry out the execution of Singularity containers instead of the `exec` command. +To re-enable the old behavior, set the environment variable `NXF_SINGULARITY_RUN_COMMAND` to `exec`. +::: + ### Multiple containers It is possible to specify a different Singularity image for each process definition in your pipeline script. For example, let's suppose you have two processes named `foo` and `bar`. You can specify two different Singularity images specifying them in the `nextflow.config` file as shown below: diff --git a/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy index 45426b4360..5e0277ea60 100644 --- a/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy @@ -37,10 +37,14 @@ class SingularityBuilder extends ContainerBuilder { private boolean newPidNamespace + private String runCmd0 + SingularityBuilder(String name) { this.image = name this.homeMount = defaultHomeMount() + this.autoMounts = defaultAutoMounts() this.newPidNamespace = defaultNewPidNamespace() + this.runCmd0 = defaultRunCommand() } private boolean defaultHomeMount() { @@ -51,6 +55,17 @@ class SingularityBuilder extends ContainerBuilder { SysEnv.get("NXF_${getBinaryName().toUpperCase()}_NEW_PID_NAMESPACE", 'true').toString() == 'true' } + private boolean defaultAutoMounts() { + SysEnv.get("NXF_${getBinaryName().toUpperCase()}_AUTO_MOUNTS", 'true').toString() == 'true' + } + + private String defaultRunCommand() { + final result = SysEnv.get("NXF_${getBinaryName().toUpperCase()}_RUN_COMMAND", 'run') + if( result !in ['run','exec'] ) + throw new IllegalArgumentException("Invalid singularity launch command '$result' - it should be either 'run' or 'exec'") + return result + } + protected String getBinaryName() { 'singularity' } @Override @@ -68,7 +83,7 @@ class SingularityBuilder extends ContainerBuilder { if( params.containsKey('runOptions') ) addRunOptions(params.runOptions.toString()) - if( params.autoMounts ) + if( params.autoMounts!=null ) autoMounts = params.autoMounts.toString() == 'true' if( params.newPidNamespace!=null ) @@ -97,7 +112,7 @@ class SingularityBuilder extends ContainerBuilder { if( engineOptions ) result << engineOptions.join(' ') << ' ' - result << 'exec ' + result << runCmd0 << ' ' if( !homeMount ) result << '--no-home ' diff --git a/modules/nextflow/src/test/groovy/nextflow/cli/CmdInfoTest.groovy b/modules/nextflow/src/test/groovy/nextflow/cli/CmdInfoTest.groovy index bd88b9261d..3ec57a2427 100644 --- a/modules/nextflow/src/test/groovy/nextflow/cli/CmdInfoTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/cli/CmdInfoTest.groovy @@ -17,6 +17,7 @@ package nextflow.cli import nextflow.plugin.Plugins +import spock.lang.IgnoreIf import spock.lang.Requires import spock.lang.Shared import spock.lang.Specification @@ -31,6 +32,7 @@ import org.yaml.snakeyaml.Yaml * * @author Paolo Di Tommaso */ +@IgnoreIf({System.getenv('NXF_SMOKE')}) @Requires({System.getenv('NXF_GITHUB_ACCESS_TOKEN')}) class CmdInfoTest extends Specification { diff --git a/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy index 436f3e74e6..9aa8199fa2 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy @@ -27,9 +27,10 @@ import spock.lang.Unroll */ class ApptainerBuilderTest extends Specification { - def 'should get the exec command line' () { - + def 'should get the legacy exec command line' () { given: + SysEnv.push(NXF_APPTAINER_RUN_COMMAND:'exec', NXF_APPTAINER_AUTO_MOUNTS:'false') + and: def path1 = Paths.get('/foo/data/file1') def path2 = Paths.get('/bar/data/file2') def path3 = Paths.get('/bar/data file') @@ -87,6 +88,71 @@ class ApptainerBuilderTest extends Specification { .build() .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home ubuntu' + cleanup: + SysEnv.pop() + } + + def 'should get the run command line' () { + + given: + def path1 = Paths.get('/foo/data/file1') + def path2 = Paths.get('/bar/data/file2') + def path3 = Paths.get('/bar/data file') + + expect: + new ApptainerBuilder('busybox') + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" busybox' + + new ApptainerBuilder('busybox') + .params(engineOptions: '-q -v') + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer -q -v run --no-home --pid -B "$PWD" busybox' + + new ApptainerBuilder('busybox') + .params(runOptions: '--contain --writable') + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" --contain --writable busybox' + + new ApptainerBuilder('ubuntu') + .addMount(path1) + .params(autoMounts: false) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid ubuntu' + + new ApptainerBuilder('ubuntu') + .addMount(path1) + .addMount(path2) + .params(autoMounts: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' + + new ApptainerBuilder('ubuntu') + .addMount(path1) + .addMount(path1) + .params(autoMounts: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' + + new ApptainerBuilder('ubuntu') + .addMount(path1) + .addMount(path1) + .params(autoMounts: true) + .params(readOnlyInputs: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' + + new ApptainerBuilder('ubuntu') + .addMount(path3) + .params(autoMounts: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' + + new ApptainerBuilder('ubuntu') + .params(newPidNamespace: false) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home -B "$PWD" ubuntu' + } def 'should mount home directory if specified' () { @@ -135,12 +201,12 @@ class ApptainerBuilderTest extends Specification { .addEnv('X=1') .addEnv(ALPHA:'aaa', BETA: 'bbb') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} APPTAINERENV_X="1" APPTAINERENV_ALPHA="aaa" APPTAINERENV_BETA="bbb" apptainer exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} APPTAINERENV_X="1" APPTAINERENV_ALPHA="aaa" APPTAINERENV_BETA="bbb" apptainer run --no-home --pid -B "$PWD" busybox' new ApptainerBuilder('busybox') .addEnv('CUDA_VISIBLE_DEVICES') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+APPTAINERENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} apptainer exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+APPTAINERENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} apptainer run --no-home --pid -B "$PWD" busybox' } @@ -150,17 +216,17 @@ class ApptainerBuilderTest extends Specification { when: def cmd = new ApptainerBuilder('ubuntu.img').build().getRunCommand() then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid ubuntu.img' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" ubuntu.img' when: cmd = new ApptainerBuilder('ubuntu.img').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid ubuntu.img bwa --this --that file.fastq' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" ubuntu.img bwa --this --that file.fastq' when: cmd = new ApptainerBuilder('ubuntu.img').params(entry:'/bin/sh').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' } @Unroll diff --git a/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy index 1614550c29..6b412ea3b1 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy @@ -28,9 +28,10 @@ import spock.lang.Unroll */ class SingularityBuilderTest extends Specification { - def 'should get the exec command line' () { - + def 'should get the legacy exec command line' () { given: + SysEnv.push(NXF_SINGULARITY_RUN_COMMAND:'exec', NXF_SINGULARITY_AUTO_MOUNTS:'false') + and: def path1 = Paths.get('/foo/data/file1') def path2 = Paths.get('/bar/data/file2') def path3 = Paths.get('/bar/data file') @@ -88,6 +89,71 @@ class SingularityBuilderTest extends Specification { .build() .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home ubuntu' + cleanup: + SysEnv.pop() + } + + def 'should get the run command line' () { + + given: + def path1 = Paths.get('/foo/data/file1') + def path2 = Paths.get('/bar/data/file2') + def path3 = Paths.get('/bar/data file') + + expect: + new SingularityBuilder('busybox') + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" busybox' + + new SingularityBuilder('busybox') + .params(engineOptions: '-q -v') + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity -q -v run --no-home --pid -B "$PWD" busybox' + + new SingularityBuilder('busybox') + .params(runOptions: '--contain --writable') + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" --contain --writable busybox' + + new SingularityBuilder('ubuntu') + .params(autoMounts: false) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid ubuntu' + + new SingularityBuilder('ubuntu') + .addMount(path1) + .addMount(path2) + .params(autoMounts: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' + + new SingularityBuilder('ubuntu') + .addMount(path1) + .addMount(path1) + .params(autoMounts: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' + + new SingularityBuilder('ubuntu') + .addMount(path1) + .addMount(path1) + .params(autoMounts: true) + .params(readOnlyInputs: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' + + new SingularityBuilder('ubuntu') + .addMount(path3) + .params(autoMounts: true) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' + + new SingularityBuilder('ubuntu') + .params(newPidNamespace: false) + .params(autoMounts: false) + .build() + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home ubuntu' + } def 'should mount home directory if specified' () { @@ -136,12 +202,12 @@ class SingularityBuilderTest extends Specification { .addEnv('X=1') .addEnv(ALPHA:'aaa', BETA: 'bbb') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} SINGULARITYENV_X="1" SINGULARITYENV_ALPHA="aaa" SINGULARITYENV_BETA="bbb" singularity exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} SINGULARITYENV_X="1" SINGULARITYENV_ALPHA="aaa" SINGULARITYENV_BETA="bbb" singularity run --no-home --pid -B "$PWD" busybox' new SingularityBuilder('busybox') .addEnv('CUDA_VISIBLE_DEVICES') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+SINGULARITYENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} singularity exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+SINGULARITYENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} singularity run --no-home --pid -B "$PWD" busybox' } @@ -151,17 +217,17 @@ class SingularityBuilderTest extends Specification { when: def cmd = new SingularityBuilder('ubuntu.img').build().getRunCommand() then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid ubuntu.img' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" ubuntu.img' when: cmd = new SingularityBuilder('ubuntu.img').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid ubuntu.img bwa --this --that file.fastq' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" ubuntu.img bwa --this --that file.fastq' when: cmd = new SingularityBuilder('ubuntu.img').params(entry:'/bin/sh').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' } @Unroll diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy index 15c9965a18..006c2d9c33 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy @@ -977,7 +977,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [enabled: true, engine: 'singularity'] as ContainerConfig ).makeBinding() then: - binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity exec --no-home --pid docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity run --no-home --pid -B /work/dir docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' } @@ -991,7 +991,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [enabled: true, engine: 'singularity', entrypointOverride: true] as ContainerConfig ).makeBinding() then: - binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity exec --no-home --pid docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity run --no-home --pid -B /work/dir docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' } diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy index e1debac016..7dd00bb5b6 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy @@ -68,9 +68,9 @@ class FusionHelperTest extends Specification { [engine:'docker'] | [FOO:'one'] | 'image:2' | null | ['echo', 'hello'] | "docker run -i -e \"FOO=one\" --rm --privileged image:2 echo 'hello'" [engine:'docker'] | [FOO:'one'] | 'image:2' | '--this=that' | ['echo', 'hello'] | "docker run -i -e \"FOO=one\" --this=that --rm --privileged image:2 echo 'hello'" and: - [engine:'singularity'] | [:] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} singularity exec --no-home --pid image:1 echo 'hello'" - [engine:'singularity'] | [FOO:'one'] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity exec --no-home --pid image:1 echo 'hello'" - [engine:'singularity'] | [FOO:'one'] | 'image:1' | '--this=that' | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity exec --no-home --pid --this=that image:1 echo 'hello'" + [engine:'singularity'] | [:] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} singularity run --no-home --pid image:1 echo 'hello'" + [engine:'singularity'] | [FOO:'one'] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity run --no-home --pid image:1 echo 'hello'" + [engine:'singularity'] | [FOO:'one'] | 'image:1' | '--this=that' | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity run --no-home --pid --this=that image:1 echo 'hello'" } From d19cb0b704ecf3c8d6d3818e2ff4a928c095a9a1 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 2 Sep 2023 15:19:09 +0200 Subject: [PATCH 085/128] Fix Wave build when Conda package name is quoted Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/build.gradle | 2 +- .../io/seqera/wave/plugin/WaveClientTest.groovy | 14 ++++++++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/plugins/nf-wave/build.gradle b/plugins/nf-wave/build.gradle index d3a604c19b..ce15723239 100644 --- a/plugins/nf-wave/build.gradle +++ b/plugins/nf-wave/build.gradle @@ -36,7 +36,7 @@ dependencies { api 'org.apache.commons:commons-lang3:3.12.0' api 'com.google.code.gson:gson:2.10.1' api 'org.yaml:snakeyaml:2.0' - api 'io.seqera:wave-utils:0.7.1' + api 'io.seqera:wave-utils:0.7.2' testImplementation(testFixtures(project(":nextflow"))) testImplementation "org.codehaus.groovy:groovy:3.0.19" diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index cf19d9a8c2..5dcb1dce33 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -513,7 +513,7 @@ class WaveClientTest extends Specification { given: def session = Mock(Session) { getConfig() >> [:]} and: - def task = Mock(TaskRun) {getConfig() >> [conda:'salmon=1.2.3'] } + def task = Mock(TaskRun) {getConfig() >> [conda:"bioconda::rseqc=3.0.1 'conda-forge::r-base>=3.5'"] } and: def client = new WaveClient(session) @@ -539,7 +539,8 @@ class WaveClientTest extends Specification { - conda-forge - defaults dependencies: - - salmon=1.2.3 + - bioconda::rseqc=3.0.1 + - conda-forge::r-base>=3.5 '''.stripIndent(true) } @@ -575,7 +576,7 @@ class WaveClientTest extends Specification { given: def session = Mock(Session) { getConfig() >> [:]} and: - def task = Mock(TaskRun) {getConfig() >> [spack:'salmon@1.2.3', arch:'amd64'] } + def task = Mock(TaskRun) {getConfig() >> [spack:"rseqc@3.0.1 'rbase@3.5'", arch:"amd64"] } and: def client = new WaveClient(session) @@ -607,8 +608,13 @@ class WaveClientTest extends Specification { !assets.containerImage !assets.containerConfig !assets.condaFile - assets.spackFile !assets.projectResources + and: + assets.spackFile.text == '''\ + spack: + specs: [rseqc@3.0.1, rbase@3.5] + concretizer: {unify: true, reuse: false} + '''.stripIndent(true) } def 'should create asset with conda file' () { From fa961e7fadb257b32e8fe19d0d5bf14a7700129f Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 2 Sep 2023 15:47:40 +0200 Subject: [PATCH 086/128] Fix Too long Http connection pool timeout Signed-off-by: Paolo Di Tommaso --- .../main/groovy/nextflow/cli/Launcher.groovy | 28 +++++++++++++------ .../groovy/nextflow/cli/LauncherTest.groovy | 25 +++++++++++++++-- 2 files changed, 43 insertions(+), 10 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy index c8986cc5d0..f3f7087cfc 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/Launcher.groovy @@ -578,15 +578,28 @@ class Launcher { */ private void setupEnvironment() { - setProxy('HTTP',System.getenv()) - setProxy('HTTPS',System.getenv()) - setProxy('FTP',System.getenv()) + final env = System.getenv() + setProxy('HTTP',env) + setProxy('HTTPS',env) + setProxy('FTP',env) - setProxy('http',System.getenv()) - setProxy('https',System.getenv()) - setProxy('ftp',System.getenv()) + setProxy('http',env) + setProxy('https',env) + setProxy('ftp',env) - setNoProxy(System.getenv()) + setNoProxy(env) + + setHttpClientProperties(env) + } + + static void setHttpClientProperties(Map env) { + // Set the httpclient connection pool timeout to 10 seconds. + // This required because the default is 20 minutes, which cause the error + // "HTTP/1.1 header parser received no bytes" when in some circumstances + // https://github.com/nextflow-io/nextflow/issues/3983#issuecomment-1702305137 + System.setProperty("jdk.httpclient.keepalive.timeout", env.getOrDefault("NXF_JDK_HTTPCLIENT_KEEPALIVE_TIMEOUT","10")) + if( env.get("NXF_JDK_HTTPCLIENT_CONNECTIONPOOLSIZE") ) + System.setProperty("jdk.httpclient.connectionPoolSize", env.get("NXF_JDK_HTTPCLIENT_CONNECTIONPOOLSIZE")) } /** @@ -650,7 +663,6 @@ class Launcher { * @param args The program options as specified by the user on the CLI */ static void main(String... args) { - final status = new Launcher() .command(args) .run() if( status ) System.exit(status) diff --git a/modules/nextflow/src/test/groovy/nextflow/cli/LauncherTest.groovy b/modules/nextflow/src/test/groovy/nextflow/cli/LauncherTest.groovy index 1bd2daff7b..7f97273e42 100644 --- a/modules/nextflow/src/test/groovy/nextflow/cli/LauncherTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/cli/LauncherTest.groovy @@ -16,12 +16,12 @@ package nextflow.cli -import spock.lang.Specification - import java.nio.file.Files import com.beust.jcommander.DynamicParameter import com.beust.jcommander.Parameter +import spock.lang.Specification +import spock.lang.Unroll import spock.util.environment.RestoreSystemProperties import test.OutputCapture /** @@ -427,6 +427,27 @@ class LauncherTest extends Specification { } + @RestoreSystemProperties + @Unroll + def 'should set http client timeout' () { + when: + Launcher.setHttpClientProperties(ENV) + then: + System.getProperty('jdk.httpclient.keepalive.timeout') == TIMEOUT + and: + System.getProperty('jdk.httpclient.connectionPoolSize') == POOLSIZE + + where: + ENV | TIMEOUT | POOLSIZE + [:] | '10' | null + and: + [NXF_JDK_HTTPCLIENT_KEEPALIVE_TIMEOUT: '1'] | '1' | null + [NXF_JDK_HTTPCLIENT_KEEPALIVE_TIMEOUT: '100'] | '100' | null + and: + [NXF_JDK_HTTPCLIENT_CONNECTIONPOOLSIZE: '0'] | '10' | '0' + [NXF_JDK_HTTPCLIENT_CONNECTIONPOOLSIZE: '99'] | '10' | '99' + } + def 'should make cli' () { given: def launcher = new Launcher() From 366eedec9fdda9c98c88390b6b2947fbaf1493c2 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 3 Sep 2023 11:59:58 +0200 Subject: [PATCH 087/128] Fix fs cp command with remote file Signed-off-by: Paolo Di Tommaso --- .../src/main/groovy/nextflow/cli/CmdFs.groovy | 29 +++++++++++++++++-- tests/checks/cli-fs.nf/.checks | 20 +++++++++++++ 2 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 tests/checks/cli-fs.nf/.checks diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdFs.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdFs.groovy index 5ea9e636a4..2382550ba3 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdFs.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdFs.groovy @@ -15,22 +15,30 @@ */ package nextflow.cli + import java.nio.charset.Charset import java.nio.file.Files import java.nio.file.Path +import java.nio.file.Paths import com.beust.jcommander.Parameter +import groovy.transform.CompileStatic +import groovy.util.logging.Slf4j +import nextflow.Global +import nextflow.Session +import nextflow.config.ConfigBuilder import nextflow.exception.AbortOperationException import nextflow.extension.FilesEx import nextflow.file.FileHelper import nextflow.file.FilePatternSplitter import nextflow.plugin.Plugins - /** * Implements `fs` command * * @author Paolo Di Tommaso */ +@CompileStatic +@Slf4j class CmdFs extends CmdBase implements UsageAware { static final public NAME = 'fs' @@ -158,6 +166,16 @@ class CmdFs extends CmdBase implements UsageAware { return NAME } + private Session createSession() { + // create the config + final config = new ConfigBuilder() + .setOptions(getLauncher().getOptions()) + .setBaseDir(Paths.get('.')) + .build() + + return new Session(config) + } + @Override void run() { if( !args ) { @@ -166,11 +184,18 @@ class CmdFs extends CmdBase implements UsageAware { } Plugins.setup() + final session = createSession() try { run0() } finally { - Plugins.stop() + try { + session.destroy() + Global.cleanUp() + Plugins.stop() + } catch (Throwable t) { + log.warn "Unexpected error while destroying the session object - cause: ${t.message}" + } } } diff --git a/tests/checks/cli-fs.nf/.checks b/tests/checks/cli-fs.nf/.checks new file mode 100644 index 0000000000..50da6168aa --- /dev/null +++ b/tests/checks/cli-fs.nf/.checks @@ -0,0 +1,20 @@ +# Skip test if AWS keys are missing +if [ -z "$AWS_ACCESS_KEY_ID" ]; then + echo "Missing AWS credentials -- Skipping test" + exit 0 +fi + +## random file name +NAME=test-cmd-$(basename `mktemp`).file + +## create random file +head -c 1000000 myfile + +## upload it +$NXF_CMD -log cmd-fs-1.log fs cp myfile s3://nextflow-ci/$NAME + +## download it +$NXF_CMD -log cmd-fs-2.log fs cp s3://nextflow-ci/$NAME $NAME + +## check they are equals +diff myfile $NAME || false From 976c282c2711305ed115710b8a628a13a5f5c35f Mon Sep 17 00:00:00 2001 From: Jan Philipp Bittner <42641080+najtin@users.noreply.github.com> Date: Sun, 3 Sep 2023 12:06:29 +0200 Subject: [PATCH 088/128] Fix Parallel execution of Conda corrupts data and packages (#4253) [ci fast] Signed-off-by: Jan Philipp Bittner <42641080+najtin@users.noreply.github.com> --- .../src/main/groovy/nextflow/conda/CondaCache.groovy | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy b/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy index 451aeecac2..0cc0b86a95 100644 --- a/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/conda/CondaCache.groovy @@ -41,7 +41,8 @@ import org.yaml.snakeyaml.Yaml @Slf4j @CompileStatic class CondaCache { - + static final private Object condaLock = new Object() + /** * Cache the prefix path for each Conda environment */ @@ -273,7 +274,6 @@ class CondaCache { @PackageScope Path createLocalCondaEnv0(String condaEnv, Path prefixPath) { - log.info "Creating env using ${binaryName}: $condaEnv [cache $prefixPath]" String opts = createOptions ? "$createOptions " : '' @@ -296,7 +296,13 @@ class CondaCache { } try { - runCommand( cmd ) + // Parallel execution of conda causes data and package corruption. + // https://github.com/nextflow-io/nextflow/issues/4233 + // https://github.com/conda/conda/issues/13037 + // Should be removed as soon as the upstream bug is fixed and released. + synchronized(condaLock) { + runCommand( cmd ) + } log.debug "'${binaryName}' create complete env=$condaEnv path=$prefixPath" } catch( Exception e ){ From c7701e3a1505eb53632306cf1ebeb21e6b37a11d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sun, 3 Sep 2023 12:07:03 +0200 Subject: [PATCH 089/128] CI: Add back logic to skip Tests for docs-only PRs. (#4250) Signed-off-by: Phil Ewels --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c27a11929e..a5e1951c22 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -92,7 +92,7 @@ jobs: any_changed: ${{ steps.changed-files.outputs.any_changed }} test: - if: ${{ !contains(github.event.head_commit.message, '[ci fast]') }} + if: ${{ !contains(github.event.head_commit.message, '[ci fast]') && needs.build.outputs.any_changed == 'true' }} needs: build runs-on: ubuntu-latest strategy: From a68c1a3efe355c0ea702c362c52e40f276cf2aeb Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sun, 3 Sep 2023 05:08:35 -0500 Subject: [PATCH 090/128] Add warning about using clusterOptions with process directives (#4248) Signed-off-by: Ben Sherman --- docs/process.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/process.md b/docs/process.md index 7c4eb7fce0..9c71b26897 100644 --- a/docs/process.md +++ b/docs/process.md @@ -1314,6 +1314,10 @@ The `clusterOptions` directive allows the usage of any native configuration opti This directive is only used by grid executors. Refer to the {ref}`executor-page` page to see which executors support this directive. ::: +:::{warning} +While you can use the `clusterOptions` directive to specify options that are supported as process directives (`queue`, `memory`, `time`, etc), you should not use both at the same time, as it will cause undefined behavior. Most HPC schedulers will either fail or simply ignore one or the other. +::: + (process-conda)= ### conda From 1ae70d5d39f68068fa073497d28ced3fed5731e4 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sun, 3 Sep 2023 05:15:16 -0500 Subject: [PATCH 091/128] Fix Parse negative CLI params as numbers (#4238) [ci fast] Signed-off-by: Ben Sherman --- modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy | 6 +++--- .../nextflow/src/test/groovy/nextflow/cli/CmdRunTest.groovy | 4 ++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy index 4da1a3b35c..890958f270 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy @@ -625,9 +625,9 @@ class CmdRun extends CmdBase implements HubOptions { if ( str.toLowerCase() == 'true') return Boolean.TRUE if ( str.toLowerCase() == 'false' ) return Boolean.FALSE - if ( str==~/\d+(\.\d+)?/ && str.isInteger() ) return str.toInteger() - if ( str==~/\d+(\.\d+)?/ && str.isLong() ) return str.toLong() - if ( str==~/\d+(\.\d+)?/ && str.isDouble() ) return str.toDouble() + if ( str==~/-?\d+(\.\d+)?/ && str.isInteger() ) return str.toInteger() + if ( str==~/-?\d+(\.\d+)?/ && str.isLong() ) return str.toLong() + if ( str==~/-?\d+(\.\d+)?/ && str.isDouble() ) return str.toDouble() return str } diff --git a/modules/nextflow/src/test/groovy/nextflow/cli/CmdRunTest.groovy b/modules/nextflow/src/test/groovy/nextflow/cli/CmdRunTest.groovy index 86917bba53..e18b401a76 100644 --- a/modules/nextflow/src/test/groovy/nextflow/cli/CmdRunTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/cli/CmdRunTest.groovy @@ -49,9 +49,13 @@ class CmdRunTest extends Specification { 'false' | false 'foo' | 'foo' '10' | 10i + '-10' | -10i '20.00' | 20i + '-20.00' | -20i '3000000000'| 3000000000l '20.33' | 20.33d + '-20.33' | -20.33d + '-foo' | '-foo' '--foo' | '--foo' '20x0' | '20x0' '20.d' | '20.d' From 69d317b6d297c7fd897ffc4cbbc4124efe15594a Mon Sep 17 00:00:00 2001 From: Manuele Simi <2822686+manuelesimi@users.noreply.github.com> Date: Sun, 3 Sep 2023 12:31:50 +0200 Subject: [PATCH 092/128] Update AZ File share doc. [ci skip] (#4235) Signed-off-by: Manuele Simi Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- docs/azure.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/azure.md b/docs/azure.md index aebf7bb9c1..ac8ff1a24d 100644 --- a/docs/azure.md +++ b/docs/azure.md @@ -63,7 +63,7 @@ azure { } ``` -The files in the File share are available to the task in the directory: `/`. +The files in the File share are available to the task in the directory: ``. For instance, given the following configuration: @@ -73,15 +73,20 @@ azure { // ... fileShares { - dir1 { - mountPath = "/mnt/mydata/" + rnaseqResources { + mountPath = "/mnt/mydata/myresources" } } } } ``` -The task can access the File share in `/mnt/mydata/dir1`. +The task can access the File share in `/mnt/mydata/myresources`. Note: The string `rnaseqResources` in the above config can be any name of your choice, and it does not affect the underlying mount. + +:::{warning} +Azure File shares do not support authentication and management with Active Directory. The storage account key must be +set in the configuration if a share is mounted. +::: (azure-batch)= From 3e8cd4881720b5fd48d4cf2881d331b61c4d3c28 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 3 Sep 2023 13:50:27 +0200 Subject: [PATCH 093/128] Add check for latest version (#4194) Signed-off-by: Paolo Di Tommaso --- nextflow | 24 +++++++++++++++++++++++- packing.gradle | 3 +++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/nextflow b/nextflow index 6e3931e648..c7d7b92078 100755 --- a/nextflow +++ b/nextflow @@ -85,6 +85,17 @@ function get() { fi } +function get_ver() { + if command -v curl &>/dev/null; then + curl -fsSL "$1" + elif command -v wget &>/dev/null; then + wget "$1" >/dev/null 2>&1 + else + echo_red "ERROR: Cannot find 'curl' nor 'wget' utility -- please install one of them" + exit 1 + fi +} + function make_temp() { local base=${NXF_TEMP:=$PWD} if [ "$(uname)" = 'Darwin' ]; then mktemp "${base}/nxf-tmp.XXXXXX" || exit $? @@ -111,7 +122,7 @@ function resolve_link() { } function current_ver() { - [[ $NXF_EDGE == 1 ]] && printf 'edge' || printf 'latest' + [[ $NXF_EDGE == 1 || $NXF_VER == *"-edge" ]] && printf 'edge' || printf 'latest' } function install() { @@ -131,6 +142,15 @@ function install() { echo '' } +function check_latest() { + [[ $cmd != run ]] && return 0 + [[ $NXF_OFFLINE == true || $NXF_DISABLE_CHECK_LATEST == true ]] && return 0 + local latest=$(get_ver "$NXF_BASE/$(current_ver)/version?current=$NXF_VER") + if [[ -n "$latest" && "$latest" != $NXF_VER ]]; then + echo_yellow "Nextflow $latest is available - Please consider updating your version to it" + fi +} + function launch_nextflow() { # the launch command line local cmdline=() @@ -445,5 +465,7 @@ else fi +# check for latest version +check_latest # finally run it launch_nextflow diff --git a/packing.gradle b/packing.gradle index 09993e7844..efec6214f4 100644 --- a/packing.gradle +++ b/packing.gradle @@ -327,6 +327,7 @@ task release(type: Exec, dependsOn: [pack, dockerImage]) { def launcherSha1 = file('nextflow.sha1').absoluteFile def launcherSha256 = file('nextflow.sha256').absoluteFile def nextflowAllFile = file("$releaseDir/nextflow-${version}-all") + def versionFile = file('VERSION').absoluteFile def snapshot = version ==~ /^.+(-RC\d+|-SNAPSHOT)$/ def edge = version ==~ /^.+(-edge|-EDGE)$/ @@ -350,6 +351,7 @@ task release(type: Exec, dependsOn: [pack, dockerImage]) { aws s3 cp $launcherSha1 s3://www2.nextflow.io/releases/edge/nextflow.sha1 $s3CmdOpts aws s3 cp $launcherSha256 s3://www2.nextflow.io/releases/edge/nextflow.sha256 $s3CmdOpts aws s3 cp $launcherSha256 s3://www2.nextflow.io/releases/edge/nextflow.md5 $s3CmdOpts + aws s3 cp $versionFile s3://www2.nextflow.io/releases/edge/version $s3CmdOpts """.stripIndent() else if( isLatest ) @@ -361,6 +363,7 @@ task release(type: Exec, dependsOn: [pack, dockerImage]) { aws s3 cp $launcherSha1 s3://www2.nextflow.io/releases/latest/nextflow.sha1 $s3CmdOpts aws s3 cp $launcherSha256 s3://www2.nextflow.io/releases/latest/nextflow.sha256 $s3CmdOpts aws s3 cp $launcherSha256 s3://www2.nextflow.io/releases/latest/nextflow.md5 $s3CmdOpts + aws s3 cp $versionFile s3://www2.nextflow.io/releases/latest/version $s3CmdOpts """.stripIndent() def temp = File.createTempFile('upload',null) From 1dfb621c12ceda4adbfef9adfec560bb36b7df53 Mon Sep 17 00:00:00 2001 From: Jesse <90865804+dxu104@users.noreply.github.com> Date: Sun, 3 Sep 2023 08:35:38 -0400 Subject: [PATCH 094/128] Update docs about splitCsv() operator (#4163) Signed-off-by: Jesse <90865804+dxu104@users.noreply.github.com> Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- docs/operator.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/operator.md b/docs/operator.md index 636d82cbe7..978fbaf85b 100644 --- a/docs/operator.md +++ b/docs/operator.md @@ -1384,6 +1384,10 @@ Channel .splitCsv(header: ['col1', 'col2', 'col3'], skip: 1 ) .view { row -> "${row.col1} - ${row.col2} - ${row.col3}" } ``` +:::{note} +- By default, the `splitCsv` operator returns each row as a *list* object. Items are accessed by using the 0-based column index. +- When the `header` is specified each row is returned as a *map* object (also known as dictionary). Items are accessed via the corresponding column name. +::: Available options: From 937c8fb7ab72a62152c838c6d7dafe2bb87bfc6d Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 3 Sep 2023 15:20:10 +0200 Subject: [PATCH 095/128] Remove experimental -dockerize option Signed-off-by: Paolo Di Tommaso --- .../groovy/nextflow/cli/CliOptions.groovy | 3 --- .../executor/BashWrapperBuilder.groovy | 4 ---- nextflow | 20 ------------------- 3 files changed, 27 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy index 8c6f1a445a..c6197b1ea9 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy @@ -84,9 +84,6 @@ class CliOptions { @Parameter(names = ['-self-update'], description = 'Update nextflow to the latest version', arity = 0, hidden = true) boolean selfUpdate - @Parameter(names = ['-d','-dockerize'], description = 'Launch nextflow via Docker (experimental)', arity = 0) - boolean dockerize - Boolean ansiLog boolean getAnsiLog() { diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index 835739e5ae..b0439ed85f 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -593,10 +593,6 @@ class BashWrapperBuilder { if( fixOwnership() ) builder.addEnv( 'NXF_OWNER=$(id -u):$(id -g)' ) - if( engine=='docker' && System.getenv('NXF_DOCKER_OPTS') ) { - builder.addRunOptions(System.getenv('NXF_DOCKER_OPTS')) - } - for( String var : containerConfig.getEnvWhitelist() ) { builder.addEnv(var) } diff --git a/nextflow b/nextflow index c7d7b92078..727de64bd3 100755 --- a/nextflow +++ b/nextflow @@ -194,7 +194,6 @@ unset JAVA_TOOL_OPTIONS # parse the command line bg='' -dockerize='' declare -a jvmopts=() declare -a args=("$@") declare -a commands=(clone config drop help history info ls pull run view node console kuberun) @@ -210,11 +209,6 @@ while [[ $# != 0 ]]; do jvmopts+=("$1") fi ;; - -d|-dockerize) - if [[ ! "$cmd" && ! -f /.nextflow/dockerized ]]; then - dockerize=1 - fi - ;; -bg) if [[ ! -f /.nextflow/dockerized ]]; then bg=1 @@ -242,20 +236,6 @@ while [[ $# != 0 ]]; do shift done -NXF_DOCKER_OPTS=${NXF_DOCKER_OPTS:=''} -if [[ "$dockerize" ]]; then - if [[ "$bg" ]]; then detach='--detach '; else detach=''; fi - NXF_ASSETS=${NXF_ASSETS:-${NXF_HOME:-$HOME/.nextflow}/assets} - mkdir -p "$NXF_ASSETS" - exec docker run $detach --rm --net host \ - -e NXF_ANSI_LOG=false \ - -e USER -e HOME -e NXF_ASSETS=$NXF_ASSETS -e NXF_USRMAP=$(id -u) -e NXF_DOCKER_OPTS='-u $(id -u)' \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v $HOME:$HOME:ro,Z -v $NXF_ASSETS:$NXF_ASSETS:Z -v $PWD:$PWD:Z -w $PWD $NXF_DOCKER_OPTS \ - nextflow/nextflow:$NXF_VER nextflow "${args[@]}" - exit 1 -fi - CAPSULE_LOG=${CAPSULE_LOG:=''} CAPSULE_RESET=${CAPSULE_RESET:=''} CAPSULE_CACHE_DIR=${CAPSULE_CACHE_DIR:="$NXF_HOME/capsule"} From 7955db8d08dd2a047f58f10e58c609350f849d09 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 3 Sep 2023 15:20:38 +0200 Subject: [PATCH 096/128] Undocument internal NXF_DEBUG variable Signed-off-by: Paolo Di Tommaso --- docs/config.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/config.md b/docs/config.md index 34ba2e5b97..e10b674076 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1542,9 +1542,6 @@ The following environment variables control the configuration of the Nextflow ru ::: : Enable the use of Conda recipes defined by using the {ref}`process-conda` directive. (default: `false`). -`NXF_DEBUG` -: Defines scripts debugging level: `1` dump task environment variables in the task log file; `2` enables command script execution tracing; `3` enables command wrapper execution tracing. - `NXF_DEFAULT_DSL` : :::{versionadded} 22.03.0-edge ::: From 7def554228d5cb039ce97808c90066192bd660e1 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 3 Sep 2023 16:07:03 +0200 Subject: [PATCH 097/128] Remove experimental -dockerize option /2 Signed-off-by: Paolo Di Tommaso --- docker/Dockerfile | 1 - nextflow | 2 -- packing.gradle | 1 - 3 files changed, 4 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 5fbe9209ed..e688fa4475 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,7 +11,6 @@ COPY nextflow /usr/local/bin/nextflow # download runtime RUN mkdir /.nextflow \ - && touch /.nextflow/dockerized \ && chmod 755 /usr/local/bin/nextflow \ && chmod 755 /usr/local/bin/entry.sh \ && nextflow info diff --git a/nextflow b/nextflow index 727de64bd3..496d0f2b86 100755 --- a/nextflow +++ b/nextflow @@ -210,9 +210,7 @@ while [[ $# != 0 ]]; do fi ;; -bg) - if [[ ! -f /.nextflow/dockerized ]]; then bg=1 - fi ;; -download) if [[ ! "$cmd" ]]; then diff --git a/packing.gradle b/packing.gradle index efec6214f4..9131bd7bbc 100644 --- a/packing.gradle +++ b/packing.gradle @@ -293,7 +293,6 @@ task dockerPack(type: Exec, dependsOn: ['packOne']) { COPY entry.sh /usr/local/bin/entry.sh COPY dist/docker /usr/local/bin/docker ENV NXF_HOME=/.nextflow - RUN touch /.nextflow/dockerized RUN chmod +x /usr/local/bin/nextflow /usr/local/bin/entry.sh RUN nextflow info ENTRYPOINT ["/usr/local/bin/entry.sh"] From 817aa05bb96e218e0689d3a1aa96c65ce2ba0503 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 3 Sep 2023 16:08:42 +0200 Subject: [PATCH 098/128] Add which to dockerfile build Signed-off-by: Paolo Di Tommaso --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index e688fa4475..de56944d02 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,5 +1,5 @@ FROM amazoncorretto:17.0.8 -RUN yum install -y procps-ng shadow-utils +RUN yum install -y procps-ng shadow-utils which ENV NXF_HOME=/.nextflow ARG TARGETPLATFORM=linux/amd64 From 7d6ad62476bd43d6e98747cabe8bfe181d5da1e9 Mon Sep 17 00:00:00 2001 From: Robert Syme Date: Sun, 3 Sep 2023 13:42:31 -0400 Subject: [PATCH 099/128] Allow setting shell directive when using the trace file. (#4210) Signed-off-by: Rob Syme --- .../main/groovy/nextflow/executor/BashWrapperBuilder.groovy | 2 +- .../groovy/nextflow/executor/BashWrapperBuilderTest.groovy | 4 ++-- .../nextflow/executor/test-bash-wrapper-with-trace.txt | 2 +- .../cloud/aws/batch/AwsBatchScriptLauncherTest.groovy | 2 +- .../nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index b0439ed85f..c37c21ae01 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -468,7 +468,7 @@ class BashWrapperBuilder { final traceWrapper = isTraceRequired() if( traceWrapper ) { // executes the stub which in turn executes the target command - launcher = "/bin/bash ${fileStr(wrapperFile)} nxf_trace" + launcher = "${interpreter} ${fileStr(wrapperFile)} nxf_trace" } else { launcher = "${interpreter} ${fileStr(scriptFile)}" diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy index 006c2d9c33..4ded770ed5 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy @@ -566,14 +566,14 @@ class BashWrapperBuilderTest extends Specification { when: binding = newBashWrapperBuilder(statsEnabled: true).makeBinding() then: - binding.launch_cmd == '/bin/bash /work/dir/.command.run nxf_trace' + binding.launch_cmd == '/bin/bash -ue /work/dir/.command.run nxf_trace' binding.unstage_controls == null binding.containsKey('unstage_controls') when: binding = newBashWrapperBuilder(statsEnabled: true, scratch: true).makeBinding() then: - binding.launch_cmd == '/bin/bash /work/dir/.command.run nxf_trace' + binding.launch_cmd == '/bin/bash -ue /work/dir/.command.run nxf_trace' binding.unstage_controls == '''\ cp .command.out /work/dir/.command.out || true cp .command.err /work/dir/.command.err || true diff --git a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt index 0aaf49f410..b032748ccb 100644 --- a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt +++ b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt @@ -281,7 +281,7 @@ on_term() { } nxf_launch() { - /bin/bash {{folder}}/.command.run nxf_trace + /bin/bash -ue {{folder}}/.command.run nxf_trace } nxf_stage() { diff --git a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy index d2620c2767..5bb60cefa2 100644 --- a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy +++ b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy @@ -232,7 +232,7 @@ class AwsBatchScriptLauncherTest extends Specification { nxf_parallel "${uploads[@]}" '''.stripIndent().leftTrim() - binding.launch_cmd == '/bin/bash .command.run nxf_trace' + binding.launch_cmd == '/bin/bash -ue .command.run nxf_trace' binding.task_env == '' diff --git a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy index 60eaf1ffb6..bbca973ffd 100644 --- a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy +++ b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy @@ -385,7 +385,7 @@ class AzFileCopyStrategyTest extends Specification { nxf_parallel "${uploads[@]}" '''.stripIndent().leftTrim() - binding.launch_cmd == '/bin/bash .command.run nxf_trace' + binding.launch_cmd == '/bin/bash -ue .command.run nxf_trace' binding.task_env == '''\ export PATH="$PWD/.nextflow-bin:$AZ_BATCH_NODE_SHARED_DIR/bin/:$PATH" From 05ff784a130ee1e589924b46e06806d05b0bdf98 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Mon, 4 Sep 2023 11:41:15 -0500 Subject: [PATCH 100/128] Document use of local variables in closures (#4251) Signed-off-by: Ben Sherman --- docs/script.md | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/docs/script.md b/docs/script.md index 71df9a3487..fc1bc761ef 100644 --- a/docs/script.md +++ b/docs/script.md @@ -392,21 +392,31 @@ Mark = Williams Sudha = Kumari ``` -A closure has two other important features. First, it can access variables in the scope where it is defined, so that it can interact with them. - -Second, a closure can be defined in an anonymous manner, meaning that it is not given a name, and is defined in the place where it needs to be used. - -As an example showing both these features, see the following code fragment: +Closures can also access variables outside of their scope, and they can be used anonymously, that is without assigning them to a variable. Here is an example that demonstrates both of these things: ```groovy -myMap = ["China": 1 , "India" : 2, "USA" : 3] +myMap = ["China": 1, "India": 2, "USA": 3] result = 0 -myMap.keySet().each( { result+= myMap[it] } ) +myMap.keySet().each { result += myMap[it] } println result ``` +A closure can also declare local variables that exist only for the lifetime of the closure: + +```groovy +result = 0 +myMap.keySet().each { + def count = myMap[it] + result += count +} +``` + +:::{warning} +Local variables should be declared using a qualifier such as `def` or a type name, otherwise they will be interpreted as global variables, which could lead to a race condition. +::: + Learn more about closures in the [Groovy documentation](http://groovy-lang.org/closures.html) (implicit-variables)= From 6664b578f09e76bbbb2e69cc97d98edfe97cd043 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Mon, 4 Sep 2023 13:01:53 -0500 Subject: [PATCH 101/128] Fix setting `executor.queueSize = 0` is ignored (#4228) Signed-off-by: Ben Sherman --- docs/config.md | 2 +- .../processor/ParallelPollingMonitor.groovy | 4 ++-- .../main/groovy/nextflow/util/ConfigHelper.groovy | 2 +- .../groovy/nextflow/util/ConfigHelperTest.groovy | 15 +++++++++++++++ 4 files changed, 19 insertions(+), 4 deletions(-) diff --git a/docs/config.md b/docs/config.md index e10b674076..5fde00ec5b 100644 --- a/docs/config.md +++ b/docs/config.md @@ -640,7 +640,7 @@ The following settings are available: : Determines how job status is retrieved. When `false` only the queue associated with the job execution is queried. When `true` the job status is queried globally i.e. irrespective of the submission queue (default: `false`). `executor.queueSize` -: The number of tasks the executor will handle in a parallel manner. Default varies for each executor (see below). +: The number of tasks the executor will handle in a parallel manner. A queue size of zero corresponds to no limit. Default varies for each executor (see below). `executor.queueStatInterval` : Determines how often to fetch the queue status from the scheduler (default: `1min`). Used only by grid executors. diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/ParallelPollingMonitor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/ParallelPollingMonitor.groovy index 9c2f687ffb..64b8ee52b6 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/ParallelPollingMonitor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/ParallelPollingMonitor.groovy @@ -55,7 +55,7 @@ class ParallelPollingMonitor extends TaskPollingMonitor { @Override protected boolean canSubmit(TaskHandler handler) { - return super.canSubmit(handler) && semaphore.tryAcquire() + return super.canSubmit(handler) && semaphore?.tryAcquire() } protected RateLimiter createSubmitRateLimit() { @@ -95,7 +95,7 @@ class ParallelPollingMonitor extends TaskPollingMonitor { @Override boolean evict(TaskHandler handler) { - semaphore.release() + semaphore?.release() return super.evict(handler) } } diff --git a/modules/nextflow/src/main/groovy/nextflow/util/ConfigHelper.groovy b/modules/nextflow/src/main/groovy/nextflow/util/ConfigHelper.groovy index 09a83f0fb9..e4891e7391 100644 --- a/modules/nextflow/src/main/groovy/nextflow/util/ConfigHelper.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/util/ConfigHelper.groovy @@ -42,7 +42,7 @@ class ConfigHelper { result = config['$'+execName][propName] } - if( result==null && config instanceof Map && config[propName] ) { + if( result==null && config instanceof Map && config[propName] != null ) { result = config[propName] } diff --git a/modules/nextflow/src/test/groovy/nextflow/util/ConfigHelperTest.groovy b/modules/nextflow/src/test/groovy/nextflow/util/ConfigHelperTest.groovy index c8cef55c45..442e8e3488 100644 --- a/modules/nextflow/src/test/groovy/nextflow/util/ConfigHelperTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/util/ConfigHelperTest.groovy @@ -28,6 +28,21 @@ import spock.lang.Unroll */ class ConfigHelperTest extends Specification { + @Unroll + def "get config property" () { + + expect: + ConfigHelper.getConfigProperty(config, execName, 'foo') == value + + where: + config | execName | value + [foo: 0] | null | 0 + [foo: 100] | null | 100 + [foo: 'bar'] | null | 'bar' + [$sge: [foo: 'bar']] | 'sge' | 'bar' + + } + @Unroll def "should parse string value: #str" () { From b0bdfd79db7d54993763c4482b58b979ebdf0c76 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 4 Sep 2023 20:18:03 +0200 Subject: [PATCH 102/128] Fix IOException should be thrown when failing to creare Azure directory Signed-off-by: Paolo Di Tommaso --- .../main/nextflow/cloud/azure/nio/AzFileSystemProvider.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/nf-azure/src/main/nextflow/cloud/azure/nio/AzFileSystemProvider.groovy b/plugins/nf-azure/src/main/nextflow/cloud/azure/nio/AzFileSystemProvider.groovy index f8fd7ab5b7..f77051023f 100644 --- a/plugins/nf-azure/src/main/nextflow/cloud/azure/nio/AzFileSystemProvider.groovy +++ b/plugins/nf-azure/src/main/nextflow/cloud/azure/nio/AzFileSystemProvider.groovy @@ -440,7 +440,7 @@ class AzFileSystemProvider extends FileSystemProvider { // 409 (CONFLICT) is returned when the path already // exists, ignore it if( e.statusCode!=409 ) - throw e + throw new IOException("Unable to create Azure blob directory: ${dir.toUriString()} - cause: ${e.message}", e) } } From c37e00bcf1fb3d1a7f6704db3b457cfd51d0a8e2 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Mon, 4 Sep 2023 13:41:19 -0500 Subject: [PATCH 103/128] Document API differences of process path inputs (#4189) Signed-off-by: Ben Sherman --- docs/process.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/process.md b/docs/process.md index 9c71b26897..4cd585ab94 100644 --- a/docs/process.md +++ b/docs/process.md @@ -488,6 +488,16 @@ workflow { } ``` +:::{note} +Process `path` inputs have nearly the same interface as described in {ref}`script-file-io`, with one difference which is relevant when files are staged into a subdirectory. Given the following input: + +```groovy +path x, stageAs: 'my-dir/*' +``` + +In this case, `x.name` returns the file name with the parent directory (e.g. `my-dir/file.txt`), whereas normally it would return the file name (e.g. `file.txt`). You can use `x.fileName.name` to get the file name. +::: + ### Multiple input files A `path` input can also accept a collection of files instead of a single value. In this case, the input variable will be a Groovy list, and you can use it as such. From 87e0648a98ec022e6af05b688885d3ce48c85070 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 4 Sep 2023 23:21:02 +0200 Subject: [PATCH 104/128] Add support for remote debug (#4266) This commit adds the a possibility to compile nextflow scripts with debugging information and debug them interactively via a remote debugger commonly available via IntellJ or similar IDE. To enable this feature it should the top-level command line option `-remote-debug`, when specified the execution of nextflow stops and wait for the starting of a debugging session in the IDE. By default, the TCP port 5005 is used. It can be changed by using the environment variable NXF_REMOTE_DEBUG_PORT. Credits: Jorge Aguilera Signed-off-by: Paolo Di Tommaso --- docs/cli.md | 3 +++ launch.sh | 5 ++-- .../src/main/groovy/nextflow/Session.groovy | 5 ++++ .../groovy/nextflow/cli/CliOptions.groovy | 3 +++ .../main/groovy/nextflow/cli/CmdRun.groovy | 1 + .../nextflow/script/ScriptParser.groovy | 25 +++++++++++-------- .../nextflow/script/ScriptRunner.groovy | 2 +- nextflow | 21 ++++++++++------ 8 files changed, 43 insertions(+), 22 deletions(-) diff --git a/docs/cli.md b/docs/cli.md index cfc1c40ef2..14f9a7bd70 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -42,6 +42,9 @@ Available options: `-q, -quiet` : Do not print information messages. +`-remote-debug` +: Enable JVM interactive remote debugging (experimental). + `-syslog` : Send logs to syslog server (e.g. localhost:514). diff --git a/launch.sh b/launch.sh index 5f95d01d41..41e5103c40 100755 --- a/launch.sh +++ b/launch.sh @@ -83,6 +83,7 @@ NXF_PLUGINS_DEFAULT=${NXF_PLUGINS_DEFAULT:-true} NXF_HOME=${NXF_HOME:-$HOME/.nextflow} NXF_OPTS=${NXF_OPTS:-} NXF_CLI="$0 $@" +NXF_REMOTE_DEBUG_PORT=${NXF_REMOTE_DEBUG_PORT:-5005} export NXF_CLI export COLUMNS export NXF_PLUGINS_DIR @@ -115,8 +116,8 @@ while [ "$*" != "" ]; do fi elif [ "$1" == '-remote-debug' ]; then - DEBUG='-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8010' - + DEBUG="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=$NXF_REMOTE_DEBUG_PORT" + args+=("$1") else args+=("$1") fi diff --git a/modules/nextflow/src/main/groovy/nextflow/Session.groovy b/modules/nextflow/src/main/groovy/nextflow/Session.groovy index c7df94b8c6..9109d503ea 100644 --- a/modules/nextflow/src/main/groovy/nextflow/Session.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/Session.groovy @@ -183,6 +183,11 @@ class Session implements ISession { */ boolean quiet + /** + * Enable debugging mode + */ + boolean debug + /** * Local path where script generated classes are saved */ diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy index c6197b1ea9..4781af73c2 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CliOptions.groovy @@ -84,6 +84,9 @@ class CliOptions { @Parameter(names = ['-self-update'], description = 'Update nextflow to the latest version', arity = 0, hidden = true) boolean selfUpdate + @Parameter(names=['-remote-debug'], description = "Enable JVM interactive remote debugging (experimental)") + boolean remoteDebug + Boolean ansiLog boolean getAnsiLog() { diff --git a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy index 890958f270..67b4fba639 100644 --- a/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/cli/CmdRun.groovy @@ -343,6 +343,7 @@ class CmdRun extends CmdBase implements HubOptions { runner.session.profile = profile runner.session.commandLine = launcher.cliString runner.session.ansiLog = launcher.options.ansiLog + runner.session.debug = launcher.options.remoteDebug runner.session.disableJobsCancellation = getDisableJobsCancellation() final isTowerEnabled = config.navigate('tower.enabled') as Boolean diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy index 4feb2446c9..53952e3c3b 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy @@ -125,7 +125,10 @@ class ScriptParser { config.addCompilationCustomizers( new ASTTransformationCustomizer(NextflowXform)) config.addCompilationCustomizers( new ASTTransformationCustomizer(OpXform)) - if( session && session.classesDir ) + if( session?.debug ) + config.debug = true + + if( session?.classesDir ) config.setTargetDirectory(session.classesDir.toFile()) return config @@ -154,7 +157,7 @@ class ScriptParser { throw new IllegalArgumentException("Unknown script type: ${script?.getClass()?.getName()}") } - GroovyShell getInterpreter() { + private GroovyShell getInterpreter() { if( !binding && session ) binding = session.binding if( !binding ) @@ -163,10 +166,13 @@ class ScriptParser { return new GroovyShell(classLoader, binding, getConfig()) } - ScriptParser parse(String scriptText, GroovyShell interpreter) { - final String clazzName = computeClassName(scriptText) + private ScriptParser parse0(String scriptText, Path scriptPath, GroovyShell interpreter) { + this.scriptPath = scriptPath + final String className = computeClassName(scriptText) try { - final parsed = interpreter.parse(scriptText, clazzName) + final parsed = scriptPath && session.debug + ? interpreter.parse(scriptPath.toFile()) + : interpreter.parse(scriptText, className) if( parsed !instanceof BaseScript ){ throw new CompilationFailedException(0, null) } @@ -183,7 +189,7 @@ class ScriptParser { String msg = e.message ?: header msg = msg != 'startup failed' ? msg : header msg = msg.replaceAll(/startup failed:\n/,'') - msg = msg.replaceAll(~/$clazzName(: \d+:\b*)?/, header+'\n- cause:') + msg = msg.replaceAll(~/$className(: \d+:\b*)?/, header+'\n- cause:') if( msg.contains "Unexpected input: '{'" ) { msg += "\nNOTE: If this is the beginning of a process or workflow, there may be a syntax error in the body, such as a missing or extra comma, for which a more specific error message could not be produced." } @@ -191,16 +197,13 @@ class ScriptParser { } } - ScriptParser parse(String scriptText) { - def interpreter = getInterpreter() - return parse(scriptText, interpreter) + return parse0(scriptText, null, getInterpreter()) } ScriptParser parse(Path scriptPath) { - this.scriptPath = scriptPath try { - parse(scriptPath.text) + parse0(scriptPath.text, scriptPath, getInterpreter()) } catch (IOException e) { throw new ScriptCompilationException("Unable to read script: '$scriptPath' -- cause: $e.message", e) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy index 7be978e903..aee864c680 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy @@ -91,7 +91,7 @@ class ScriptRunner { return this } - ScriptRunner setPreview(boolean value, Closure action) { + ScriptRunner setPreview(boolean value, Closure action) { this.preview = value this.previewAction = action return this diff --git a/nextflow b/nextflow index 496d0f2b86..e296c43fbe 100755 --- a/nextflow +++ b/nextflow @@ -24,6 +24,7 @@ NXF_TEMP=${NXF_TEMP:-$TMPDIR} NXF_DIST=${NXF_DIST:-$NXF_HOME/framework} NXF_CLI="$0 $@" NXF_CLI_OPTS=${NXF_CLI_OPTS:-} +NXF_REMOTE_DEBUG_PORT=${NXF_REMOTE_DEBUG_PORT:-5005} export NXF_CLI export NXF_ORG @@ -91,7 +92,7 @@ function get_ver() { elif command -v wget &>/dev/null; then wget "$1" >/dev/null 2>&1 else - echo_red "ERROR: Cannot find 'curl' nor 'wget' utility -- please install one of them" + echo_red "ERROR: Cannot find 'curl' nor 'wget' utility -- please install one of them" exit 1 fi } @@ -154,7 +155,7 @@ function check_latest() { function launch_nextflow() { # the launch command line local cmdline=() - # remove leading and trailing double-quotes + # remove leading and trailing double-quotes for x in "${launcher[@]}"; do x="${x%\"}" x="${x#\"}" @@ -212,6 +213,10 @@ while [[ $# != 0 ]]; do -bg) bg=1 ;; + -remote-debug) + echo_yellow "Enabling script debugging - continue the execution launching the remote VM debugger in your favourite IDE using port $NXF_REMOTE_DEBUG_PORT" + remote_debug=1 + ;; -download) if [[ ! "$cmd" ]]; then rm -rf "$NXF_DIST/$NXF_VER" || exit $? @@ -385,7 +390,7 @@ EOF # checked if a cached classpath file exists and it newer that the nextflow boot jar file LAUNCH_FILE="${NXF_LAUNCHER}/classpath-$(env_md5)" -if [ -s "$LAUNCH_FILE" ] && [ "$LAUNCH_FILE" -nt "$NXF_BIN" ]; then +if [ -s "$LAUNCH_FILE" ] && [ "$LAUNCH_FILE" -nt "$NXF_BIN" ] && [[ "$remote_debug" -ne 1 ]]; then declare -a launcher="($(cat "$LAUNCH_FILE"))" else # otherwise run the capsule and get the result classpath in the 'launcher' and save it to a file @@ -400,8 +405,11 @@ else cmd_base=(${BASH_REMATCH[1]}) cmd_tail=(${BASH_REMATCH[2]}) + launcher="${cmd_base[@]}" + [[ "$NXF_JVM_ARGS" ]] && launcher+=($NXF_JVM_ARGS) + [[ "$remote_debug" ]] && launcher+=(-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=$NXF_REMOTE_DEBUG_PORT) + if [[ "$JAVA_VER" =~ ^(9|10|11|12|13|14|15|16|17|18|19|20) ]]; then - launcher="${cmd_base[@]}" launcher+=(--add-opens=java.base/java.lang=ALL-UNNAMED) launcher+=(--add-opens=java.base/java.io=ALL-UNNAMED) launcher+=(--add-opens=java.base/java.nio=ALL-UNNAMED) @@ -419,14 +427,11 @@ else launcher+=(--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED) launcher+=(--add-opens=java.base/jdk.internal.vm=ALL-UNNAMED) launcher+=(--add-opens=java.base/java.util.regex=ALL-UNNAMED) - [[ "$NXF_JVM_ARGS" ]] && launcher+=($NXF_JVM_ARGS) if [[ "$NXF_ENABLE_VIRTUAL_THREADS" == 'true' ]]; then [[ "$JAVA_VER" =~ ^(19|20) ]] && launcher+=(--enable-preview) || die "Virtual threads requires Java 19 or later - current version $JAVA_VER" fi launcher+=("${cmd_tail[@]}") else - launcher="${cmd_base[@]}" - [[ "$NXF_JVM_ARGS" ]] && launcher+=($NXF_JVM_ARGS) launcher+=("${cmd_tail[@]}") fi @@ -434,7 +439,7 @@ else if mkdir -p "${NXF_LAUNCHER}" 2>/dev/null; then STR='' for x in "${launcher[@]}"; do - [[ "$x" != "\"-Duser.dir=$PWD\"" ]] && STR+="$x " + [[ "$x" != "\"-Duser.dir=$PWD\"" ]] && [[ ! "$x" == *"-agentlib:jdwp"* ]] && STR+="$x " done printf "$STR">"$LAUNCH_FILE" else From db79e13f8f1e9d0e318d7a2e865ea783754eb1b7 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 4 Sep 2023 22:11:59 +0200 Subject: [PATCH 105/128] Disable version check on CI tests Signed-off-by: Paolo Di Tommaso --- validation/test.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/validation/test.sh b/validation/test.sh index c9f34225ad..a8734027aa 100755 --- a/validation/test.sh +++ b/validation/test.sh @@ -9,6 +9,7 @@ export NXF_IGNORE_WARN_DSL2=true export NXF_CMD=${NXF_CMD:-$(get_abs_filename ../launch.sh)} # disable ansi log to make log more readable export NXF_ANSI_LOG=false +export NXF_DISABLE_CHECK_LATEST=true # # Integration tests From a88a4245dc19f36957d7b38e6071639cc5803db2 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 5 Sep 2023 09:16:02 -0500 Subject: [PATCH 106/128] Update documentation of NXF_DEBUG (#4187) Signed-off-by: Ben Sherman --- .../main/groovy/nextflow/executor/BashWrapperBuilder.groovy | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index c37c21ae01..b70675ac88 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -75,9 +75,8 @@ class BashWrapperBuilder { /* * Env variable `NXF_DEBUG` is used to control debug options in executed BASH scripts * - 0: no debug - * - 1: dump current environment in the `.command.log` file - * - 2: trace the execution of user script adding the `set -x` flag - * - 3: trace the execution of wrapper scripts + * - 1: dump current environment in the `.command.log` file and trace the execution of user script + * - 2: trace the execution of wrapper scripts */ def str = System.getenv('NXF_DEBUG') try { From 2daa4172bb679efedd6ea02234535b2237a41478 Mon Sep 17 00:00:00 2001 From: Robert Syme Date: Tue, 5 Sep 2023 15:06:54 -0400 Subject: [PATCH 107/128] Docs improvement to clarify the usage of the bin dir (#4121) Signed-off-by: Rob Syme Signed-off-by: Robert Syme Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- docs/sharing.md | 67 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 44 insertions(+), 23 deletions(-) diff --git a/docs/sharing.md b/docs/sharing.md index f1291381cc..a3f8a5b864 100644 --- a/docs/sharing.md +++ b/docs/sharing.md @@ -440,11 +440,52 @@ The use of a code management system is important to keep together all the depend Moreover to guarantee that a pipeline is reproducible it should be self-contained i.e. it should have ideally no dependencies on the hosting environment. By using Nextflow you can achieve this goal following these methods: -### Third party scripts +### Binary applications + +Docker allows you to ship any binary dependencies that you may have in your pipeline to a portable image that is downloaded on-demand and can be executed on any platform where a Docker engine is installed. + +In order to use it with Nextflow, create a Docker image containing the tools needed by your pipeline and make it available in the [Docker Hub](https://hub.docker.com). + +Then declare in the `nextflow.config` file, that you will include in your project, the name of the Docker image you have created. For example: + +```groovy +process.container = 'my-docker-image' +docker.enabled = true +``` + +In this way when you launch the pipeline execution, the Docker image will be automatically downloaded and used to run your tasks. + +Read the {ref}`container-page` page to learn more on how to use containers with Nextflow. + +This mix of technologies makes it possible to write self-contained and truly reproducible pipelines which require zero configuration and can be reproduced in any system having a Java VM and a Docker engine installed. + +[^id2]: BitBucket provides two types of version control system: Git and Mercurial. Nextflow supports only Git repositories. + +### Bundling executables in the workflow + +In most cases, software dependencies should be provided by the execution environment ([container](./container.md), [conda](./conda.md)/[spack](./spack.md) environment, or host-native [modules](./process.md#module)). + +In cases where you do not wish to modify the execution environment(s), executable scripts can be included in the `bin/` directory in the workflow repository root. This can be useful to make changes that affect task execution across all environments with a single change. + +To ensure your scripts can be made available to the task: + +1. Write scripts in the `bin/` directory (relative to the project repository root) +2. Specify a portable shebang (see note below for details). +3. Ensure the scripts are executable. For example: `chmod a+x bin/my_script.py` + +:::{tip} +To maximize portability of your bundled script, it is recommended to avoid hard-coding the interpreter path in the shebang line. -Any third party script that does not need to be compiled (Bash, Python, Perl, etc) can be included in the pipeline project repository, so that they are distributed with it. +For example, shebang definitions `#!/usr/bin/python` and `#!/usr/local/bin/python` both hard-code specific paths to the python interpreter. To improve portability, rely on `env` to dynamically resolve the path to the interpreter. An example of the recommended approach is: -Grant the execute permission to these files and copy them into a folder named `bin/` in the root directory of your project repository. Nextflow will automatically add this folder to the `PATH` environment variable, and the scripts will automatically be accessible in your pipeline without the need to specify an absolute path to invoke them. +```bash +#!/usr/bin/env python +``` +::: + +### Using bundled executables in the workflow + +Nextflow will automatically add the `bin/` directory to the `PATH` environment variable, and the scripts will automatically be accessible in your pipeline without the need to specify an absolute path to invoke them. ### System environment @@ -495,23 +536,3 @@ The actual parameter values can be provided when launching the script execution nextflow run --my_input /path/to/input/file --my_output /other/path --my_flag true ``` -### Binary applications - -Docker allows you to ship any binary dependencies that you may have in your pipeline to a portable image that is downloaded on-demand and can be executed on any platform where a Docker engine is installed. - -In order to use it with Nextflow, create a Docker image containing the tools needed by your pipeline and make it available in the [Docker registry](https://registry.hub.docker.com). - -Then declare in the `nextflow.config` file, that you will include in your project, the name of the Docker image you have created. For example: - -```groovy -process.container = 'my-docker-image' -docker.enabled = true -``` - -In this way when you launch the pipeline execution, the Docker image will be automatically downloaded and used to run your tasks. - -Read the {ref}`container-page` page to learn more on how to use containers with Nextflow. - -This mix of technologies makes it possible to write self-contained and truly reproducible pipelines which require zero configuration and can be reproduced in any system having a Java VM and a Docker engine installed. - -[^id2]: BitBucket provides two types of version control system: Git and Mercurial. Nextflow supports only Git repositories. From fce9702e39f8dcf3641e4caa4dc1eb4ea6bb1149 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Tue, 5 Sep 2023 14:41:03 -0500 Subject: [PATCH 108/128] Report an error on duplicate workflow name definitions (#4088) Signed-off-by: Ben Sherman --- .../groovy/nextflow/script/ScriptMeta.groovy | 17 ++---- .../nextflow/script/ScriptIncludesTest.groovy | 32 +---------- .../nextflow/script/ScriptMetaTest.groovy | 57 +++++++++++++++++++ 3 files changed, 66 insertions(+), 40 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy index ffecebc534..726253eafe 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptMeta.groovy @@ -150,20 +150,15 @@ class ScriptMeta { } void checkComponentName(ComponentDef component, String name) { - if( component !instanceof ProcessDef && component !instanceof FunctionDef ) { + if( component !instanceof WorkflowDef && component !instanceof ProcessDef && component !instanceof FunctionDef ) { return } - if (functionsCount.get(name)) { - final msg = "A function with name '$name' is defined more than once in module script: $scriptPath -- Make sure to not define the same function as process" - if (NF.isStrictMode()) - throw new DuplicateModuleFunctionException(msg) - log.warn(msg) + if( functionsCount.get(name) ) { + throw new DuplicateModuleFunctionException("A function named '$name' is already defined or included in script: $scriptPath") } - if (imports.get(name)) { - final msg = "A process with name '$name' is defined more than once in module script: $scriptPath -- Make sure to not define the same function as process" - if (NF.isStrictMode()) - throw new DuplicateModuleFunctionException(msg) - log.warn(msg) + final existing = imports.get(name) + if( existing != null ) { + throw new DuplicateModuleFunctionException("A ${existing.type} named '$name' is already defined or included in script: $scriptPath") } } diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptIncludesTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptIncludesTest.groovy index d17af77534..d9e15c5099 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptIncludesTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptIncludesTest.groovy @@ -148,7 +148,7 @@ class ScriptIncludesTest extends Dsl2Spec { result.val == 'dlrow olleh' } - def 'should allows duplicate functions' () { + def 'should allow duplicate functions' () { given: NextflowMeta.instance.strictMode(true) and: @@ -184,7 +184,7 @@ class ScriptIncludesTest extends Dsl2Spec { NextflowMeta.instance.strictMode(false) } - def 'should allows multiple signatures of function' () { + def 'should allow multiple signatures of function' () { given: NextflowMeta.instance.strictMode(true) and: @@ -221,7 +221,7 @@ class ScriptIncludesTest extends Dsl2Spec { NextflowMeta.instance.strictMode(false) } - def 'should fails if no signatures of function founded' () { + def 'should fail if no signatures of function founded' () { given: NextflowMeta.instance.strictMode(true) and: @@ -773,32 +773,6 @@ class ScriptIncludesTest extends Dsl2Spec { noExceptionThrown() } - def 'should allows duplicate import' () { - given: - def folder = TestHelper.createInMemTempDir(); - def MOD1 = folder.resolve('mod1.nf') - def MOD2 = folder.resolve('mod2.nf') - def SCRIPT = folder.resolve('main.nf') - - MOD1.text = MOD2.text = ''' - process foo { - /hello/ - } - ''' - - SCRIPT.text = """ - include { foo } from './mod1' - include { foo } from './mod2' - println 'x' - """ - - when: - def runner = new MockScriptRunner() - runner.setScript(SCRIPT).execute() - then: - noExceptionThrown() - } - def 'should include only named component' () { given: def folder = Files.createTempDirectory('test') diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy index fac10444f1..caf41312f2 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy @@ -1,9 +1,26 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package nextflow.script import java.nio.file.Files import groovy.transform.InheritConstructors import nextflow.NF +import nextflow.exception.DuplicateModuleFunctionException import test.Dsl2Spec import test.TestHelper @@ -177,4 +194,44 @@ class ScriptMetaTest extends Dsl2Spec { bundle.getEntries() == ['foo.txt', 'bar.txt'] as Set } + + def 'should throw duplicate name exception' () { + + given: + def script1 = new FooScript(new ScriptBinding()) + def script2 = new FooScript(new ScriptBinding()) + def meta1 = new ScriptMeta(script1) + def meta2 = new ScriptMeta(script2) + + // import module into main script + def func2 = new FunctionDef(name: 'func1', alias: 'func1') + def proc2 = new ProcessDef(script2, Mock(Closure), 'proc1') + def work2 = new WorkflowDef(name: 'work1') + meta2.addDefinition(proc2, func2, work2) + + meta1.addModule(meta2, null, null) + + // attempt to define duplicate components in main script + def func1 = new FunctionDef(name: 'func1', alias: 'func1') + def proc1 = new ProcessDef(script1, Mock(Closure), 'proc1') + def work1 = new WorkflowDef(name: 'work1') + + when: + meta1.addDefinition(func1) + then: + def e = thrown(DuplicateModuleFunctionException) + e.message.contains "A function named 'func1' is already defined" + + when: + meta1.addDefinition(proc1) + then: + e = thrown(DuplicateModuleFunctionException) + e.message.contains "A process named 'proc1' is already defined" + + when: + meta1.addDefinition(work1) + then: + e = thrown(DuplicateModuleFunctionException) + e.message.contains "A workflow named 'work1' is already defined" + } } From 2785ffe917be9954ec3e5352659765737f4a8022 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Tue, 5 Sep 2023 22:41:12 +0200 Subject: [PATCH 109/128] Fix failing test Signed-off-by: Paolo Di Tommaso --- .../src/test/groovy/nextflow/script/ScriptMetaTest.groovy | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy index caf41312f2..d94be89795 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptMetaTest.groovy @@ -209,7 +209,9 @@ class ScriptMetaTest extends Dsl2Spec { def work2 = new WorkflowDef(name: 'work1') meta2.addDefinition(proc2, func2, work2) - meta1.addModule(meta2, null, null) + meta1.addModule(meta2, 'func1', null) + meta1.addModule(meta2, 'proc1', null) + meta1.addModule(meta2, 'work1', null) // attempt to define duplicate components in main script def func1 = new FunctionDef(name: 'func1', alias: 'func1') From c3aa26e1770fc0aff1a96a6b473569d7e6f6b84f Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 7 Sep 2023 04:15:17 -0500 Subject: [PATCH 110/128] Improve pod options documentation (#4274) Signed-off-by: Ben Sherman --- docs/process.md | 149 ++++++++++++++++++++++++++++++++---------------- 1 file changed, 101 insertions(+), 48 deletions(-) diff --git a/docs/process.md b/docs/process.md index 4cd585ab94..a1496033d2 100644 --- a/docs/process.md +++ b/docs/process.md @@ -1859,7 +1859,7 @@ See also: [cpus](#cpus), [memory](#memory), [time](#time) ### pod -The `pod` directive allows the definition of pods specific settings, such as environment variables, secrets and config maps when using the {ref}`k8s-executor` executor. +The `pod` directive allows the definition of pod specific settings, such as environment variables, secrets, and config maps, when using the {ref}`k8s-executor` executor. For example: @@ -1873,9 +1873,9 @@ process your_task { } ``` -The above snippet defines an environment variable named `FOO` which value is `bar`. +The above snippet defines an environment variable named `FOO` whose value is `bar`. -When defined in the Nextflow configuration file, a pod setting can be defined using the canonical associative array syntax. For example: +When defined in the Nextflow configuration file, pod settings should be defined as maps. For example: ```groovy process { @@ -1883,106 +1883,159 @@ process { } ``` -When more than one setting needs to be provides they must be enclosed in a list definition as shown below: +Multiple pod settings can be provided as a list of maps: ```groovy process { - pod = [ [env: 'FOO', value: 'bar'], [secret: 'my-secret/key1', mountPath: '/etc/file.txt'] ] + pod = [ + [env: 'FOO', value: 'bar'], + [secret: 'my-secret/key1', mountPath: '/etc/file.txt'] + ] } ``` -The `pod` directive supports the following options: +The following options are available: -`affinity: ` +`affinity: ` : :::{versionadded} 22.01.0-edge ::: -: Specifies affinity for which nodes the process should run on. See [Kubernetes affinity](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity) for details. +: Specifies the pod [affinity](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity) with the given configuration. -`annotation: , value: ` +`annotation: '', value: ''` : *Can be specified multiple times* -: Defines a pod annotation with key `K` and value `V`. +: Defines a pod [annotation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) with the given name and value. -`automountServiceAccountToken: ` +`automountServiceAccountToken: true | false` : :::{versionadded} 22.01.0-edge ::: -: Specifies whether to [automount service account token](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/) into process pods. If `V` is true, service account token is automounted into task pods (default). +: Specifies whether to [automount service account token](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/#opt-out-of-api-credential-automounting) into the pod (default: `true`). -`config: , mountPath: ` +`config: '/', mountPath: ''` : *Can be specified multiple times* -: Mounts a [ConfigMap](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/) with name `C` with key `K` to the path `/absolute/path`. When the key component is omitted the path is interpreted as a directory and all the `ConfigMap` entries are exposed in that path. +: Mounts a [ConfigMap](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/) with name and optional key to the given path. If the key is omitted, the path is interpreted as a directory and all entries in the `ConfigMap` are exposed in that path. -`csi: , mountPath: ` +`csi: '', mountPath: ''` : :::{versionadded} 22.11.0-edge ::: : *Can be specified multiple times* -: Mounts a [CSI ephemeral volume](https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#csi-ephemeral-volumes) with config `V`to the path `/absolute/path`. +: Mounts a [CSI ephemeral volume](https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#csi-ephemeral-volumes) by name to the given path. -`emptyDir: , mountPath: ` +`emptyDir: , mountPath: ''` : :::{versionadded} 22.11.0-edge ::: : *Can be specified multiple times* -: Mounts an [emptyDir](https://kubernetes.io/docs/concepts/storage/volumes/#emptydir) with configuration `V` to the path `/absolute/path`. +: Mounts an [emptyDir](https://kubernetes.io/docs/concepts/storage/volumes/#emptydir) with the given configuration to the given path. -`env: , config: ` +`env: '', config: '/'` : *Can be specified multiple times* -: Defines an environment variable with name `E` and whose value is given by the entry associated to the key with name `K` in the [ConfigMap](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/) with name `C`. +: Defines an environment variable whose value is defined by the given [ConfigMap](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/) and key. -`env: , fieldPath: ` +`env: '', fieldPath: ''` : :::{versionadded} 21.09.1-edge ::: : *Can be specified multiple times* -: Defines an environment variable with name `E` and whose value is given by the `V` [field path](https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information/). +: Defines an environment variable whose value is defined by the given [field path](https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information/#use-pod-fields-as-values-for-environment-variables) value. -`env: , secret: ` +: For example, the following pod option: + + ```groovy + pod = [env: 'MY_NODE_NAME', fieldPath: 'spec.nodeName'] + ``` + + Maps to the following pod spec: + + ```yaml + env: + - name: MY_NODE_NAME + valueFrom: + fieldRef: + fieldPath: spec.nodeName + ``` + +`env: '', secret: '/'` : *Can be specified multiple times* -: Defines an environment variable with name `E` and whose value is given by the entry associated to the key with name `K` in the [Secret](https://kubernetes.io/docs/concepts/configuration/secret/) with name `S`. +: Defines an environment variable whose value is defined by the given [Secret](https://kubernetes.io/docs/concepts/configuration/secret/) and key. -`env: , value: ` +`env: '', value: ''` : *Can be specified multiple times* -: Defines an environment variable with name `E` and whose value is given by the `V` string. +: Defines an environment variable with the given name and value. -`imagePullPolicy: ` -: Specifies the strategy to be used to pull the container image e.g. `imagePullPolicy: 'Always'`. +`imagePullPolicy: 'IfNotPresent' | 'Always' | 'Never'` +: Specifies the [image pull policy](https://kubernetes.io/docs/concepts/containers/images/#image-pull-policy) used by the pod to pull the container image. -`imagePullSecret: ` -: Specifies the secret name to access a private container image registry. See [Kubernetes documentation](https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod) for details. +`imagePullSecret: ''` +: Specifies the [image pull secret](https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod) used to access a private container image registry. -`label: , value: ` +`label: '', value: ''` : *Can be specified multiple times* -: Defines a pod label with key `K` and value `V`. +: Defines a pod [label](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) with the given name and value. -`nodeSelector: ` -: Specifies which node the process will run on. See [Kubernetes nodeSelector](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#nodeselector) for details. +`nodeSelector: ` +: Specifies the [node selector](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#nodeselector) with the given configuration. -`priorityClassName: ` +: The configuration can be a map or a string: + + ```groovy + // map + pod = [nodeSelector: [disktype: 'ssd', cpu: 'intel']] + + // string + pod = [nodeSelector: 'disktype=ssd,cpu=intel'] + ``` + +`priorityClassName: ''` : :::{versionadded} 22.01.0-edge ::: : Specifies the [priority class name](https://kubernetes.io/docs/concepts/scheduling-eviction/pod-priority-preemption/) for pods. -`privileged: ` +`privileged: true | false` : :::{versionadded} 22.05.0-edge ::: -: Whether the process task should run as a *privileged* container (default: `false`) +: Specifies whether the pod should run as a *privileged* container (default: `false`). -`runAsUser: ` -: Specifies the user ID to be used to run the container. Shortcut for the `securityContext` option. +`runAsUser: ''` +: Specifies the user ID with which to run the container. Shortcut for the `securityContext` option. -`secret: , mountPath: ` +`secret: '/', mountPath: ''` : *Can be specified multiple times* -: Mounts a [Secret](https://kubernetes.io/docs/concepts/configuration/secret/) with name `S` with key `K` to the path `/absolute/path`. When the key component is omitted the path is interpreted as a directory and all the `Secret` entries are exposed in that path. +: Mounts a [Secret](https://kubernetes.io/docs/concepts/configuration/secret/) with name and optional key to the given path. If the key is omitted, the path is interpreted as a directory and all entries in the `Secret` are exposed in that path. -`securityContext: ` -: Specifies the pod security context. See [Kubernetes security context](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/) for details. +`securityContext: ` +: Specifies the pod [security context](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/) with the given configuration. -`toleration: ` +`toleration: ` : :::{versionadded} 22.04.0 ::: : *Can be specified multiple times* -: Specifies a toleration for a node taint. See [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/) for details. - -`volumeClaim: , mountPath: ` +: Specifies the pod [toleration](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/) with the given configuration. + +: The configuration should be a map corresponding to a single toleration rule. For example, the following pod options: + + ```groovy + pod = [ + [toleration: [key: 'key1', operator: 'Equal', value: 'value1', effect: 'NoSchedule']], + [toleration: [key: 'key1', operator: 'Exists', effect: 'NoSchedule']], + ] + ``` + + Maps to the following pod spec: + + ```yaml + tolerations: + - key: "key1" + operator: "Equal" + value: "value1" + effect: "NoSchedule" + - key: "key1" + operator: "Exists" + effect: "NoSchedule" + ``` + +`volumeClaim: '', mountPath: '' [, subPath: '', readOnly: true | false]` : *Can be specified multiple times* -: Mounts a [Persistent volume claim](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) with name `V` to the specified path location. Use the optional `subPath` parameter to mount a directory inside the referenced volume instead of its root. The volume may be mounted with `readOnly: true`, but is read/write by default. +: Mounts a [Persistent volume claim](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) with the given name to the given path. +: The `subPath` option can be used to mount a sub-directory of the volume instead of its root. +: The `readOnly` option can be used to mount the volume as read-only (default: `false`) (process-publishdir)= From 62686ce86a8d8f24706fbd57b435e91ab4288891 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Thu, 7 Sep 2023 10:27:30 -0500 Subject: [PATCH 111/128] Always emit publish event for cached task outputs (#4227) Signed-off-by: Ben Sherman --- .../src/main/groovy/nextflow/processor/PublishDir.groovy | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/PublishDir.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/PublishDir.groovy index e6f6b64223..8013f97fea 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/PublishDir.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/PublishDir.groovy @@ -378,11 +378,10 @@ class PublishDir { if( checkSourcePathConflicts(destination)) return - if( !overwrite ) - return - - FileHelper.deletePath(destination) - processFileImpl(source, destination) + if( overwrite ) { + FileHelper.deletePath(destination) + processFileImpl(source, destination) + } } notifyFilePublish(destination, source) From 23c4ec1d551cc55d5c35a70015aa137cfa7d5a56 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Thu, 7 Sep 2023 21:11:10 +0200 Subject: [PATCH 112/128] Add support for Spack to Singularity builds Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/build.gradle | 2 +- .../io/seqera/wave/plugin/WaveClient.groovy | 9 +- .../seqera/wave/plugin/WaveClientTest.groovy | 89 +++++++++++++++++++ 3 files changed, 95 insertions(+), 5 deletions(-) diff --git a/plugins/nf-wave/build.gradle b/plugins/nf-wave/build.gradle index ce15723239..28f5b904da 100644 --- a/plugins/nf-wave/build.gradle +++ b/plugins/nf-wave/build.gradle @@ -36,7 +36,7 @@ dependencies { api 'org.apache.commons:commons-lang3:3.12.0' api 'com.google.code.gson:gson:2.10.1' api 'org.yaml:snakeyaml:2.0' - api 'io.seqera:wave-utils:0.7.2' + api 'io.seqera:wave-utils:0.7.4' testImplementation(testFixtures(project(":nextflow"))) testImplementation "org.codehaus.groovy:groovy:3.0.19" diff --git a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy index f979da6d9f..1e02a5f9eb 100644 --- a/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy +++ b/plugins/nf-wave/src/main/io/seqera/wave/plugin/WaveClient.groovy @@ -449,8 +449,6 @@ class WaveClient { */ Path spackFile = null if( attrs.spack ) { - if( singularity ) - throw new IllegalArgumentException("Wave containers do not support (yet) the resolution of Spack package with Singularity") if( containerScript ) throw new IllegalArgumentException("Unexpected spack and dockerfile conflict while resolving wave container") @@ -462,7 +460,10 @@ class WaveClient { // create a minimal spack file with package spec from user input spackFile = spackPackagesToSpackFile(attrs.spack, config.spackOpts()) } - containerScript = spackFileToDockerFile(config.spackOpts()) + // create the container file to build the container + containerScript = singularity + ? spackFileToSingularityFile(config.spackOpts()) + : spackFileToDockerFile(config.spackOpts()) } /* @@ -571,7 +572,7 @@ class WaveClient { protected boolean isSpackFile(String value) { if( value.contains('\n') ) return false - return value.endsWith('.yaml') + return value.endsWith('.yaml') || value.endsWith('.yml') } protected boolean refreshJwtToken0(String refresh) { diff --git a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy index 5dcb1dce33..62468f8ad7 100644 --- a/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy +++ b/plugins/nf-wave/src/test/io/seqera/wave/plugin/WaveClientTest.groovy @@ -807,6 +807,95 @@ class WaveClientTest extends Specification { folder?.deleteDir() } + def 'should create assets with spack recipe for singularity' () { + given: + def session = Mock(Session) { getConfig() >> [wave:[build:[spack:[commands: ['cmd-foo','cmd-bar']]]]]} + and: + def task = Mock(TaskRun) {getConfig() >> [spack:"rseqc@3.0.1 'rbase@3.5'", arch:"amd64"] } + and: + def client = new WaveClient(session) + + when: + def assets = client.resolveAssets(task, null, true) + then: + assets.containerFile == '''\ + Bootstrap: docker + From: {{spack_runner_image}} + stage: final + + %files from build + /opt/spack-env /opt/spack-env + /opt/software /opt/software + /opt/._view /opt/._view + /opt/spack-env/z10_spack_environment.sh /.singularity.d/env/91-environment.sh + + %post + cmd-foo + cmd-bar + '''.stripIndent() + + and: + !assets.moduleResources + !assets.containerImage + !assets.containerConfig + !assets.condaFile + !assets.projectResources + and: + assets.spackFile.text == '''\ + spack: + specs: [rseqc@3.0.1, rbase@3.5] + concretizer: {unify: true, reuse: false} + '''.stripIndent(true) + } + + def 'should create asset with spack file for singularity' () { + given: + def folder = Files.createTempDirectory('test') + def spackFile = folder.resolve('spack.yml'); + spackFile.text = '''\ + spack: + specs: [rseqc@3.0.1, rbase@3.5] + concretizer: {unify: true, reuse: false} + '''.stripIndent(true) + and: + def session = Mock(Session) { getConfig() >> [wave:[build:[spack:[basePackages: 'nano@1.2.3']]]]} + def task = Mock(TaskRun) {getConfig() >> [spack:spackFile.toString()] } + and: + def client = new WaveClient(session) + + when: + def assets = client.resolveAssets(task, null, true) + then: + assets.containerFile == '''\ + Bootstrap: docker + From: {{spack_runner_image}} + stage: final + + %files from build + /opt/spack-env /opt/spack-env + /opt/software /opt/software + /opt/._view /opt/._view + /opt/spack-env/z10_spack_environment.sh /.singularity.d/env/91-environment.sh + + %post + '''.stripIndent() + and: + !assets.moduleResources + !assets.containerImage + !assets.containerConfig + !assets.projectResources + !assets.condaFile + and: + assets.spackFile.text == '''\ + spack: + specs: [rseqc@3.0.1, rbase@3.5, nano@1.2.3] + concretizer: {unify: true, reuse: false} + '''.stripIndent(true) + + cleanup: + folder?.deleteDir() + } + def 'should create assets with project resources' () { given: def MODULE_RES = Mock(ResourcesBundle) From c056a74ec08542f5d232de74f00ec844ae29e4a6 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 9 Sep 2023 08:54:40 +0200 Subject: [PATCH 113/128] Add scripts logging for troubleshooting Signed-off-by: Paolo Di Tommaso --- .../groovy/nextflow/script/ScriptRunner.groovy | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy index aee864c680..ea9bae557f 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptRunner.groovy @@ -131,9 +131,14 @@ class ScriptRunner { session.start() try { // parse the script - parseScript(scriptFile, entryName) - // run the code - run() + try { + parseScript(scriptFile, entryName) + // run the code + run() + } + finally { + log.debug "Parsed script files:${scriptFiles0()}" + } // await completion await() // shutdown session @@ -151,6 +156,13 @@ class ScriptRunner { return result } + protected String scriptFiles0() { + def result = '' + for( Map.Entry it : ScriptMeta.allScriptNames() ) + result += "\n ${it.key}: ${it.value.toUriString()}" + return result + } + /** * Test the name specified by the {@code methodName} * From bb96763f454d806c49411745c9fab5f1a482937c Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 9 Sep 2023 08:55:55 +0200 Subject: [PATCH 114/128] Use sipHash24 in place of deprecated murmur32 for script aliasing Signed-off-by: Paolo Di Tommaso --- .../groovy/nextflow/script/ScriptParser.groovy | 2 +- .../groovy/nextflow/util/LoggerHelper.groovy | 2 +- .../nextflow/script/ScriptParserTest.groovy | 2 +- .../nextflow/script/ScriptRunnerTest.groovy | 1 + .../groovy/nextflow/util/LoggerHelperTest.groovy | 16 ++++++++-------- 5 files changed, 12 insertions(+), 11 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy index 53952e3c3b..e491124003 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ScriptParser.groovy @@ -147,7 +147,7 @@ class ScriptParser { if( script instanceof CharSequence ) { final hash = Hashing - .murmur3_32() + .sipHash24() .newHasher() .putUnencodedChars(script.toString()) .hash() diff --git a/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy b/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy index 1e657d6952..33e28bc473 100644 --- a/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/util/LoggerHelper.groovy @@ -597,7 +597,7 @@ class LoggerHelper { } } - static private Pattern ERR_LINE_REGEX = ~/\((Script_[0-9a-f]{8}):(\d*)\)$/ + static private Pattern ERR_LINE_REGEX = ~/\((Script_[0-9a-f]{16}):(\d*)\)$/ @PackageScope static List getErrorLine( String str, Map allNames ) { diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptParserTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptParserTest.groovy index ab0ccf6a89..8235beb80a 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptParserTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptParserTest.groovy @@ -106,7 +106,7 @@ class ScriptParserTest extends Specification { when: def result = parser.computeClassName('process foo { etc } ') then: - result == 'Script_01af1441' + result == 'Script_dd540db41b3a8b2a' } def 'should set classpath' () { diff --git a/modules/nextflow/src/test/groovy/nextflow/script/ScriptRunnerTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/ScriptRunnerTest.groovy index 726682fa7c..c2554ff4bf 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/ScriptRunnerTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/ScriptRunnerTest.groovy @@ -248,6 +248,7 @@ class ScriptRunnerTest extends Dsl2Spec { runner.session.fault.error instanceof ProcessUnrecoverableException runner.session.fault.error.cause instanceof MissingPropertyException runner.session.fault.error.cause.message =~ /Unknown variable 'HELLO' -- .*/ + // if this fails, likely there's something wrong in the LoggerHelper#getErrorLine method runner.session.fault.report =~ /No such variable: HELLO -- .*/ } diff --git a/modules/nextflow/src/test/groovy/nextflow/util/LoggerHelperTest.groovy b/modules/nextflow/src/test/groovy/nextflow/util/LoggerHelperTest.groovy index 79d438e6ff..8b4a7169db 100644 --- a/modules/nextflow/src/test/groovy/nextflow/util/LoggerHelperTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/util/LoggerHelperTest.groovy @@ -41,9 +41,9 @@ class LoggerHelperTest extends Specification { given: final pwd = System.getProperty("user.dir") Map names = [ - 'Script_f1bbc0ef': Paths.get('/some/path/main.nf'), - 'Script_1b751fe9': Paths.get('/other/path/module.nf'), - 'Script_12345678': Paths.get("$pwd/foo/script.nf") + 'Script_f1bbc0eff1bbc0ef': Paths.get('/some/path/main.nf'), + 'Script_1b751fe91b751fe9': Paths.get('/other/path/module.nf'), + 'Script_1234567812345678': Paths.get("$pwd/foo/script.nf") ] expect: @@ -53,11 +53,11 @@ class LoggerHelperTest extends Specification { EXPECTED | LINE null | 'at nextflow.script.ScriptRunner.run(ScriptRunner.groovy:289)' null | 'at nextflow.script.BaseScript.run(BaseScript.groovy:151)' - ['/some/path/main.nf', '63'] | 'at Script_f1bbc0ef.runScript(Script_f1bbc0ef:63)' - null | 'at Script_1b751fe9$_runScript_closure1.doCall(Script_1b751fe9)' - ['/other/path/module.nf', '10'] | 'at Script_1b751fe9$_runScript_closure1.doCall(Script_1b751fe9:10)' - ['foo/script.nf', '55'] | 'at Script_12345678.runScript(Script_12345678:55)' - null | 'at Script_12345678.runScript(Script_xxxxxxxx:63)' + ['/some/path/main.nf', '63'] | 'at Script_f1bbc0ef.runScript(Script_f1bbc0eff1bbc0ef:63)' + null | 'at Script_1b751fe9$_runScript_closure1.doCall(Script_1b751fe91b751fe9)' + ['/other/path/module.nf', '10'] | 'at Script_1b751fe9$_runScript_closure1.doCall(Script_1b751fe91b751fe9:10)' + ['foo/script.nf', '55'] | 'at Script_12345678.runScript(Script_1234567812345678:55)' + null | 'at Script_12345678.runScript(Script_xxxxxxxxxxxxxxxx:63)' } From 80f7cd4636cdab929ff5bbb7fcb173e0a8bbcd16 Mon Sep 17 00:00:00 2001 From: Ben Sherman Date: Sun, 10 Sep 2023 03:24:51 -0500 Subject: [PATCH 115/128] Disable staging script for remote work dir (#4282) This commit disables the use of separate stage file for executors using a non-default system, e.g. object storage as work directory (including Fusion file system). The generation of stage file is still used for local and HPC batch schedulers. See https://github.com/nextflow-io/nextflow/issues/4279 Signed-off-by: Ben Sherman Signed-off-by: Paolo Di Tommaso Co-authored-by: Paolo Di Tommaso --- .../executor/BashWrapperBuilder.groovy | 4 +- .../batch/AwsBatchScriptLauncherTest.groovy | 43 +++++++++++++++++++ .../azure/batch/AzFileCopyStrategyTest.groovy | 5 ++- 3 files changed, 49 insertions(+), 3 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index b70675ac88..61eefeeb20 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -198,7 +198,9 @@ class BashWrapperBuilder { return null final header = "# stage input files\n" - if( stagingScript.size() >= stageFileThreshold.bytes ) { + // enable only when the stage uses the default file system, i.e. it's not a remote object storage file + // see https://github.com/nextflow-io/nextflow/issues/4279 + if( stageFile.fileSystem == FileSystems.default && stagingScript.size() >= stageFileThreshold.bytes ) { stageScript = stagingScript return header + "bash ${stageFile}" } diff --git a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy index 5bb60cefa2..9ce7ee13d8 100644 --- a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy +++ b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy @@ -16,11 +16,14 @@ package nextflow.cloud.aws.batch +import java.nio.file.FileSystems import java.nio.file.Files import java.nio.file.Paths import nextflow.Session +import nextflow.SysEnv import nextflow.cloud.aws.config.AwsConfig +import nextflow.cloud.aws.util.S3PathFactory import nextflow.processor.TaskBean import nextflow.util.Duration import spock.lang.Specification @@ -631,4 +634,44 @@ class AwsBatchScriptLauncherTest extends Specification { } + def 'should not create separate stage script' () { + given: + SysEnv.push([NXF_WRAPPER_STAGE_FILE_THRESHOLD: '100']) + and: + def workDir = S3PathFactory.parse('s3://my-bucket/work') + and: + def inputFiles = [ + 'sample_1.fq': Paths.get('/my-bucket/data/sample_1.fq'), + 'sample_2.fq': Paths.get('/my-bucket/data/sample_2.fq'), + ] + def stageScript = '''\ + # stage input files + downloads=(true) + rm -f sample_1.fq + rm -f sample_2.fq + rm -f .command.sh + downloads+=("nxf_s3_download s3://my-bucket/data/sample_1.fq sample_1.fq") + downloads+=("nxf_s3_download s3://my-bucket/data/sample_2.fq sample_2.fq") + downloads+=("nxf_s3_download s3://my-bucket/work/.command.sh .command.sh") + nxf_parallel "${downloads[@]}" + '''.stripIndent() + and: + def bean = [ + workDir: workDir, + targetDir: workDir, + inputFiles: inputFiles, + script: 'echo Hello world!' + ] as TaskBean + def opts = new AwsOptions() + def builder = new AwsBatchScriptLauncher(bean, opts) + + when: + def binding = builder.makeBinding() + then: + binding.stage_inputs == stageScript + + cleanup: + SysEnv.pop() + } + } diff --git a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy index bbca973ffd..0c710aa320 100644 --- a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy +++ b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy @@ -1,6 +1,6 @@ package nextflow.cloud.azure.batch -import java.nio.file.FileSystem + import java.nio.file.Path import java.nio.file.Paths import java.nio.file.attribute.BasicFileAttributes @@ -9,6 +9,7 @@ import java.nio.file.spi.FileSystemProvider import com.azure.storage.blob.BlobClient import nextflow.Session import nextflow.cloud.azure.config.AzConfig +import nextflow.cloud.azure.nio.AzFileSystem import nextflow.cloud.azure.nio.AzPath import nextflow.processor.TaskBean import spock.lang.Specification @@ -34,7 +35,7 @@ class AzFileCopyStrategyTest extends Specification { provider.getScheme() >> 'az' provider.readAttributes(_, _, _) >> attr - def fs = Mock(FileSystem) + def fs = Mock(AzFileSystem) fs.provider() >> provider fs.toString() >> ('az://' + bucket) def uri = GroovyMock(URI) From c1cc60dd48866857460912bd7b20098417c8eb28 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sat, 9 Sep 2023 09:10:23 +0200 Subject: [PATCH 116/128] Revert singularity|apptainer run as default Signed-off-by: Paolo Di Tommaso --- .../container/SingularityBuilder.groovy | 2 +- .../container/ApptainerBuilderTest.groovy | 50 +++++++++---------- .../container/SingularityBuilderTest.groovy | 50 +++++++++---------- .../executor/BashWrapperBuilderTest.groovy | 4 +- .../executor/fusion/FusionHelperTest.groovy | 6 +-- 5 files changed, 56 insertions(+), 56 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy index 5e0277ea60..5c2d74abfc 100644 --- a/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/container/SingularityBuilder.groovy @@ -60,7 +60,7 @@ class SingularityBuilder extends ContainerBuilder { } private String defaultRunCommand() { - final result = SysEnv.get("NXF_${getBinaryName().toUpperCase()}_RUN_COMMAND", 'run') + final result = SysEnv.get("NXF_${getBinaryName().toUpperCase()}_RUN_COMMAND", 'exec') if( result !in ['run','exec'] ) throw new IllegalArgumentException("Invalid singularity launch command '$result' - it should be either 'run' or 'exec'") return result diff --git a/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy index 9aa8199fa2..6552918ea0 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/ApptainerBuilderTest.groovy @@ -27,9 +27,9 @@ import spock.lang.Unroll */ class ApptainerBuilderTest extends Specification { - def 'should get the legacy exec command line' () { + def 'should get the run command line and auto mounts disabled' () { given: - SysEnv.push(NXF_APPTAINER_RUN_COMMAND:'exec', NXF_APPTAINER_AUTO_MOUNTS:'false') + SysEnv.push(NXF_APPTAINER_RUN_COMMAND:'run', NXF_APPTAINER_AUTO_MOUNTS:'false') and: def path1 = Paths.get('/foo/data/file1') def path2 = Paths.get('/bar/data/file2') @@ -38,36 +38,36 @@ class ApptainerBuilderTest extends Specification { expect: new ApptainerBuilder('busybox') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid busybox' new ApptainerBuilder('busybox') .params(engineOptions: '-q -v') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer -q -v exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer -q -v run --no-home --pid busybox' new ApptainerBuilder('busybox') .params(runOptions: '--contain --writable') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid --contain --writable busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid --contain --writable busybox' new ApptainerBuilder('ubuntu') .addMount(path1) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid ubuntu' new ApptainerBuilder('ubuntu') .addMount(path1) .addMount(path2) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .addMount(path1) .addMount(path1) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .addMount(path1) @@ -75,18 +75,18 @@ class ApptainerBuilderTest extends Specification { .params(autoMounts: true) .params(readOnlyInputs: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .addMount(path3) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .params(newPidNamespace: false) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home ubuntu' cleanup: SysEnv.pop() @@ -102,37 +102,37 @@ class ApptainerBuilderTest extends Specification { expect: new ApptainerBuilder('busybox') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B "$PWD" busybox' new ApptainerBuilder('busybox') .params(engineOptions: '-q -v') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer -q -v run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer -q -v exec --no-home --pid -B "$PWD" busybox' new ApptainerBuilder('busybox') .params(runOptions: '--contain --writable') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" --contain --writable busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B "$PWD" --contain --writable busybox' new ApptainerBuilder('ubuntu') .addMount(path1) .params(autoMounts: false) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid ubuntu' new ApptainerBuilder('ubuntu') .addMount(path1) .addMount(path2) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .addMount(path1) .addMount(path1) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .addMount(path1) @@ -140,18 +140,18 @@ class ApptainerBuilderTest extends Specification { .params(autoMounts: true) .params(readOnlyInputs: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .addMount(path3) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' new ApptainerBuilder('ubuntu') .params(newPidNamespace: false) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home -B "$PWD" ubuntu' } @@ -201,12 +201,12 @@ class ApptainerBuilderTest extends Specification { .addEnv('X=1') .addEnv(ALPHA:'aaa', BETA: 'bbb') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} APPTAINERENV_X="1" APPTAINERENV_ALPHA="aaa" APPTAINERENV_BETA="bbb" apptainer run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} APPTAINERENV_X="1" APPTAINERENV_ALPHA="aaa" APPTAINERENV_BETA="bbb" apptainer exec --no-home --pid -B "$PWD" busybox' new ApptainerBuilder('busybox') .addEnv('CUDA_VISIBLE_DEVICES') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+APPTAINERENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} apptainer run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+APPTAINERENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} apptainer exec --no-home --pid -B "$PWD" busybox' } @@ -216,17 +216,17 @@ class ApptainerBuilderTest extends Specification { when: def cmd = new ApptainerBuilder('ubuntu.img').build().getRunCommand() then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" ubuntu.img' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B "$PWD" ubuntu.img' when: cmd = new ApptainerBuilder('ubuntu.img').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" ubuntu.img bwa --this --that file.fastq' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B "$PWD" ubuntu.img bwa --this --that file.fastq' when: cmd = new ApptainerBuilder('ubuntu.img').params(entry:'/bin/sh').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer run --no-home --pid -B "$PWD" ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+APPTAINERENV_TMP="$TMP"} ${TMPDIR:+APPTAINERENV_TMPDIR="$TMPDIR"} apptainer exec --no-home --pid -B "$PWD" ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' } @Unroll diff --git a/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy index 6b412ea3b1..661f459a29 100644 --- a/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/container/SingularityBuilderTest.groovy @@ -28,9 +28,9 @@ import spock.lang.Unroll */ class SingularityBuilderTest extends Specification { - def 'should get the legacy exec command line' () { + def 'should get the run command line and auto mounts disable' () { given: - SysEnv.push(NXF_SINGULARITY_RUN_COMMAND:'exec', NXF_SINGULARITY_AUTO_MOUNTS:'false') + SysEnv.push(NXF_SINGULARITY_RUN_COMMAND:'run', NXF_SINGULARITY_AUTO_MOUNTS:'false') and: def path1 = Paths.get('/foo/data/file1') def path2 = Paths.get('/bar/data/file2') @@ -39,36 +39,36 @@ class SingularityBuilderTest extends Specification { expect: new SingularityBuilder('busybox') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid busybox' new SingularityBuilder('busybox') .params(engineOptions: '-q -v') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity -q -v exec --no-home --pid busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity -q -v run --no-home --pid busybox' new SingularityBuilder('busybox') .params(runOptions: '--contain --writable') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid --contain --writable busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid --contain --writable busybox' new SingularityBuilder('ubuntu') .addMount(path1) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid ubuntu' new SingularityBuilder('ubuntu') .addMount(path1) .addMount(path2) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .addMount(path1) .addMount(path1) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .addMount(path1) @@ -76,18 +76,18 @@ class SingularityBuilderTest extends Specification { .params(autoMounts: true) .params(readOnlyInputs: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .addMount(path3) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .params(newPidNamespace: false) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home ubuntu' cleanup: SysEnv.pop() @@ -103,36 +103,36 @@ class SingularityBuilderTest extends Specification { expect: new SingularityBuilder('busybox') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B "$PWD" busybox' new SingularityBuilder('busybox') .params(engineOptions: '-q -v') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity -q -v run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity -q -v exec --no-home --pid -B "$PWD" busybox' new SingularityBuilder('busybox') .params(runOptions: '--contain --writable') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" --contain --writable busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B "$PWD" --contain --writable busybox' new SingularityBuilder('ubuntu') .params(autoMounts: false) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid ubuntu' new SingularityBuilder('ubuntu') .addMount(path1) .addMount(path2) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /foo/data/file1 -B /bar/data/file2 -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .addMount(path1) .addMount(path1) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /foo/data/file1 -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .addMount(path1) @@ -140,19 +140,19 @@ class SingularityBuilderTest extends Specification { .params(autoMounts: true) .params(readOnlyInputs: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /foo/data/file1:/foo/data/file1:ro -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .addMount(path3) .params(autoMounts: true) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B /bar/data\\ file -B "$PWD" ubuntu' new SingularityBuilder('ubuntu') .params(newPidNamespace: false) .params(autoMounts: false) .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home ubuntu' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home ubuntu' } @@ -202,12 +202,12 @@ class SingularityBuilderTest extends Specification { .addEnv('X=1') .addEnv(ALPHA:'aaa', BETA: 'bbb') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} SINGULARITYENV_X="1" SINGULARITYENV_ALPHA="aaa" SINGULARITYENV_BETA="bbb" singularity run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} SINGULARITYENV_X="1" SINGULARITYENV_ALPHA="aaa" SINGULARITYENV_BETA="bbb" singularity exec --no-home --pid -B "$PWD" busybox' new SingularityBuilder('busybox') .addEnv('CUDA_VISIBLE_DEVICES') .build() - .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+SINGULARITYENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} singularity run --no-home --pid -B "$PWD" busybox' + .runCommand == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${CUDA_VISIBLE_DEVICES:+SINGULARITYENV_CUDA_VISIBLE_DEVICES="$CUDA_VISIBLE_DEVICES"} singularity exec --no-home --pid -B "$PWD" busybox' } @@ -217,17 +217,17 @@ class SingularityBuilderTest extends Specification { when: def cmd = new SingularityBuilder('ubuntu.img').build().getRunCommand() then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" ubuntu.img' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B "$PWD" ubuntu.img' when: cmd = new SingularityBuilder('ubuntu.img').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" ubuntu.img bwa --this --that file.fastq' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B "$PWD" ubuntu.img bwa --this --that file.fastq' when: cmd = new SingularityBuilder('ubuntu.img').params(entry:'/bin/sh').build().getRunCommand('bwa --this --that file.fastq') then: - cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity run --no-home --pid -B "$PWD" ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' + cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} singularity exec --no-home --pid -B "$PWD" ubuntu.img /bin/sh -c "cd $PWD; bwa --this --that file.fastq"' } @Unroll diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy index 4ded770ed5..2b4f290e00 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy @@ -977,7 +977,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [enabled: true, engine: 'singularity'] as ContainerConfig ).makeBinding() then: - binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity run --no-home --pid -B /work/dir docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity exec --no-home --pid -B /work/dir docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' } @@ -991,7 +991,7 @@ class BashWrapperBuilderTest extends Specification { containerConfig: [enabled: true, engine: 'singularity', entrypointOverride: true] as ContainerConfig ).makeBinding() then: - binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity run --no-home --pid -B /work/dir docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' + binding.launch_cmd == 'set +u; env - PATH="$PATH" ${TMP:+SINGULARITYENV_TMP="$TMP"} ${TMPDIR:+SINGULARITYENV_TMPDIR="$TMPDIR"} ${NXF_TASK_WORKDIR:+SINGULARITYENV_NXF_TASK_WORKDIR="$NXF_TASK_WORKDIR"} singularity exec --no-home --pid -B /work/dir docker://ubuntu:latest /bin/bash -c "cd $PWD; eval $(nxf_container_env); /bin/bash -ue /work/dir/.command.sh"' binding.cleanup_cmd == "" binding.kill_cmd == '[[ "$pid" ]] && nxf_kill $pid' } diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy index 7dd00bb5b6..e1debac016 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/fusion/FusionHelperTest.groovy @@ -68,9 +68,9 @@ class FusionHelperTest extends Specification { [engine:'docker'] | [FOO:'one'] | 'image:2' | null | ['echo', 'hello'] | "docker run -i -e \"FOO=one\" --rm --privileged image:2 echo 'hello'" [engine:'docker'] | [FOO:'one'] | 'image:2' | '--this=that' | ['echo', 'hello'] | "docker run -i -e \"FOO=one\" --this=that --rm --privileged image:2 echo 'hello'" and: - [engine:'singularity'] | [:] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} singularity run --no-home --pid image:1 echo 'hello'" - [engine:'singularity'] | [FOO:'one'] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity run --no-home --pid image:1 echo 'hello'" - [engine:'singularity'] | [FOO:'one'] | 'image:1' | '--this=that' | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity run --no-home --pid --this=that image:1 echo 'hello'" + [engine:'singularity'] | [:] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} singularity exec --no-home --pid image:1 echo 'hello'" + [engine:'singularity'] | [FOO:'one'] | 'image:1' | null | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity exec --no-home --pid image:1 echo 'hello'" + [engine:'singularity'] | [FOO:'one'] | 'image:1' | '--this=that' | ['echo', 'hello'] | "set +u; env - PATH=\"\$PATH\" \${TMP:+SINGULARITYENV_TMP=\"\$TMP\"} \${TMPDIR:+SINGULARITYENV_TMPDIR=\"\$TMPDIR\"} SINGULARITYENV_FOO=\"one\" singularity exec --no-home --pid --this=that image:1 echo 'hello'" } From 07582f0b7361745e5ef100598a189c881a033062 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 10 Sep 2023 10:40:14 +0200 Subject: [PATCH 117/128] Make TraceRecord@store field public accessible [ci fast] Signed-off-by: Paolo Di Tommaso --- .../src/main/groovy/nextflow/trace/TraceRecord.groovy | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/trace/TraceRecord.groovy b/modules/nextflow/src/main/groovy/nextflow/trace/TraceRecord.groovy index 0662c8fb20..444e185843 100644 --- a/modules/nextflow/src/main/groovy/nextflow/trace/TraceRecord.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/trace/TraceRecord.groovy @@ -237,10 +237,11 @@ class TraceRecord implements Serializable { } } - @PackageScope Map store + Map getStore() { store } + @Memoized Set keySet() { FIELDS.keySet() @@ -401,7 +402,7 @@ class TraceRecord implements Serializable { } String toString() { - "${this.class.simpleName} ${store}" + "${this.class.simpleName} ${this.store}" } From 5686bf1da63261d03ff42f3fac3088877d9dc34b Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 10 Sep 2023 14:51:21 +0200 Subject: [PATCH 118/128] Add maxSubmitAwait (#3736) This commit adds a new directive named `maxSubmitAwait` that allows defining how long a task can be in a pending status waiting to be scheduled for execution by the underlying executor. When the timeout is reached, an execution error is returned. The usual `errorStrategy` directive can be used to control how handle the error condition and, if required, attempt a new job submission. The `task.submitAttempt` attribute can be used to determine which submitted attempt failed. Signed-off-by: Paolo Di Tommaso --- docs/process.md | 27 +++++++++++ .../ProcessSubmitTimeoutException.groovy | 29 ++++++++++++ .../nextflow/processor/TaskConfig.groovy | 19 ++++++-- .../nextflow/processor/TaskHandler.groovy | 17 +++++++ .../processor/TaskPollingMonitor.groovy | 14 ++++-- .../nextflow/processor/TaskProcessor.groovy | 19 +++++--- .../groovy/nextflow/processor/TaskRun.groovy | 5 ++ .../nextflow/script/ProcessConfig.groovy | 1 + .../nextflow/processor/TaskConfigTest.groovy | 18 +++++++ .../nextflow/processor/TaskHandlerTest.groovy | 21 +++++++++ .../processor/TaskProcessorTest.groovy | 47 +++++++++++-------- 11 files changed, 184 insertions(+), 33 deletions(-) create mode 100644 modules/nextflow/src/main/groovy/nextflow/exception/ProcessSubmitTimeoutException.groovy diff --git a/docs/process.md b/docs/process.md index a1496033d2..58a2e1d9cf 100644 --- a/docs/process.md +++ b/docs/process.md @@ -1705,6 +1705,33 @@ process foo { See also: [cpus](#cpus) and [memory](#memory). +(process-maxsubmitawait)= + +### maxSubmitAwait (experimental) + +The `maxSubmitAwait` directives allows you to specify how long a task can remain in submission queue without being executed. +Elapsed this time the task execution will fail. + +When used along with `retry` error strategy, it can be useful to re-schedule the task to a difference queue or +resource requirement. For example: + +```groovy +process foo { + errorStrategy 'retry' + maxSubmitAwait '10 mins' + maxRetries 3 + queue "${task.submitAttempt==1 : 'spot-compute' : 'on-demand-compute'}" + script: + ''' + your_job --here + ''' +} +``` + +In the above example the task is submitted to the `spot-compute` on the first attempt (`task.submitAttempt==1`). If the +task execution does not start in the 10 minutes, a failure is reported and a new submission is attempted using the +queue named `on-demand-compute`. + (process-maxerrors)= ### maxErrors diff --git a/modules/nextflow/src/main/groovy/nextflow/exception/ProcessSubmitTimeoutException.groovy b/modules/nextflow/src/main/groovy/nextflow/exception/ProcessSubmitTimeoutException.groovy new file mode 100644 index 0000000000..e2ccbe923e --- /dev/null +++ b/modules/nextflow/src/main/groovy/nextflow/exception/ProcessSubmitTimeoutException.groovy @@ -0,0 +1,29 @@ +/* + * Copyright 2013-2019, Centre for Genomic Regulation (CRG) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nextflow.exception + +import groovy.transform.InheritConstructors + +/** + * Exception thrown when a task does not complete within + * the `time` requested time requirement + * + * @author Paolo Di Tommaso + */ +@InheritConstructors +class ProcessSubmitTimeoutException extends ProcessException { +} diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskConfig.groovy index cf88eab3af..3a24dc6c04 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskConfig.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskConfig.groovy @@ -269,8 +269,8 @@ class TaskConfig extends LazyMap implements Cloneable { getDiskResource()?.getRequest() } - Duration getTime() { - def value = get('time') + private Duration getDuration0(String key) { + def value = get(key) if( !value ) return null @@ -285,8 +285,17 @@ class TaskConfig extends LazyMap implements Cloneable { new Duration(value.toString().trim()) } catch( Exception e ) { - throw new AbortOperationException("Not a valid `time` value in process definition: $value") + throw new AbortOperationException("Not a valid `$key` value in process definition: $value") } + + } + + Duration getTime() { + return getDuration0('time') + } + + Duration getMaxSubmitAwait() { + return getDuration0('maxSubmitAwait') } boolean hasCpus() { @@ -406,6 +415,10 @@ class TaskConfig extends LazyMap implements Cloneable { } } + Integer getSubmitAttempt() { + get('submitAttempt') as Integer ?: 1 + } + Integer getAttempt() { get('attempt') as Integer ?: 1 } diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskHandler.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskHandler.groovy index 3a15b99b57..2db1e00470 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskHandler.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskHandler.groovy @@ -246,4 +246,21 @@ abstract class TaskHandler { task.processor.forksCount?.decrement() } + /** + * Check if the task submit could not be accomplished with the time specified via the + * `maxWait` directive + * + * @return + * {@code true} if the task is in `submit` status after the amount of time specified + * via {@code maxAwait} directive has passed, otherwise {@code false} is returned. + */ + boolean isSubmitTimeout() { + final maxAwait = task.config.getMaxSubmitAwait() + if( !maxAwait ) + return false + final now = System.currentTimeMillis() + if( isSubmitted() && now-submitTimeMillis>maxAwait.millis ) + return true + return false + } } diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy index 17c2694c46..6b5d281ba8 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskPollingMonitor.groovy @@ -16,7 +16,6 @@ package nextflow.processor - import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.TimeUnit import java.util.concurrent.locks.Condition @@ -27,6 +26,7 @@ import com.google.common.util.concurrent.RateLimiter import groovy.transform.CompileStatic import groovy.util.logging.Slf4j import nextflow.Session +import nextflow.exception.ProcessSubmitTimeoutException import nextflow.executor.BatchCleanup import nextflow.executor.GridTaskHandler import nextflow.util.Duration @@ -611,14 +611,22 @@ class TaskPollingMonitor implements TaskMonitor { } // check if it is terminated - if( handler.checkIfCompleted() ) { - log.debug "Task completed > $handler" + boolean timeout=false + if( handler.checkIfCompleted() || (timeout=handler.isSubmitTimeout()) ) { + final state = timeout ? 'timed-out' : 'completed' + log.debug "Task $state > $handler" // decrement forks count handler.decProcessForks() // since completed *remove* the task from the processing queue evict(handler) + // check if submit timeout is reached + if( timeout ) { + try { handler.kill() } catch( Throwable t ) { log.warn("Unable to cancel task ${handler.task.lazyName()}", t) } + handler.task.error = new ProcessSubmitTimeoutException("Task '${handler.task.lazyName()}' could not be submitted within specified 'maxAwait' time: ${handler.task.config.getMaxSubmitAwait()}") + } + // finalize the tasks execution final fault = handler.task.processor.finalizeTask(handler.task) diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy index bc3ae5e711..df320fe01f 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy @@ -61,6 +61,7 @@ import nextflow.exception.MissingFileException import nextflow.exception.MissingValueException import nextflow.exception.ProcessException import nextflow.exception.ProcessFailedException +import nextflow.exception.ProcessSubmitTimeoutException import nextflow.exception.ProcessRetryableException import nextflow.exception.ProcessUnrecoverableException import nextflow.exception.ShowOnlyExceptionMessage @@ -1037,8 +1038,11 @@ class TaskProcessor { return RETRY } - final int taskErrCount = task ? ++task.failCount : 0 - final int procErrCount = ++errorCount + final submitTimeout = error.cause instanceof ProcessSubmitTimeoutException + final submitErrMsg = submitTimeout ? error.cause.message : null + final int submitRetries = submitTimeout ? ++task.submitRetries : 0 + final int taskErrCount = !submitTimeout && task ? ++task.failCount : 0 + final int procErrCount = !submitTimeout ? ++errorCount : errorCount // -- when is a task level error and the user has chosen to ignore error, // just report and error message and DO NOT stop the execution @@ -1048,11 +1052,11 @@ class TaskProcessor { task.config.errorCount = procErrCount task.config.retryCount = taskErrCount - errorStrategy = checkErrorStrategy(task, error, taskErrCount, procErrCount) + errorStrategy = checkErrorStrategy(task, error, taskErrCount, procErrCount, submitRetries) if( errorStrategy.soft ) { - def msg = "[$task.hashLog] NOTE: $error.message" + def msg = "[$task.hashLog] NOTE: ${submitTimeout ? submitErrMsg : error.message}" if( errorStrategy == IGNORE ) msg += " -- Error is ignored" - else if( errorStrategy == RETRY ) msg += " -- Execution is retried ($taskErrCount)" + else if( errorStrategy == RETRY ) msg += " -- Execution is retried (${submitTimeout ? submitRetries : taskErrCount})" log.info msg task.failed = true task.errorAction = errorStrategy @@ -1107,7 +1111,7 @@ class TaskProcessor { : name } - protected ErrorStrategy checkErrorStrategy( TaskRun task, ProcessException error, final int taskErrCount, final int procErrCount ) { + protected ErrorStrategy checkErrorStrategy( TaskRun task, ProcessException error, final int taskErrCount, final int procErrCount, final submitRetries ) { final action = task.config.getErrorStrategy() @@ -1126,11 +1130,12 @@ class TaskProcessor { final int maxErrors = task.config.getMaxErrors() final int maxRetries = task.config.getMaxRetries() - if( (procErrCount < maxErrors || maxErrors == -1) && taskErrCount <= maxRetries ) { + if( (procErrCount < maxErrors || maxErrors == -1) && taskErrCount <= maxRetries && submitRetries <= maxRetries ) { final taskCopy = task.makeCopy() session.getExecService().submit({ try { taskCopy.config.attempt = taskErrCount+1 + taskCopy.config.submitAttempt = submitRetries+1 taskCopy.runType = RunType.RETRY taskCopy.resolve(taskBody) checkCachedOrLaunchTask( taskCopy, taskCopy.hash, false ) diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskRun.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskRun.groovy index fd2099f2f6..d4c8f7da32 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskRun.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskRun.groovy @@ -304,6 +304,11 @@ class TaskRun implements Cloneable { */ volatile int failCount + /** + * The number of times the submit of the task has been retried + */ + volatile int submitRetries + /** * Mark the task as failed */ diff --git a/modules/nextflow/src/main/groovy/nextflow/script/ProcessConfig.groovy b/modules/nextflow/src/main/groovy/nextflow/script/ProcessConfig.groovy index 5e3ddaa42c..d88a11cf1d 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/ProcessConfig.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/ProcessConfig.groovy @@ -65,6 +65,7 @@ class ProcessConfig implements Map, Cloneable { 'machineType', 'queue', 'label', + 'maxSubmitAwait', 'maxErrors', 'maxForks', 'maxRetries', diff --git a/modules/nextflow/src/test/groovy/nextflow/processor/TaskConfigTest.groovy b/modules/nextflow/src/test/groovy/nextflow/processor/TaskConfigTest.groovy index d290723992..b08595666f 100644 --- a/modules/nextflow/src/test/groovy/nextflow/processor/TaskConfigTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/processor/TaskConfigTest.groovy @@ -233,6 +233,24 @@ class TaskConfigTest extends Specification { } + def 'test max submit await'() { + + when: + def config = new TaskConfig() + config.maxSubmitAwait = value + + then: + config.maxSubmitAwait == expected + config.getMaxSubmitAwait() == expected + + where: + expected || value + null || null + new Duration('1s') || 1000 + new Duration('2h') || '2h' + + } + def testGetMemory() { when: diff --git a/modules/nextflow/src/test/groovy/nextflow/processor/TaskHandlerTest.groovy b/modules/nextflow/src/test/groovy/nextflow/processor/TaskHandlerTest.groovy index 1846fb0838..0225478a21 100644 --- a/modules/nextflow/src/test/groovy/nextflow/processor/TaskHandlerTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/processor/TaskHandlerTest.groovy @@ -207,4 +207,25 @@ class TaskHandlerTest extends Specification { handler.task.processor.getForksCount().intValue() == COUNTER -1 } + def 'should validate is submit timeout' () { + given: + def handler = Spy(TaskHandler) + handler.status = TaskStatus.SUBMITTED + handler.task = Mock(TaskRun) { + getConfig() >> Mock(TaskConfig) { getMaxSubmitAwait() >> Duration.of('500ms') } + } + + when: + def timeout = handler.isSubmitTimeout() + then: + !timeout + + when: + sleep 1_000 + and: + timeout = handler.isSubmitTimeout() + then: + timeout + + } } diff --git a/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy b/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy index 4a075faa8c..6d8af3ed75 100644 --- a/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy @@ -483,19 +483,19 @@ class TaskProcessorTest extends Specification { task = new TaskRun() task.config = new TaskConfig() then: - proc.checkErrorStrategy(task, error, 1,1) == ErrorStrategy.TERMINATE + proc.checkErrorStrategy(task, error, 1, 1, 0) == ErrorStrategy.TERMINATE when: task = new TaskRun() task.config = new TaskConfig(errorStrategy: 'ignore') then: - proc.checkErrorStrategy(task, error, 10, 10) == ErrorStrategy.IGNORE + proc.checkErrorStrategy(task, error, 10, 10, 0) == ErrorStrategy.IGNORE when: task = new TaskRun() task.config = new TaskConfig(errorStrategy: 'finish') then: - proc.checkErrorStrategy(task, error, 1, 1) == ErrorStrategy.FINISH + proc.checkErrorStrategy(task, error, 1, 1, 0) == ErrorStrategy.FINISH } @@ -509,19 +509,19 @@ class TaskProcessorTest extends Specification { task = new TaskRun() task.config = new TaskConfig(errorStrategy: 'retry') then: - proc.checkErrorStrategy(task, error, 1, 1) == ErrorStrategy.TERMINATE + proc.checkErrorStrategy(task, error, 1, 1, 0) == ErrorStrategy.TERMINATE when: task = new TaskRun() task.config = new TaskConfig(errorStrategy: 'ignore') then: - proc.checkErrorStrategy(task, error, 1, 1) == ErrorStrategy.TERMINATE + proc.checkErrorStrategy(task, error, 1, 1, 0) == ErrorStrategy.TERMINATE when: task = new TaskRun() task.config = new TaskConfig(errorStrategy: 'finish') then: - proc.checkErrorStrategy(task, error, 1, 1) == ErrorStrategy.FINISH + proc.checkErrorStrategy(task, error, 1, 1, 0) == ErrorStrategy.FINISH } @@ -540,23 +540,30 @@ class TaskProcessorTest extends Specification { when: task = new TaskRun(context: new TaskContext(holder: [:])) - task.config = new TaskConfig(errorStrategy:'retry', maxErrors: max_errors, maxRetries: max_retries ) + task.config = new TaskConfig(errorStrategy: 'retry', maxErrors: MAX_ERRORS, maxRetries: MAX_RETRIES ) then: - proc.checkErrorStrategy(task, error, task_err_count , proc_err_count) == strategy + proc.checkErrorStrategy(task, error, TASK_ERR_COUNT , PROC_ERR_COUNT, SUBMIT_RETRIES) == EXPECTED where: - max_retries | max_errors | task_err_count | proc_err_count | strategy - 1 | 3 | 0 | 0 | ErrorStrategy.RETRY - 1 | 3 | 1 | 0 | ErrorStrategy.RETRY - 1 | 3 | 2 | 0 | ErrorStrategy.TERMINATE - 1 | 3 | 0 | 1 | ErrorStrategy.RETRY - 1 | 3 | 0 | 2 | ErrorStrategy.RETRY - 1 | 3 | 0 | 3 | ErrorStrategy.TERMINATE - 3 | -1 | 0 | 0 | ErrorStrategy.RETRY - 3 | -1 | 1 | 1 | ErrorStrategy.RETRY - 3 | -1 | 2 | 2 | ErrorStrategy.RETRY - 3 | -1 | 3 | 9 | ErrorStrategy.RETRY - 3 | -1 | 4 | 9 | ErrorStrategy.TERMINATE + MAX_RETRIES | MAX_ERRORS | TASK_ERR_COUNT | PROC_ERR_COUNT | SUBMIT_RETRIES | EXPECTED + 1 | 3 | 0 | 0 | 0 | ErrorStrategy.RETRY + 1 | 3 | 1 | 0 | 0 | ErrorStrategy.RETRY + 1 | 3 | 2 | 0 | 0 | ErrorStrategy.TERMINATE + 1 | 3 | 0 | 1 | 0 | ErrorStrategy.RETRY + 1 | 3 | 0 | 2 | 0 | ErrorStrategy.RETRY + 1 | 3 | 0 | 3 | 0 | ErrorStrategy.TERMINATE + 3 | -1 | 0 | 0 | 0 | ErrorStrategy.RETRY + 3 | -1 | 1 | 1 | 0 | ErrorStrategy.RETRY + 3 | -1 | 2 | 2 | 0 | ErrorStrategy.RETRY + 3 | -1 | 3 | 9 | 0 | ErrorStrategy.RETRY + 3 | -1 | 4 | 9 | 0 | ErrorStrategy.TERMINATE + and: + // terminates when the submit retries is greater than the max retries + 1 | -1 | 0 | 0 | 1 | ErrorStrategy.RETRY + 1 | -1 | 0 | 0 | 2 | ErrorStrategy.TERMINATE + 3 | -1 | 0 | 0 | 2 | ErrorStrategy.RETRY + 3 | -1 | 0 | 0 | 2 | ErrorStrategy.RETRY + 3 | -1 | 0 | 0 | 4 | ErrorStrategy.TERMINATE } From 42504d3c83145e16179e22df83e9d35ecc995eca Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Sun, 10 Sep 2023 21:27:01 +0200 Subject: [PATCH 119/128] Add support for inputs and outputs arity Signed-off-by: Paolo Di Tommaso --- docs/process.md | 59 ++++-- .../exception/IllegalArityException.groovy | 31 +++ .../nextflow/processor/TaskProcessor.groovy | 28 ++- .../nextflow/script/params/ArityParam.groovy | 97 ++++++++++ .../nextflow/script/params/BaseInParam.groovy | 2 +- .../nextflow/script/params/FileInParam.groovy | 2 +- .../script/params/FileOutParam.groovy | 2 +- .../processor/TaskProcessorTest.groovy | 182 +++++++++++++++++- .../script/params/ArityParamTest.groovy | 64 ++++++ .../script/params/ParamsInTest.groovy | 11 +- .../script/params/ParamsOutTest.groovy | 8 +- tests/checks/process-arity.nf/.checks | 16 ++ tests/process-arity.nf | 34 ++++ 13 files changed, 503 insertions(+), 33 deletions(-) create mode 100644 modules/nextflow/src/main/groovy/nextflow/exception/IllegalArityException.groovy create mode 100644 modules/nextflow/src/main/groovy/nextflow/script/params/ArityParam.groovy create mode 100644 modules/nextflow/src/test/groovy/nextflow/script/params/ArityParamTest.groovy create mode 100644 tests/checks/process-arity.nf/.checks create mode 100644 tests/process-arity.nf diff --git a/docs/process.md b/docs/process.md index 58a2e1d9cf..183b0c7f08 100644 --- a/docs/process.md +++ b/docs/process.md @@ -471,22 +471,41 @@ workflow { } ``` -The `stageAs` option allows you to control how the file should be named in the task work directory. You can provide a specific name or a pattern as described in the [Multiple input files](#multiple-input-files) section: +Available options: -```groovy -process foo { +`arity` +: :::{versionadded} 23.09.0-edge + ::: +: Specify the number of expected files. Can be a number or a range: + + ```groovy input: - path x, stageAs: 'data.txt' + path('one.txt', arity: '1') // exactly one file is expected + path('pair_*.txt', arity: '2') // exactly two files are expected + path('many_*.txt', arity: '1..*') // one or more files are expected + ``` - """ - your_command --in data.txt - """ -} + When a task is created, Nextflow will check whether the received files for each path input match the declared arity, and fail if they do not. -workflow { - foo('/some/data/file.txt') -} -``` +`stageAs` +: Specify how the file should be named in the task work directory: + + ```groovy + process foo { + input: + path x, stageAs: 'data.txt' + + """ + your_command --in data.txt + """ + } + + workflow { + foo('/some/data/file.txt') + } + ``` + + Can be a name or a pattern as described in the [Multiple input files](#multiple-input-files) section. :::{note} Process `path` inputs have nearly the same interface as described in {ref}`script-file-io`, with one difference which is relevant when files are staged into a subdirectory. Given the following input: @@ -922,6 +941,22 @@ In the above example, the `randomNum` process creates a file named `result.txt` Available options: +`arity` +: :::{versionadded} 23.09.0-edge + ::: +: Specify the number of expected files. Can be a number or a range: + + ```groovy + output: + path('one.txt', arity: '1') // exactly one file is expected + path('pair_*.txt', arity: '2') // exactly two files are expected + path('many_*.txt', arity: '1..*') // one or more files are expected + ``` + + When a task completes, Nextflow will check whether the produced files for each path output match the declared arity, + and fail if they do not. If the arity is `1`, a sole file object will be emitted. Otherwise, a list will always be emitted, + even if only one file is produced. + `followLinks` : When `true` target files are return in place of any matching symlink (default: `true`) diff --git a/modules/nextflow/src/main/groovy/nextflow/exception/IllegalArityException.groovy b/modules/nextflow/src/main/groovy/nextflow/exception/IllegalArityException.groovy new file mode 100644 index 0000000000..f11f8f3cb2 --- /dev/null +++ b/modules/nextflow/src/main/groovy/nextflow/exception/IllegalArityException.groovy @@ -0,0 +1,31 @@ +/* + * Copyright 2013-2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package nextflow.exception + +import groovy.transform.CompileStatic +import groovy.transform.InheritConstructors + +/** + * Exception thrown when input/output arity check fails + * + * @author Paolo Di Tommaso + */ +@CompileStatic +@InheritConstructors +class IllegalArityException extends ProcessUnrecoverableException { +} diff --git a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy index df320fe01f..ec00c62d56 100644 --- a/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/processor/TaskProcessor.groovy @@ -57,12 +57,13 @@ import nextflow.ast.TaskTemplateVarsXform import nextflow.cloud.CloudSpotTerminationException import nextflow.dag.NodeMarker import nextflow.exception.FailedGuardException +import nextflow.exception.IllegalArityException import nextflow.exception.MissingFileException import nextflow.exception.MissingValueException import nextflow.exception.ProcessException import nextflow.exception.ProcessFailedException -import nextflow.exception.ProcessSubmitTimeoutException import nextflow.exception.ProcessRetryableException +import nextflow.exception.ProcessSubmitTimeoutException import nextflow.exception.ProcessUnrecoverableException import nextflow.exception.ShowOnlyExceptionMessage import nextflow.exception.UnexpectedException @@ -1570,7 +1571,6 @@ class TaskProcessor { task.setOutput(param, stdout) } - protected void collectOutFiles( TaskRun task, FileOutParam param, Path workDir, Map context ) { final List allFiles = [] @@ -1594,7 +1594,7 @@ class TaskProcessor { else { def path = param.glob ? splitter.strip(filePattern) : filePattern def file = workDir.resolve(path) - def exists = param.followLinks ? file.exists() : file.exists(LinkOption.NOFOLLOW_LINKS) + def exists = checkFileExists(file, param.followLinks) if( exists ) result = [file] else @@ -1604,7 +1604,7 @@ class TaskProcessor { if( result ) allFiles.addAll(result) - else if( !param.optional ) { + else if( !param.optional && (!param.arity || param.arity.min > 0) ) { def msg = "Missing output file(s) `$filePattern` expected by process `${safeTaskName(task)}`" if( inputsRemovedFlag ) msg += " (note: input files are not included in the default matching set)" @@ -1612,10 +1612,16 @@ class TaskProcessor { } } - task.setOutput( param, allFiles.size()==1 ? allFiles[0] : allFiles ) + if( !param.isValidArity(allFiles.size()) ) + throw new IllegalArityException("Incorrect number of output files for process `${safeTaskName(task)}` -- expected ${param.arity}, found ${allFiles.size()}") + + task.setOutput( param, allFiles.size()==1 && param.isSingle() ? allFiles[0] : allFiles ) } + protected boolean checkFileExists(Path file, boolean followLinks) { + followLinks ? file.exists() : file.exists(LinkOption.NOFOLLOW_LINKS) + } protected void collectOutValues( TaskRun task, ValueOutParam param, Map ctx ) { @@ -1814,7 +1820,7 @@ class TaskProcessor { if( obj instanceof Path ) return obj - if( !obj == null ) + if( obj == null ) throw new ProcessUnrecoverableException("Path value cannot be null") if( !(obj instanceof CharSequence) ) @@ -1856,10 +1862,10 @@ class TaskProcessor { return files } - protected singleItemOrList( List items, ScriptType type ) { + protected singleItemOrList( List items, boolean single, ScriptType type ) { assert items != null - if( items.size() == 1 ) { + if( items.size() == 1 && single ) { return makePath(items[0],type) } @@ -2059,7 +2065,11 @@ class TaskProcessor { final fileParam = param as FileInParam final normalized = normalizeInputToFiles(val, count, fileParam.isPathQualifier(), batch) final resolved = expandWildcards( fileParam.getFilePattern(ctx), normalized ) - ctx.put( param.name, singleItemOrList(resolved, task.type) ) + + if( !param.isValidArity(resolved.size()) ) + throw new IllegalArityException("Incorrect number of input files for process `${safeTaskName(task)}` -- expected ${param.arity}, found ${resolved.size()}") + + ctx.put( param.name, singleItemOrList(resolved, param.isSingle(), task.type) ) count += resolved.size() for( FileHolder item : resolved ) { Integer num = allNames.getOrCreate(item.stageName, 0) +1 diff --git a/modules/nextflow/src/main/groovy/nextflow/script/params/ArityParam.groovy b/modules/nextflow/src/main/groovy/nextflow/script/params/ArityParam.groovy new file mode 100644 index 0000000000..3c1a425288 --- /dev/null +++ b/modules/nextflow/src/main/groovy/nextflow/script/params/ArityParam.groovy @@ -0,0 +1,97 @@ +/* + * Copyright 2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nextflow.script.params + +import groovy.transform.CompileStatic +import groovy.transform.EqualsAndHashCode +import nextflow.exception.IllegalArityException + +/** + * Implements an arity option for process inputs and outputs. + * + * @author Ben Sherman + */ +@CompileStatic +trait ArityParam { + + Range arity + + Range getArity() { arity } + + def setArity(String value) { + if( value.isInteger() ) { + def n = value.toInteger() + this.arity = new Range(n, n) + return this + } + + final tokens = value.tokenize('..') + if( tokens.size() == 2 ) { + final min = tokens[0] + final max = tokens[1] + if( min.isInteger() && (max == '*' || max.isInteger()) ) { + this.arity = new Range( + min.toInteger(), + max == '*' ? Integer.MAX_VALUE : max.toInteger() + ) + return this + } + } + + throw new IllegalArityException("Path arity should be a number (e.g. '1') or a range (e.g. '1..*')") + } + + /** + * Determine whether a single output file should be unwrapped. + */ + boolean isSingle() { + return !arity || arity.max == 1 + } + + boolean isValidArity(int size) { + return !arity || arity.contains(size) + } + + @EqualsAndHashCode + static class Range { + int min + int max + + Range(int min, int max) { + if( min<0 ) + throw new IllegalArityException("Path arity min value must be greater or equals to 0") + if( max<1 ) + throw new IllegalArityException("Path arity max value must be greater or equals to 1") + if( min==0 && max==1 ) + throw new IllegalArityException("Path arity 0..1 is not allowed") + this.min = min + this.max = max + } + + boolean contains(int value) { + min <= value && value <= max + } + + @Override + String toString() { + min == max + ? min.toString() + : "${min}..${max == Integer.MAX_VALUE ? '*' : max}".toString() + } + } + +} diff --git a/modules/nextflow/src/main/groovy/nextflow/script/params/BaseInParam.groovy b/modules/nextflow/src/main/groovy/nextflow/script/params/BaseInParam.groovy index f8c40aec2c..12d396414f 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/params/BaseInParam.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/params/BaseInParam.groovy @@ -54,7 +54,7 @@ abstract class BaseInParam extends BaseParam implements InParam { return inChannel } - BaseInParam(ProcessConfig config ) { + BaseInParam( ProcessConfig config ) { this(config.getOwnerScript().getBinding(), config.getInputs()) } diff --git a/modules/nextflow/src/main/groovy/nextflow/script/params/FileInParam.groovy b/modules/nextflow/src/main/groovy/nextflow/script/params/FileInParam.groovy index b52f8dad39..8bebf83d70 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/params/FileInParam.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/params/FileInParam.groovy @@ -28,7 +28,7 @@ import nextflow.script.TokenVar */ @Slf4j @InheritConstructors -class FileInParam extends BaseInParam implements PathQualifier { +class FileInParam extends BaseInParam implements ArityParam, PathQualifier { protected filePattern diff --git a/modules/nextflow/src/main/groovy/nextflow/script/params/FileOutParam.groovy b/modules/nextflow/src/main/groovy/nextflow/script/params/FileOutParam.groovy index e305c46c3e..a55cd957c7 100644 --- a/modules/nextflow/src/main/groovy/nextflow/script/params/FileOutParam.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/script/params/FileOutParam.groovy @@ -33,7 +33,7 @@ import nextflow.util.BlankSeparatedList */ @Slf4j @InheritConstructors -class FileOutParam extends BaseOutParam implements OutParam, OptionalParam, PathQualifier { +class FileOutParam extends BaseOutParam implements OutParam, ArityParam, OptionalParam, PathQualifier { /** * ONLY FOR TESTING DO NOT USE diff --git a/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy b/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy index 6d8af3ed75..131cc6d0a9 100644 --- a/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/processor/TaskProcessorTest.groovy @@ -26,6 +26,8 @@ import groovyx.gpars.agent.Agent import nextflow.Global import nextflow.ISession import nextflow.Session +import nextflow.exception.IllegalArityException +import nextflow.exception.MissingFileException import nextflow.exception.ProcessException import nextflow.exception.ProcessUnrecoverableException import nextflow.executor.Executor @@ -37,6 +39,7 @@ import nextflow.script.BodyDef import nextflow.script.ProcessConfig import nextflow.script.ScriptType import nextflow.script.bundle.ResourcesBundle +import nextflow.script.params.FileInParam import nextflow.script.params.FileOutParam import nextflow.util.ArrayBag import nextflow.util.CacheHelper @@ -44,7 +47,6 @@ import nextflow.util.MemoryUnit import spock.lang.Specification import spock.lang.Unroll import test.TestHelper - /** * * @author Paolo Di Tommaso @@ -186,13 +188,19 @@ class TaskProcessorTest extends Specification { when: def list = [ FileHolder.get(path1, 'x_file_1') ] - def result = processor.singleItemOrList(list, ScriptType.SCRIPTLET) + def result = processor.singleItemOrList(list, true, ScriptType.SCRIPTLET) then: result.toString() == 'x_file_1' + when: + list = [ FileHolder.get(path1, 'x_file_1') ] + result = processor.singleItemOrList(list, false, ScriptType.SCRIPTLET) + then: + result*.toString() == ['x_file_1'] + when: list = [ FileHolder.get(path1, 'x_file_1'), FileHolder.get(path2, 'x_file_2'), FileHolder.get(path3, 'x_file_3') ] - result = processor.singleItemOrList(list, ScriptType.SCRIPTLET) + result = processor.singleItemOrList(list, false, ScriptType.SCRIPTLET) then: result*.toString() == [ 'x_file_1', 'x_file_2', 'x_file_3'] @@ -964,4 +972,172 @@ class TaskProcessorTest extends Specification { result.config.getCpus() == 10 result.config.getMemory() == MemoryUnit.of('100 GB') } + + @Unroll + def 'should validate inputs arity' () { + given: + def executor = Mock(Executor) + def session = Mock(Session) {getFilePorter()>>Mock(FilePorter) } + def processor = Spy(new TaskProcessor(session:session, executor:executor)) + and: + def context = new TaskContext(holder: new HashMap()) + def task = new TaskRun( + name: 'foo', + type: ScriptType.SCRIPTLET, + context: context, + config: new TaskConfig()) + + when: + def param = new FileInParam(new Binding(), []) + .setPathQualifier(true) + .bind(FILE_NAME) as FileInParam + if( ARITY ) + param.setArity(ARITY) + + processor.makeTaskContextStage2(task, [(param):FILE_VALUE], 0 ) + then: + context.get(FILE_NAME) == EXPECTED + + where: + FILE_NAME | FILE_VALUE | ARITY | EXPECTED + 'file.txt' | '/some/file.txt' | null | Path.of('/some/file.txt') + 'file.*' | '/some/file.txt' | null | Path.of('/some/file.txt') + 'file.*' | ['/some/file1.txt','/some/file2.txt'] | null | [Path.of('/some/file1.txt'), Path.of('/some/file2.txt')] + '*' | ['/some/file1.txt','/some/file2.txt'] | null | [Path.of('/some/file1.txt'), Path.of('/some/file2.txt')] + '*' | [] | null | [] + + and: + 'file.txt' | '/some/file.txt' | '1' | Path.of('/some/file.txt') + 'f*' | '/some/file.txt' | '1' | Path.of('/some/file.txt') + 'f*' | '/some/file.txt' | '1..2' | [Path.of('/some/file.txt')] + 'f*' | '/some/file.txt' | '1..*' | [Path.of('/some/file.txt')] + 'f*' | '/some/file.txt' | '1..*' | [Path.of('/some/file.txt')] + 'f*' | ['/some/file.txt'] | '1..*' | [Path.of('/some/file.txt')] + 'f*' | ['/some/file1.txt', '/some/file2.txt'] | '1..*' | [Path.of('/some/file1.txt'), Path.of('/some/file2.txt')] + } + + def 'should throw an arity error' () { + given: + def executor = Mock(Executor) + def session = Mock(Session) {getFilePorter()>>Mock(FilePorter) } + def processor = Spy(new TaskProcessor(session:session, executor:executor)) + and: + def context = new TaskContext(holder: new HashMap()) + def task = new TaskRun( + name: 'foo', + type: ScriptType.SCRIPTLET, + context: context, + config: new TaskConfig()) + + when: + def param = new FileInParam(new Binding(), []) + .setPathQualifier(true) + .bind(FILE_NAME) as FileInParam + if( ARITY ) + param.setArity(ARITY) + + processor.makeTaskContextStage2(task, [(param):FILE_VALUE], 0 ) + then: + def e = thrown(IllegalArityException) + e.message == ERROR + + where: + FILE_NAME | FILE_VALUE | ARITY | ERROR + 'file.txt' | [] | '0' | 'Path arity max value must be greater or equals to 1' + 'file.txt' | [] | '1' | 'Incorrect number of input files for process `foo` -- expected 1, found 0' + 'f*' | [] | '1..*' | 'Incorrect number of input files for process `foo` -- expected 1..*, found 0' + 'f*' | '/some/file.txt' | '2..*' | 'Incorrect number of input files for process `foo` -- expected 2..*, found 1' + 'f*' | ['/some/file.txt'] | '2..*' | 'Incorrect number of input files for process `foo` -- expected 2..*, found 1' + 'f*' | ['/a','/b'] | '3' | 'Incorrect number of input files for process `foo` -- expected 3, found 2' + } + + def 'should validate collect output files' () { + given: + def executor = Mock(Executor) + def session = Mock(Session) {getFilePorter()>>Mock(FilePorter) } + def processor = Spy(new TaskProcessor(session:session, executor:executor)) + and: + def context = new TaskContext(holder: new HashMap()) + def task = new TaskRun( + name: 'foo', + type: ScriptType.SCRIPTLET, + context: context, + config: new TaskConfig()) + and: + def workDir = Path.of('/work') + + when: + def param = new FileOutParam(new Binding(), []) + .setPathQualifier(true) + .optional(OPTIONAL) + .bind(FILE_NAME) as FileOutParam + if( ARITY ) + param.setArity(ARITY) + and: + processor.collectOutFiles(task, param, workDir, context) + then: + processor.fetchResultFiles(_,_,_) >> RESULTS + processor.checkFileExists(_,_) >> EXISTS + and: + task.getOutputs().get(param) == EXPECTED + + where: + FILE_NAME | RESULTS | EXISTS | OPTIONAL | ARITY | EXPECTED + 'file.txt' | null | true | false | null | Path.of('/work/file.txt') + '*' | [Path.of('/work/file.txt')] | true | false | null | Path.of('/work/file.txt') + '*' | [Path.of('/work/A'), Path.of('/work/B')] | true | false | null | [Path.of('/work/A'), Path.of('/work/B')] + '*' | [] | true | true | null | [] + and: + 'file.txt' | null | true | false | '1' | Path.of('/work/file.txt') + '*' | [Path.of('/work/file.txt')] | true | false | '1' | Path.of('/work/file.txt') + '*' | [Path.of('/work/file.txt')] | true | false | '1..*' | [Path.of('/work/file.txt')] + '*' | [Path.of('/work/A'), Path.of('/work/B')] | true | false | '2' | [Path.of('/work/A'), Path.of('/work/B')] + '*' | [Path.of('/work/A'), Path.of('/work/B')] | true | false | '1..*' | [Path.of('/work/A'), Path.of('/work/B')] + '*' | [] | true | false | '0..*' | [] + } + + @Unroll + def 'should report output error' () { + given: + def executor = Mock(Executor) + def session = Mock(Session) {getFilePorter()>>Mock(FilePorter) } + def processor = Spy(new TaskProcessor(session:session, executor:executor)) + and: + def context = new TaskContext(holder: new HashMap()) + def task = new TaskRun( + name: 'foo', + type: ScriptType.SCRIPTLET, + context: context, + config: new TaskConfig()) + and: + def workDir = Path.of('/work') + + when: + def param = new FileOutParam(new Binding(), []) + .setPathQualifier(true) + .optional(OPTIONAL) + .bind(FILE_NAME) as FileOutParam + if( ARITY ) + param.setArity(ARITY) + and: + processor.collectOutFiles(task, param, workDir, context) + then: + processor.fetchResultFiles(_,_,_) >> RESULTS + processor.checkFileExists(_,_) >> EXISTS + and: + def e = thrown(EXCEPTION) + e.message == ERROR + + where: + FILE_NAME | RESULTS | EXISTS | OPTIONAL | ARITY | EXCEPTION | ERROR + 'file.txt' | null | false | false | null | MissingFileException | "Missing output file(s) `file.txt` expected by process `foo`" + '*' | [] | true | false | null | MissingFileException | "Missing output file(s) `*` expected by process `foo`" + and: + 'file.txt' | null | true | false | '2' | IllegalArityException | "Incorrect number of output files for process `foo` -- expected 2, found 1" + '*' | [Path.of('/work/file.txt')] | true | false | '2' | IllegalArityException | "Incorrect number of output files for process `foo` -- expected 2, found 1" + '*' | [Path.of('/work/file.txt')] | true | false | '2..*' | IllegalArityException | "Incorrect number of output files for process `foo` -- expected 2..*, found 1" + '*' | [] | true | true | '1..*' | IllegalArityException | "Incorrect number of output files for process `foo` -- expected 1..*, found 0" + + } + } diff --git a/modules/nextflow/src/test/groovy/nextflow/script/params/ArityParamTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/params/ArityParamTest.groovy new file mode 100644 index 0000000000..bd4f50ee29 --- /dev/null +++ b/modules/nextflow/src/test/groovy/nextflow/script/params/ArityParamTest.groovy @@ -0,0 +1,64 @@ +/* + * Copyright 2023, Seqera Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nextflow.script.params + +import spock.lang.Specification +import spock.lang.Unroll +/** + * + * @author Ben Sherman + */ +class ArityParamTest extends Specification { + + static class DefaultArityParam implements ArityParam { + DefaultArityParam() {} + } + + @Unroll + def testArity () { + + when: + def param = new DefaultArityParam() + param.setArity(VALUE) + then: + param.arity.min == MIN + param.arity.max == MAX + param.isSingle() == SINGLE + + where: + VALUE | SINGLE | MIN | MAX + '1' | true | 1 | 1 + '1..*' | false | 1 | Integer.MAX_VALUE + '0..*' | false | 0 | Integer.MAX_VALUE + } + + @Unroll + def testArityRange () { + + when: + def range = new ArityParam.Range(MIN, MAX) + then: + range.contains(2) == TWO + range.toString() == STRING + + where: + MIN | MAX | TWO | STRING + 1 | 1 | false | '1' + 1 | Integer.MAX_VALUE | true | '1..*' + } + +} diff --git a/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy index 60dba152bc..3312a37a3c 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsInTest.groovy @@ -707,12 +707,12 @@ class ParamsInTest extends Dsl2Spec { process hola { input: - path x - path f1 - path '*.fa' + path x, arity: '1' + path f1, arity: '1..2' + path '*.fa', arity: '1..*' path 'file.txt' path f2, name: '*.fa' - path f3, stageAs: '*.txt' + path f3, stageAs: '*.txt' return '' } @@ -738,18 +738,21 @@ class ParamsInTest extends Dsl2Spec { in0.inChannel.val == FILE in0.index == 0 in0.isPathQualifier() + in0.arity == new ArityParam.Range(1, 1) in1.name == 'f1' in1.filePattern == '*' in1.inChannel.val == FILE in1.index == 1 in1.isPathQualifier() + in1.arity == new ArityParam.Range(1, 2) in2.name == '*.fa' in2.filePattern == '*.fa' in2.inChannel.val == FILE in2.index == 2 in2.isPathQualifier() + in2.arity == new ArityParam.Range(1, Integer.MAX_VALUE) in3.name == 'file.txt' in3.filePattern == 'file.txt' diff --git a/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsOutTest.groovy b/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsOutTest.groovy index 1c46c898ba..d19820f4f6 100644 --- a/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsOutTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/script/params/ParamsOutTest.groovy @@ -931,7 +931,8 @@ class ParamsOutTest extends Dsl2Spec { separatorChar: '#', glob: false, optional: false, - includeInputs: false + includeInputs: false, + arity: '1' path y, maxDepth:5, @@ -941,7 +942,8 @@ class ParamsOutTest extends Dsl2Spec { separatorChar: ':', glob: true, optional: true, - includeInputs: true + includeInputs: true, + arity: '0..*' return '' } @@ -963,6 +965,7 @@ class ParamsOutTest extends Dsl2Spec { !out0.getGlob() !out0.getOptional() !out0.getIncludeInputs() + out0.getArity() == new ArityParam.Range(1, 1) and: out1.getMaxDepth() == 5 @@ -973,6 +976,7 @@ class ParamsOutTest extends Dsl2Spec { out1.getGlob() out1.getOptional() out1.getIncludeInputs() + out1.getArity() == new ArityParam.Range(0, Integer.MAX_VALUE) } def 'should set file options' () { diff --git a/tests/checks/process-arity.nf/.checks b/tests/checks/process-arity.nf/.checks new file mode 100644 index 0000000000..66e9a3e265 --- /dev/null +++ b/tests/checks/process-arity.nf/.checks @@ -0,0 +1,16 @@ +set +e + +# +# run normal mode +# +echo '' +$NXF_RUN +[[ $? == 0 ]] || false + + +# +# RESUME mode +# +echo '' +$NXF_RUN -resume +[[ $? == 0 ]] || false \ No newline at end of file diff --git a/tests/process-arity.nf b/tests/process-arity.nf new file mode 100644 index 0000000000..8a750745f0 --- /dev/null +++ b/tests/process-arity.nf @@ -0,0 +1,34 @@ +#!/usr/bin/env nextflow + +process foo { + output: + path('one.txt', arity: '1') + path('pair_*.txt', arity: '2') + path('many_*.txt', arity: '1..*') + script: + """ + echo 'one' > one.txt + echo 'pair_1' > pair_1.txt + echo 'pair_2' > pair_2.txt + echo 'many_1' > many_1.txt + echo 'many_2' > many_2.txt + echo 'many_3' > many_3.txt + """ +} + +process bar { + input: + path('one.txt', arity: '1') + path('pair_*.txt', arity: '2') + path('many_*.txt', arity: '1..*') + script: + """ + cat one.txt + cat pair_*.txt + cat many_*.txt + """ +} + +workflow { + foo | bar +} From 7e5d414e81322a5cb1f7e8f0bf2f0a7dc69b1ad0 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 00:04:49 +0200 Subject: [PATCH 120/128] Bump nf-amazon@2.1.2 Signed-off-by: Paolo Di Tommaso --- plugins/nf-amazon/changelog.txt | 5 +++++ plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/plugins/nf-amazon/changelog.txt b/plugins/nf-amazon/changelog.txt index 81af4cb0aa..3fd772a31b 100644 --- a/plugins/nf-amazon/changelog.txt +++ b/plugins/nf-amazon/changelog.txt @@ -1,5 +1,10 @@ nf-amazon changelog =================== +2.1.2 - 10 Sep +- Disable staging script for remote work dir (#4282) [80f7cd46] +- Allow setting shell directive when using the trace file. (#4210) [7d6ad624] +- Bump groovy 3.0.19 [cb411208] + 2.1.1 - 5 Aug 2023 - Fix glob resolution for remove files [19a72c40] - Fix Option fixOwnership traverse parent directories [f2a2ea35] diff --git a/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF b/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF index d7b0cf833b..6ae09958e7 100644 --- a/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-amazon/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.aws.AmazonPlugin Plugin-Id: nf-amazon -Plugin-Version: 2.1.1 +Plugin-Version: 2.1.2 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 83410f39a35d79d079f32cd5a2e4d7656123245c Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 00:05:18 +0200 Subject: [PATCH 121/128] Bump nf-azure@1.3.1 Signed-off-by: Paolo Di Tommaso --- plugins/nf-azure/changelog.txt | 6 ++++++ plugins/nf-azure/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/plugins/nf-azure/changelog.txt b/plugins/nf-azure/changelog.txt index 808543c7da..73e9ea7cc8 100644 --- a/plugins/nf-azure/changelog.txt +++ b/plugins/nf-azure/changelog.txt @@ -1,5 +1,11 @@ nf-azure changelog =================== +1.3.1 - 10 Sep 2023 +- Disable staging script for remote work dir (#4282) [80f7cd46] +- Fix IOException should be thrown when failing to create Azure directory [b0bdfd79] +- Fix security deps in nf-azure plugin [c30d5211] +- Bump groovy 3.0.19 [cb411208] + 1.3.0 - 17 Aug 2023 - Add resource labels support for Azure Batch (#4178) [7b5e50a1] - Fix typos in source code comments (#4173) [ci fast] [e78bc37e] diff --git a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF index 48a1d75d94..73da661c8f 100644 --- a/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-azure/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: nextflow.cloud.azure.AzurePlugin Plugin-Id: nf-azure -Plugin-Version: 1.3.0 +Plugin-Version: 1.3.1 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.01.0-edge From a9f6dd6559a436c585cd568d5565ba9200630756 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 00:05:42 +0200 Subject: [PATCH 122/128] Bump nf-wave@0.12.0 Signed-off-by: Paolo Di Tommaso --- plugins/nf-wave/changelog.txt | 10 ++++++++++ plugins/nf-wave/src/resources/META-INF/MANIFEST.MF | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/plugins/nf-wave/changelog.txt b/plugins/nf-wave/changelog.txt index 5d0b016b7c..10992dcad5 100644 --- a/plugins/nf-wave/changelog.txt +++ b/plugins/nf-wave/changelog.txt @@ -1,5 +1,15 @@ nf-wave changelog ================== +0.12.0 - 10 Sep 2023 +- Add support for Spack to Singularity builds [23c4ec1d] +- Add inspect command (#4069) [090c31ce] +- Add support for Wave native build for singularity [8a434893] +- Fix Wave build when Conda package name is quoted [d19cb0b7] +- Fix Wave build for Singularity files [a60ef72b] +- Improve Wave handing of Conda envs [736ab9bb] +- Deprecated Wave report feature [80c5cb27] +- Bump groovy 3.0.19 [cb411208] + 0.11.2 - 17 Aug 2023 - Use root user in Wave container based on micromamba (#4038) [ci fast] [a3a75ea2] - Add 429 http status code to Wave retriable errors [8eb5f305] diff --git a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF index 143419776a..1ac36f6aac 100644 --- a/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF +++ b/plugins/nf-wave/src/resources/META-INF/MANIFEST.MF @@ -1,6 +1,6 @@ Manifest-Version: 1.0 Plugin-Class: io.seqera.wave.plugin.WavePlugin Plugin-Id: nf-wave -Plugin-Version: 0.11.2 +Plugin-Version: 0.12.0 Plugin-Provider: Seqera Labs Plugin-Requires: >=23.05.0-edge From 2a7bd21152e273ca2eaffe9377418a24421c3944 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 00:09:48 +0200 Subject: [PATCH 123/128] Update changelog Signed-off-by: Paolo Di Tommaso --- changelog.txt | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/changelog.txt b/changelog.txt index 359db4221b..dfd2d910fc 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,5 +1,53 @@ NEXTFLOW CHANGE-LOG =================== +23.09.0-edge - 10 Sep 2023 +- Add check for latest version (#4194) [3e8cd488] +- Add inspect command (#4069) [090c31ce] +- Add maxSubmitAwait (#3736) [5686bf1d] +- Add scripts logging for troubleshooting [c056a74e] +- Add support for Spack to Singularity builds [23c4ec1d] +- Add support for Wave native build for Singularity [8a434893] +- Add support for inputs and outputs arity [42504d3c] +- Add support for remote debug (#4266) [87e0648a] +- Add warning about using clusterOptions with process directives (#4248) [a68c1a3e] +- Add which to dockerfile build [817aa05b] +- Align Singularity experience with Docker (#4230) [a1e1d3ca] [c1cc60dd] +- Allow setting shell directive when using the trace file (#4210) [7d6ad624] +- Always emit publish event for cached task outputs (#4227) [62686ce8] +- Deprecated Wave report feature [80c5cb27] +- Disable staging script for remote work dir (#4282) [80f7cd46] +- Disable version check on CI tests [db79e13f] +- Docs improvement to clarify the usage of the bin dir (#4121) [2daa4172] +- Document API differences of process path inputs (#4189) [c37e00bc] +- Document use of local variables in closures (#4251) [05ff784a] +- Fix IOException should be thrown when failing to creare Azure directory [b0bdfd79] +- Fix Parallel execution of Conda corrupts data and packages (#4253) [976c282c] +- Fix Parse negative CLI params as numbers (#4238) [1ae70d5d] +- Fix Too long Http connection pool timeout [fa961e7f] +- Fix Wave build for Singularity files [a60ef72b] +- Fix Wave build when Conda package name is quoted [d19cb0b7] +- Fix failing test [2785ffe9] +- Fix fs cp command with remote file [366eedec] +- Fix printf command with negative exit code (#4213) [465468b0] +- Fix security deps in nf-azure plugin [c30d5211] +- Fix setting `executor.queueSize = 0` is ignored (#4228) [6664b578] +- Improve Wave handing of Conda envs [736ab9bb] +- Improve pod options documentation (#4274) [c3aa26e1] +- Make TraceRecord@store field public accessible [07582f0b] +- Remove -dsl1 and -dsl1 rub cli options [b6721b71] +- Remove experimental -dockerize option /2 [7def5542] +- Remove experimental -dockerize option [937c8fb7] +- Report an error on duplicate workflow name definitions (#4088) [fce9702e] +- Undocument internal NXF_DEBUG variable [7955db8d] +- Update AZ File share doc (#4235) [69d317b6] +- Update docs about splitCsv() operator (#4163) [1dfb621c] +- Update documentation of NXF_DEBUG (#4187) [a88a4245] +- Use sipHash24 in place of deprecated murmur32 for script aliasing [bb96763f] +- Bump groovy 3.0.19 [cb411208] +- Bump nf-amazon@2.1.2 [7e5d414e] +- Bump nf-azure@1.3.1 [83410f39] +- Bump nf-wave@0.12.0 [a9f6dd65] + 23.08.1-edge - 17 Aug 2023 - Add 429 http status code to Wave retriable errors [8eb5f305] - Add resource labels support for Azure Batch (#4178) [7b5e50a1] From f37d51baa80979f359c9dbd87be9564965dcd035 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 00:10:34 +0200 Subject: [PATCH 124/128] [release 23.09.0-edge] Update timestamp and build number Signed-off-by: Paolo Di Tommaso --- VERSION | 2 +- docs/conf.py | 4 ++-- .../nextflow/src/main/resources/META-INF/plugins-info.txt | 6 +++--- modules/nf-commons/src/main/nextflow/Const.groovy | 6 +++--- nextflow | 2 +- nextflow.md5 | 2 +- nextflow.sha1 | 2 +- nextflow.sha256 | 2 +- 8 files changed, 13 insertions(+), 13 deletions(-) diff --git a/VERSION b/VERSION index ed8e6580ae..8aa01d92a9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -23.08.1-edge +23.09.0-edge diff --git a/docs/conf.py b/docs/conf.py index dd9c2a42f6..659d8609d2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -58,9 +58,9 @@ # built documents. # # The short X.Y version. -version = '23.08' +version = '23.09' # The full version, including alpha/beta/rc tags. -release = '23.08.1-edge' +release = '23.09.0-edge' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt index 60364143e3..eaad0fec0b 100644 --- a/modules/nextflow/src/main/resources/META-INF/plugins-info.txt +++ b/modules/nextflow/src/main/resources/META-INF/plugins-info.txt @@ -1,9 +1,9 @@ -nf-amazon@2.1.1 -nf-azure@1.3.0 +nf-amazon@2.1.2 +nf-azure@1.3.1 nf-cloudcache@0.2.0 nf-codecommit@0.1.5 nf-console@1.0.6 nf-ga4gh@1.1.0 nf-google@1.8.1 nf-tower@1.6.1 -nf-wave@0.11.2 \ No newline at end of file +nf-wave@0.12.0 \ No newline at end of file diff --git a/modules/nf-commons/src/main/nextflow/Const.groovy b/modules/nf-commons/src/main/nextflow/Const.groovy index 38687fde5b..d593e5084f 100644 --- a/modules/nf-commons/src/main/nextflow/Const.groovy +++ b/modules/nf-commons/src/main/nextflow/Const.groovy @@ -52,17 +52,17 @@ class Const { /** * The application version */ - static public final String APP_VER = "23.08.1-edge" + static public final String APP_VER = "23.09.0-edge" /** * The app build time as linux/unix timestamp */ - static public final long APP_TIMESTAMP = 1692284319999 + static public final long APP_TIMESTAMP = 1694383263606 /** * The app build number */ - static public final int APP_BUILDNUM = 5874 + static public final int APP_BUILDNUM = 5879 /** * The app build time string relative to UTC timezone diff --git a/nextflow b/nextflow index e296c43fbe..53609cc4c7 100755 --- a/nextflow +++ b/nextflow @@ -15,7 +15,7 @@ # limitations under the License. [[ "$NXF_DEBUG" == 'x' ]] && set -x -NXF_VER=${NXF_VER:-'23.08.1-edge'} +NXF_VER=${NXF_VER:-'23.09.0-edge'} NXF_ORG=${NXF_ORG:-'nextflow-io'} NXF_HOME=${NXF_HOME:-$HOME/.nextflow} NXF_PROT=${NXF_PROT:-'https'} diff --git a/nextflow.md5 b/nextflow.md5 index 5aa63f083a..cd4dff1322 100644 --- a/nextflow.md5 +++ b/nextflow.md5 @@ -1 +1 @@ -74e5cedd3fd06021c698fab73cf5f03e +8871d71a561d214566dbc8e33300c8c6 diff --git a/nextflow.sha1 b/nextflow.sha1 index d24bb70c95..bb6635b8f2 100644 --- a/nextflow.sha1 +++ b/nextflow.sha1 @@ -1 +1 @@ -fbaa08de20ccc1743711f9d58fc2ada055d79f01 +8fae253e69459149b039e57ca69f0b5ceb2ec5cf diff --git a/nextflow.sha256 b/nextflow.sha256 index 144ff2727b..f297c7d37e 100644 --- a/nextflow.sha256 +++ b/nextflow.sha256 @@ -1 +1 @@ -2b099e5febcc8236308be215936af5ac0af6d9c007e329247f25d4bd9a7cf444 +41d0d8a76a49c75f91d4d244ab64bc2e2f96f768828cbdcddd29d527e95bbb99 From 9f9edcdc4b80e1316ed3fb164ef9f350fd641c9a Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 11:36:25 +0200 Subject: [PATCH 125/128] Revert "Allow setting shell directive when using the trace file. (#4210)" This reverts commit 7d6ad62476bd43d6e98747cabe8bfe181d5da1e9 since it breaks the execution of command scripts that declares a non-Bash shell interpreter. --- .../main/groovy/nextflow/executor/BashWrapperBuilder.groovy | 2 +- .../groovy/nextflow/executor/BashWrapperBuilderTest.groovy | 4 ++-- .../nextflow/executor/test-bash-wrapper-with-trace.txt | 2 +- .../cloud/aws/batch/AwsBatchScriptLauncherTest.groovy | 2 +- .../nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy index 61eefeeb20..24b6cbe256 100644 --- a/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy +++ b/modules/nextflow/src/main/groovy/nextflow/executor/BashWrapperBuilder.groovy @@ -469,7 +469,7 @@ class BashWrapperBuilder { final traceWrapper = isTraceRequired() if( traceWrapper ) { // executes the stub which in turn executes the target command - launcher = "${interpreter} ${fileStr(wrapperFile)} nxf_trace" + launcher = "/bin/bash ${fileStr(wrapperFile)} nxf_trace" } else { launcher = "${interpreter} ${fileStr(scriptFile)}" diff --git a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy index 2b4f290e00..89a40ca4ea 100644 --- a/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy +++ b/modules/nextflow/src/test/groovy/nextflow/executor/BashWrapperBuilderTest.groovy @@ -566,14 +566,14 @@ class BashWrapperBuilderTest extends Specification { when: binding = newBashWrapperBuilder(statsEnabled: true).makeBinding() then: - binding.launch_cmd == '/bin/bash -ue /work/dir/.command.run nxf_trace' + binding.launch_cmd == '/bin/bash /work/dir/.command.run nxf_trace' binding.unstage_controls == null binding.containsKey('unstage_controls') when: binding = newBashWrapperBuilder(statsEnabled: true, scratch: true).makeBinding() then: - binding.launch_cmd == '/bin/bash -ue /work/dir/.command.run nxf_trace' + binding.launch_cmd == '/bin/bash /work/dir/.command.run nxf_trace' binding.unstage_controls == '''\ cp .command.out /work/dir/.command.out || true cp .command.err /work/dir/.command.err || true diff --git a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt index b032748ccb..0aaf49f410 100644 --- a/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt +++ b/modules/nextflow/src/test/resources/nextflow/executor/test-bash-wrapper-with-trace.txt @@ -281,7 +281,7 @@ on_term() { } nxf_launch() { - /bin/bash -ue {{folder}}/.command.run nxf_trace + /bin/bash {{folder}}/.command.run nxf_trace } nxf_stage() { diff --git a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy index 9ce7ee13d8..4b09a85d46 100644 --- a/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy +++ b/plugins/nf-amazon/src/test/nextflow/cloud/aws/batch/AwsBatchScriptLauncherTest.groovy @@ -235,7 +235,7 @@ class AwsBatchScriptLauncherTest extends Specification { nxf_parallel "${uploads[@]}" '''.stripIndent().leftTrim() - binding.launch_cmd == '/bin/bash -ue .command.run nxf_trace' + binding.launch_cmd == '/bin/bash .command.run nxf_trace' binding.task_env == '' diff --git a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy index 0c710aa320..436fcb2ce9 100644 --- a/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy +++ b/plugins/nf-azure/src/test/nextflow/cloud/azure/batch/AzFileCopyStrategyTest.groovy @@ -386,7 +386,7 @@ class AzFileCopyStrategyTest extends Specification { nxf_parallel "${uploads[@]}" '''.stripIndent().leftTrim() - binding.launch_cmd == '/bin/bash -ue .command.run nxf_trace' + binding.launch_cmd == '/bin/bash .command.run nxf_trace' binding.task_env == '''\ export PATH="$PWD/.nextflow-bin:$AZ_BATCH_NODE_SHARED_DIR/bin/:$PATH" From 052b51bd384abe05ae95cf95cdff5c64168a577a Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 12:11:52 +0200 Subject: [PATCH 126/128] Update changelog Signed-off-by: Paolo Di Tommaso --- changelog.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog.txt b/changelog.txt index dfd2d910fc..ef5d881bb4 100644 --- a/changelog.txt +++ b/changelog.txt @@ -1,5 +1,8 @@ NEXTFLOW CHANGE-LOG =================== +23.09.1-edge - 11 Sep 2023 +- Revert "Allow setting shell directive when using the trace file (#4210)" [9f9edcdc] + 23.09.0-edge - 10 Sep 2023 - Add check for latest version (#4194) [3e8cd488] - Add inspect command (#4069) [090c31ce] From 79c677a12d82400444fb6221219ce92b7247dcf2 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 12:12:22 +0200 Subject: [PATCH 127/128] [release 23.09.1-edge] [ci fast] Update timestamp and build number Signed-off-by: Paolo Di Tommaso --- VERSION | 2 +- docs/conf.py | 2 +- launch.sh | 6 ++++++ modules/nf-commons/src/main/nextflow/Const.groovy | 6 +++--- nextflow | 2 +- nextflow.md5 | 2 +- nextflow.sha1 | 2 +- nextflow.sha256 | 2 +- 8 files changed, 15 insertions(+), 9 deletions(-) diff --git a/VERSION b/VERSION index 8aa01d92a9..4a719b97aa 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -23.09.0-edge +23.09.1-edge diff --git a/docs/conf.py b/docs/conf.py index 659d8609d2..b9e2ae98df 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,7 @@ # The short X.Y version. version = '23.09' # The full version, including alpha/beta/rc tags. -release = '23.09.0-edge' +release = '23.09.1-edge' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/launch.sh b/launch.sh index 41e5103c40..5bc03f3229 100755 --- a/launch.sh +++ b/launch.sh @@ -118,6 +118,12 @@ while [ "$*" != "" ]; do elif [ "$1" == '-remote-debug' ]; then DEBUG="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=$NXF_REMOTE_DEBUG_PORT" args+=("$1") + elif [ "$1" == '-enable-checkpoint' ]; then + mkdir -p crac-files + JVM_ARGS+=" -XX:CRaCCheckpointTo=$PWD/crac-files" + elif [ "$1" == '-checkpoint' ]; then + jcmd $CLASSPATH JDK.checkpoint + exit 0 else args+=("$1") fi diff --git a/modules/nf-commons/src/main/nextflow/Const.groovy b/modules/nf-commons/src/main/nextflow/Const.groovy index d593e5084f..2fc8eec099 100644 --- a/modules/nf-commons/src/main/nextflow/Const.groovy +++ b/modules/nf-commons/src/main/nextflow/Const.groovy @@ -52,17 +52,17 @@ class Const { /** * The application version */ - static public final String APP_VER = "23.09.0-edge" + static public final String APP_VER = "23.09.1-edge" /** * The app build time as linux/unix timestamp */ - static public final long APP_TIMESTAMP = 1694383263606 + static public final long APP_TIMESTAMP = 1694426936712 /** * The app build number */ - static public final int APP_BUILDNUM = 5879 + static public final int APP_BUILDNUM = 5882 /** * The app build time string relative to UTC timezone diff --git a/nextflow b/nextflow index 53609cc4c7..c3145509a6 100755 --- a/nextflow +++ b/nextflow @@ -15,7 +15,7 @@ # limitations under the License. [[ "$NXF_DEBUG" == 'x' ]] && set -x -NXF_VER=${NXF_VER:-'23.09.0-edge'} +NXF_VER=${NXF_VER:-'23.09.1-edge'} NXF_ORG=${NXF_ORG:-'nextflow-io'} NXF_HOME=${NXF_HOME:-$HOME/.nextflow} NXF_PROT=${NXF_PROT:-'https'} diff --git a/nextflow.md5 b/nextflow.md5 index cd4dff1322..4046935306 100644 --- a/nextflow.md5 +++ b/nextflow.md5 @@ -1 +1 @@ -8871d71a561d214566dbc8e33300c8c6 +c6690f184ec1e95646b6a4d60bb5442a diff --git a/nextflow.sha1 b/nextflow.sha1 index bb6635b8f2..bc00d398a1 100644 --- a/nextflow.sha1 +++ b/nextflow.sha1 @@ -1 +1 @@ -8fae253e69459149b039e57ca69f0b5ceb2ec5cf +31e0a8e79b45d605459781fe3d31e70590e3f2e1 diff --git a/nextflow.sha256 b/nextflow.sha256 index f297c7d37e..75fc2abc39 100644 --- a/nextflow.sha256 +++ b/nextflow.sha256 @@ -1 +1 @@ -41d0d8a76a49c75f91d4d244ab64bc2e2f96f768828cbdcddd29d527e95bbb99 +dd82066091517be8562f0fa9af26c272db00b14ce07145d202f1d6619d3e3a7f From 89ede15c71e79858560f93df71064b74d2da2cf4 Mon Sep 17 00:00:00 2001 From: Paolo Di Tommaso Date: Mon, 11 Sep 2023 21:59:39 +0200 Subject: [PATCH 128/128] Update docs Signed-off-by: Paolo Di Tommaso --- docs/wave.md | 42 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/docs/wave.md b/docs/wave.md index 8eec01b3d5..e95a88cb29 100644 --- a/docs/wave.md +++ b/docs/wave.md @@ -43,9 +43,11 @@ The Tower access token is not mandatory, but it is recommended in order to acces ## Use cases +(wave-authenticate-private-repos)= + ### Authenticate private repositories -Wave allows the use of private repositories in your Nextflow pipelines. The repository access keys must be provided in the form of [Nextflow Tower credentials](https://help.tower.nf/22.2/credentials/overview/). +Wave allows the use of private repositories in your Nextflow pipelines. The repository access keys must be provided in the form of [Tower credentials](https://help.tower.nf/latest/credentials/overview/). Once the credentials have been created, simply specify your [Tower account access token](https://help.tower.nf/22.2/api/overview/#authentication) in your pipeline configuration file. If the credentials were created in a Tower organization workspace, specify the workspace ID as well in the config file as shown below: @@ -120,6 +122,36 @@ In order to request the build of containers that are optimised for a specific CP If using a Spack YAML file to provide the required packages, you should avoid editing the following sections, which are already configured by the Wave plugin: `packages`, `config`, `view` and `concretizer` (your edits may be ignored), and `compilers` (your edits will be considered, and may interfere with the setup by the Wave plugin). ::: +### Build Singularity native images + +As of version `23.09.0-edge`, Nextflow can build Singularity native images on-demand either using `Singularityfile`, +Conda packages or Spack packages. The Singularity images are automatically uploaded in a container registry OCI compliant +of your choice and stored as a [ORAS artefact](https://oras.land/). + +:::{note} +This feature requires of Singularity (or Apptainer) version supporting the pull of images using the `oras:` pseudo-protocol. +::: + +For example to enable the provisioning of Singularity images in your pipeline use the following configuration snippet: + +``` +singularity.enabled = true +wave.enabled = true +wave.freeze = true +wave.strategy = ['conda'] +wave.build.repository = 'docker.io/user/repo' +``` + +In the above configuration replace `docker.io/user/repo` with a repository of your choice where Singularity image files +should be uploaded. + +:::{note} +When using a private repository, the repository access keys must be provider via Tower credentials manager (see {ref}`above `). + +Moreover the access to the repository must be granted in the compute nodes by using the command `singularity remote login `. +Please see Singularity documentation for further details. +::: + ### Push to a private repository Containers built by Wave are uploaded to the Wave default repository hosted on AWS ECR at `195996028523.dkr.ecr.eu-west-1.amazonaws.com/wave/build`. The images in this repository are automatically deleted 1 week after the date of their push. @@ -154,6 +186,14 @@ The following configuration options are available: `wave.endpoint` : The Wave service endpoint (default: `https://wave.seqera.io`). +`wave.freeze` +: :::{versionadded} 22.09.0-edge + ::: +: When enabling the container freeze mode, Wave will provision an non-ephemeral container image +that will be pushed to a container repository your choice. It requires the use of the `wave.build.repository` setting. +It is also suggested to specify a custom cache repository via the setting `wave.build.cacheRepository`. Note: when using +container freeze mode, the container repository authentication needs to be managed by the underlying infrastructure. + `wave.build.repository` : The container repository where images built by Wave are uploaded (note: the corresponding credentials must be provided in your Nextflow Tower account).