Skip to content

Commit

Permalink
Merge pull request #614 from nf-core/nf-core-template-merge-2.14.1
Browse files Browse the repository at this point in the history
Important! Template update for nf-core/tools v2.14.1
  • Loading branch information
jfy133 authored May 10, 2024
2 parents 8392af9 + 13bcb40 commit 600e458
Show file tree
Hide file tree
Showing 34 changed files with 232 additions and 178 deletions.
6 changes: 1 addition & 5 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,6 @@ indent_style = unset
[/assets/email*]
indent_size = unset

# ignore Readme
[README.md]
indent_style = unset

# ignore python
# ignore python and markdown
[*.{py,md}]
indent_style = unset
2 changes: 1 addition & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/mag/
- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/mag/tree/master/.github/CONTRIBUTING.md)
- [ ] If necessary, also make a PR on the nf-core/mag _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
- [ ] Make sure your code lints (`nf-core lint`).
- [ ] Ensure the test suite passes (`nf-test test main.nf.test -profile test,docker`).
- [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir <OUTDIR>`).
- [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir <OUTDIR>`).
- [ ] Usage Documentation in `docs/usage.md` is updated.
- [ ] Output Documentation in `docs/output.md` is updated.
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/awsfulltest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ on:
types: [published]
workflow_dispatch:
jobs:
run-tower:
run-platform:
name: Run AWS full tests
if: github.repository == 'nf-core/mag'
runs-on: ubuntu-latest
steps:
- name: Launch workflow via tower
- name: Launch workflow via Seqera Platform
uses: seqeralabs/action-tower-launch@v2
with:
workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }}
Expand All @@ -30,7 +30,7 @@ jobs:

- uses: actions/upload-artifact@v4
with:
name: Tower debug log file
name: Seqera Platform debug log file
path: |
tower_action_*.log
tower_action_*.json
seqera_platform_action_*.log
seqera_platform_action_*.json
12 changes: 6 additions & 6 deletions .github/workflows/awstest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ name: nf-core AWS test
on:
workflow_dispatch:
jobs:
run-tower:
run-platform:
name: Run AWS tests
if: github.repository == 'nf-core/mag'
runs-on: ubuntu-latest
steps:
# Launch workflow using Tower CLI tool action
- name: Launch workflow via tower
# Launch workflow using Seqera Platform CLI tool action
- name: Launch workflow via Seqera Platform
uses: seqeralabs/action-tower-launch@v2
with:
workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }}
Expand All @@ -27,7 +27,7 @@ jobs:

- uses: actions/upload-artifact@v4
with:
name: Tower debug log file
name: Seqera Platform debug log file
path: |
tower_action_*.log
tower_action_*.json
seqera_platform_action_*.log
seqera_platform_action_*.json
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ jobs:
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Check out pipeline code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4

- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
uses: nf-core/setup-nextflow@v2
with:
version: "${{ matrix.NXF_VER }}"

Expand Down
22 changes: 18 additions & 4 deletions .github/workflows/download_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ on:
pull_request:
types:
- opened
- edited
- synchronize
branches:
- master
pull_request_target:
Expand All @@ -28,11 +30,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
uses: nf-core/setup-nextflow@v2

- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- name: Disk space cleanup
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1

- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
with:
python-version: "3.11"
python-version: "3.12"
architecture: "x64"
- uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7
with:
Expand Down Expand Up @@ -65,8 +70,17 @@ jobs:
- name: Inspect download
run: tree ./${{ env.REPOTITLE_LOWERCASE }}

- name: Run the downloaded pipeline
- name: Run the downloaded pipeline (stub)
id: stub_run_pipeline
continue-on-error: true
env:
NXF_SINGULARITY_CACHEDIR: ./
NXF_SINGULARITY_HOME_MOUNT: true
run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
- name: Run the downloaded pipeline (stub run not supported)
id: run_pipeline
if: ${{ job.steps.stub_run_pipeline.status == failure() }}
env:
NXF_SINGULARITY_CACHEDIR: ./
NXF_SINGULARITY_HOME_MOUNT: true
run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test,singularity --outdir ./results
6 changes: 3 additions & 3 deletions .github/workflows/fix-linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# Use the @nf-core-bot token to check out so we can push later
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
with:
token: ${{ secrets.nf_core_bot_auth_token }}

Expand All @@ -32,9 +32,9 @@ jobs:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}

# Install and run pre-commit
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
with:
python-version: 3.11
python-version: "3.12"

- name: Install pre-commit
run: pip install pre-commit
Expand Down
19 changes: 9 additions & 10 deletions .github/workflows/linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,12 @@ jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4

- name: Set up Python 3.11
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- name: Set up Python 3.12
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
with:
python-version: 3.11
cache: "pip"
python-version: "3.12"

- name: Install pre-commit
run: pip install pre-commit
Expand All @@ -32,14 +31,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out pipeline code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4

- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
uses: nf-core/setup-nextflow@v2

- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
with:
python-version: "3.11"
python-version: "3.12"
architecture: "x64"

- name: Install dependencies
Expand All @@ -60,7 +59,7 @@ jobs:

- name: Upload linting log file artifact
if: ${{ always() }}
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4
with:
name: linting-logs
path: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/linting_comment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
uses: dawidd6/action-download-artifact@f6b0bace624032e30a85a8fd9c1a7f8f611f5737 # v3
uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3
with:
workflow: linting.yml
workflow_conclusion: completed
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/release-announcements.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
- name: get topics and convert to hashtags
id: get_topics
run: |
curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ' >> $GITHUB_OUTPUT
echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" >> $GITHUB_OUTPUT
- uses: rzr/fediverse-action@master
with:
Expand All @@ -25,13 +25,13 @@ jobs:
Please see the changelog: ${{ github.event.release.html_url }}
${{ steps.get_topics.outputs.GITHUB_OUTPUT }} #nfcore #openscience #nextflow #bioinformatics
${{ steps.get_topics.outputs.topics }} #nfcore #openscience #nextflow #bioinformatics
send-tweet:
runs-on: ubuntu-latest

steps:
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
with:
python-version: "3.10"
- name: Install dependencies
Expand Down
1 change: 1 addition & 0 deletions .nf-core.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
repository_type: pipeline
nf_core_version: "2.14.1"

lint:
files_unchanged:
Expand Down
3 changes: 3 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ repos:
rev: "v3.1.0"
hooks:
- id: prettier
additional_dependencies:
- [email protected]

- repo: https://github.com/editorconfig-checker/editorconfig-checker.python
rev: "2.7.3"
hooks:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
[![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/)
[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/)
[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/)
[![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://tower.nf/launch?pipeline=https://github.com/nf-core/mag)
[![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://cloud.seqera.io/launch?pipeline=https://github.com/nf-core/mag)

[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23mag-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/mag)[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core)[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core)[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core)

Expand Down
6 changes: 3 additions & 3 deletions conf/test.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.multirun.csv'
centrifuge_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_cf.tar.gz"
kraken2_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_kraken.tgz"
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.multirun.csv'
centrifuge_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_cf.tar.gz'
kraken2_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_kraken.tgz'
skip_krona = false
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
Expand Down
8 changes: 4 additions & 4 deletions conf/test_adapterremoval.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.euk.csv'
centrifuge_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_cf.tar.gz"
kraken2_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_kraken.tgz"
metaeuk_db = "https://github.com/nf-core/test-datasets/raw/modules/data/proteomics/database/yeast_UPS.fasta"
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.euk.csv'
centrifuge_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_cf.tar.gz'
kraken2_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_kraken.tgz'
metaeuk_db = params.pipelines_testdata_base_path + '/modules/data/proteomics/database/yeast_UPS.fasta'
skip_krona = true
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
Expand Down
6 changes: 3 additions & 3 deletions conf/test_ancient_dna.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.csv'
centrifuge_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_cf.tar.gz"
kraken2_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_kraken.tgz"
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.csv'
centrifuge_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_cf.tar.gz'
kraken2_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_kraken.tgz'
skip_krona = true
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
Expand Down
6 changes: 3 additions & 3 deletions conf/test_bbnorm.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.csv'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.csv'
keep_phix = true
skip_clipping = true
skip_prokka = true
skip_prodigal = true
skip_quast = true
skip_binning = true
centrifuge_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_cf.tar.gz"
kraken2_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_kraken.tgz"
centrifuge_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_cf.tar.gz'
kraken2_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_kraken.tgz'
skip_krona = true
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
Expand Down
8 changes: 4 additions & 4 deletions conf/test_binrefinement.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.csv'
assembly_input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/assembly_samplesheet.csv'
centrifuge_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_cf.tar.gz"
kraken2_db = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/test_data/minigut_kraken.tgz"
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.csv'
assembly_input = params.pipelines_testdata_base_path + 'mag/samplesheets/assembly_samplesheet.csv'
centrifuge_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_cf.tar.gz'
kraken2_db = params.pipelines_testdata_base_path + 'mag/test_data/minigut_kraken.tgz'
skip_krona = true
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
Expand Down
2 changes: 1 addition & 1 deletion conf/test_busco_auto.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.csv'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.csv'
skip_spades = true
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
Expand Down
2 changes: 1 addition & 1 deletion conf/test_concoct.config
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.csv'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.csv'
centrifuge_db = null
kraken2_db = null
skip_krona = true
Expand Down
4 changes: 2 additions & 2 deletions conf/test_host_rm.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ params {
max_time = '6.h'

// Input data
host_fasta = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/host_reference/genome.hg38.chr21_10000bp_region.fa"
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.host_rm.csv'
host_fasta = params.pipelines_testdata_base_path + 'mag/host_reference/genome.hg38.chr21_10000bp_region.fa'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.host_rm.csv'
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
busco_db = "https://busco-data.ezlab.org/v5/data/lineages/bacteria_odb10.2024-01-08.tar.gz"
Expand Down
2 changes: 1 addition & 1 deletion conf/test_hybrid.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.hybrid.csv'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.hybrid.csv'
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
busco_db = "https://busco-data.ezlab.org/v5/data/lineages/bacteria_odb10.2024-01-08.tar.gz"
Expand Down
4 changes: 2 additions & 2 deletions conf/test_hybrid_host_rm.config
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ params {
max_time = '6.h'

// Input data
host_fasta = "https://raw.githubusercontent.com/nf-core/test-datasets/mag/host_reference/genome.hg38.chr21_10000bp_region.fa"
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.hybrid_host_rm.csv'
host_fasta = params.pipelines_testdata_base_path + 'mag/host_reference/genome.hg38.chr21_10000bp_region.fa'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.hybrid_host_rm.csv'
min_length_unbinned_contigs = 1
max_unbinned_contigs = 2
skip_binqc = true
Expand Down
2 changes: 1 addition & 1 deletion conf/test_nothing.config
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ params {
max_time = '6.h'

// Input data
input = 'https://raw.githubusercontent.com/nf-core/test-datasets/mag/samplesheets/samplesheet.csv'
input = params.pipelines_testdata_base_path + 'mag/samplesheets/samplesheet.csv'
centrifuge_db = null
kraken2_db = null
skip_krona = true
Expand Down
Loading

0 comments on commit 600e458

Please sign in to comment.