diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 4a9bc5c..4ecfbfe 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -18,11 +18,11 @@
"python.linting.flake8Path": "/opt/conda/bin/flake8",
"python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle",
"python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle",
- "python.linting.pylintPath": "/opt/conda/bin/pylint",
+ "python.linting.pylintPath": "/opt/conda/bin/pylint"
},
// Add the IDs of extensions you want installed when the container is created.
- "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"],
- },
- },
+ "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"]
+ }
+ }
}
diff --git a/.editorconfig b/.editorconfig
index b6b3190..9b99008 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -22,3 +22,11 @@ indent_size = unset
[/assets/email*]
indent_size = unset
+
+# ignore Readme
+[README.md]
+indent_style = unset
+
+# ignore python
+[*.{py}]
+indent_style = unset
diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml
index bfb336f..5906667 100644
--- a/.github/workflows/awsfulltest.yml
+++ b/.github/workflows/awsfulltest.yml
@@ -31,7 +31,7 @@ jobs:
}
profiles: test_full
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: Tower debug log file
path: |
diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml
index 1e04975..a3d63eb 100644
--- a/.github/workflows/awstest.yml
+++ b/.github/workflows/awstest.yml
@@ -25,7 +25,7 @@ jobs:
}
profiles: test
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: Tower debug log file
path: |
diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml
index ddc4d5d..5f10613 100644
--- a/.github/workflows/branch.yml
+++ b/.github/workflows/branch.yml
@@ -19,7 +19,7 @@ jobs:
# NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
- name: Post PR comment
if: failure()
- uses: mshick/add-pr-comment@v1
+ uses: mshick/add-pr-comment@v2
with:
message: |
## This PR is against the `master` branch :x:
diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml
index 694e90e..e37cfda 100644
--- a/.github/workflows/clean-up.yml
+++ b/.github/workflows/clean-up.yml
@@ -10,7 +10,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/stale@v7
+ - uses: actions/stale@v9
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml
new file mode 100644
index 0000000..8a33004
--- /dev/null
+++ b/.github/workflows/download_pipeline.yml
@@ -0,0 +1,67 @@
+name: Test successful pipeline download with 'nf-core download'
+
+# Run the workflow when:
+# - dispatched manually
+# - when a PR is opened or reopened to master branch
+# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev.
+on:
+ workflow_dispatch:
+ pull_request:
+ types:
+ - opened
+ branches:
+ - master
+ pull_request_target:
+ branches:
+ - master
+
+env:
+ NXF_ANSI_LOG: false
+
+jobs:
+ download:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Install Nextflow
+ uses: nf-core/setup-nextflow@v1
+
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ architecture: "x64"
+ - uses: eWaterCycle/setup-singularity@v7
+ with:
+ singularity-version: 3.8.3
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install git+https://github.com/nf-core/tools.git@dev
+
+ - name: Get the repository name and current branch set as environment variable
+ run: |
+ echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
+ echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV}
+ echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV}
+
+ - name: Download the pipeline
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./
+ run: |
+ nf-core download ${{ env.REPO_LOWERCASE }} \
+ --revision ${{ env.REPO_BRANCH }} \
+ --outdir ./${{ env.REPOTITLE_LOWERCASE }} \
+ --compress "none" \
+ --container-system 'singularity' \
+ --container-library "quay.io" -l "docker.io" -l "ghcr.io" \
+ --container-cache-utilisation 'amend' \
+ --download-configuration
+
+ - name: Inspect download
+ run: tree ./${{ env.REPOTITLE_LOWERCASE }}
+
+ - name: Run the downloaded pipeline
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./
+ NXF_SINGULARITY_HOME_MOUNT: true
+ run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml
index 10e0494..633ac28 100644
--- a/.github/workflows/fix-linting.yml
+++ b/.github/workflows/fix-linting.yml
@@ -4,7 +4,7 @@ on:
types: [created]
jobs:
- deploy:
+ fix-linting:
# Only run if comment is on a PR with the main repo, and if it contains the magic keywords
if: >
contains(github.event.comment.html_url, '/pull/') &&
@@ -13,10 +13,17 @@ jobs:
runs-on: ubuntu-latest
steps:
# Use the @nf-core-bot token to check out so we can push later
- - uses: actions/checkout@v4
+ - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
with:
token: ${{ secrets.nf_core_bot_auth_token }}
+ # indication that the linting is being fixed
+ - name: React on comment
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: eyes
+
# Action runs on the issue comment, so we don't get the PR by default
# Use the gh cli to check out the PR
- name: Checkout Pull Request
@@ -24,32 +31,59 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
- - uses: actions/setup-node@v4
+ # Install and run pre-commit
+ - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
+ with:
+ python-version: 3.11
- - name: Install Prettier
- run: npm install -g prettier @prettier/plugin-php
+ - name: Install pre-commit
+ run: pip install pre-commit
- # Check that we actually need to fix something
- - name: Run 'prettier --check'
- id: prettier_status
- run: |
- if prettier --check ${GITHUB_WORKSPACE}; then
- echo "result=pass" >> $GITHUB_OUTPUT
- else
- echo "result=fail" >> $GITHUB_OUTPUT
- fi
+ - name: Run pre-commit
+ id: pre-commit
+ run: pre-commit run --all-files
+ continue-on-error: true
- - name: Run 'prettier --write'
- if: steps.prettier_status.outputs.result == 'fail'
- run: prettier --write ${GITHUB_WORKSPACE}
+ # indication that the linting has finished
+ - name: react if linting finished succesfully
+ if: steps.pre-commit.outcome == 'success'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: "+1"
- name: Commit & push changes
- if: steps.prettier_status.outputs.result == 'fail'
+ id: commit-and-push
+ if: steps.pre-commit.outcome == 'failure'
run: |
git config user.email "core@nf-co.re"
git config user.name "nf-core-bot"
git config push.default upstream
git add .
git status
- git commit -m "[automated] Fix linting with Prettier"
+ git commit -m "[automated] Fix code linting"
git push
+
+ - name: react if linting errors were fixed
+ id: react-if-fixed
+ if: steps.commit-and-push.outcome == 'success'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: hooray
+
+ - name: react if linting errors were not fixed
+ if: steps.commit-and-push.outcome == 'failure'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ comment-id: ${{ github.event.comment.id }}
+ reactions: confused
+
+ - name: react if linting errors were not fixed
+ if: steps.commit-and-push.outcome == 'failure'
+ uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ with:
+ issue-number: ${{ github.event.issue.number }}
+ body: |
+ @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually.
+ See [CI log](https://github.com/nf-core/spatialtranscriptomics/actions/runs/${{ github.run_id }}) for more details.
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
index 905c58e..81cd098 100644
--- a/.github/workflows/linting.yml
+++ b/.github/workflows/linting.yml
@@ -11,61 +11,22 @@ on:
types: [published]
jobs:
- EditorConfig:
+ pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-node@v4
-
- - name: Install editorconfig-checker
- run: npm install -g editorconfig-checker
-
- - name: Run ECLint check
- run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile')
-
- Prettier:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - uses: actions/setup-node@v4
-
- - name: Install Prettier
- run: npm install -g prettier
-
- - name: Run Prettier --check
- run: prettier --check ${GITHUB_WORKSPACE}
-
- PythonBlack:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
-
- - name: Check code lints with Black
- uses: psf/black@stable
-
- # If the above check failed, post a comment on the PR explaining the failure
- - name: Post PR comment
- if: failure()
- uses: mshick/add-pr-comment@v1
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
with:
- message: |
- ## Python linting (`black`) is failing
-
- To keep the code consistent with lots of contributors, we run automated code consistency checks.
- To fix this CI test, please run:
-
- * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black`
- * Fix formatting errors in your pipeline: `black .`
-
- Once you push these changes the test should pass, and you can hide this comment :+1:
+ python-version: 3.11
+ cache: "pip"
- We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help!
+ - name: Install pre-commit
+ run: pip install pre-commit
- Thanks again for your contribution!
- repo-token: ${{ secrets.GITHUB_TOKEN }}
- allow-repeats: false
+ - name: Run pre-commit
+ run: pre-commit run --all-files
nf-core:
runs-on: ubuntu-latest
@@ -76,7 +37,7 @@ jobs:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: "3.11"
architecture: "x64"
@@ -99,7 +60,7 @@ jobs:
- name: Upload linting log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: linting-logs
path: |
diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml
index 0bbcd30..147bcd1 100644
--- a/.github/workflows/linting_comment.yml
+++ b/.github/workflows/linting_comment.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
- uses: dawidd6/action-download-artifact@v2
+ uses: dawidd6/action-download-artifact@v3
with:
workflow: linting.yml
workflow_conclusion: completed
diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml
index 6ad3392..21ac3f0 100644
--- a/.github/workflows/release-announcements.yml
+++ b/.github/workflows/release-announcements.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/setup-python@v4
+ - uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
@@ -56,7 +56,7 @@ jobs:
bsky-post:
runs-on: ubuntu-latest
steps:
- - uses: zentered/bluesky-post-action@v0.0.2
+ - uses: zentered/bluesky-post-action@v0.1.0
with:
post: |
Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
diff --git a/.gitpod.yml b/.gitpod.yml
index acf7269..363d5b1 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -7,6 +7,7 @@ tasks:
- name: unset JAVA_TOOL_OPTIONS
command: |
unset JAVA_TOOL_OPTIONS
+
vscode:
extensions: # based on nf-core.nf-core-extensionpack
- codezombiech.gitignore # Language support for .gitignore files
diff --git a/.nf-core.yml b/.nf-core.yml
index bc2e3d4..da5010a 100644
--- a/.nf-core.yml
+++ b/.nf-core.yml
@@ -5,3 +5,6 @@ lint:
- conf/igenomes.config
files_unchanged:
- .gitattributes
+ - assets/nf-core-spatialtranscriptomics_logo_light.png
+ - docs/images/nf-core-spatialtranscriptomics_logo_light.png
+ - docs/images/nf-core-spatialtranscriptomics_logo_dark.png
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0c31cdb..af57081 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,5 +1,10 @@
repos:
- repo: https://github.com/pre-commit/mirrors-prettier
- rev: "v2.7.1"
+ rev: "v3.1.0"
hooks:
- id: prettier
+ - repo: https://github.com/editorconfig-checker/editorconfig-checker.python
+ rev: "2.7.3"
+ hooks:
+ - id: editorconfig-checker
+ alias: ec
diff --git a/README.md b/README.md
index f8736dd..b12fcd7 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,9 @@
-# ![nf-core/spatialtranscriptomics](docs/images/nf-core-spatialtranscriptomics_logo_light.png#gh-light-mode-only) ![nf-core/spatialtranscriptomics](docs/images/nf-core-spatialtranscriptomics_logo_dark.png#gh-dark-mode-only)
-
+
+
+
[![GitHub Actions CI Status](https://github.com/nf-core/spatialtranscriptomics/workflows/nf-core%20CI/badge.svg)](https://github.com/nf-core/spatialtranscriptomics/actions?query=workflow%3A%22nf-core+CI%22)
[![GitHub Actions Linting Status](https://github.com/nf-core/spatialtranscriptomics/workflows/nf-core%20linting/badge.svg)](https://github.com/nf-core/spatialtranscriptomics/actions?query=workflow%3A%22nf-core+linting%22)[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/spatialtranscriptomics/results)[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX)
diff --git a/assets/email_template.html b/assets/email_template.html
index f3db80e..0a69d64 100644
--- a/assets/email_template.html
+++ b/assets/email_template.html
@@ -12,7 +12,7 @@
-nf-core/spatialtranscriptomics v${version}
+nf-core/spatialtranscriptomics ${version}
Run Name: $runName
<% if (!success){
diff --git a/assets/email_template.txt b/assets/email_template.txt
index e3032db..2a7dd66 100644
--- a/assets/email_template.txt
+++ b/assets/email_template.txt
@@ -4,7 +4,7 @@
|\\ | |__ __ / ` / \\ |__) |__ } {
| \\| | \\__, \\__/ | \\ |___ \\`-._,-`-,
`._,._,'
- nf-core/spatialtranscriptomics v${version}
+ nf-core/spatialtranscriptomics ${version}
----------------------------------------------------
Run Name: $runName
diff --git a/assets/nf-core-spatialtranscriptomics_logo_light.png b/assets/nf-core-spatialtranscriptomics_logo_light.png
index f9cbb84..34da382 100644
Binary files a/assets/nf-core-spatialtranscriptomics_logo_light.png and b/assets/nf-core-spatialtranscriptomics_logo_light.png differ
diff --git a/bin/read_st_data.py b/bin/read_st_data.py
index f83301b..7c542af 100755
--- a/bin/read_st_data.py
+++ b/bin/read_st_data.py
@@ -114,19 +114,26 @@ def read_visium(
for f in files.values():
if not f.exists():
if any(x in str(f) for x in ["hires_image", "lowres_image"]):
- logg.warning(f"You seem to be missing an image file.\n" f"Could not find '{f}'.")
+ logg.warning(
+ f"You seem to be missing an image file.\n"
+ f"Could not find '{f}'."
+ )
else:
raise OSError(f"Could not find '{f}'")
adata.uns["spatial"][library_id]["images"] = dict()
for res in ["hires", "lowres"]:
try:
- adata.uns["spatial"][library_id]["images"][res] = imread(str(files[f"{res}_image"]))
+ adata.uns["spatial"][library_id]["images"][res] = imread(
+ str(files[f"{res}_image"])
+ )
except Exception:
raise OSError(f"Could not find '{res}_image'")
# read json scalefactors
- adata.uns["spatial"][library_id]["scalefactors"] = json.loads(files["scalefactors_json_file"].read_bytes())
+ adata.uns["spatial"][library_id]["scalefactors"] = json.loads(
+ files["scalefactors_json_file"].read_bytes()
+ )
adata.uns["spatial"][library_id]["metadata"] = {
k: (str(attrs[k], "utf-8") if isinstance(attrs[k], bytes) else attrs[k])
@@ -150,7 +157,9 @@ def read_visium(
adata.obs = adata.obs.join(positions, how="left")
- adata.obsm["spatial"] = adata.obs[["pxl_row_in_fullres", "pxl_col_in_fullres"]].to_numpy()
+ adata.obsm["spatial"] = adata.obs[
+ ["pxl_row_in_fullres", "pxl_col_in_fullres"]
+ ].to_numpy()
adata.obs.drop(
columns=["pxl_row_in_fullres", "pxl_col_in_fullres"],
inplace=True,
@@ -160,7 +169,9 @@ def read_visium(
if source_image_path is not None:
# get an absolute path
source_image_path = str(Path(source_image_path).resolve())
- adata.uns["spatial"][library_id]["metadata"]["source_image_path"] = str(source_image_path)
+ adata.uns["spatial"][library_id]["metadata"]["source_image_path"] = str(
+ source_image_path
+ )
return adata
@@ -171,13 +182,28 @@ def read_visium(
description="Load spatial transcriptomics data from MTX matrices and aligned images."
)
parser.add_argument(
- "--SRCountDir", metavar="SRCountDir", type=str, default=None, help="Input directory with Spaceranger data."
+ "--SRCountDir",
+ metavar="SRCountDir",
+ type=str,
+ default=None,
+ help="Input directory with Spaceranger data.",
+ )
+ parser.add_argument(
+ "--outAnnData",
+ metavar="outAnnData",
+ type=str,
+ default=None,
+ help="Output h5ad file path.",
)
- parser.add_argument("--outAnnData", metavar="outAnnData", type=str, default=None, help="Output h5ad file path.")
args = parser.parse_args()
# Read Visium data
- st_adata = read_visium(args.SRCountDir, count_file="raw_feature_bc_matrix.h5", library_id=None, load_images=True)
+ st_adata = read_visium(
+ args.SRCountDir,
+ count_file="raw_feature_bc_matrix.h5",
+ library_id=None,
+ load_images=True,
+ )
# Write raw anndata to file
st_adata.write(args.outAnnData)
diff --git a/bin/st_clustering.qmd b/bin/st_clustering.qmd
index c37fbbe..be8e496 100644
--- a/bin/st_clustering.qmd
+++ b/bin/st_clustering.qmd
@@ -2,9 +2,9 @@
title: "nf-core/spatialtranscriptomics"
subtitle: "Dimensionality reduction and clustering"
format:
- nf-core-html: default
+ nf-core-html: default
execute:
- keep-ipynb: true
+ keep-ipynb: true
jupyter: python3
---
diff --git a/bin/st_quality_controls.qmd b/bin/st_quality_controls.qmd
index e209089..a841d8f 100644
--- a/bin/st_quality_controls.qmd
+++ b/bin/st_quality_controls.qmd
@@ -2,7 +2,7 @@
title: "nf-core/spatialtranscriptomics"
subtitle: "Pre-processing and quality controls"
format:
- nf-core-html: default
+ nf-core-html: default
jupyter: python3
---
@@ -75,7 +75,7 @@ st_adata.var['mt'] = st_adata.var_names.str.startswith('MT-')
st_adata.var['ribo'] = st_adata.var_names.str.contains(("^RP[LS]"))
st_adata.var['hb'] = st_adata.var_names.str.contains(("^HB[AB]"))
sc.pp.calculate_qc_metrics(st_adata, qc_vars=["mt", "ribo", "hb"],
- inplace=True, log1p=False)
+ inplace=True, log1p=False)
# Save a copy of data as a restore-point if filtering results in 0 spots left
st_adata_before_filtering = st_adata.copy()
@@ -90,9 +90,9 @@ mitochondrial, ribosomal and haemoglobin genes:
```{python}
#| layout-nrow: 2
sc.pl.violin(st_adata, ['n_genes_by_counts', 'total_counts'],
- multi_panel=True, jitter=0.4, rotation= 45)
+ multi_panel=True, jitter=0.4, rotation= 45)
sc.pl.violin(st_adata, ['pct_counts_mt', 'pct_counts_ribo', 'pct_counts_hb'],
- multi_panel=True, jitter=0.4, rotation= 45)
+ multi_panel=True, jitter=0.4, rotation= 45)
```
## Spatial distributions
@@ -246,9 +246,9 @@ The final results of all the filtering is as follows:
```{python}
#| layout-nrow: 2
sc.pl.violin(st_adata, ['n_genes_by_counts', 'total_counts'],
- multi_panel=True, jitter=0.4, rotation= 45)
+ multi_panel=True, jitter=0.4, rotation= 45)
sc.pl.violin(st_adata, ['pct_counts_mt', 'pct_counts_ribo', 'pct_counts_hb'],
- multi_panel=True, jitter=0.4, rotation= 45)
+ multi_panel=True, jitter=0.4, rotation= 45)
```
```{python}
diff --git a/bin/st_spatial_de.qmd b/bin/st_spatial_de.qmd
index 82d8e81..0e1e211 100644
--- a/bin/st_spatial_de.qmd
+++ b/bin/st_spatial_de.qmd
@@ -2,7 +2,7 @@
title: "nf-core/spatialtranscriptomics"
subtitle: "Differential gene expression"
format:
- nf-core-html: default
+ nf-core-html: default
jupyter: python3
---
@@ -95,5 +95,5 @@ itself to visualize the patterns:
symbols = results_tab.iloc[: n_top_spatial_degs]["gene_symbol"]
plt.rcParams["figure.figsize"] = (3.5, 4)
sc.pl.spatial(st_adata, img_key="hires", color=symbols.index, alpha=0.7,
- ncols=2, title=symbols, size=1.25)
+ ncols=2, title=symbols, size=1.25)
```
diff --git a/docs/images/nf-core-spatialtranscriptomics_logo_dark.png b/docs/images/nf-core-spatialtranscriptomics_logo_dark.png
index 784abc2..18665a3 100644
Binary files a/docs/images/nf-core-spatialtranscriptomics_logo_dark.png and b/docs/images/nf-core-spatialtranscriptomics_logo_dark.png differ
diff --git a/docs/images/nf-core-spatialtranscriptomics_logo_light.png b/docs/images/nf-core-spatialtranscriptomics_logo_light.png
index f9cbb84..afa6b71 100644
Binary files a/docs/images/nf-core-spatialtranscriptomics_logo_light.png and b/docs/images/nf-core-spatialtranscriptomics_logo_light.png differ
diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy
index 185f861..85469a7 100755
--- a/lib/WorkflowMain.groovy
+++ b/lib/WorkflowMain.groovy
@@ -23,7 +23,7 @@ class WorkflowMain {
//
// Validate parameters and print summary to screen
//
- public static void initialise(workflow, params, log) {
+ public static void initialise(workflow, params, log, args) {
// Print workflow version and exit on --version
if (params.version) {
@@ -34,6 +34,8 @@ class WorkflowMain {
// Check that a -profile or Nextflow config has been provided to run the pipeline
NfcoreTemplate.checkConfigProvided(workflow, log)
+ // Check that the profile doesn't contain spaces and doesn't end with a trailing comma
+ checkProfile(workflow.profile, args, log)
// Check that conda channels are set-up correctly
if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) {
@@ -48,4 +50,16 @@ class WorkflowMain {
Nextflow.error("Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'")
}
}
+
+ //
+ // Exit pipeline if --profile contains spaces
+ //
+ private static void checkProfile(profile, args, log) {
+ if (profile.endsWith(',')) {
+ Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
+ }
+ if (args[0]) {
+ log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
+ }
+ }
}
diff --git a/lib/nfcore_external_java_deps.jar b/lib/nfcore_external_java_deps.jar
deleted file mode 100644
index 805c8bb..0000000
Binary files a/lib/nfcore_external_java_deps.jar and /dev/null differ
diff --git a/main.nf b/main.nf
index bfd223c..ae2df75 100644
--- a/main.nf
+++ b/main.nf
@@ -34,7 +34,7 @@ if (params.validate_params) {
validateParameters()
}
-WorkflowMain.initialise(workflow, params, log)
+WorkflowMain.initialise(workflow, params, log, args)
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/modules.json b/modules.json
index f29c0dc..c114074 100644
--- a/modules.json
+++ b/modules.json
@@ -12,12 +12,12 @@
},
"fastqc": {
"branch": "master",
- "git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9",
+ "git_sha": "f4ae1d942bd50c5c0b9bd2de1393ce38315ba57c",
"installed_by": ["modules"]
},
"multiqc": {
"branch": "master",
- "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
+ "git_sha": "9e71d8519dfbfc328c078bba14d4bd4c99e39a94",
"installed_by": ["modules"]
},
"spaceranger/count": {
diff --git a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
index da03340..4a99360 100755
--- a/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
+++ b/modules/nf-core/custom/dumpsoftwareversions/templates/dumpsoftwareversions.py
@@ -58,7 +58,9 @@ def main():
}
with open("$versions") as f:
- versions_by_process = yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module
+ versions_by_process = (
+ yaml.load(f, Loader=yaml.BaseLoader) | versions_this_module
+ )
# aggregate versions by the module name (derived from fully-qualified process name)
versions_by_module = {}
diff --git a/modules/nf-core/fastqc/tests/main.nf.test b/modules/nf-core/fastqc/tests/main.nf.test
index 1f21c66..70edae4 100644
--- a/modules/nf-core/fastqc/tests/main.nf.test
+++ b/modules/nf-core/fastqc/tests/main.nf.test
@@ -33,7 +33,7 @@ nextflow_process {
{ assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
{ assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("fastqc_versions_single") }
)
}
}
@@ -63,7 +63,7 @@ nextflow_process {
{ assert path(process.out.html[0][1][0]).text.contains("File type | Conventional base calls |
") },
{ assert path(process.out.html[0][1][1]).text.contains("File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("fastqc_versions_paired") }
)
}
}
@@ -89,7 +89,7 @@ nextflow_process {
{ assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
{ assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") }
)
}
}
@@ -115,7 +115,7 @@ nextflow_process {
{ assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
{ assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("fastqc_versions_bam") }
)
}
}
@@ -153,7 +153,7 @@ nextflow_process {
{ assert path(process.out.html[0][1][2]).text.contains("File type | Conventional base calls |
") },
{ assert path(process.out.html[0][1][3]).text.contains("File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("fastqc_versions_multiple") }
)
}
}
@@ -179,7 +179,7 @@ nextflow_process {
{ assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" },
{ assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") }
)
}
}
@@ -204,7 +204,7 @@ nextflow_process {
{ assert process.success },
{ assert snapshot(process.out.html.collect { file(it[1]).getName() } +
process.out.zip.collect { file(it[1]).getName() } +
- process.out.versions ).match() }
+ process.out.versions ).match("fastqc_stub") }
)
}
}
diff --git a/modules/nf-core/fastqc/tests/main.nf.test.snap b/modules/nf-core/fastqc/tests/main.nf.test.snap
index 5d624bb..86f7c31 100644
--- a/modules/nf-core/fastqc/tests/main.nf.test.snap
+++ b/modules/nf-core/fastqc/tests/main.nf.test.snap
@@ -1,5 +1,17 @@
{
- "sarscov2 single-end [fastq] - stub": {
+ "fastqc_versions_interleaved": {
+ "content": [
+ [
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:40:07.293713"
+ },
+ "fastqc_stub": {
"content": [
[
"test.html",
@@ -7,14 +19,70 @@
"versions.yml:md5,e1cc25ca8af856014824abd842e93978"
]
],
- "timestamp": "2024-01-17T18:40:57.254299"
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:31:01.425198"
+ },
+ "fastqc_versions_multiple": {
+ "content": [
+ [
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:40:55.797907"
+ },
+ "fastqc_versions_bam": {
+ "content": [
+ [
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:40:26.795862"
+ },
+ "fastqc_versions_single": {
+ "content": [
+ [
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:39:27.043675"
+ },
+ "fastqc_versions_paired": {
+ "content": [
+ [
+ "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
+ ]
+ ],
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:39:47.584191"
},
- "versions": {
+ "fastqc_versions_custom_prefix": {
"content": [
[
"versions.yml:md5,e1cc25ca8af856014824abd842e93978"
]
],
- "timestamp": "2024-01-17T18:36:50.033627"
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:41:14.576531"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/multiqc/tests/main.nf.test b/modules/nf-core/multiqc/tests/main.nf.test
index d0438ed..f1c4242 100644
--- a/modules/nf-core/multiqc/tests/main.nf.test
+++ b/modules/nf-core/multiqc/tests/main.nf.test
@@ -3,6 +3,7 @@ nextflow_process {
name "Test Process MULTIQC"
script "../main.nf"
process "MULTIQC"
+
tag "modules"
tag "modules_nfcore"
tag "multiqc"
@@ -12,7 +13,7 @@ nextflow_process {
when {
process {
"""
- input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
+ input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true))
input[1] = []
input[2] = []
input[3] = []
@@ -25,7 +26,7 @@ nextflow_process {
{ assert process.success },
{ assert process.out.report[0] ==~ ".*/multiqc_report.html" },
{ assert process.out.data[0] ==~ ".*/multiqc_data" },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("multiqc_versions_single") }
)
}
@@ -36,7 +37,7 @@ nextflow_process {
when {
process {
"""
- input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
+ input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true))
input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true))
input[2] = []
input[3] = []
@@ -49,7 +50,7 @@ nextflow_process {
{ assert process.success },
{ assert process.out.report[0] ==~ ".*/multiqc_report.html" },
{ assert process.out.data[0] ==~ ".*/multiqc_data" },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert snapshot(process.out.versions).match("multiqc_versions_config") }
)
}
}
@@ -61,7 +62,7 @@ nextflow_process {
when {
process {
"""
- input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
+ input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true))
input[1] = []
input[2] = []
input[3] = []
@@ -75,7 +76,7 @@ nextflow_process {
{ assert snapshot(process.out.report.collect { file(it).getName() } +
process.out.data.collect { file(it).getName() } +
process.out.plots.collect { file(it).getName() } +
- process.out.versions ).match() }
+ process.out.versions ).match("multiqc_stub") }
)
}
diff --git a/modules/nf-core/multiqc/tests/main.nf.test.snap b/modules/nf-core/multiqc/tests/main.nf.test.snap
index d37e730..549ba79 100644
--- a/modules/nf-core/multiqc/tests/main.nf.test.snap
+++ b/modules/nf-core/multiqc/tests/main.nf.test.snap
@@ -1,13 +1,17 @@
{
- "versions": {
+ "multiqc_versions_single": {
"content": [
[
"versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
]
],
- "timestamp": "2024-01-09T23:02:49.911994"
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:43:40.529579"
},
- "sarscov2 single-end [fastqc] - stub": {
+ "multiqc_stub": {
"content": [
[
"multiqc_report.html",
@@ -16,6 +20,22 @@
"versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
]
],
- "timestamp": "2024-01-09T23:03:14.524346"
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:45:09.605359"
+ },
+ "multiqc_versions_config": {
+ "content": [
+ [
+ "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
+ ]
+ ],
+ "meta": {
+ "nf-test": "0.8.4",
+ "nextflow": "23.10.1"
+ },
+ "timestamp": "2024-01-31T17:44:53.535994"
}
}
\ No newline at end of file
diff --git a/nextflow.config b/nextflow.config
index a8212c9..6d0f929 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -21,13 +21,13 @@ params {
st_qc_min_counts = 500
st_qc_min_genes = 250
st_qc_min_spots = 1
- st_qc_mito_threshold = 20
- st_qc_ribo_threshold = 0
- st_qc_hb_threshold = 100
+ st_qc_mito_threshold = 20.0
+ st_qc_ribo_threshold = 0.0
+ st_qc_hb_threshold = 100.0
// Clustering
st_cluster_n_hvgs = 2000
- st_cluster_resolution = 1
+ st_cluster_resolution = 1.0
// Spatial differential expression
st_n_top_spatial_degs = 14
@@ -86,6 +86,13 @@ try {
System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")
}
+// Load nf-core/spatialtranscriptomics custom profiles from different institutions.
+// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs!
+// try {
+// includeConfig "${params.custom_config_base}/pipeline/spatialtranscriptomics.config"
+// } catch (Exception e) {
+// System.err.println("WARNING: Could not load nf-core/config/spatialtranscriptomics profiles: ${params.custom_config_base}/pipeline/spatialtranscriptomics.config")
+// }
profiles {
debug {
dumpHashes = true
@@ -100,6 +107,7 @@ profiles {
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
+ channels = ['conda-forge', 'bioconda', 'defaults']
apptainer.enabled = false
}
mamba {
diff --git a/nextflow_schema.json b/nextflow_schema.json
index 5db2fe4..2f9a6f4 100644
--- a/nextflow_schema.json
+++ b/nextflow_schema.json
@@ -140,7 +140,7 @@
},
"st_cluster_resolution": {
"type": "number",
- "default": 0.4,
+ "default": 1,
"description": "The resolution for the clustering of the spots.",
"help_text": "The resolution controls the coarseness of the clustering, where a higher resolution leads to more clusters.",
"fa_icon": "fas fa-circle-nodes"
@@ -213,7 +213,7 @@
"max_cpus": {
"type": "integer",
"description": "Maximum number of CPUs that can be requested for any single job.",
- "default": 16,
+ "default": 64,
"fa_icon": "fas fa-microchip",
"hidden": true,
"help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`"
diff --git a/pyproject.toml b/pyproject.toml
index 0d62beb..7d08e1c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,10 +1,13 @@
-# Config file for Python. Mostly used to configure linting of bin/check_samplesheet.py with Black.
+# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff.
# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation.
-[tool.black]
+[tool.ruff]
line-length = 120
-target_version = ["py37", "py38", "py39", "py310"]
+target-version = "py38"
+select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"]
+cache-dir = "~/.cache/ruff"
-[tool.isort]
-profile = "black"
-known_first_party = ["nf_core"]
-multi_line_output = 3
+[tool.ruff.isort]
+known-first-party = ["nf_core"]
+
+[tool.ruff.per-file-ignores]
+"__init__.py" = ["E402", "F401"]
diff --git a/subworkflows/local/input_check.nf b/subworkflows/local/input_check.nf
index baff0e8..abd2391 100644
--- a/subworkflows/local/input_check.nf
+++ b/subworkflows/local/input_check.nf
@@ -92,8 +92,8 @@ def create_channel_spaceranger(LinkedHashMap meta) {
// Convert a path in `meta` to a file object and return it. If `key` is not contained in `meta`
// return an empty list which is recognized as 'no file' by nextflow.
def get_file_from_meta = {key ->
- v = meta.remove(key);
- return v ? file(v) : []
+ v = meta.remove(key);
+ return v ? file(v) : []
}
fastq_dir = meta.remove("fastq_dir")
diff --git a/workflows/spatialtranscriptomics.nf b/workflows/spatialtranscriptomics.nf
index c3c34f9..37047b9 100644
--- a/workflows/spatialtranscriptomics.nf
+++ b/workflows/spatialtranscriptomics.nf
@@ -17,13 +17,15 @@ WorkflowSpatialtranscriptomics.initialise(params, log)
// Check input path parameters to see if they exist
log.info """\
- Project directory: ${projectDir}
- """
- .stripIndent()
-
-def checkPathParamList = [ params.input,
- params.spaceranger_reference,
- params.spaceranger_probeset ]
+ Project directory: ${projectDir}
+ """
+ .stripIndent()
+
+def checkPathParamList = [
+ params.input,
+ params.spaceranger_reference,
+ params.spaceranger_probeset
+]
for (param in checkPathParamList) { if (param) { file(param, checkIfExists: true) } }
// Check mandatory parameters
@@ -176,6 +178,13 @@ workflow.onComplete {
}
}
+workflow.onError {
+ if (workflow.errorReport.contains("Process requirement exceeds available memory")) {
+ println("🛑 Default resources exceed availability 🛑 ")
+ println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡")
+ }
+}
+
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
THE END