From b57961abf76589ee586e82eb3287ac1b0318124a Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Mon, 1 Jul 2024 18:38:57 +0100 Subject: [PATCH 01/15] diann 1.8.1 -> 1.9.1dev --- modules/local/assemble_empirical_library/main.nf | 4 ++-- modules/local/diann_preliminary_analysis/main.nf | 4 ++-- modules/local/diannsummary/main.nf | 4 ++-- modules/local/individual_final_analysis/main.nf | 4 ++-- modules/local/silicolibrarygeneration/main.nf | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/local/assemble_empirical_library/main.nf b/modules/local/assemble_empirical_library/main.nf index 14eb19e2..b5b36ce5 100644 --- a/modules/local/assemble_empirical_library/main.nf +++ b/modules/local/assemble_empirical_library/main.nf @@ -3,8 +3,8 @@ process ASSEMBLE_EMPIRICAL_LIBRARY { label 'process_low' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : - 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : + 'ghcr.io/bigbio/diann:1.9.1dev' }" input: // In this step the real files are passed, and not the names diff --git a/modules/local/diann_preliminary_analysis/main.nf b/modules/local/diann_preliminary_analysis/main.nf index fe995ece..c57937de 100644 --- a/modules/local/diann_preliminary_analysis/main.nf +++ b/modules/local/diann_preliminary_analysis/main.nf @@ -3,8 +3,8 @@ process DIANN_PRELIMINARY_ANALYSIS { label 'process_high' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : - 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : + 'ghcr.io/bigbio/diann:1.9.1dev' }" input: tuple val(meta), path(ms_file), path(predict_library) diff --git a/modules/local/diannsummary/main.nf b/modules/local/diannsummary/main.nf index 4677b3e3..32dc2e44 100644 --- a/modules/local/diannsummary/main.nf +++ b/modules/local/diannsummary/main.nf @@ -3,8 +3,8 @@ process DIANNSUMMARY { label 'process_high' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : - 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : + 'ghcr.io/bigbio/diann:1.9.1dev' }" input: // Note that the files are passed as names and not paths, this prevents them from being staged diff --git a/modules/local/individual_final_analysis/main.nf b/modules/local/individual_final_analysis/main.nf index f80b047a..ddc9221e 100644 --- a/modules/local/individual_final_analysis/main.nf +++ b/modules/local/individual_final_analysis/main.nf @@ -3,8 +3,8 @@ process INDIVIDUAL_FINAL_ANALYSIS { label 'process_high' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : - 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : + 'ghcr.io/bigbio/diann:1.9.1dev' }" input: tuple val(meta), path(ms_file), path(fasta), path(diann_log), path(library) diff --git a/modules/local/silicolibrarygeneration/main.nf b/modules/local/silicolibrarygeneration/main.nf index e5f6e2f3..99482403 100644 --- a/modules/local/silicolibrarygeneration/main.nf +++ b/modules/local/silicolibrarygeneration/main.nf @@ -3,8 +3,8 @@ process SILICOLIBRARYGENERATION { label 'process_medium' container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : - 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : + 'ghcr.io/bigbio/diann:1.9.1dev' }" input: file(fasta) From 766ffc33601dc5e2094f59627cc5b659b70ec7be Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 2 Jul 2024 11:06:28 +0100 Subject: [PATCH 02/15] diann 1.8.1 -> 1.9.1dev --- modules/local/diann_preliminary_analysis/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/diann_preliminary_analysis/main.nf b/modules/local/diann_preliminary_analysis/main.nf index c57937de..e5401c2c 100644 --- a/modules/local/diann_preliminary_analysis/main.nf +++ b/modules/local/diann_preliminary_analysis/main.nf @@ -61,7 +61,7 @@ process DIANN_PRELIMINARY_ANALYSIS { cat <<-END_VERSIONS > versions.yml "${task.process}": - DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "(\\d*\\.\\d+\\.\\d+)|(\\d*\\.\\d+)") + DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "\\d+\\.\\d+(\\.\\w+)*(\\.[\\d]+)?") END_VERSIONS """ } From 6317c6b814bc9ef54f7f8d60fab21c067ec5517d Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 2 Jul 2024 11:15:20 +0100 Subject: [PATCH 03/15] diann 1.8.1 -> 1.9.1dev --- modules/local/assemble_empirical_library/main.nf | 2 +- modules/local/diannsummary/main.nf | 2 +- modules/local/individual_final_analysis/main.nf | 2 +- modules/local/silicolibrarygeneration/main.nf | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/local/assemble_empirical_library/main.nf b/modules/local/assemble_empirical_library/main.nf index b5b36ce5..853925c2 100644 --- a/modules/local/assemble_empirical_library/main.nf +++ b/modules/local/assemble_empirical_library/main.nf @@ -58,7 +58,7 @@ process ASSEMBLE_EMPIRICAL_LIBRARY { cat <<-END_VERSIONS > versions.yml "${task.process}": - DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "(\\d*\\.\\d+\\.\\d+)|(\\d*\\.\\d+)") + DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "\\d+\\.\\d+(\\.\\w+)*(\\.[\\d]+)?") END_VERSIONS """ } diff --git a/modules/local/diannsummary/main.nf b/modules/local/diannsummary/main.nf index 32dc2e44..86c9d132 100644 --- a/modules/local/diannsummary/main.nf +++ b/modules/local/diannsummary/main.nf @@ -69,7 +69,7 @@ process DIANNSUMMARY { cat <<-END_VERSIONS > versions.yml "${task.process}": - DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "(\\d*\\.\\d+\\.\\d+)|(\\d*\\.\\d+)") + DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "\\d+\\.\\d+(\\.\\w+)*(\\.[\\d]+)?") END_VERSIONS """ } diff --git a/modules/local/individual_final_analysis/main.nf b/modules/local/individual_final_analysis/main.nf index ddc9221e..21c0d375 100644 --- a/modules/local/individual_final_analysis/main.nf +++ b/modules/local/individual_final_analysis/main.nf @@ -53,7 +53,7 @@ process INDIVIDUAL_FINAL_ANALYSIS { cat <<-END_VERSIONS > versions.yml "${task.process}": - DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "(\\d*\\.\\d+\\.\\d+)|(\\d*\\.\\d+)") + DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "\\d+\\.\\d+(\\.\\w+)*(\\.[\\d]+)?") END_VERSIONS """ } diff --git a/modules/local/silicolibrarygeneration/main.nf b/modules/local/silicolibrarygeneration/main.nf index 99482403..55169a7b 100644 --- a/modules/local/silicolibrarygeneration/main.nf +++ b/modules/local/silicolibrarygeneration/main.nf @@ -49,7 +49,7 @@ process SILICOLIBRARYGENERATION { cat <<-END_VERSIONS > versions.yml "${task.process}": - DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "(\\d*\\.\\d+\\.\\d+)|(\\d*\\.\\d+)") + DIA-NN: \$(diann 2>&1 | grep "DIA-NN" | grep -oP "\\d+\\.\\d+(\\.\\w+)*(\\.[\\d]+)?") END_VERSIONS """ } From 9cb39228d51f6758aa28f98594608e316de1f5cc Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 2 Jul 2024 11:46:40 +0100 Subject: [PATCH 04/15] diann 1.8.1 -> 1.9.1dev --- modules/local/diannsummary/main.nf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/local/diannsummary/main.nf b/modules/local/diannsummary/main.nf index 86c9d132..70b707c1 100644 --- a/modules/local/diannsummary/main.nf +++ b/modules/local/diannsummary/main.nf @@ -24,7 +24,8 @@ process DIANNSUMMARY { path "diann_report.gg_matrix.tsv", emit: gg_matrix path "diann_report.unique_genes_matrix.tsv", emit: unique_gene_matrix path "diannsummary.log", emit: log - path "empirical_library.tsv.speclib", emit: final_speclib + path "empirical_library.tsv", emit: final_speclib + path "empirical_library.tsv.skyline.speclib", emit: skyline_speclib path "versions.yml", emit: version when: From 96d1d1c446d9aa71ee02dde3e10a45fb217ffd12 Mon Sep 17 00:00:00 2001 From: Yasset Perez-Riverol Date: Tue, 2 Jul 2024 12:11:50 +0100 Subject: [PATCH 05/15] diann 1.8.1 -> 1.9.1dev --- bin/diann_convert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/diann_convert.py b/bin/diann_convert.py index fb2bdd22..f385b607 100755 --- a/bin/diann_convert.py +++ b/bin/diann_convert.py @@ -292,7 +292,7 @@ def diann_version(self) -> str: return diann_version_id def validate_diann_version(self) -> None: - supported_diann_versions = ["1.8.1"] + supported_diann_versions = ["1.8.1", "1.9.beta.1"] if self.diann_version not in supported_diann_versions: raise ValueError(f"Unsupported DIANN version {self.diann_version}") From a2f4d70db61bc6e79cbf43c437ba3672caf5dd8b Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 20:32:53 +0800 Subject: [PATCH 06/15] quantms DIANN 1.9.1dev --- .github/workflows/ci.yml | 3 ++ bin/diann_convert.py | 52 +++++++++---------- bin/psm_conversion.py | 6 +-- conf/test_latest_dia.config | 48 +++++++++++++++++ .../local/assemble_empirical_library/main.nf | 10 ++-- .../local/diann_preliminary_analysis/main.nf | 11 ++-- modules/local/diannsummary/main.nf | 10 ++-- .../local/individual_final_analysis/main.nf | 10 ++-- modules/local/silicolibrarygeneration/main.nf | 10 ++-- nextflow.config | 26 +++++----- nextflow_schema.json | 6 +++ subworkflows/local/create_input_channel.nf | 3 ++ 12 files changed, 138 insertions(+), 57 deletions(-) create mode 100644 conf/test_latest_dia.config diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a372b35d..201e7e39 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,6 +43,9 @@ jobs: exec_profile: conda - NXF_VER: "latest-everything" exec_profile: "conda" + include: + - test_profile: test_latest_dia + exec_profile: singularity steps: - name: Check out pipeline code uses: actions/checkout@v4 diff --git a/bin/diann_convert.py b/bin/diann_convert.py index f385b607..0a5e2bb6 100755 --- a/bin/diann_convert.py +++ b/bin/diann_convert.py @@ -591,13 +591,13 @@ def mztab_PRH(report, pg, index_ref, database, fasta_df): logger.debug("Classifying results type ...") pg["opt_global_result_type"] = "single_protein" - pg.loc[pg["Protein.Ids"].str.contains(";"), "opt_global_result_type"] = "indistinguishable_protein_group" + pg.loc[pg["Protein.Group"].str.contains(";"), "opt_global_result_type"] = "indistinguishable_protein_group" out_mztab_PRH = pg del pg out_mztab_PRH = out_mztab_PRH.drop(["Protein.Names"], axis=1) out_mztab_PRH.rename( - columns={"Protein.Group": "accession", "First.Protein.Description": "description"}, inplace=True + columns={"First.Protein.Description": "description"}, inplace=True ) out_mztab_PRH.loc[:, "database"] = database @@ -614,10 +614,10 @@ def mztab_PRH(report, pg, index_ref, database, fasta_df): out_mztab_PRH.loc[:, i] = "null" logger.debug("Extracting accession values (keeping first)...") - out_mztab_PRH.loc[:, "accession"] = out_mztab_PRH.apply(lambda x: x["accession"].split(";")[0], axis=1) + out_mztab_PRH.loc[:, "accession"] = out_mztab_PRH.apply(lambda x: x["Protein.Group"].split(";")[0], axis=1) protein_details_df = out_mztab_PRH[out_mztab_PRH["opt_global_result_type"] == "indistinguishable_protein_group"] - prh_series = protein_details_df["Protein.Ids"].str.split(";", expand=True).stack().reset_index(level=1, drop=True) + prh_series = protein_details_df["Protein.Group"].str.split(";", expand=True).stack().reset_index(level=1, drop=True) prh_series.name = "accession" protein_details_df = ( protein_details_df.drop("accession", axis=1).join(prh_series).reset_index().drop(columns="index") @@ -644,14 +644,14 @@ def mztab_PRH(report, pg, index_ref, database, fasta_df): # out_mztab_PRH.loc[out_mztab_PRH["opt_global_result_type"] == "single_protein", "ambiguity_members"] = "null" # or out_mztab_PRH.loc[out_mztab_PRH["Protein.Ids"] == out_mztab_PRH["accession"], "ambiguity_members"] = "null" out_mztab_PRH.loc[:, "ambiguity_members"] = out_mztab_PRH.apply( - lambda x: x["Protein.Ids"] if x["opt_global_result_type"] == "indistinguishable_protein_group" else "null", + lambda x: x["Protein.Group"] if x["opt_global_result_type"] == "indistinguishable_protein_group" else "null", axis=1, ) logger.debug("Matching PRH to best search engine score...") score_looker = ModScoreLooker(report) out_mztab_PRH[["modifiedSequence", "best_search_engine_score[1]"]] = out_mztab_PRH.apply( - lambda x: score_looker.get_score(x["Protein.Ids"]), axis=1, result_type="expand" + lambda x: score_looker.get_score(x["Protein.Group"]), axis=1, result_type="expand" ) logger.debug("Matching PRH to modifications...") @@ -664,16 +664,16 @@ def mztab_PRH(report, pg, index_ref, database, fasta_df): # This used to be a bottleneck in performance # This implementation drops the run time from 57s to 25ms protein_agg_report = ( - report[["PG.MaxLFQ", "Protein.Ids", "study_variable"]] - .groupby(["study_variable", "Protein.Ids"]) + report[["PG.MaxLFQ", "Protein.Group", "study_variable"]] + .groupby(["study_variable", "Protein.Group"]) .agg({"PG.MaxLFQ": ["mean", "std", "sem"]}) .reset_index() - .pivot(columns=["study_variable"], index="Protein.Ids") + .pivot(columns=["study_variable"], index="Protein.Group") .reset_index() ) protein_agg_report.columns = ["::".join([str(s) for s in col]).strip() for col in protein_agg_report.columns.values] subname_mapper = { - "Protein.Ids::::": "Protein.Ids", + "Protein.Group::::": "Protein.Group", "PG.MaxLFQ::mean": "protein_abundance_study_variable", "PG.MaxLFQ::std": "protein_abundance_stdev_study_variable", "PG.MaxLFQ::sem": "protein_abundance_std_error_study_variable", @@ -685,7 +685,7 @@ def mztab_PRH(report, pg, index_ref, database, fasta_df): # Oddly enough the last implementation mapped the the accession (Q9NZJ9) in the mztab # to the Protein.Ids (A0A024RBG1;Q9NZJ9;Q9NZJ9-2), leading to A LOT of missing values. out_mztab_PRH = out_mztab_PRH.merge( - protein_agg_report, on="Protein.Ids", how="left", validate="many_to_one", copy=True + protein_agg_report, on="Protein.Group", how="left", validate="many_to_one", copy=True ) del name_mapper del subname_mapper @@ -694,7 +694,7 @@ def mztab_PRH(report, pg, index_ref, database, fasta_df): out_mztab_PRH.loc[:, "PRH"] = "PRT" index = out_mztab_PRH.loc[:, "PRH"] - out_mztab_PRH.drop(["PRH", "Genes", "modifiedSequence", "Protein.Ids"], axis=1, inplace=True) + out_mztab_PRH.drop(["PRH", "Genes", "modifiedSequence", "Protein.Group"], axis=1, inplace=True) out_mztab_PRH.insert(0, "PRH", index) out_mztab_PRH.fillna("null", inplace=True) out_mztab_PRH.loc[:, "database"] = database @@ -734,12 +734,12 @@ def mztab_PEH( out_mztab_PEH = pd.DataFrame() out_mztab_PEH = pr.iloc[:, 0:10] out_mztab_PEH.drop( - ["Protein.Group", "Protein.Names", "First.Protein.Description", "Proteotypic"], axis=1, inplace=True + ["Protein.Ids", "Protein.Names", "First.Protein.Description", "Proteotypic"], axis=1, inplace=True ) out_mztab_PEH.rename( columns={ "Stripped.Sequence": "sequence", - "Protein.Ids": "accession", + "Protein.Group": "accession", "Modified.Sequence": "opt_global_cv_MS:1000889_peptidoform_sequence", "Precursor.Charge": "charge", }, @@ -909,7 +909,7 @@ def __find_info(directory, n): out_mztab_PSH = out_mztab_PSH[ [ "Stripped.Sequence", - "Protein.Ids", + "Protein.Group", "Q.Value", "RT.Start", "Precursor.Charge", @@ -1014,7 +1014,7 @@ def classify_result_type(target): :return: A string implys protein type :rtype: str """ - if ";" in target["Protein.Ids"]: + if ";" in target["Protein.Group"]: return "indistinguishable_protein_group" return "single_protein" @@ -1056,7 +1056,7 @@ def match_in_report(report, target, max_, flag, level): return tuple(q_value) if flag == 1 and level == "protein": - result = report[report["Protein.Ids"] == target] + result = report[report["Protein.Group"] == target] PRH_params = [] for i in range(1, max_ + 1): match = result[result["study_variable"] == i] @@ -1083,19 +1083,19 @@ def __init__(self, report: pd.DataFrame) -> None: def make_lookup_dict(self, report) -> Dict[str, Tuple[str, float]]: grouped_df = ( - report[["Modified.Sequence", "Protein.Ids", "Global.PG.Q.Value"]] + report[["Modified.Sequence", "Protein.Group", "Global.PG.Q.Value"]] .sort_values("Global.PG.Q.Value", ascending=True) - .groupby(["Protein.Ids"]) + .groupby(["Protein.Group"]) .head(1) ) - # Modified.Sequence Protein.Ids Global.PG.Q.Value + # Modified.Sequence Protein.Group Global.PG.Q.Value # 78265 LFNEQNFFQR Q8IV63;Q8IV63-2;Q8IV63-3 0.000252 # 103585 NPTIVNFPITNVDLR Q53GS9;Q53GS9-2 0.000252 # 103586 NPTWKPLIR Q7Z4Q2;Q7Z4Q2-2 0.000252 # 103588 NPVGYPLAWQFLR Q9NZ08;Q9NZ08-2 0.000252 out = { - row["Protein.Ids"]: (row["Modified.Sequence"], row["Global.PG.Q.Value"]) for _, row in grouped_df.iterrows() + row["Protein.Group"]: (row["Modified.Sequence"], row["Global.PG.Q.Value"]) for _, row in grouped_df.iterrows() } return out @@ -1325,17 +1325,17 @@ def calculate_protein_coverages(report: pd.DataFrame, out_mztab_PRH: pd.DataFram protein in the PRH table (defined by accession, not protein.ids). """ nested_df = ( - report[["Protein.Ids", "Stripped.Sequence"]] - .groupby("Protein.Ids") + report[["Protein.Group", "Stripped.Sequence"]] + .groupby("Protein.Group") .agg({"Stripped.Sequence": set}) .reset_index() ) - # Protein.Ids Stripped.Sequence + # Protein.Group Stripped.Sequence # 0 A0A024RBG1;Q9NZJ9;Q9NZJ9-2 {SEQEDEVLLVSSSR} # 1 A0A096LP49;A0A096LP49-2 {SPWAMTERKHSSLER} # 2 A0AVT1;A0AVT1-2 {EDFTLLDFINAVK, KPDHVPISSEDER, QDVIITALDNVEAR,... - ids_to_seqs = dict(zip(nested_df["Protein.Ids"], nested_df["Stripped.Sequence"])) - acc_to_ids = dict(zip(out_mztab_PRH["accession"], out_mztab_PRH["Protein.Ids"])) + ids_to_seqs = dict(zip(nested_df["Protein.Group"], nested_df["Stripped.Sequence"])) + acc_to_ids = dict(zip(out_mztab_PRH["accession"], out_mztab_PRH["Protein.Group"])) fasta_id_to_seqs = dict(zip(fasta_df["id"], fasta_df["seq"])) acc_to_fasta_ids: dict = {} diff --git a/bin/psm_conversion.py b/bin/psm_conversion.py index 5abd8e21..9d43fb24 100755 --- a/bin/psm_conversion.py +++ b/bin/psm_conversion.py @@ -10,7 +10,7 @@ _parquet_field = [ "sequence", "protein_accessions", "protein_start_positions", "protein_end_positions", - "modifications", "retention_time", "charge", "calc_mass_to_charge", "reference_file_name", + "modifications", "retention_time", "charge", "exp_mass_to_charge", "reference_file_name", "scan_number", "peptidoform", "posterior_error_probability", "global_qvalue", "is_decoy", "consensus_support", "mz_array", "intensity_array", "num_peaks", "search_engines", "id_scores", "hit_rank" ] @@ -61,7 +61,7 @@ def convert_psm(idxml, spectra_file, export_decoy_psm): for peptide_id in pep_ids: retention_time = peptide_id.getRT() - calc_mass_to_charge = peptide_id.getMZ() + exp_mass_to_charge = peptide_id.getMZ() scan_number = int(re.findall(r"(spectrum|scan)=(\d+)", peptide_id.getMetaValue("spectrum_reference"))[0][1]) if isinstance(spectra_df, pd.DataFrame): @@ -101,7 +101,7 @@ def convert_psm(idxml, spectra_file, export_decoy_psm): hit_rank = hit.getRank() parquet_data.append([sequence, protein_accessions, protein_start_positions, protein_end_positions, - modifications, retention_time, charge, calc_mass_to_charge, reference_file_name, + modifications, retention_time, charge, exp_mass_to_charge, reference_file_name, scan_number, peptidoform, posterior_error_probability, global_qvalue, is_decoy, consensus_support, mz_array, intensity_array, num_peaks, search_engines, id_scores, hit_rank]) diff --git a/conf/test_latest_dia.config b/conf/test_latest_dia.config new file mode 100644 index 00000000..14eea22b --- /dev/null +++ b/conf/test_latest_dia.config @@ -0,0 +1,48 @@ +/* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Nextflow config file for running minimal tests (DIA) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + Defines input files and everything required to run a fast and simple test. + + Use as follows: + nextflow run nf-core/quantms -profile test_dia, [--outdir ] + +------------------------------------------------------------------------------------------------ +*/ + +params { + config_profile_name = 'Test profile for latest DIA' + config_profile_description = 'Minimal test dataset to check pipeline function for the data-independent acquisition pipeline branch for latest DIA-NN.' + + // Limit resources so that this can run on GitHub Actions + max_cpus = 2 + max_memory = 6.GB + max_time = 48.h + + outdir = './results_latest_dia' + + // Input data + input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/dia_ci/PXD026600.sdrf.tsv' + database = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/dia_ci/REF_EColi_K12_UPS1_combined.fasta' + diann_version = '1.9.beta.1' + min_pr_mz = 350 + max_pr_mz = 950 + min_fr_mz = 500 + max_fr_mz = 1500 + min_peptide_length = 15 + max_peptide_length = 30 + max_precursor_charge = 3 + allowed_missed_cleavages = 1 + diann_normalize = false + skip_post_msstats = false + publish_dir_mode = 'symlink' + max_mods = 2 +} + +process { + // thermorawfileparser + withName: 'NFCORE_QUANTMS:QUANTMS:FILE_PREPARATION:THERMORAWFILEPARSER' { + publishDir = [path: { "${params.outdir}/${task.process.tokenize(':')[-1].toLowerCase()}" }, pattern: "*.log" ] + } +} + diff --git a/modules/local/assemble_empirical_library/main.nf b/modules/local/assemble_empirical_library/main.nf index 853925c2..63e74aae 100644 --- a/modules/local/assemble_empirical_library/main.nf +++ b/modules/local/assemble_empirical_library/main.nf @@ -2,9 +2,13 @@ process ASSEMBLE_EMPIRICAL_LIBRARY { tag "$meta.experiment_id" label 'process_low' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : - 'ghcr.io/bigbio/diann:1.9.1dev' }" + if (params.diann_version == "1.9.beta.1") { + container 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' + } else { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : + 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + } input: // In this step the real files are passed, and not the names diff --git a/modules/local/diann_preliminary_analysis/main.nf b/modules/local/diann_preliminary_analysis/main.nf index e5401c2c..59939860 100644 --- a/modules/local/diann_preliminary_analysis/main.nf +++ b/modules/local/diann_preliminary_analysis/main.nf @@ -2,9 +2,13 @@ process DIANN_PRELIMINARY_ANALYSIS { tag "$ms_file.baseName" label 'process_high' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : - 'ghcr.io/bigbio/diann:1.9.1dev' }" + if (params.diann_version == "1.9.beta.1") { + container 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' + } else { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : + 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + } input: tuple val(meta), path(ms_file), path(predict_library) @@ -13,7 +17,6 @@ process DIANN_PRELIMINARY_ANALYSIS { path "*.quant", emit: diann_quant tuple val(meta), path("*_diann.log"), emit: log path "versions.yml", emit: version - path(ms_file), emit: preliminary_ms_file when: task.ext.when == null || task.ext.when diff --git a/modules/local/diannsummary/main.nf b/modules/local/diannsummary/main.nf index 70b707c1..aeb41a0c 100644 --- a/modules/local/diannsummary/main.nf +++ b/modules/local/diannsummary/main.nf @@ -2,9 +2,13 @@ process DIANNSUMMARY { tag "$meta.experiment_id" label 'process_high' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : - 'ghcr.io/bigbio/diann:1.9.1dev' }" + if (params.diann_version == "1.9.beta.1") { + container 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' + } else { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : + 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + } input: // Note that the files are passed as names and not paths, this prevents them from being staged diff --git a/modules/local/individual_final_analysis/main.nf b/modules/local/individual_final_analysis/main.nf index 21c0d375..e58cd50f 100644 --- a/modules/local/individual_final_analysis/main.nf +++ b/modules/local/individual_final_analysis/main.nf @@ -2,9 +2,13 @@ process INDIVIDUAL_FINAL_ANALYSIS { tag "$ms_file.baseName" label 'process_high' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : - 'ghcr.io/bigbio/diann:1.9.1dev' }" + if (params.diann_version == "1.9.beta.1") { + container 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' + } else { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : + 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + } input: tuple val(meta), path(ms_file), path(fasta), path(diann_log), path(library) diff --git a/modules/local/silicolibrarygeneration/main.nf b/modules/local/silicolibrarygeneration/main.nf index 55169a7b..96ff060c 100644 --- a/modules/local/silicolibrarygeneration/main.nf +++ b/modules/local/silicolibrarygeneration/main.nf @@ -2,9 +2,13 @@ process SILICOLIBRARYGENERATION { tag "$fasta.Name" label 'process_medium' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' : - 'ghcr.io/bigbio/diann:1.9.1dev' }" + if (params.diann_version == "1.9.beta.1") { + container 'https://ftp.pride.ebi.ac.uk/pub/databases/pride/resources/tools/ghcr.io-bigbio-diann-1.9.1dev.sif' + } else { + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://containers.biocontainers.pro/s3/SingImgsRepo/diann/v1.8.1_cv1/diann_v1.8.1_cv1.img' : + 'docker.io/biocontainers/diann:v1.8.1_cv1' }" + } input: file(fasta) diff --git a/nextflow.config b/nextflow.config index a48ce8a7..fb96aa3d 100644 --- a/nextflow.config +++ b/nextflow.config @@ -180,6 +180,7 @@ params { convert_dotd = false // DIA-NN + diann_version = '1.8.1' diann_debug = 3 scan_window = 8 scan_window_automatic = true @@ -404,18 +405,19 @@ profiles { conda.useMamba = true process.executor = 'lsf' } - test { includeConfig 'conf/test_lfq.config' } - test_localize { includeConfig 'conf/test_localize.config' } - test_tmt { includeConfig 'conf/test_tmt.config' } - test_lfq { includeConfig 'conf/test_lfq.config' } - test_lfq_sage { includeConfig 'conf/test_lfq_sage.config' } - test_dia { includeConfig 'conf/test_dia.config' } - test_full_lfq { includeConfig 'conf/test_full_lfq.config' } - test_full_tmt { includeConfig 'conf/test_full_tmt.config' } - test_full_dia { includeConfig 'conf/test_full_dia.config' } - test_full { includeConfig 'conf/test_full_lfq.config' } - test_dda_id { includeConfig 'conf/test_dda_id.config' } - mambaci { includeConfig 'conf/mambaci.config' } + test { includeConfig 'conf/test_lfq.config' } + test_localize { includeConfig 'conf/test_localize.config' } + test_tmt { includeConfig 'conf/test_tmt.config' } + test_lfq { includeConfig 'conf/test_lfq.config' } + test_lfq_sage { includeConfig 'conf/test_lfq_sage.config' } + test_dia { includeConfig 'conf/test_dia.config' } + test_latest_dia { includeConfig 'conf/test_latest_dia.config' } + test_full_lfq { includeConfig 'conf/test_full_lfq.config' } + test_full_tmt { includeConfig 'conf/test_full_tmt.config' } + test_full_dia { includeConfig 'conf/test_full_dia.config' } + test_full { includeConfig 'conf/test_full_lfq.config' } + test_dda_id { includeConfig 'conf/test_dda_id.config' } + mambaci { includeConfig 'conf/mambaci.config' } } diff --git a/nextflow_schema.json b/nextflow_schema.json index 982402f3..1b876c59 100644 --- a/nextflow_schema.json +++ b/nextflow_schema.json @@ -962,6 +962,12 @@ "description": "Settings for DIA-NN - a universal software for data-independent acquisition (DIA) proteomics data processing.", "default": "", "properties": { + "diann_version": { + "type": "string", + "description": "The version of DIA-NN used", + "fa_icon": "fas fa-font", + "help_text": "There are differences between the different DIA-NN versions. DIA-NN do not support cloud freely in 1.9.1." + }, "mass_acc_automatic": { "type": "boolean", "default": true, diff --git a/subworkflows/local/create_input_channel.nf b/subworkflows/local/create_input_channel.nf index be8ca978..32f58c1d 100644 --- a/subworkflows/local/create_input_channel.nf +++ b/subworkflows/local/create_input_channel.nf @@ -168,6 +168,9 @@ def create_meta_channel(LinkedHashMap row, is_sdrf, enzymes, files, wrapper) { } else if (session.config.conda && session.config.conda.enabled) { log.error "File in DIA mode found in input design and conda profile was chosen. DIA-NN currently doesn't support conda! Exiting. Please use the docker/singularity profile with a container." exit 1 + } else if (!session.config.singularity.enabled && params.diann_version == "1.9.beta.1") { + log.error "DIA-NN 1.9.beta.1 currently only support singularity! Exiting. Please use the singularity profile with a container." + exit 1 } if (wrapper.labelling_type.contains("label free") || meta.acquisition_method == "dia") { From 114a14f1f2d6419e4d1f56d1fbd294c8d5fc5f4b Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:07:02 +0800 Subject: [PATCH 07/15] Update ci.yml --- .github/workflows/ci.yml | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 201e7e39..4432afad 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,9 +68,13 @@ jobs: echo "$(pwd)/micromamba/bin" >> $GITHUB_PATH ./bin/micromamba shell init -s bash echo $'channels:\n - conda-forge\n - bioconda\n - defaults\nuse_lockfiles: false' >> ~/.mambarc - - - name: Run pipeline with test data - if: matrix.exec_profile != 'conda' + - name: Install Singularity + if: matrix.exec_profile == 'singularity' + run: | + sudo apt-get update && \ + sudo apt-get install -y singularity-container + - name: Run pipeline with test data in docker profile + if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix @@ -85,6 +89,14 @@ jobs: # Remember that you can parallelise this by using strategy.matrix run: | nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,micromamba --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results + - name: Run pipeline with test data in singularity profile + if: matrix.exec_profile == 'singularity' + # TODO nf-core: You can customise CI pipeline run tests as required + # For example: adding multiple test runs with different parameters + # Remember that you can parallelise this by using strategy.matrix + run: | + nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results + - name: Gather failed logs if: failure() || cancelled() run: | From 410e4e49f65d33cfb7b1c7326231caac84c49ad2 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:16:34 +0800 Subject: [PATCH 08/15] Update ci.yml --- .github/workflows/ci.yml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4432afad..812bdf6b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,7 +72,22 @@ jobs: if: matrix.exec_profile == 'singularity' run: | sudo apt-get update && \ - sudo apt-get install -y singularity-container + sudo apt-get install -y \ + autoconf \ + automake \ + cryptsetup \ + fuse \ + fuse2fs \ + libfuse-dev \ + libglib2.0-dev \ + libseccomp-dev \ + libtool \ + pkg-config \ + runc \ + squashfs-tools \ + squashfs-tools-ng \ + uidmap \ + zlib1g-dev - name: Run pipeline with test data in docker profile if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required From 6c8ee8b8c12521a7225187b057b6e09c857960d3 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:18:49 +0800 Subject: [PATCH 09/15] Update ci.yml --- .github/workflows/ci.yml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 812bdf6b..2698faa9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,21 +73,21 @@ jobs: run: | sudo apt-get update && \ sudo apt-get install -y \ - autoconf \ - automake \ - cryptsetup \ - fuse \ - fuse2fs \ - libfuse-dev \ - libglib2.0-dev \ - libseccomp-dev \ - libtool \ - pkg-config \ - runc \ - squashfs-tools \ - squashfs-tools-ng \ - uidmap \ - zlib1g-dev + autoconf \ + automake \ + cryptsetup \ + fuse \ + fuse2fs \ + libfuse-dev \ + libglib2.0-dev \ + libseccomp-dev \ + libtool \ + pkg-config \ + runc \ + squashfs-tools \ + squashfs-tools-ng \ + uidmap \ + zlib1g-dev - name: Run pipeline with test data in docker profile if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required From fbd863d9494c7440d59c48e9c75be400ff419902 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 21:54:56 +0800 Subject: [PATCH 10/15] Update ci.yml --- .github/workflows/ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2698faa9..067cbe78 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -88,6 +88,12 @@ jobs: squashfs-tools-ng \ uidmap \ zlib1g-dev + sudo rm -rf /usr/local/go + sudo tar -C /usr/local -xzf go1.22.5.linux-amd64.tar.gz + echo 'export PATH=/usr/local/go/bin:$PATH' >> ~/.bashrc && \ + source ~/.bashrc + wget -qO- https://github.com/sylabs/singularity/releases/download/v4.1.4/singularity-ce_4.1.4-jammy_amd64.deb + sudo apt install ./singularity-ce_4.1.4-jammy_amd64.deb - name: Run pipeline with test data in docker profile if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required From c5dc72e88ed9dd4f3f5dd281b60f935fd3efc981 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 22:01:34 +0800 Subject: [PATCH 11/15] fixed ci --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 067cbe78..2692908e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -89,6 +89,7 @@ jobs: uidmap \ zlib1g-dev sudo rm -rf /usr/local/go + wget -qO- https://go.dev/dl/go1.22.5.linux-amd64.tar.gz sudo tar -C /usr/local -xzf go1.22.5.linux-amd64.tar.gz echo 'export PATH=/usr/local/go/bin:$PATH' >> ~/.bashrc && \ source ~/.bashrc From 7d000697f87e785ab457a043ef2b3ae028f11a13 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 22:23:14 +0800 Subject: [PATCH 12/15] Update ci.yml --- .github/workflows/ci.yml | 30 +++++------------------------- 1 file changed, 5 insertions(+), 25 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2692908e..37d97d03 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,31 +70,11 @@ jobs: echo $'channels:\n - conda-forge\n - bioconda\n - defaults\nuse_lockfiles: false' >> ~/.mambarc - name: Install Singularity if: matrix.exec_profile == 'singularity' - run: | - sudo apt-get update && \ - sudo apt-get install -y \ - autoconf \ - automake \ - cryptsetup \ - fuse \ - fuse2fs \ - libfuse-dev \ - libglib2.0-dev \ - libseccomp-dev \ - libtool \ - pkg-config \ - runc \ - squashfs-tools \ - squashfs-tools-ng \ - uidmap \ - zlib1g-dev - sudo rm -rf /usr/local/go - wget -qO- https://go.dev/dl/go1.22.5.linux-amd64.tar.gz - sudo tar -C /usr/local -xzf go1.22.5.linux-amd64.tar.gz - echo 'export PATH=/usr/local/go/bin:$PATH' >> ~/.bashrc && \ - source ~/.bashrc - wget -qO- https://github.com/sylabs/singularity/releases/download/v4.1.4/singularity-ce_4.1.4-jammy_amd64.deb - sudo apt install ./singularity-ce_4.1.4-jammy_amd64.deb + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + - name: Singularity install with defaults + uses: singularityhub/install-singularity@main - name: Run pipeline with test data in docker profile if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required From 159d6e5412ab0e67273c02461f8a13ced2f34275 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 22:31:04 +0800 Subject: [PATCH 13/15] Update .github/workflows/ci.yml Co-authored-by: Julianus Pfeuffer --- .github/workflows/ci.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37d97d03..5b93019d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,11 +70,8 @@ jobs: echo $'channels:\n - conda-forge\n - bioconda\n - defaults\nuse_lockfiles: false' >> ~/.mambarc - name: Install Singularity if: matrix.exec_profile == 'singularity' - steps: - - name: Checkout Repository - uses: actions/checkout@v3 - - name: Singularity install with defaults - uses: singularityhub/install-singularity@main + - name: Singularity install with defaults + uses: singularityhub/install-singularity@main - name: Run pipeline with test data in docker profile if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required From f2221b251a4e9bdd93c9ebaf1373d9fada3c33d7 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Mon, 22 Jul 2024 22:43:57 +0800 Subject: [PATCH 14/15] Update ci.yml --- .github/workflows/ci.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5b93019d..3e49dd6c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,10 +68,9 @@ jobs: echo "$(pwd)/micromamba/bin" >> $GITHUB_PATH ./bin/micromamba shell init -s bash echo $'channels:\n - conda-forge\n - bioconda\n - defaults\nuse_lockfiles: false' >> ~/.mambarc - - name: Install Singularity + - name: Install Singularity with defaults if: matrix.exec_profile == 'singularity' - - name: Singularity install with defaults - uses: singularityhub/install-singularity@main + uses: singularityhub/install-singularity@main - name: Run pipeline with test data in docker profile if: matrix.exec_profile == 'docker' # TODO nf-core: You can customise CI pipeline run tests as required From 2e2f0aabba8ac21a0fa8fe0f89b73e18b9b98cf9 Mon Sep 17 00:00:00 2001 From: Chengxin Dai <37200167+daichengxin@users.noreply.github.com> Date: Tue, 23 Jul 2024 09:12:13 +0800 Subject: [PATCH 15/15] Update main.nf --- modules/local/diannsummary/main.nf | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/local/diannsummary/main.nf b/modules/local/diannsummary/main.nf index aeb41a0c..77ba67eb 100644 --- a/modules/local/diannsummary/main.nf +++ b/modules/local/diannsummary/main.nf @@ -28,8 +28,9 @@ process DIANNSUMMARY { path "diann_report.gg_matrix.tsv", emit: gg_matrix path "diann_report.unique_genes_matrix.tsv", emit: unique_gene_matrix path "diannsummary.log", emit: log - path "empirical_library.tsv", emit: final_speclib - path "empirical_library.tsv.skyline.speclib", emit: skyline_speclib + path "empirical_library.tsv", emit: final_speclib optional true + path "empirical_library.tsv.speclib", emit: final_tsv_speclib optional true + path "empirical_library.tsv.skyline.speclib", emit: skyline_speclib optional true path "versions.yml", emit: version when: