From 23657452753176574a40c4ec5080a822c59a8eed Mon Sep 17 00:00:00 2001 From: Yiheng Wang Date: Wed, 11 Sep 2024 15:59:30 +0800 Subject: [PATCH] update bundles Signed-off-by: Yiheng Wang --- ci/run_premerge_gpu.sh | 13 ------------- ci/verify_bundle.py | 11 +++++++---- .../configs/metadata.json | 9 +++++---- .../classification_template/large_files.yml | 8 ++++---- models/mednist_reg/configs/metadata.json | 3 ++- models/mednist_reg/large_files.yml | 8 ++++---- .../configs/metadata.json | 19 ++++++++++--------- .../large_files.yml | 8 ++++---- .../configs/metadata.json | 19 ++++++++++--------- .../large_files.yml | 8 ++++---- 10 files changed, 50 insertions(+), 56 deletions(-) diff --git a/ci/run_premerge_gpu.sh b/ci/run_premerge_gpu.sh index eaac2c7b..5d907f36 100755 --- a/ci/run_premerge_gpu.sh +++ b/ci/run_premerge_gpu.sh @@ -102,19 +102,6 @@ verify_bundle() { fi # verify bundle python $(pwd)/ci/verify_bundle.py --b "$bundle" - # unzip data and do unit tests - DATA_DIR="$(pwd)/models/maisi_ct_generative/datasets" - ZIP_FILE="$DATA_DIR/all_masks_flexible_size_and_spacing_3000.zip" - UNZIP_DIR="$DATA_DIR/all_masks_flexible_size_and_spacing_3000" - if [ -f "$ZIP_FILE" ]; then - if [ ! -d "$UNZIP_DIR" ]; then - echo "Unzipping files for MAISI Bundle..." - unzip $ZIP_FILE -d $DATA_DIR - echo "Unzipping complete." - else - echo "Unzipped content already exists, continuing..." - fi - fi test_cmd="python $(pwd)/ci/unit_tests/runner.py --b \"$bundle\"" if [ "$dist_flag" = "True" ]; then test_cmd="torchrun $(pwd)/ci/unit_tests/runner.py --b \"$bundle\" --dist True" diff --git a/ci/verify_bundle.py b/ci/verify_bundle.py index aa8f218e..845611fa 100644 --- a/ci/verify_bundle.py +++ b/ci/verify_bundle.py @@ -21,10 +21,13 @@ exclude_verify_shape_list, exclude_verify_torchscript_list, ) -from monai.bundle import ckpt_export, create_workflow, verify_metadata, verify_net_in_out +from monai.bundle import ckpt_export, verify_metadata, verify_net_in_out from monai.bundle.config_parser import ConfigParser +from monai.utils.module import optional_import from utils import download_large_files, get_json_dict +create_workflow, has_create_workflow = optional_import("monai.bundle", name="create_workflow") + # files that must be included in a bundle necessary_files_list = ["configs/metadata.json", "LICENSE"] # files that are preferred to be included in a bundle @@ -246,11 +249,11 @@ def check_properties(**kwargs): kwargs.pop("properties_path", None) print(kwargs) - workflow = create_workflow(**kwargs) if app_properties_path is not None and os.path.isfile(app_properties_path): shutil.copy(app_properties_path, "ci/bundle_properties.py") from bundle_properties import InferProperties, MetaProperties + workflow = create_workflow(**kwargs) workflow.properties = {**MetaProperties, **InferProperties} check_result = workflow.check_properties() if check_result is not None and len(check_result) > 0: @@ -287,7 +290,6 @@ def verify_bundle_properties(model_path: str, bundle: str): if "supported_apps" in metadata: supported_apps = metadata["supported_apps"] all_properties = [] - print("vista3d sopperted apps: ", supported_apps) for app, version in supported_apps.items(): properties_path = get_app_properties(app, version) if properties_path is not None: @@ -299,7 +301,8 @@ def verify_bundle_properties(model_path: str, bundle: str): check_properties(**check_property_args) print("successfully checked properties.") else: - check_properties(**check_property_args) + # skip property check if supported_apps is not provided + pass def verify(bundle, models_path="models", mode="full"): diff --git a/models/classification_template/configs/metadata.json b/models/classification_template/configs/metadata.json index fa2428ce..776901db 100644 --- a/models/classification_template/configs/metadata.json +++ b/models/classification_template/configs/metadata.json @@ -1,14 +1,15 @@ { "schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json", - "version": "0.0.1", + "version": "0.0.2", "changelog": { + "0.0.2": "update large file yml", "0.0.1": "Initial version" }, - "monai_version": "1.3.0", - "pytorch_version": "2.0.1", + "monai_version": "1.4.0", + "pytorch_version": "2.4.0", "numpy_version": "1.24.4", "optional_packages_version": { - "pytorch-ignite": "0.4.12" + "pytorch-ignite": "0.4.11" }, "name": "Classification Template", "task": "Classification Template in 2D images", diff --git a/models/classification_template/large_files.yml b/models/classification_template/large_files.yml index 5b82bc39..4c850751 100644 --- a/models/classification_template/large_files.yml +++ b/models/classification_template/large_files.yml @@ -1,5 +1,5 @@ large_files: - - path: "models/model.pt" - url: "https://drive.google.com/uc?id=1kClwSCzVzahn4OTVePLhbvW4vIOLKDlu" - hash_val: "915f54538655e9e6091c5d09dfdee621" - hash_type: "md5" +- path: "models/model.pt" + url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_key_metric=0.9500.pt" + hash_val: "915f54538655e9e6091c5d09dfdee621" + hash_type: "md5" diff --git a/models/mednist_reg/configs/metadata.json b/models/mednist_reg/configs/metadata.json index d1edb2af..53c052b5 100644 --- a/models/mednist_reg/configs/metadata.json +++ b/models/mednist_reg/configs/metadata.json @@ -1,7 +1,8 @@ { "schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json", - "version": "0.0.4", + "version": "0.0.5", "changelog": { + "0.0.5": "update large files", "0.0.4": "add name tag", "0.0.3": "update to use monai 1.1.0", "0.0.2": "update to use rc1", diff --git a/models/mednist_reg/large_files.yml b/models/mednist_reg/large_files.yml index e12ad109..ec766f05 100644 --- a/models/mednist_reg/large_files.yml +++ b/models/mednist_reg/large_files.yml @@ -1,5 +1,5 @@ large_files: - - path: "models/model.pt" - url: "https://drive.google.com/file/d/1qhoPDrl_ZTDWRX8bvlO72sxS3Oxcod9H/view?usp=sharing" - hash_val: "7970f6df5daaa2dff272afd448ea944e" - hash_type: "md5" +- path: "models/model.pt" + url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_key_metric=-0.0065.pt" + hash_val: "7970f6df5daaa2dff272afd448ea944e" + hash_type: "md5" diff --git a/models/renalStructures_UNEST_segmentation/configs/metadata.json b/models/renalStructures_UNEST_segmentation/configs/metadata.json index c2cf410a..5147f722 100644 --- a/models/renalStructures_UNEST_segmentation/configs/metadata.json +++ b/models/renalStructures_UNEST_segmentation/configs/metadata.json @@ -1,7 +1,8 @@ { "schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json", - "version": "0.2.4", + "version": "0.2.5", "changelog": { + "0.2.5": "update large files", "0.2.4": "fix black 24.1 format error", "0.2.3": "update AddChanneld with EnsureChannelFirstd and remove meta_dict", "0.2.2": "add name tag", @@ -12,16 +13,16 @@ "0.1.1": "fixed the model state dict name", "0.1.0": "complete the model package" }, - "monai_version": "1.3.0", - "pytorch_version": "1.13.1", - "numpy_version": "1.22.2", + "monai_version": "1.4.0", + "pytorch_version": "2.4.0", + "numpy_version": "1.24.4", "optional_packages_version": { - "nibabel": "3.2.1", - "pytorch-ignite": "0.4.8", - "einops": "0.4.1", - "fire": "0.4.0", + "nibabel": "5.2.1", + "pytorch-ignite": "0.4.11", + "einops": "0.7.0", + "fire": "0.6.0", "timm": "0.6.7", - "torchvision": "0.14.1" + "torchvision": "0.19.0" }, "name": "Renal structures UNEST segmentation", "task": "Renal segmentation", diff --git a/models/renalStructures_UNEST_segmentation/large_files.yml b/models/renalStructures_UNEST_segmentation/large_files.yml index e7dc4da1..029dedfd 100644 --- a/models/renalStructures_UNEST_segmentation/large_files.yml +++ b/models/renalStructures_UNEST_segmentation/large_files.yml @@ -1,5 +1,5 @@ large_files: - - path: "models/model.pt" - url: "https://drive.google.com/file/d/19um4b5X8ZrzQY6YTU92LOknLKRx4S5rw/view?usp=sharing" - hash_val: "" - hash_type: "" +- path: "models/model.pt" + url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_renalstructure_unest_segmentation.pt" + hash_val: "5a89c036d87b5eaba23275f209744e2a" + hash_type: "md5" diff --git a/models/wholeBrainSeg_Large_UNEST_segmentation/configs/metadata.json b/models/wholeBrainSeg_Large_UNEST_segmentation/configs/metadata.json index 9489b50b..aa5ed32b 100644 --- a/models/wholeBrainSeg_Large_UNEST_segmentation/configs/metadata.json +++ b/models/wholeBrainSeg_Large_UNEST_segmentation/configs/metadata.json @@ -1,7 +1,8 @@ { "schema": "https://github.com/Project-MONAI/MONAI-extra-test-data/releases/download/0.8.1/meta_schema_20220324.json", - "version": "0.2.4", + "version": "0.2.5", "changelog": { + "0.2.5": "update large files", "0.2.4": "fix black 24.1 format error", "0.2.3": "fix PYTHONPATH in readme.md", "0.2.2": "add name tag", @@ -11,16 +12,16 @@ "0.1.1": "Fix dimension according to MONAI 1.0 and fix readme file", "0.1.0": "complete the model package" }, - "monai_version": "1.0.0", - "pytorch_version": "1.10.0", - "numpy_version": "1.21.2", + "monai_version": "1.4.0", + "pytorch_version": "2.4.0", + "numpy_version": "1.24.4", "optional_packages_version": { - "nibabel": "3.2.1", - "pytorch-ignite": "0.4.8", - "einops": "0.4.1", - "fire": "0.4.0", + "nibabel": "5.2.1", + "pytorch-ignite": "0.4.11", + "einops": "0.7.0", + "fire": "0.6.0", "timm": "0.6.7", - "torchvision": "0.11.1" + "torchvision": "0.19.0" }, "name": "Whole brain large UNEST segmentation", "task": "Whole Brain Segmentation", diff --git a/models/wholeBrainSeg_Large_UNEST_segmentation/large_files.yml b/models/wholeBrainSeg_Large_UNEST_segmentation/large_files.yml index c44ee609..e24dd6aa 100644 --- a/models/wholeBrainSeg_Large_UNEST_segmentation/large_files.yml +++ b/models/wholeBrainSeg_Large_UNEST_segmentation/large_files.yml @@ -1,5 +1,5 @@ large_files: - - path: "models/model.pt" - url: "https://drive.google.com/file/d/1aBbzCodYZ4nk3VtuQ71QZoRGN0ZFxaiR/view?usp=sharing" - hash_val: "" - hash_type: "" +- path: "models/model.pt" + url: "https://developer.download.nvidia.com/assets/Clara/monai/tutorials/model_zoo/model_wholebrainseg_large_unest_segmentation.pt" + hash_val: "1bbb4b4d01ba785422a94f3ad2a28942" + hash_type: "md5"