From 03c77c29c570c0e1a81a7d75c75e6e54b6862963 Mon Sep 17 00:00:00 2001 From: jakob Date: Wed, 15 Jan 2025 14:13:00 +0100 Subject: [PATCH] update release flow; bugfix --- resources/anonymise_nnunet_pkl_v2.py | 43 ++++++++++++++++++++++++++++ resources/package_management.md | 4 +++ totalsegmentator/libs.py | 2 +- 3 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 resources/anonymise_nnunet_pkl_v2.py diff --git a/resources/anonymise_nnunet_pkl_v2.py b/resources/anonymise_nnunet_pkl_v2.py new file mode 100644 index 000000000..9eeb7a922 --- /dev/null +++ b/resources/anonymise_nnunet_pkl_v2.py @@ -0,0 +1,43 @@ +import os +import sys +import pickle +import shutil +from pathlib import Path + + +if __name__ == "__main__": + """ + Remove elements with Subject IDs from nnunet results. + + Also deleting the training log because in there are also the IDs. + + usage: + + cd $RESULTS_FOLDER + anonymise_nnunet_pkl_v2.py nnUNet/3d_fullres/Dataset291_TotalSegmentator_part1_organs_1559subj/nnUNetTrainerNoMirroring__nnUNetPlans__3d_fullres + nnUNet_export_model_to_zip -t 291 -o dataset_291_upload.zip -c 3d_fullres -tr nnUNetTrainerV2 --not_strict + """ + dir_in = Path(sys.argv[1]) + + folds = sorted(list(dir_in.glob("fold_*"))) + print(f"Nr of folds found: {len(folds)}") + + # Anonymise model_final_checkpoint.model.pkl for all folds + for fold_dir in folds: + + # Remove unneeded files and directories + for dir in [fold_dir / "validation"]: + if dir.exists(): + print(f"Deleting: {dir}") + shutil.rmtree(dir) + + for file in [fold_dir / "checkpoint_best.pth"]: + if file.exists(): + print(f"Deleting: {file}") + os.remove(file) + + training_logs = fold_dir.glob("training_log_*") + for log in training_logs: + if log.exists(): + print(f"Deleting: {log}") + os.remove(log) diff --git a/resources/package_management.md b/resources/package_management.md index 921b4e1a7..f2ad6d620 100644 --- a/resources/package_management.md +++ b/resources/package_management.md @@ -26,5 +26,9 @@ INFO: Now all done by `release.sh` ## Release new weights +* `cd /mnt/nvme/data/multiseg/weights_upload/totalsegmentator_v2` +* `cp -r $nnUNet_results/Dataset527_breasts_1559subj .` +* `python ~/dev/TotalSegmentator/resources/anonymise_nnunet_pkl_v2.py Dataset527_breasts_1559subj/nnUNetTrainer_DASegOrd0_NoMirroring__nnUNetPlans__3d_fullres` +* `zip -r Dataset527_breasts_1559subj.zip Dataset527_breasts_1559subj` * Click on "Draft a new release" on github * Create new tag ending with -weights and upload weights diff --git a/totalsegmentator/libs.py b/totalsegmentator/libs.py index a8a5f4809..400223cd1 100644 --- a/totalsegmentator/libs.py +++ b/totalsegmentator/libs.py @@ -301,7 +301,7 @@ def download_pretrained_weights(task_id): WEIGHTS_URL = url + "/v2.4.0-weights/Dataset008_HepaticVessel.zip" elif task_id == 913: weights_path = config_dir / "Dataset913_lung_nodules" - WEIGHTS_URL = url + "/v2.4.0-weights/Dataset913_lung_nodules.zip" # TODO: correct url + WEIGHTS_URL = url + "/v2.5.0-weights/Dataset913_lung_nodules.zip" # Commercial models elif task_id == 304: