Skip to content

Commit

Permalink
update all MR models to new improved version
Browse files Browse the repository at this point in the history
  • Loading branch information
wasserth committed Jan 15, 2025
1 parent ab4d933 commit c66e845
Show file tree
Hide file tree
Showing 10 changed files with 85 additions and 29 deletions.
5 changes: 2 additions & 3 deletions resources/anonymise_nnunet_pkl_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@
usage:
cd $RESULTS_FOLDER
anonymise_nnunet_pkl_v2.py nnUNet/3d_fullres/Dataset291_TotalSegmentator_part1_organs_1559subj/nnUNetTrainerNoMirroring__nnUNetPlans__3d_fullres
nnUNet_export_model_to_zip -t 291 -o dataset_291_upload.zip -c 3d_fullres -tr nnUNetTrainerV2 --not_strict
cd $nnUNet_results
anonymise_nnunet_pkl_v2.py Dataset789_kidney_cyst_501subj/nnUNetTrainer_DASegOrd0_NoMirroring__nnUNetPlans__3d_fullres
"""
dir_in = Path(sys.argv[1])

Expand Down
10 changes: 6 additions & 4 deletions resources/package_management.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,11 @@ INFO: Now all done by `release.sh`


## Release new weights
* `cd /mnt/nvme/data/multiseg/weights_upload/totalsegmentator_v2`
* `cp -r $nnUNet_results/Dataset527_breasts_1559subj .`
* `python ~/dev/TotalSegmentator/resources/anonymise_nnunet_pkl_v2.py Dataset527_breasts_1559subj/nnUNetTrainer_DASegOrd0_NoMirroring__nnUNetPlans__3d_fullres`
* `zip -r Dataset527_breasts_1559subj.zip Dataset527_breasts_1559subj`
* Run `./resources/prepare_weights_for_release.sh DATASET_ID [DATASET_ID2 ...]`
* Or do it manually:
* `cd /mnt/nvme/data/multiseg/weights_upload/totalsegmentator_v2`
* `cp -r $nnUNet_results/Dataset527_breasts_1559subj .`
* `python ~/dev/TotalSegmentator/resources/anonymise_nnunet_pkl_v2.py Dataset527_breasts_1559subj/nnUNetTrainer_DASegOrd0_NoMirroring__nnUNetPlans__3d_fullres`
* `zip -r Dataset527_breasts_1559subj.zip Dataset527_breasts_1559subj`
* Click on "Draft a new release" on github
* Create new tag ending with -weights and upload weights
40 changes: 40 additions & 0 deletions resources/prepare_weights_for_release.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/bin/bash
set -e # Exit on error

# Prepare weights for release by removing subject ids and unneeded files and zipping
#
# Usage: ./prepare_weights_for_release.sh DATASET_ID [DATASET_ID2 ...]
#
# Example: ./prepare_weights_for_release.sh 527 528 529 ...

# todo: select as needed
# cd /mnt/nvme/data/multiseg/weights_upload/totalsegmentator_v2
cd /mnt/nvme/data/multiseg/weights_upload/totalsegmentator_mri

# Process each dataset ID
for dataset_id in "$@"; do
# Get full task name using Python script
task_name=$(python3 -c "
from totalsegmentator.nnunet import get_full_task_name_v2
print(get_full_task_name_v2($dataset_id))
")

echo "Processing $task_name..."

# Copy dataset folder
cp -r "$nnUNet_results/$task_name" .

# Get the only folder inside task_name
trainer_folder=$(ls "$task_name" | head -n 1)

# Anonymize the pkl files
python ~/dev/TotalSegmentator/resources/anonymise_nnunet_pkl_v2.py "$task_name/$trainer_folder"

# Create zip archive
zip -r "${task_name}.zip" "$task_name"

echo "Completed processing $task_name"
done

echo "All datasets processed successfully"

Binary file modified tests/reference_files/example_seg_mr.nii.gz
Binary file not shown.
14 changes: 7 additions & 7 deletions tests/update_test_files.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,19 @@ set -e
# Usage:
# ./tests/update_test_files.sh <license_key>

TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg.nii.gz -bs --ml -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg.nii.gz -bs --ml -d cpu
TotalSegmentator -i tests/reference_files/example_mr_sm.nii.gz -o tests/reference_files/example_seg_mr.nii.gz -ta total_mr --ml -d cpu
TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_roi_subset.nii.gz --ml -rs liver brain -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_roi_subset.nii.gz --ml -rs liver brain -d cpu
TotalSegmentator -i tests/reference_files/example_mr_sm.nii.gz -o tests/reference_files/example_seg_roi_subset_mr.nii.gz -ta total_mr --ml -rs liver brain -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_fast --fast --statistics -sii -p -d cpu
TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_fast.nii.gz --fast --ml -d cpu
TotalSegmentator -i tests/reference_files/example_ct.nii.gz -o tests/reference_files/example_seg_fast_force_split.nii.gz --fast --ml -fs -d cpu
TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_fast_body_seg.nii.gz --fast --ml -bs -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_fast.nii.gz --fast --ml -d cpu
# TotalSegmentator -i tests/reference_files/example_ct.nii.gz -o tests/reference_files/example_seg_fast_force_split.nii.gz --fast --ml -fs -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_fast_body_seg.nii.gz --fast --ml -bs -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_lung_vessels -ta lung_vessels -d cpu
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_tissue_types -ta tissue_types -d cpu -l $1
# TotalSegmentator -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/example_seg_appendicular_bones -ta appendicular_bones -d cpu
TotalSegmentator -i tests/reference_files/example_ct_dicom -o tests/reference_files/example_seg_dicom.nii.gz --fast --ml -d cpu
totalseg_get_phase -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/phase_prediction.json
# TotalSegmentator -i tests/reference_files/example_ct_dicom -o tests/reference_files/example_seg_dicom.nii.gz --fast --ml -d cpu
# totalseg_get_phase -i tests/reference_files/example_ct_sm.nii.gz -o tests/reference_files/phase_prediction.json

# Manually check if segmentations in tests/reference_files/example_seg_fast_force_split.nii.gz look correct
# (all others have too small FOV for manual inspection)
15 changes: 9 additions & 6 deletions totalsegmentator/bin/totalseg_download_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,16 +26,17 @@ def main():
"tissue_types", "tissue_types_mr", "face", "face_mr",
"head_glands_cavities", "head_muscles", "headneck_bones_vessels",
"headneck_muscles", "liver_vessels", "brain_structures",
"lung_nodules", "kidney_cysts", "breasts"],
"lung_nodules", "kidney_cysts", "breasts",
"thigh_shoulder_muscles", "thigh_shoulder_muscles_mr"],
help="Task for which to download the weights", default="total")

args = parser.parse_args()

task_to_id = {
"total": [291, 292, 293, 294, 295, 298],
"total_fast": [297, 298],
"total_mr": [730, 731],
"total_fast_mr": [732, 733],
"total_mr": [850, 851],
"total_fast_mr": [852, 853],
"lung_vessels": [258],
"cerebral_bleed": [150],
"hip_implant": [260],
Expand All @@ -58,11 +59,13 @@ def main():
"heartchambers_highres": [301],
"appendicular_bones": [304],
"tissue_types": [481],
"tissue_types_mr": [734],
"tissue_types_mr": [854],
"vertebrae_discs": [305],
"face": [303],
"face_mr": [737],
"brain_structures": [409]
"face_mr": [856],
"brain_structures": [409],
"thigh_shoulder_muscles": [999], # TODO
"thigh_shoulder_muscles_mr": [857]
}

setup_totalseg()
Expand Down
2 changes: 1 addition & 1 deletion totalsegmentator/download_pretrained_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
Download all pretrained weights
"""
for task_id in [291, 292, 293, 294, 295, 297, 298, 258, 150, 260, 503,
315, 299, 300, 730, 731, 732, 733, 775, 776, 777, 778,
315, 299, 300, 850, 851, 852, 853, 775, 776, 777, 778,
779]:
download_pretrained_weights(task_id)
sleep(5)
2 changes: 0 additions & 2 deletions totalsegmentator/libs.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,8 +326,6 @@ def download_pretrained_weights(task_id):
weights_path = config_dir / "TODO"
elif task_id == 857:
weights_path = config_dir / "Dataset857_TotalSegMRI_thigh_shoulder_1088subj"
elif task_id == 913:
weights_path = config_dir / "Dataset913_lung_nodules_1352subj"

else:
raise ValueError(f"For task_id {task_id} no download path was found.")
Expand Down
18 changes: 16 additions & 2 deletions totalsegmentator/nnunet.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def recursive_find_python_class_custom(folder: str, class_name: str, current_mod
warnings.filterwarnings("ignore", category=FutureWarning, module="nnunetv2") # ignore torch.load warning


def _get_full_task_name(task_id: int, src: str="raw"):
def get_full_task_name(task_id: int, src: str="raw"):
if src == "raw":
base = Path(os.environ['nnUNet_raw_data_base']) / "nnUNet_raw_data"
elif src == "preprocessed":
Expand Down Expand Up @@ -95,6 +95,20 @@ def _get_full_task_name(task_id: int, src: str="raw"):
raise ValueError(f"task_id {task_id} not found")


def get_full_task_name_v2(task_id: int, src: str="raw"):
if src == "raw":
base = Path(os.environ['nnUNet_raw'])
elif src == "preprocessed":
base = Path(os.environ['nnUNet_preprocessed'])
elif src == "results":
base = Path(os.environ['nnUNet_results'])
dirs = [str(dir).split("/")[-1] for dir in base.glob("*")]
for dir in dirs:
if f"Dataset{task_id:03d}" in dir:
return dir
raise ValueError(f"dataset_id {task_id} not found")


def contains_empty_img(imgs):
"""
imgs: List of image paths
Expand Down Expand Up @@ -152,7 +166,7 @@ def nnUNet_predict(dir_in, dir_out, task_id, model="3d_fullres", folds=None,
disable_mixed_precision = False

task_id = int(task_id)
task_name = _get_full_task_name(task_id, src="results")
task_name = get_full_task_name(task_id, src="results")

# trainer_class_name = default_trainer
# trainer = trainer_class_name
Expand Down
8 changes: 4 additions & 4 deletions totalsegmentator/python_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ def totalsegmentator(input: Union[str, Path, Nifti1Image], output: Union[str, Pa
if fast: raise ValueError("task brain_structures does not work with option --fast")
show_license_info()
elif task == "thigh_shoulder_muscles":
task_id = 999
task_id = 999 # TODO
resample = 1.5
trainer = "nnUNetTrainer_2000epochs_NoMirroring"
crop = None
Expand Down Expand Up @@ -521,13 +521,13 @@ def totalsegmentator(input: Union[str, Path, Nifti1Image], output: Union[str, Pa
st = time.time()
if not quiet: print("Generating rough segmentation for cropping...")
if robust_rs:
crop_model_task = 732 if task == "total_mr" else 297
crop_model_task = 852 if task == "total_mr" else 297
crop_spacing = 3.0
else:
crop_model_task = 733 if task == "total_mr" else 298
crop_model_task = 853 if task == "total_mr" else 298
crop_spacing = 6.0
crop_task = "total_mr" if task == "total_mr" else "total"
crop_trainer = "nnUNetTrainer_DASegOrd0_NoMirroring" if task == "total_mr" else "nnUNetTrainer_4000epochs_NoMirroring"
crop_trainer = "nnUNetTrainer_2000epochs_NoMirroring" if task == "total_mr" else "nnUNetTrainer_4000epochs_NoMirroring"
download_pretrained_weights(crop_model_task)

organ_seg, _, _ = nnUNet_predict_image(input, None, crop_model_task, model="3d_fullres", folds=[0],
Expand Down

0 comments on commit c66e845

Please sign in to comment.