Skip to content

Commit

Permalink
Merge pull request #1 from MIC-DKFZ/master
Browse files Browse the repository at this point in the history
get newest commits from nnunet master
  • Loading branch information
wasserth authored Feb 8, 2024
2 parents 164d94c + d5306f4 commit d2bd77d
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 6 deletions.
2 changes: 1 addition & 1 deletion nnunetv2/dataset_conversion/Dataset220_KiTS2023.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def convert_kits2023(kits_base_dir: str, nnunet_dataset_id: int = 220):
regions_class_order=(1, 3, 2),
num_training_cases=len(cases), file_ending='.nii.gz',
dataset_name=task_name, reference='none',
release='prerelease',
release='0.1.3',
overwrite_image_reader_writer='NibabelIOWithReorient',
description="KiTS2023")

Expand Down
59 changes: 59 additions & 0 deletions nnunetv2/dataset_conversion/Dataset223_AMOS2022postChallenge.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import shutil

from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.paths import nnUNet_raw
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json

if __name__ == '__main__':
downloaded_amos_dir = '/home/isensee/amos22/amos22' # downloaded and extracted from https://zenodo.org/record/7155725#.Y0OOCOxBztM

target_dataset_id = 223
target_dataset_name = f'Dataset{target_dataset_id:3.0f}_AMOS2022postChallenge'

maybe_mkdir_p(join(nnUNet_raw, target_dataset_name))
imagesTr = join(nnUNet_raw, target_dataset_name, 'imagesTr')
imagesTs = join(nnUNet_raw, target_dataset_name, 'imagesTs')
labelsTr = join(nnUNet_raw, target_dataset_name, 'labelsTr')
maybe_mkdir_p(imagesTr)
maybe_mkdir_p(imagesTs)
maybe_mkdir_p(labelsTr)

train_identifiers = []
# copy images
source = join(downloaded_amos_dir, 'imagesTr')
source_files = nifti_files(source, join=False)
train_identifiers += source_files
for s in source_files:
shutil.copy(join(source, s), join(imagesTr, s[:-7] + '_0000.nii.gz'))

source = join(downloaded_amos_dir, 'imagesVa')
source_files = nifti_files(source, join=False)
train_identifiers += source_files
for s in source_files:
shutil.copy(join(source, s), join(imagesTr, s[:-7] + '_0000.nii.gz'))

source = join(downloaded_amos_dir, 'imagesTs')
source_files = nifti_files(source, join=False)
for s in source_files:
shutil.copy(join(source, s), join(imagesTs, s[:-7] + '_0000.nii.gz'))

# copy labels
source = join(downloaded_amos_dir, 'labelsTr')
source_files = nifti_files(source, join=False)
for s in source_files:
shutil.copy(join(source, s), join(labelsTr, s))

source = join(downloaded_amos_dir, 'labelsVa')
source_files = nifti_files(source, join=False)
for s in source_files:
shutil.copy(join(source, s), join(labelsTr, s))

old_dataset_json = load_json(join(downloaded_amos_dir, 'dataset.json'))
new_labels = {v: k for k, v in old_dataset_json['labels'].items()}

generate_dataset_json(join(nnUNet_raw, target_dataset_name), {0: 'nonCT'}, new_labels,
num_training_cases=len(train_identifiers), file_ending='.nii.gz', regions_class_order=None,
dataset_name=target_dataset_name, reference='https://zenodo.org/record/7155725#.Y0OOCOxBztM',
license=old_dataset_json['licence'], # typo in OG dataset.json
description=old_dataset_json['description'],
release=old_dataset_json['release'])
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import numpy as np
from batchgenerators.utilities.file_and_folder_operations import load_json, join, save_json, isfile, maybe_mkdir_p
from dynamic_network_architectures.architectures.residual_unet import ResidualEncoderUNet
from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet
from dynamic_network_architectures.architectures.unet import PlainConvUNet
from dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_instancenorm

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from typing import Union, List, Tuple

from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet
from torch import nn

from nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner
from dynamic_network_architectures.architectures.residual_unet import ResidualEncoderUNet


class ResEncUNetPlanner(ExperimentPlanner):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from dynamic_network_architectures.architectures.residual_unet import ResidualEncoderUNet
from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet
from dynamic_network_architectures.architectures.unet import PlainConvUNet
from dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_batchnorm
from dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0, InitWeights_He
Expand Down
3 changes: 1 addition & 2 deletions nnunetv2/utilities/get_network_from_plans.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from dynamic_network_architectures.architectures.residual_unet import ResidualEncoderUNet
from dynamic_network_architectures.architectures.unet import PlainConvUNet
from dynamic_network_architectures.architectures.unet import PlainConvUNet, ResidualEncoderUNet
from dynamic_network_architectures.building_blocks.helper import get_matching_instancenorm, convert_dim_to_conv_op
from dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0
from nnunetv2.utilities.network_initialization import InitWeights_He
Expand Down

0 comments on commit d2bd77d

Please sign in to comment.