Skip to content

Commit

Permalink
reformat code
Browse files Browse the repository at this point in the history
  • Loading branch information
jithenece committed Jul 2, 2024
1 parent 652c092 commit af0eab0
Show file tree
Hide file tree
Showing 8 changed files with 162 additions and 64 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ general:

execute:
- FileStructureImporter
- NiftiConverter
- SitkNiftiConverter
- Registration
- NNUnetPETCTRunner
- TotalSegmentatorMLRunner
Expand All @@ -17,11 +17,11 @@ modules:
FileStructureImporter:
input_dir: 'input_data'
structures:
- $patientID@instance/CT@dicom:mod=ct
- $patientID/PT@dicom:mod=pt
- $patientID@instance/ct@dicom:mod=ct
- $patientID/pt@dicom:mod=pt
import_id: patientID

NiftiConverter:
SitkNiftiConverter:
in_datas: dicom:mod=pt|ct
allow_multi_input: true

Expand All @@ -33,17 +33,17 @@ modules:

TotalSegmentatorMLRunner:
in_data: nifti:mod=ct:registered=true
use_fast_mode: true
use_fast_mode: false

BreastPostProcessor:
in_ct_data: nifti:mod=ct:registered=true

DsegConverter:
source_segs: nifti:mod=seg:processor=bamf
model_name: BAMF Breast FDG PET CT
target_dicom: dicom:mod=ct
model_name: BAMF Breast FDG Tumor Segmentation
target_dicom: dicom:mod=pt
skip_empty_slices: True

DataOrganizer:
targets:
- dicomseg-->[i:patientID]/bamf_nnunet_pet_ct_breast.seg.dcm
- dicomseg-->[i:patientID]/bamf_pet_ct_breast_tumor.seg.dcm
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,13 @@ FROM mhubai/base:latest
# https://github.com/MIC-DKFZ/nnUNet/pull/1209
ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True

# Install nnunet and platipy
RUN pip3 install --no-cache-dir \
nnunet

# Install nnunet
# Install TotalSegmentator
RUN pip3 install --no-cache-dir totalsegmentator==1.5.6
RUN pip3 install TotalSegmentator==1.5.7 nnunet==1.6.6

# Clone the main branch of MHubAI/models
ARG MHUB_MODELS_REPO
RUN buildutils/import_mhub_model.sh bamf_nnunet_pet_ct_breast ${MHUB_MODELS_REPO}
RUN buildutils/import_mhub_model.sh bamf_pet_ct_breast_tumor ${MHUB_MODELS_REPO}

# Pull nnUNet model weights into the container for Task777_CT_Nodules
ENV WEIGHTS_DIR=/root/.nnunet/nnUNet_models/nnUNet/
Expand All @@ -33,4 +30,4 @@ ENV WEIGHTS_FOLDER=$WEIGHTS_DIR

# Default run script
ENTRYPOINT ["mhub.run"]
CMD ["--config", "/app/models/bamf_nnunet_pet_ct_breast/config/default.yml"]
CMD ["--config", "/app/models/bamf_pet_ct_breast_tumor/config/default.yml"]
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"id": "",
"name": "bamf_nnunet_pet_ct_breast",
"name": "bamf_pet_ct_breast_tumor",
"title": "Bamf NNunet PET CT Breast",
"summary": {
"description": "This model used to detect FDG-avid lesions in breast from FDG PET/CT scans",
Expand All @@ -19,9 +19,9 @@
"label": "Input Image",
"description": "The PET scan of a patient.",
"format": "DICOM",
"modality": "CT",
"modality": "PT",
"bodypartexamined": "Lung",
"slicethickness": "3.38mm",
"slicethickness": "4mm",
"non-contrast": false,
"contrast": false
}
Expand Down Expand Up @@ -116,12 +116,16 @@
},
"evaluation": {
"title": "Evaluation Data",
"text": "77 validation cases were rated by a radiologist and non-expert",
"text": "The model was used to segment cases from the IDC [1] collection of QIN-Breast [2]. 11 validation cases were rated by a radiologist and non-expert. The model predictions, and radiologist corrections are published on zenodo [9]",
"references": [
{
"label": "Imaging Data Collections (IDC)",
"uri": "https://datacommons.cancer.gov/repository/imaging-data-commons"
},
{
"label": "QIN-Breast",
"uri": "https://www.cancerimagingarchive.net/collection/qin-breast/"
},
{
"label": "Image segmentations produced by the AIMI Annotations initiative",
"uri": "https://zenodo.org/records/10009368"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
"""
---------------------------------------------------------
Post processing Module on segmentation output
---------------------------------------------------------
-------------------------------------------------
Author: Jithendra Kumar
Email: [email protected]
-------------------------------------------------
"""
import os
import SimpleITK as sitk
import numpy as np
Expand All @@ -10,22 +21,6 @@

class BreastPostProcessor(Module):

def mask_labels(self, labels, ts):
"""
Create a mask based on given labels.
Args:
labels (list): List of labels to be masked.
ts (np.ndarray): Image data.
Returns:
np.ndarray: Masked image data.
"""
lung = np.zeros(ts.shape)
for lbl in labels:
lung[ts == lbl] = 1
return lung

def bbox2_3D(self, img):
r = np.any(img, axis=(1, 2))
c = np.any(img, axis=(0, 2))
Expand Down Expand Up @@ -65,21 +60,6 @@ def n_connected(self, img_data):
img_data[img_filtered != 1] = 0
return img_data

def arr_2_sitk_img(self, arr, ref):
"""
Convert numpy array to SimpleITK image.
Args:
arr (np.ndarray): Input image data as a numpy array.
ref: Reference image for copying information.
Returns:
sitk.Image: Converted SimpleITK image.
"""
op_img = sitk.GetImageFromArray(arr)
op_img.CopyInformation(ref)
return op_img

@IO.Instance()
@IO.Input('in_ct_data', 'nifti:mod=ct:registered=true', the='input ct data')
@IO.Input('in_tumor_data', 'nifti:mod=seg:model=nnunet', the='input tumor segmentation')
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""
-------------------------------------------------
MHub - NNU-Net Runner
MHub - NNU-Net MultiModality Runner
This is a base runner for pre-trained
nnunet models using PET-CT Modalities
nnunet models
-------------------------------------------------
-------------------------------------------------
Expand All @@ -21,7 +21,7 @@
nnunet_task_name_regex = r"Task[0-9]{3}_[a-zA-Z0-9_]+"

@IO.ConfigInput('in_ct_data', 'nifti:mod=ct', the="input ct data to run nnunet on")
@IO.ConfigInput('in_pt_data', 'nifti:mod=pt', the="input pt resampled data to run nnunet on")
@IO.ConfigInput('in_pt_data', 'nifti:mod=pt', the="input pt data to run nnunet on")
@IO.Config('nnunet_task', str, None, the='nnunet task name')
@IO.Config('nnunet_model', str, None, the='nnunet model name (2d, 3d_lowres, 3d_fullres, 3d_cascade_fullres)')
#@IO.Config('input_data_type', DataType, 'nifti:mod=ct', factory=DataType.fromString, the='input data type')
Expand Down Expand Up @@ -129,7 +129,7 @@ def export_prob_mask(self, nnunet_out_dir: str, ref_file: InstanceData, output_d

@IO.Instance()
@IO.Input('in_ct_data', the="input ct data to run nnunet on")
@IO.Input('in_pt_data', the="input pt resampled data to run nnunet on")
@IO.Input('in_pt_data', the="input pt data to run nnunet on")
@IO.Output("out_data", 'VOLUME_001.nii.gz', 'nifti:mod=seg:model=nnunet', the="output data from nnunet")
def task(self, instance: Instance, in_ct_data: InstanceData,in_pt_data: InstanceData, out_data: InstanceData) -> None:

Expand Down
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
"""
---------------------------------------------------------
Registration Module using SimpleITK
---------------------------------------------------------
-------------------------------------------------
Author: Jithendra Kumar
Email: [email protected]
-------------------------------------------------
"""
import os
import shutil
import SimpleITK as sitk
import numpy as np
from mhubio.core import IO
from mhubio.core import Module, Instance, InstanceData


class Registration(Module):

@IO.Instance()
@IO.Input('in_fixed_data', 'nifti:mod=pt', the='input pt data')
@IO.Input('in_moving_data', 'nifti:mod=ct', the='input ct data')
@IO.Input('in_fixed_data', 'nifti:mod=pt', the='input fixed data')
@IO.Input('in_moving_data', 'nifti:mod=ct', the='input moving data')
@IO.Output('out_data', 'VOL000_registered.nii.gz', 'nifti:mod=ct:registered=true', the="registered ct data")
def task(self, instance: Instance, in_moving_data: InstanceData, in_fixed_data: InstanceData, out_data: InstanceData):
"""
Perform registration
Perform registration and resampling
"""
fixed = sitk.ReadImage(in_fixed_data.abspath, sitk.sitkFloat32)
moving = sitk.ReadImage(in_moving_data.abspath, sitk.sitkFloat32)
Expand All @@ -27,7 +38,7 @@ def task(self, instance: Instance, in_moving_data: InstanceData, in_fixed_data:
R.SetOptimizerAsRegularStepGradientDescent(1.0, 0.001, 200)
R.SetInitialTransform(sitk.TranslationTransform(fixed.GetDimension()))
R.SetInterpolator(sitk.sitkLinear)

def command_iteration(method):
print(f"{method.GetOptimizerIteration():3} = {method.GetMetricValue():10.5f}")

Expand Down
106 changes: 106 additions & 0 deletions models/bamf_pet_ct_breast_tumor/utils/SitkNiftiConverter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
"""
---------------------------------------------------------
Custom - Dicom to Nifti Conversion Module using SimpleITK
---------------------------------------------------------
-------------------------------------------------
Author: Jithendra Kumar
Email: [email protected]
-------------------------------------------------
"""

from enum import Enum
from typing import List, Dict, Any

import pydicom
import shutil
import os, subprocess
import pyplastimatch as pypla # type: ignore
import SimpleITK as sitk
from pathlib import Path

from mhubio.core import Module, Instance, InstanceDataCollection, InstanceData, DataType, FileType
from mhubio.core.IO import IO


@IO.ConfigInput('in_datas', 'dicom:mod=ct|pt', the="target data that will be converted to nifti")
@IO.Config('allow_multi_input', bool, False, the='allow multiple input files')
#@IO.Config('targets', List[DataType], ['dicom:mod=ct', 'nrrd:mod=ct'], factory=IO.F.list(DataType.fromString), the='target data types to convert to nifti')
@IO.Config('bundle_name', str, 'nifti', the="bundle name converted data will be added to")
@IO.Config('converted_file_name', str, '[filename].nii.gz', the='name of the converted file')
@IO.Config('overwrite_existing_file', bool, False, the='overwrite existing file if it exists')
#@IO.Config('wrap_output', bool, False, the='Wrap output in bundles. Required, if multiple input data is allowed that is not yet separated into distinct bundles.')
class SitkNiftiConverter(Module):
"""
Conversion module.
Convert instance data from dicom
"""

allow_multi_input: bool
bundle_name: str # TODO optional type declaration
converted_file_name: str
overwrite_existing_file: bool
#wrap_output: bool

@IO.Instance()
#@IO.Inputs('in_datas', IO.C('targets'), the="data to be converted")
@IO.Inputs('in_datas', the="data to be converted")
@IO.Outputs('out_datas', path=IO.C('converted_file_name'), dtype='nifti:converter=sitk', data='in_datas', bundle=IO.C('bundle_name'), auto_increment=True, the="converted data")
@IO.Outputs('log_datas', path='[basename].pmconv.log', dtype='log:log-task=conversion', data='in_datas', bundle=IO.C('bundle_name'), auto_increment=True, the="log generated by conversion engine")
def task(self, instance: Instance, in_datas: InstanceDataCollection, out_datas: InstanceDataCollection, log_datas: InstanceDataCollection, **kwargs) -> None:

# some sanity checks
assert isinstance(in_datas, InstanceDataCollection)
assert isinstance(out_datas, InstanceDataCollection)
assert len(in_datas) == len(out_datas)
assert len(in_datas) == len(log_datas)

print("debug NiftiConverter 1 len(in_datas)",len(in_datas))
print("debug NiftiConverter 2 len(out_datas)",len(out_datas))
# filtered collection must not be empty
if len(in_datas) == 0:
print(f"CONVERT ERROR: no data found in instance {str(instance)}.")
return None

# check if multi file conversion is enables
if not self.allow_multi_input and len(in_datas) > 1:
print("WARNING: found more than one matching file but multi file conversion is disabled. Only the first file will be converted.")
in_datas = InstanceDataCollection([in_datas.first()])

# conversion step
for i, in_data in enumerate(in_datas):
out_data = out_datas.get(i)
log_data = log_datas.get(i)

# check if output data already exists
if os.path.isfile(out_data.abspath) and not self.overwrite_existing_file:
print("CONVERT ERROR: File already exists: ", out_data.abspath)
continue

# check datatype
if in_data.type.ftype == FileType.DICOM:
files = []
dcm_dir = Path(in_data.abspath)
for f in dcm_dir.glob("*.dcm"):
ds = pydicom.dcmread(f, stop_before_pixels=True)
slicer_loc = ds.SliceLocation if hasattr(ds, "SlicerLocation") else 0
files.append((slicer_loc, f))
slices = sorted(files, key=lambda s: s[0])
ordered_files = [x[1] for x in slices]

o_tmp_dir = self.config.data.requestTempDir(label="convert-processor")
ptmp_dir = Path(o_tmp_dir)
for i, f in enumerate(ordered_files):
shutil.copy(f, ptmp_dir / f"{i}.dcm")
# load in with SimpleITK
reader = sitk.ImageSeriesReader()
dicom_names = reader.GetGDCMSeriesFileNames(str(ptmp_dir))
reader.SetFileNames(dicom_names)
image = reader.Execute()

# save as nifti
sitk.WriteImage(image, out_data.abspath, useCompression=True, compressionLevel=9)

else:
raise ValueError(f"CONVERT ERROR: unsupported file type {in_data.type.ftype}.")
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
-------------------------------------------------
Author: Leonard Nürnberg
Email: [email protected]
Comments: Reused the file from model/totalsegmentator
-------------------------------------------------
"""

Expand Down Expand Up @@ -124,20 +123,21 @@
# ROI = ','.join(mapping[class_map['total'][ci]] for ci in range(1, 105))
ROI = 'SPLEEN,RIGHT_KIDNEY,LEFT_KIDNEY,GALLBLADDER,LIVER,STOMACH,AORTA,INFERIOR_VENA_CAVA,PORTAL_AND_SPLENIC_VEIN,PANCREAS,RIGHT_ADRENAL_GLAND,LEFT_ADRENAL_GLAND,LEFT_UPPER_LUNG_LOBE,LEFT_LOWER_LUNG_LOBE,RIGHT_UPPER_LUNG_LOBE,RIGHT_MIDDLE_LUNG_LOBE,RIGHT_LOWER_LUNG_LOBE,VERTEBRAE_L5,VERTEBRAE_L4,VERTEBRAE_L3,VERTEBRAE_L2,VERTEBRAE_L1,VERTEBRAE_T12,VERTEBRAE_T11,VERTEBRAE_T10,VERTEBRAE_T9,VERTEBRAE_T8,VERTEBRAE_T7,VERTEBRAE_T6,VERTEBRAE_T5,VERTEBRAE_T4,VERTEBRAE_T3,VERTEBRAE_T2,VERTEBRAE_T1,VERTEBRAE_C7,VERTEBRAE_C6,VERTEBRAE_C5,VERTEBRAE_C4,VERTEBRAE_C3,VERTEBRAE_C2,VERTEBRAE_C1,ESOPHAGUS,TRACHEA,MYOCARDIUM,LEFT_ATRIUM,LEFT_VENTRICLE,RIGHT_ATRIUM,RIGHT_VENTRICLE,PULMONARY_ARTERY,BRAIN,LEFT_ILIAC_ARTERY,RIGHT_ILIAC_ARTERY,LEFT_ILIAC_VEIN,RIGHT_ILIAC_VEIN,SMALL_INTESTINE,DUODENUM,COLON,LEFT_RIB_1,LEFT_RIB_2,LEFT_RIB_3,LEFT_RIB_4,LEFT_RIB_5,LEFT_RIB_6,LEFT_RIB_7,LEFT_RIB_8,LEFT_RIB_9,LEFT_RIB_10,LEFT_RIB_11,LEFT_RIB_12,RIGHT_RIB_1,RIGHT_RIB_2,RIGHT_RIB_3,RIGHT_RIB_4,RIGHT_RIB_5,RIGHT_RIB_6,RIGHT_RIB_7,RIGHT_RIB_8,RIGHT_RIB_9,RIGHT_RIB_10,RIGHT_RIB_11,RIGHT_RIB_12,LEFT_HUMERUS,RIGHT_HUMERUS,LEFT_SCAPULA,RIGHT_SCAPULA,LEFT_CLAVICLE,RIGHT_CLAVICLE,LEFT_FEMUR,RIGHT_FEMUR,LEFT_HIP,RIGHT_HIP,SACRUM,FACE,LEFT_GLUTEUS_MAXIMUS,RIGHT_GLUTEUS_MAXIMUS,LEFT_GLUTEUS_MEDIUS,RIGHT_GLUTEUS_MEDIUS,LEFT_GLUTEUS_MINIMUS,RIGHT_GLUTEUS_MINIMUS,LEFT_AUTOCHTHONOUS_BACK_MUSCLE,RIGHT_AUTOCHTHONOUS_BACK_MUSCLE,LEFT_ILIOPSOAS,RIGHT_ILIOPSOAS,URINARY_BLADDER'

@IO.ConfigInput('in_data', 'nifti:mod=ct', the="input data to run TotalSegmentator on")
@IO.Config('use_fast_mode', bool, True, the="flag to set to run TotalSegmentator in fast mode")
class TotalSegmentatorMLRunner(Module):

use_fast_mode: bool

@IO.Instance()
@IO.Input('in_data', 'nifti:mod=ct:registered=true', the="input whole body ct scan")
@IO.Input('in_data', the="input whole body ct scan")
@IO.Output('out_data', 'segmentations.nii.gz', 'nifti:mod=seg:model=TotalSegmentator:roi=SPLEEN,RIGHT_KIDNEY,LEFT_KIDNEY,GALLBLADDER,LIVER,STOMACH,AORTA,INFERIOR_VENA_CAVA,PORTAL_AND_SPLENIC_VEIN,PANCREAS,RIGHT_ADRENAL_GLAND,LEFT_ADRENAL_GLAND,LEFT_UPPER_LUNG_LOBE,LEFT_LOWER_LUNG_LOBE,RIGHT_UPPER_LUNG_LOBE,RIGHT_MIDDLE_LUNG_LOBE,RIGHT_LOWER_LUNG_LOBE,VERTEBRAE_L5,VERTEBRAE_L4,VERTEBRAE_L3,VERTEBRAE_L2,VERTEBRAE_L1,VERTEBRAE_T12,VERTEBRAE_T11,VERTEBRAE_T10,VERTEBRAE_T9,VERTEBRAE_T8,VERTEBRAE_T7,VERTEBRAE_T6,VERTEBRAE_T5,VERTEBRAE_T4,VERTEBRAE_T3,VERTEBRAE_T2,VERTEBRAE_T1,VERTEBRAE_C7,VERTEBRAE_C6,VERTEBRAE_C5,VERTEBRAE_C4,VERTEBRAE_C3,VERTEBRAE_C2,VERTEBRAE_C1,ESOPHAGUS,TRACHEA,MYOCARDIUM,LEFT_ATRIUM,LEFT_VENTRICLE,RIGHT_ATRIUM,RIGHT_VENTRICLE,PULMONARY_ARTERY,BRAIN,LEFT_ILIAC_ARTERY,RIGHT_ILIAC_ARTERY,LEFT_ILIAC_VEIN,RIGHT_ILIAC_VEIN,SMALL_INTESTINE,DUODENUM,COLON,LEFT_RIB_1,LEFT_RIB_2,LEFT_RIB_3,LEFT_RIB_4,LEFT_RIB_5,LEFT_RIB_6,LEFT_RIB_7,LEFT_RIB_8,LEFT_RIB_9,LEFT_RIB_10,LEFT_RIB_11,LEFT_RIB_12,RIGHT_RIB_1,RIGHT_RIB_2,RIGHT_RIB_3,RIGHT_RIB_4,RIGHT_RIB_5,RIGHT_RIB_6,RIGHT_RIB_7,RIGHT_RIB_8,RIGHT_RIB_9,RIGHT_RIB_10,RIGHT_RIB_11,RIGHT_RIB_12,LEFT_HUMERUS,RIGHT_HUMERUS,LEFT_SCAPULA,RIGHT_SCAPULA,LEFT_CLAVICLE,RIGHT_CLAVICLE,LEFT_FEMUR,RIGHT_FEMUR,LEFT_HIP,RIGHT_HIP,SACRUM,FACE,LEFT_GLUTEUS_MAXIMUS,RIGHT_GLUTEUS_MAXIMUS,LEFT_GLUTEUS_MEDIUS,RIGHT_GLUTEUS_MEDIUS,LEFT_GLUTEUS_MINIMUS,RIGHT_GLUTEUS_MINIMUS,LEFT_AUTOCHTHONOUS_BACK_MUSCLE,RIGHT_AUTOCHTHONOUS_BACK_MUSCLE,LEFT_ILIOPSOAS,RIGHT_ILIOPSOAS,URINARY_BLADDER', data='in_data', the="output segmentation mask containing all labels")
def task(self, instance: Instance, in_data: InstanceData, out_data: InstanceData) -> None:

# build command
bash_command = ["TotalSegmentator"]
bash_command += ["-i", in_data.abspath]

# multi-label output (one nifti file containing all labels instead of one nifti file per label)
self.v("Generating multi-label output ('--ml')")
bash_command += ["-o", out_data.abspath]
Expand All @@ -150,7 +150,7 @@ def task(self, instance: Instance, in_data: InstanceData, out_data: InstanceData
else:
self.v("Running TotalSegmentator in default mode (1.5mm)")

# TODO: remove
# TODO: remove
self.v(">> run: ", " ".join(bash_command))

# run the model
Expand Down

0 comments on commit af0eab0

Please sign in to comment.