Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add functions to PyDicer class and add some docs #157

Merged
merged 1 commit into from
Nov 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,12 @@
:maxdepth: 5
:hidden:

tool
input
config
preprocess
convert
visualise
dataset
analyse
nnunet
6 changes: 6 additions & 0 deletions docs/nnunet.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#####################
nnUNet
#####################

.. autoclass:: pydicer.dataset.nnunet.NNUNetDataset
:members:
8 changes: 8 additions & 0 deletions docs/tool.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#####################
PyDicer
#####################


.. autoclass:: pydicer.tool.PyDicer
:members:

10 changes: 7 additions & 3 deletions pydicer/dataset/nnunet.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,13 @@ def __init__(
image_modality: str = "CT",
mapping_id=DEFAULT_MAPPING_ID,
):
"""_summary_
"""Prepare a dataset to train models using nnUNet.

Ensure that nnUNet is installed in your Python environment.
For details on nnUNet see: https://github.com/MIC-DKFZ/nnUNet

> Note: This class currently support nnUNet v1. Contributions welcome to add support for
nnUNet v2.

Args:
working_directory (Union[str, Path]): The PyDicer working directory
Expand Down Expand Up @@ -502,8 +508,6 @@ def prepare_dataset(self) -> Path:
target_label_path = label_ts_path.joinpath(f"{pat_id}.nii.gz")
sitk.WriteImage(pat_label_map, str(target_label_path))



# write JSON file
dataset_dict = {
"name": self.nnunet_name,
Expand Down
8 changes: 8 additions & 0 deletions pydicer/dataset/preparation.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@


class PrepareDataset:
"""
Class that provides functionality for prepartion of subsets of data.

Args:
- working_directory (str|pathlib.Path, optional): Main working directory for pydicer.
Defaults to ".".
"""

def __init__(self, working_directory="."):
self.working_directory = Path(working_directory)

Expand Down
104 changes: 77 additions & 27 deletions pydicer/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from logging.handlers import RotatingFileHandler
from pathlib import Path

import pandas as pd

from pydicer.config import PyDicerConfig
from pydicer.constants import CONVERTED_DIR_NAME, PYDICER_DIR_NAME

Expand All @@ -14,10 +16,31 @@
from pydicer.dataset.preparation import PrepareDataset
from pydicer.analyse.data import AnalyseData

from pydicer.utils import read_converted_data, add_structure_name_mapping, copy_doc

from pydicer.generate.object import add_object, add_structure_object, add_dose_object
from pydicer.generate.segmentation import (
read_all_segmentation_logs,
segment_image,
segment_dataset,
)

logger = logging.getLogger()


class PyDicer:
"""The PyDicer class provides easy access to all the key PyDicer functionality.

Args:
working_directory (str|pathlib.Path, optional): Directory in which data is stored. Defaults
to ".".

:ivar convert: Instance of :class:`~pydicer.convert.data.ConvertData`
:ivar visualise: Instance of :class:`~pydicer.visualise.data.VisualiseData`
:ivar dataset: Instance of :class:`~pydicer.dataset.preparation.PrepareDataset`
:ivar analyse: Instance of :class:`~pydicer.analyse.data.AnalyseData`
"""

def __init__(self, working_directory="."):

self.working_directory = Path(working_directory)
Expand Down Expand Up @@ -135,8 +158,8 @@ def preprocess(self, force=True):
if len(self.dicom_directories) == 0:
raise ValueError("No DICOM input locations set. Add one using the add_input function.")

pd = PreprocessData(self.working_directory)
pd.preprocess(self.dicom_directories, force=force)
preprocess_data = PreprocessData(self.working_directory)
preprocess_data.preprocess(self.dicom_directories, force=force)

self.preprocessed_data = read_preprocessed_data(self.working_directory)

Expand All @@ -161,33 +184,60 @@ def run_pipeline(self, patient=None, force=True):
)
self.analyse.compute_dvh(dataset_name=CONVERTED_DIR_NAME, patient=patient, force=force)

# Object generation (insert in dataset(s) or all data)
def add_object_to_dataset(
self,
uid,
patient_id,
obj_type,
modality,
for_uid=None,
referenced_sop_instance_uid=None,
datasets=None,
):
"""_summary_
@copy_doc(add_structure_name_mapping, remove_args=["working_directory"])
def add_structure_name_mapping( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

Args:
uid (_type_): _description_
patient_id (_type_): _description_
obj_type (_type_): _description_
modality (_type_): _description_
for_uid (_type_, optional): _description_. Defaults to None.
referenced_sop_instance_uid (_type_, optional): _description_. Defaults to None.
datasets (_type_, optional): _description_. Defaults to None.
"""
return add_structure_name_mapping(
*args, working_directory=self.working_directory, **kwargs
)

@copy_doc(read_converted_data, remove_args=["working_directory"])
def read_converted_data( # pylint: disable=missing-function-docstring
self, *_, **kwargs
) -> pd.DataFrame:

return read_converted_data(working_directory=self.working_directory, **kwargs)

@copy_doc(add_object, remove_args=["working_directory"])
def add_object( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

return add_object(self.working_directory, *args, **kwargs)

@copy_doc(add_structure_object, remove_args=["working_directory"])
def add_structure_object( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

return add_structure_object(self.working_directory, *args, **kwargs)

@copy_doc(add_dose_object, remove_args=["working_directory"])
def add_dose_object( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

return add_dose_object(self.working_directory, *args, **kwargs)

@copy_doc(read_all_segmentation_logs, remove_args=["working_directory"])
def read_all_segmentation_logs( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

return read_all_segmentation_logs(self.working_directory, *args, **kwargs)

# Check that object folder exists, if not provide instructions for adding
@copy_doc(segment_image, remove_args=["working_directory"])
def segment_image( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

# Check that no object with uid already exists
return segment_image(self.working_directory, *args, **kwargs)

# Check that references sop uid exists, only warning if not
@copy_doc(segment_dataset, remove_args=["working_directory"])
def segment_dataset( # pylint: disable=missing-function-docstring
self, *args, **kwargs
) -> pd.DataFrame:

# Once ready, add to converted.csv for each dataset specified
return segment_dataset(self.working_directory, *args, **kwargs)
51 changes: 45 additions & 6 deletions pydicer/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,15 @@ def determine_dcm_datetime(ds, require_time=False):
return None


def load_object_metadata(row, keep_tags=None, remove_tags=None):
def load_object_metadata(row: pd.Series, keep_tags=None, remove_tags=None):
"""Loads the object's metadata

Args:
row (pd.Series): The row of the converted DataFrame for which to load the metadata
keep_tags TODO
remove_tag TODO
keep_tags (str|list, optional): DICOM tag keywords keep when loading data. If set all other
tags will be removed. Defaults to None.
remove_tag (str|list, optional): DICOM tag keywords keep when loading data. If set all
other tags will be kept. Defaults to None.

Returns:
pydicom.Dataset: The dataset object containing the original DICOM metadata
Expand Down Expand Up @@ -462,9 +464,9 @@ def add_structure_name_mapping(
this mapping belongs. Defaults to None.

Raises:
SystemError: _description_
ValueError: _description_
ValueError: _description_
SystemError: Ensure working_directory or structure_set is provided.
ValueError: All keys in mapping dictionary must be of type `str`.
ValueError: All values in mapping dictionary must be a list of `str` entries.
"""

mapping_path_base = None
Expand Down Expand Up @@ -585,3 +587,40 @@ def fetch_converted_test_data(working_directory=None, dataset="HNSCC"):
shutil.copytree(output_directory.joinpath(working_name), working_directory)

return working_directory


def copy_doc(copy_func, remove_args=None):
"""Copies the doc string of the given function to another.
This function is intended to be used as a decorator.

Remove args listed in `remove_args` from the docstring.

This function was adapted from:
https://stackoverflow.com/questions/68901049/copying-the-docstring-of-function-onto-another-function-by-name

.. code-block:: python3

def foo():
'''This is a foo doc string'''
...

@copy_doc(foo)
def bar():
...

"""

if remove_args is None:
remove_args = []

def wrapped(func):
func.__doc__ = copy_func.__doc__

for arg in remove_args:
func.__doc__ = "\n".join(
[line for line in func.__doc__.split("\n") if not line.strip().startswith(arg)]
)

return func

return wrapped
Loading