Skip to content

Commit

Permalink
Merge branch 'main' of github.com:orthanc-team/python-orthanc-api-client
Browse files Browse the repository at this point in the history
  • Loading branch information
bcrickboom committed Oct 31, 2024
2 parents 75d95ce + e7f63d1 commit 06af77b
Show file tree
Hide file tree
Showing 9 changed files with 187 additions and 44 deletions.
19 changes: 7 additions & 12 deletions orthanc_api_client/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import random
from typing import Union, Optional
from .helpers_internal import write_dataset_to_bytes
from pydicom.dataset import Dataset, FileDataset
from pydicom.uid import ExplicitVRLittleEndian
import pydicom.uid
from urllib3.filepost import encode_multipart_formdata, choose_boundary

Expand Down Expand Up @@ -108,16 +106,13 @@ def generate_test_dicom_file(
) -> bytes:
buffer = bytearray(height * width * 2)

meta = pydicom.Dataset()
meta.MediaStorageSOPClassUID = pydicom.uid.MRImageStorage
meta.MediaStorageSOPInstanceUID = pydicom.uid.generate_uid()
meta.TransferSyntaxUID = pydicom.uid.ExplicitVRLittleEndian
file_meta = pydicom.dataset.FileMetaDataset()
file_meta.MediaStorageSOPClassUID = pydicom.uid.MRImageStorage
file_meta.MediaStorageSOPInstanceUID = pydicom.uid.generate_uid()
file_meta.TransferSyntaxUID = pydicom.uid.ExplicitVRLittleEndian

ds = Dataset()
ds.file_meta = meta

ds.is_little_endian = True
ds.is_implicit_VR = False
ds = pydicom.dataset.Dataset()
ds.file_meta = file_meta

ds.Modality = "MR"
ds.SOPInstanceUID = pydicom.uid.generate_uid()
Expand Down Expand Up @@ -184,7 +179,7 @@ def encode_multipart_related(fields, boundary=None):


def is_version_at_least(version_string: str, expected_major: int, expected_minor: int, expected_patch: Optional[int] = None) -> bool:
if version_string == "mainline":
if version_string.startswith("mainline"):
return True

split_version = version_string.split(".")
Expand Down
12 changes: 2 additions & 10 deletions orthanc_api_client/helpers_internal.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,8 @@
from pydicom.filebase import DicomFileLike
from pydicom import dcmwrite
from io import BytesIO


def write_dataset_to_bytes(dataset) -> bytes:
# create a buffer
with BytesIO() as buffer:
# create a DicomFileLike object that has some properties of DataSet
memory_dataset = DicomFileLike(buffer)
# write the dataset to the DicomFileLike object
dcmwrite(memory_dataset, dataset)
# to read from the object, you have to rewind it
memory_dataset.seek(0)
# read the contents as bytes
return memory_dataset.read()
dataset.save_as(buffer)
return buffer.getvalue()
88 changes: 86 additions & 2 deletions orthanc_api_client/resources/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,59 @@ def modify_bulk(self, orthanc_ids: List[str] = [], replace_tags: Any = {}, remov


def modify_bulk_async(self, orthanc_ids: List[str] = [], replace_tags: Any = {}, remove_tags: List[str] = [], keep_tags: List[str] = [], delete_original: bool = True, force: bool = False, transcode: Optional[str] = None, permissive: bool = False) -> Job:
return self._modify_bulk_async(
operation="modify",
orthanc_ids=orthanc_ids,
replace_tags=replace_tags,
remove_tags=remove_tags,
keep_tags=keep_tags,
delete_original=delete_original,
force=force,
transcode=transcode,
permissive=permissive)

def _modify_bulk(self, operation: str, orthanc_ids: List[str] = [], replace_tags: Any = {}, remove_tags: List[str] = [], keep_tags: List[str] = [], delete_original: bool = True, force: bool = False, transcode: Optional[str] = None, permissive: bool = False) -> Tuple[List[str], List[str], List[str], List[str]]:
"""
returns a tuple with:
- the list of modified instances ids
- the list of modified series ids
- the list of modified studies ids
- the list of modified patients ids
"""
modified_instances_ids = []
modified_series_ids = []
modified_studies_ids = []
modified_patients_ids = []

job = self._modify_bulk_async(
operation=operation,
orthanc_ids=orthanc_ids,
replace_tags=replace_tags,
remove_tags=remove_tags,
keep_tags=keep_tags,
delete_original=delete_original,
force=force,
transcode=transcode
)

job.wait_completed()

if job.info.status == JobStatus.SUCCESS and "Resources" in job.content:
# extract the list of modified instances ids from the job content
for r in job.content.get("Resources"):
if r.get("Type") == "Instance":
modified_instances_ids.append(r.get("ID"))
elif r.get("Type") == "Series":
modified_series_ids.append(r.get("ID"))
elif r.get("Type") == "Study":
modified_studies_ids.append(r.get("ID"))
elif r.get("Type") == "Patient":
modified_patients_ids.append(r.get("ID"))
return modified_instances_ids, modified_series_ids, modified_studies_ids, modified_patients_ids
else:
raise api_exceptions.OrthancApiException(msg=f"Error while {'modifying' if operation == 'modify' else 'anonymizing'} bulk {self._get_level()}, job failed {json.dumps(job.info.content)}")

def _modify_bulk_async(self, operation: str, orthanc_ids: List[str] = [], replace_tags: Any = {}, remove_tags: List[str] = [], keep_tags: List[str] = [], delete_original: bool = True, force: bool = False, transcode: Optional[str] = None, permissive: bool = False) -> Job:
query = {
"Force": force,
"Level": self._get_level(),
Expand All @@ -321,14 +374,45 @@ def modify_bulk_async(self, orthanc_ids: List[str] = [], replace_tags: Any = {},
query['KeepSource'] = False

r = self._api_client.post(
endpoint=f"/tools/bulk-modify",
endpoint=f"/tools/bulk-{operation}",
json=query)

if r.status_code == 200 and "ID" in r.json():
return Job(api_client=self._api_client, orthanc_id=r.json()['ID'])
else:
raise HttpError(http_status_code=r.status_code, msg="Error in bulk-modify", url=r.url, request_response=r)
raise HttpError(http_status_code=r.status_code, msg=f"Error in bulk-{operation}", url=r.url, request_response=r)


def anonymize_bulk(self, orthanc_ids: List[str] = [], replace_tags: Any = {}, remove_tags: List[str] = [], keep_tags: List[str] = [], delete_original: bool = False, force: bool = False, transcode: Optional[str] = None, permissive: bool = False) -> Tuple[List[str], List[str], List[str], List[str]]:
"""
returns a tuple with:
- the list of anonymized instances ids
- the list of anonymized series ids
- the list of anonymized studies ids
- the list of anonymized patients ids
"""
return self._modify_bulk(
operation="anonymize",
orthanc_ids=orthanc_ids,
replace_tags=replace_tags,
remove_tags=remove_tags,
keep_tags=keep_tags,
delete_original=delete_original,
force=force,
transcode=transcode,
permissive=permissive)

def anonymize_bulk_async(self, orthanc_ids: List[str] = [], replace_tags: Any = {}, remove_tags: List[str] = [], keep_tags: List[str] = [], delete_original: bool = False, force: bool = False, transcode: Optional[str] = None, permissive: bool = False) -> Job:
return self._modify_bulk_async(
operation="anonymize",
orthanc_ids=orthanc_ids,
replace_tags=replace_tags,
remove_tags=remove_tags,
keep_tags=keep_tags,
delete_original=delete_original,
force=force,
transcode=transcode,
permissive=permissive)

def print_daily_stats(self, from_date: datetime.date = None, to_date: datetime.date = None):
if self._url_segment == "patients":
Expand Down
15 changes: 15 additions & 0 deletions release-notes.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,18 @@
v 0.17.0
========

- Added `OrthancApiClient.instances.anonymize_bulk` and `OrthancApiClient.instances.anonymize_bulk_async`

v 0.16.3
========

- Avoid pydicom warning when generating test files

v 0.16.2
========

- `o.is_orthanc_version_at_least()` and `o.is_plugin_version_at_least()` now support "mainline-commitId" patterns

v 0.16.1
========

Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
# For a discussion on single-sourcing the version across setup.py and the
# project code, see
# https://packaging.python.org/guides/single-sourcing-package-version/
version='0.16.1', # Required
version='0.17.0', # Required

# This is a one-line description or tagline of what your project does. This
# corresponds to the "Summary" metadata field:
Expand Down Expand Up @@ -134,7 +134,7 @@
# https://packaging.python.org/discussions/install-requires-vs-requirements/
install_requires=[
'requests>=2.31.0',
'pydicom>=3.0.0',
'pydicom>=3.0.1',
'StrEnum>=0.4.15'
],

Expand Down
2 changes: 1 addition & 1 deletion tests/docker-setup/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
version: "3"
services:
orthanc-a:
image: orthancteam/orthanc:24.2.3
image: orthancteam/orthanc:24.10.1
ports: ["10042:8042"]
environment:
VERBOSE_STARTUP: "true"
Expand Down
4 changes: 2 additions & 2 deletions tests/docker-setup/orthanc-b/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM orthancteam/orthanc:24.2.3
FROM orthancteam/orthanc:24.10.1

RUN pip install pydicom
RUN pip install --break-system-packages pydicom==3.0.1

RUN mkdir /scripts
COPY plugin.py /scripts
31 changes: 16 additions & 15 deletions tests/docker-setup/orthanc-b/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,15 @@

from io import BytesIO

from pydicom import dcmread, dcmwrite
from pydicom.filebase import DicomFileLike


TOKEN = orthanc.GenerateRestApiAuthorizationToken()


def write_dataset_to_bytes(dataset):
# create a buffer
with BytesIO() as buffer:
# create a DicomFileLike object that has some properties of DataSet
memory_dataset = DicomFileLike(buffer)
# write the dataset to the DicomFileLike object
dcmwrite(memory_dataset, dataset)
# to read from the object, you have to rewind it
memory_dataset.seek(0)
# read the contents as bytes
return memory_dataset.read()

dataset.save_as(buffer)
return buffer.getvalue()


def get_api_token(output, uri, **request):
Expand All @@ -43,15 +33,26 @@ def worklist_callback(answers, query, issuerAet, calledAet):
json_tags = json.loads(orthanc.DicomBufferToJson(dicom, orthanc.DicomToJsonFormat.FULL, orthanc.DicomToJsonFlags.NONE, 0))

dataset = pydicom.dataset.Dataset()
dataset.is_little_endian = True
dataset.is_implicit_VR = False

file_meta = pydicom.dataset.FileMetaDataset()

# Set the FileMeta attributes
file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.31'
file_meta.MediaStorageSOPInstanceUID = generate_uid()
file_meta.ImplementationClassUID = '1.2.840.10008.5.1.4.1.1.2'
file_meta.TransferSyntaxUID = pydicom.uid.ExplicitVRLittleEndian
dataset.file_meta = file_meta

# dataset.TransferSyntaxUID = pydicom.uid.ExplicitVRLittleEndian
dataset.AccessionNumber = 'A123456'
dataset.StudyInstanceUID = '1.2.3.4'
dataset.PatientName = 'PatientName'
dataset.PatientID = 'PatientID'
dataset.PatientBirthDate = '20220208'
dataset.PatientSex = 'O'

# dataset.is_little_endian = True
# dataset.is_implicit_VR = False
# dataset.file_meta.TransferSyntaxUID = pydicom.uid.ExplicitVRLittleEndian
dataset_bytes = write_dataset_to_bytes(dataset)

answers.WorklistAddAnswer(query, dataset_bytes)
Expand Down
56 changes: 56 additions & 0 deletions tests/test_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import subprocess
import logging
import datetime
import uuid

from orthanc_api_client import OrthancApiClient, generate_test_dicom_file, ChangeType, ResourceType, Study, Job, JobStatus, JobType, InstancesSet, LabelsConstraint, LogLevel, RemoteJob
from orthanc_api_client.helpers import *
import orthanc_api_client.exceptions as api_exceptions
Expand Down Expand Up @@ -937,6 +939,59 @@ def test_modify_bulk_instances(self):
self.assertNotIn(instances_ids[0], modified_instances_ids) # make sure the new ids are different from the original ones


def test_anonymize_bulk_series(self):
self.oa.delete_all_content()

self.oa.upload_folder(here / "stimuli/MR/Brain")
series_ids = self.oa.series.get_all_ids()

self.assertEqual(1, len(self.oa.studies.get_all_ids()))

_, modified_series_ids, __, ___ = self.oa.series.anonymize_bulk(
orthanc_ids=series_ids,
delete_original=False,
keep_tags=["SeriesDescription", "StudyDescription"]
)

self.assertEqual(2, len(modified_series_ids))
tags1 = self.oa.series.get_tags(modified_series_ids[0])
tags2 = self.oa.series.get_tags(modified_series_ids[1])
self.assertEqual(tags1.get("PatientName"), tags2.get("PatientName"))
self.assertEqual(tags1.get("StudyDescription"), tags2.get("StudyDescription"))
self.assertEqual(tags1.get("StudyInstanceUID"), tags2.get("StudyInstanceUID"))
self.assertIn(tags1.get("SeriesDescription"), ["sT2W/FLAIR", "T1/3D/FFE/C"])
# make sure both series are in the same anonymized study (the original study is still in Orthanc)
self.assertEqual(2, len(self.oa.studies.get_all_ids()))


def test_anonymize_bulk_study(self):
self.oa.delete_all_content()

self.oa.upload_folder(here / "stimuli/MR/Brain")
self.oa.upload_file(here / "stimuli/CT_small.dcm")

self.assertEqual(2, len(self.oa.studies.get_all_ids()))

_, __, modified_studies_ids, ___ = self.oa.series.anonymize_bulk(
orthanc_ids=self.oa.studies.get_all_ids(),
delete_original=False,
keep_tags=["SeriesDescription", "StudyDescription"],
replace_tags={
"PatientID": str(uuid.uuid4()), # orthanc does not put all studies in the same patient -> you must do it manually
"PatientName": f"Anonymized " + str(uuid.uuid4())
},
force=True
)

self.assertEqual(2, len(modified_studies_ids))
tags1 = self.oa.studies.get_tags(modified_studies_ids[0])
tags2 = self.oa.studies.get_tags(modified_studies_ids[1])
self.assertEqual(tags1.get("PatientName"), tags2.get("PatientName"))
self.assertNotEqual(tags1.get("StudyDescription"), tags2.get("StudyDescription"))
self.assertNotEqual(tags1.get("StudyInstanceUID"), tags2.get("StudyInstanceUID"))
# make sure both studies are in the same anonymized patient (the original patients are still in Orthanc)
self.assertEqual(3, len(self.oa.patients.get_all_ids()))

def test_asyncio(self):
self.oa.delete_all_content()

Expand Down Expand Up @@ -1490,6 +1545,7 @@ def test_version(self):
self.assertFalse(is_version_at_least("1.1", 1, 2, 3))

self.assertTrue(is_version_at_least("mainline", 1, 2, 3)) # mainline is always bigger than any version number !!!
self.assertTrue(is_version_at_least("mainline-548748", 1, 2, 3)) # mainline is always bigger than any version number !!!

self.assertTrue(self.oa.is_orthanc_version_at_least(1, 9, 0))
self.assertTrue(self.oa.is_plugin_version_at_least("dicom-web", 1, 5))
Expand Down

0 comments on commit 06af77b

Please sign in to comment.