Skip to content

Commit

Permalink
merge develop
Browse files Browse the repository at this point in the history
  • Loading branch information
julien-louis committed Dec 3, 2024
2 parents 9c1ba7f + 8294ff5 commit 2ad2b72
Show file tree
Hide file tree
Showing 184 changed files with 2,358 additions and 1,047 deletions.
16 changes: 8 additions & 8 deletions bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,10 @@ if [ -n "$build" ] ; then
mkdir -p /tmp/home
docker run --rm -t -i -v "$PWD:/src" -u "`id -u`:`id -g`" -e HOME="/src/tmp/home" \
-e MAVEN_OPTS="-Dmaven.repo.local=/src/tmp/home/.m2/repository" \
-w /src "$DEV_IMG" sh -c 'git config --global --add safe.directory /src && cd shanoir-ng-parent && mvn clean install -DskipTests'
-w /src "$DEV_IMG" sh -c 'cd shanoir-ng-parent && mvn clean install -DskipTests'

# 3. build the docker images
docker compose build
docker compose -f docker-compose-dev.yml build
fi

if [ -n "$deploy" ] ; then
Expand Down Expand Up @@ -136,14 +136,14 @@ if [ -n "$deploy" ] ; then
# 2. keycloak-database + keycloak
if [ -n "$keycloak" ] ; then
step "init: keycloak-database"
docker compose up -d keycloak-database
docker compose -f docker-compose-dev.yml up -d keycloak-database
wait_tcp_ready keycloak-database 3306

step "init: keycloak"
docker compose run --rm -e SHANOIR_MIGRATION=init keycloak

step "start: keycloak"
docker compose up -d keycloak
docker compose -f docker-compose-dev.yml up -d keycloak
docker-compose/common/oneshot --pgrp '\| *' \
' INFO \[io.quarkus\] .* Keycloak .* started in [0-9]*' \
-- docker compose logs --no-color --follow keycloak >/dev/null
Expand All @@ -156,7 +156,7 @@ if [ -n "$deploy" ] ; then
for infra_ms_dcm4chee in ldap dcm4chee-database dcm4chee-arc
do
step "start: $infra_ms_dcm4chee infrastructure microservices dcm4chee"
docker compose up -d "$infra_ms_dcm4chee"
docker compose -f docker-compose-dev.yml up -d "$infra_ms_dcm4chee"
done
fi

Expand All @@ -165,7 +165,7 @@ if [ -n "$deploy" ] ; then
for infra_ms in rabbitmq solr
do
step "start: $infra_ms infrastructure microservice"
docker compose up -d "$infra_ms"
docker compose -f docker-compose-dev.yml up -d "$infra_ms"
done

# 5. Shanoir-NG microservices
Expand All @@ -175,10 +175,10 @@ if [ -n "$deploy" ] ; then
step "init: $ms microservice"
docker compose run --rm -e SHANOIR_MIGRATION=init "$ms"
step "start: $ms microservice"
docker compose up -d "$ms"
docker compose -f docker-compose-dev.yml up -d "$ms"
done

# 6. nginx
step "start: nginx"
docker compose up -d nginx
docker compose -f docker-compose-dev.yml up -d nginx
fi
22 changes: 11 additions & 11 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ services:
#
keycloak-database:
container_name: "${SHANOIR_PREFIX}keycloak-database"
image: "ghcr.io/fli-iam/shanoir-ng/keycloak-database:ng_v2.3.1"
image: "ghcr.io/fli-iam/shanoir-ng/keycloak-database:NG_v2.5.0"
environment:
- MYSQL_DATABASE=keycloak
ulimits:
Expand All @@ -43,7 +43,7 @@ services:
- SHANOIR_KEYCLOAK_USER
- SHANOIR_KEYCLOAK_PASSWORD
- SHANOIR_ALLOWED_ADMIN_IPS
image: "ghcr.io/fli-iam/shanoir-ng/keycloak:ng_v2.3.1"
image: "ghcr.io/fli-iam/shanoir-ng/keycloak:NG_v2.5.0"
volumes:
- "keycloak-logs:/opt/keycloak/data/log"
networks:
Expand All @@ -70,7 +70,7 @@ services:
#
database:
container_name: "${SHANOIR_PREFIX}database"
image: "ghcr.io/fli-iam/shanoir-ng/database:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/database:NG_v2.5.0"
command: --max_allowed_packet 20000000
env_file:
- ./docker-compose/database/variables.env
Expand All @@ -91,7 +91,7 @@ services:
#
users:
container_name: "${SHANOIR_PREFIX}users"
image: "ghcr.io/fli-iam/shanoir-ng/users:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/users:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand Down Expand Up @@ -121,7 +121,7 @@ services:
#
studies:
container_name: "${SHANOIR_PREFIX}studies"
image: "ghcr.io/fli-iam/shanoir-ng/studies:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/studies:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand All @@ -146,7 +146,7 @@ services:
#
import:
container_name: "${SHANOIR_PREFIX}import"
image: "ghcr.io/fli-iam/shanoir-ng/import:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/import:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand Down Expand Up @@ -174,7 +174,7 @@ services:
#
datasets:
container_name: "${SHANOIR_PREFIX}datasets"
image: "ghcr.io/fli-iam/shanoir-ng/datasets:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/datasets:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand Down Expand Up @@ -202,7 +202,7 @@ services:

preclinical:
container_name: "${SHANOIR_PREFIX}preclinical"
image: "ghcr.io/fli-iam/shanoir-ng/preclinical:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/preclinical:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand All @@ -227,7 +227,7 @@ services:
#
nifti-conversion:
container_name: "${SHANOIR_PREFIX}nifti-conversion"
image: "ghcr.io/fli-iam/shanoir-ng/nifti-conversion:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/nifti-conversion:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand All @@ -248,7 +248,7 @@ services:
#
solr:
container_name: "${SHANOIR_PREFIX}solr"
image: "ghcr.io/fli-iam/shanoir-ng/solr:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/solr:NG_v2.5.0"
environment:
- SOLR_LOG_LEVEL=SEVERE
volumes:
Expand Down Expand Up @@ -321,7 +321,7 @@ services:
#
nginx:
container_name: shanoir-ng-nginx
image: "ghcr.io/fli-iam/shanoir-ng/nginx:ng_v2.4.1"
image: "ghcr.io/fli-iam/shanoir-ng/nginx:NG_v2.5.0"
environment:
- SHANOIR_PREFIX
- SHANOIR_URL_SCHEME
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALTER TABLE shanoir_metadata ADD COLUMN sorting_index int(11);
1 change: 1 addition & 0 deletions docker-compose/solr/core/schema.xml
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@
<field name="studyName" type="lowercase"/>
<field name="subjectName" type="lowercase"/>
<field name="subjectId" type="plong"/>
<field name="sortingIndex" type="pint"/>
<field name="subjectType" type="lowercase"/>
<field name="tags" type="strings"/>
<field name="processed" type="boolean" indexed="true" stored="true" required="true" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,15 @@ public void anonymize(ArrayList<File> dicomFiles, String profile) throws Excepti
tagsToDeleteForManufacturer = AnonymizationRulesSingleton.getInstance().getTagsToDeleteForManufacturer();
// init here for multi-threading reasons
Map<String, String> seriesInstanceUIDs = new HashMap<>();
Map<String, String> frameOfReferenceUIDs = new HashMap<>();
Map<String, String> studyInstanceUIDs = new HashMap<>();
Map<String, String> studyIds = new HashMap<>();
LOG.debug("anonymize : totalAmount={}", totalAmount);
int current = 0;
for (int i = 0; i < dicomFiles.size(); ++i) {
final File file = dicomFiles.get(i);
// Perform the anonymization
performAnonymization(file, anonymizationMap, false, "", "", seriesInstanceUIDs, studyInstanceUIDs, studyIds);
performAnonymization(file, anonymizationMap, false, "", "", seriesInstanceUIDs, frameOfReferenceUIDs, studyInstanceUIDs, studyIds);
current++;
final int currentPercent = current * 100 / totalAmount;
LOG.debug("anonymize : anonymization current percent= {} %", currentPercent);
Expand All @@ -99,14 +100,15 @@ public void anonymizeForShanoir(ArrayList<File> dicomFiles, String profile, Stri

// init here for multi-threading reasons
Map<String, String> seriesInstanceUIDs = new HashMap<>();
Map<String, String> frameOfReferenceUIDs = new HashMap<>();
Map<String, String> studyInstanceUIDs = new HashMap<>();
Map<String, String> studyIds = new HashMap<>();
LOG.debug("anonymize : totalAmount={}", totalAmount);
int current = 0;
for (int i = 0; i < dicomFiles.size(); ++i) {
final File file = dicomFiles.get(i);
// Perform the anonymization
performAnonymization(file, anonymizationMap, true, patientName, patientID, seriesInstanceUIDs, studyInstanceUIDs, studyIds);
performAnonymization(file, anonymizationMap, true, patientName, patientID, seriesInstanceUIDs, frameOfReferenceUIDs, studyInstanceUIDs, studyIds);
current++;
final int currentPercent = current * 100 / totalAmount;
LOG.debug("anonymize : anonymization current percent= {} %", currentPercent);
Expand Down Expand Up @@ -156,7 +158,7 @@ private void anonymizePatientMetaData(Attributes attributes, String patientName,
* @throws Exception
*/
public void performAnonymization(final File dicomFile, Map<String, String> anonymizationMap, boolean isShanoirAnonymization,
String patientName, String patientID, Map<String, String> seriesInstanceUIDs,
String patientName, String patientID, Map<String, String> seriesInstanceUIDs, Map<String, String> frameOfReferenceUIDs,
Map<String, String> studyInstanceUIDs, Map<String, String> studyIds) throws Exception {
DicomInputStream din = null;
DicomOutputStream dos = null;
Expand Down Expand Up @@ -213,18 +215,17 @@ public void performAnonymization(final File dicomFile, Map<String, String> anony
anonymizeTag(tagInt, action, datasetAttributes);
// even: public tags
} else if (anonymizationMap.containsKey(tagString)) {
if (tagInt == Tag.SOPInstanceUID) {
anonymizeSOPInstanceUID(tagInt, datasetAttributes, mediaStorageSOPInstanceUIDGenerated);
} else if (tagInt == Tag.SeriesInstanceUID) {
anonymizeSeriesInstanceUID(tagInt, datasetAttributes, seriesInstanceUIDs);
} else if (tagInt == Tag.StudyInstanceUID) {
anonymizeStudyInstanceUID(tagInt, datasetAttributes, studyInstanceUIDs);
} else if (tagInt == Tag.StudyID) {
anonymizeStudyId(tagInt, datasetAttributes, studyIds);
} else {
final String action = anonymizationMap.get(tagString);
anonymizeTag(tagInt, action, datasetAttributes);
}
switch (tagInt) {
case Tag.SOPInstanceUID -> anonymizeSOPInstanceUID(tagInt, datasetAttributes, mediaStorageSOPInstanceUIDGenerated);
case Tag.SeriesInstanceUID -> anonymizeUID(tagInt, datasetAttributes, seriesInstanceUIDs);
case Tag.FrameOfReferenceUID -> anonymizeUID(tagInt, datasetAttributes, frameOfReferenceUIDs);
case Tag.StudyInstanceUID -> anonymizeUID(tagInt, datasetAttributes, studyInstanceUIDs);
case Tag.StudyID -> anonymizeStudyId(tagInt, datasetAttributes, studyIds);
default -> {
final String action = anonymizationMap.get(tagString);
anonymizeTag(tagInt, action, datasetAttributes);
}
}
} else {
if (0x50000000 <= tagInt && tagInt <= 0x50FFFFFF) {
final String action = anonymizationMap.get(CURVE_DATA_TAGS);
Expand Down Expand Up @@ -350,46 +351,6 @@ private void anonymizeSOPInstanceUID(int tagInt, Attributes attributes, String m
anonymizeTagAccordingToVR(attributes, tagInt, mediaStorageSOPInstanceUID);
}

private void anonymizeSeriesInstanceUID(int tagInt, Attributes attributes, Map<String, String> seriesInstanceUIDs) {
String value;
if (seriesInstanceUIDs != null && seriesInstanceUIDs.size() != 0
&& seriesInstanceUIDs.get(attributes.getString(tagInt)) != null) {
value = seriesInstanceUIDs.get(attributes.getString(tagInt));
} else {
UIDGeneration generator = new UIDGeneration();
String newUID = null;
try {
newUID = generator.getNewUID();
} catch (Exception e) {
LOG.error(e.getMessage());
}
value = newUID;
seriesInstanceUIDs.put(attributes.getString(tagInt), value);
}
anonymizeTagAccordingToVR(attributes, tagInt, value);
}

private void anonymizeStudyInstanceUID(int tagInt, Attributes attributes, Map<String, String> studyInstanceUIDs) {
String value;
if (studyInstanceUIDs != null && studyInstanceUIDs.size() != 0
&& studyInstanceUIDs.get(attributes.getString(tagInt)) != null) {
value = studyInstanceUIDs.get(attributes.getString(tagInt));
LOG.debug("Existing StudyInstanceUID reused: {}", value);
} else {
UIDGeneration generator = new UIDGeneration();
String newUID = null;
try {
newUID = generator.getNewUID();
} catch (Exception e) {
LOG.error(e.getMessage());
}
value = newUID;
LOG.info("New StudyInstanceUID generated for DICOM study/exam: {}", newUID);
studyInstanceUIDs.put(attributes.getString(tagInt), value);
}
anonymizeTagAccordingToVR(attributes, tagInt, value);
}

private void anonymizeStudyId(int tagInt, Attributes attributes, Map<String, String> studyIds) {
String value;
if (studyIds != null && studyIds.size() != 0 && studyIds.get(attributes.getString(tagInt)) != null) {
Expand Down Expand Up @@ -523,4 +484,30 @@ else if (vr.equals(VR.AE) || vr.equals(VR.AS) || vr.equals(VR.CS) || vr.equals(V
// VR.OD = Other Double String
}

private void anonymizeUID(int tagInt, Attributes attributes, Map<String, String> UIDs) {
String value;
if (UIDs != null && UIDs.size() != 0
&& UIDs.get(attributes.getString(tagInt)) != null) {
value = UIDs.get(attributes.getString(tagInt));
// We log only concerning the studyInstanceUID
if(Tag.StudyInstanceUID == tagInt) {
LOG.debug("Existing StudyInstanceUID reused: {}", value);
}
} else {
UIDGeneration generator = new UIDGeneration();
String newUID = null;
try {
newUID = generator.getNewUID();
} catch (Exception e) {
LOG.error(e.getMessage());
}
value = newUID;
if(Tag.StudyInstanceUID == tagInt) {
LOG.info("New StudyInstanceUID generated for DICOM study/exam: {}", newUID);
}
UIDs.put(attributes.getString(tagInt), value);
}
anonymizeTagAccordingToVR(attributes, tagInt, value);
}

}
3 changes: 2 additions & 1 deletion shanoir-ng-datasets/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@
<profiles>
<profile>${spring.active.profile}</profile>
</profiles>
<classifier>exec</classifier>
</configuration>
<executions>
<execution>
Expand All @@ -168,7 +169,7 @@
<phase>package</phase>
<configuration>
<target>
<copy file="target/${project.build.finalName}.jar"
<copy file="target/${project.build.finalName}-exec.jar"
tofile="${basedir}/../docker-compose/datasets/${project.artifactId}.jar" />
</target>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,21 +312,15 @@ public void createDatasetAcquisition(final String studyStr) {


/**
* Receives a shanoirEvent as a json object, concerning a subject deletion
* @param eventAsString the task as a json string.
*/
@RabbitListener(bindings = @QueueBinding(
key = ShanoirEventType.DELETE_SUBJECT_EVENT,
value = @Queue(value = RabbitMQConfiguration.DELETE_SUBJECT_QUEUE, durable = "true"),
exchange = @Exchange(value = RabbitMQConfiguration.EVENTS_EXCHANGE, ignoreDeclarationExceptions = "true",
autoDelete = "false", durable = "true", type=ExchangeTypes.TOPIC)), containerFactory = "singleConsumerFactory"
)
* Receives a shanoirEvent as a json object, concerning a subject deletion
* @param subjectIdAsString a string of the subject's id
*/
@RabbitListener(queues = RabbitMQConfiguration.DELETE_SUBJECT_QUEUE, containerFactory = "singleConsumerFactory")
@Transactional
public void deleteSubject(String eventAsString) throws AmqpRejectAndDontRequeueException {
public void deleteSubject(String subjectIdAsString) throws AmqpRejectAndDontRequeueException {
SecurityContextUtil.initAuthenticationContext("ROLE_ADMIN");
try {
ShanoirEvent event = objectMapper.readValue(eventAsString, ShanoirEvent.class);
Long subjectId = Long.valueOf(event.getObjectId());
Long subjectId = Long.valueOf(subjectIdAsString);
Set<Long> studyIds = new HashSet<>();

// Inverse order to remove copied examination before its source (if copied)
Expand Down Expand Up @@ -477,7 +471,7 @@ public void copyDatasetsToStudy(final String data) {
Long dsCount = datasetRepository.countDatasetsBySourceIdAndStudyId(datasetParentId, studyId);
Dataset datasetParent = datasetService.findById(datasetParentId);

if (datasetParent.getSourceId() != null) {
if (datasetParent.getSource() != null) {
LOG.info("[CopyDatasets] Selected dataset is a copy, please pick the original dataset.");
countCopy++;
} else if (dsCount != 0) {
Expand All @@ -502,7 +496,8 @@ public void copyDatasetsToStudy(final String data) {
event.setStatus(ShanoirEvent.SUCCESS);
event.setProgress(1.0f);
eventService.publishEvent(event);
solrService.indexDatasets(newDatasets);
if (newDatasets.size() > 0)
solrService.indexDatasets(newDatasets);

} catch (Exception e) {
if (event != null) {
Expand Down
Loading

0 comments on commit 2ad2b72

Please sign in to comment.