Skip to content

Commit

Permalink
2558 : API for pipeline execution analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
DuckflipXYZ committed Jan 22, 2025
1 parent ebb3569 commit ba34ed8
Show file tree
Hide file tree
Showing 6 changed files with 214 additions and 12 deletions.
5 changes: 5 additions & 0 deletions shanoir-ng-datasets/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,11 @@
<version>3.2.2</version>
</dependency>

<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>5.7.1</version>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -971,6 +971,38 @@ public boolean hasRightOnExaminations(List<Long> examinationIds, String rightStr
return true;
}


/**
* Check that the connected user has the given right for the given datas.
*
* @param ids the data ids
* @param dataType the data ids
* @param rightStr the right
* @return true or false
* @throws EntityNotFoundException
*/
public boolean hasRightOnDatas(List<Long> ids, String dataType, String rightStr) throws EntityNotFoundException {
if (isAdmin()) {
return true;
}
switch (dataType) {
case "study":
return hasRightOnStudies(ids, rightStr);
case "subject":
return hasRightOnSubjects(ids, rightStr);
case "examination":
return hasRightOnExaminations(ids, rightStr);
case "acquisition":
return hasRightOnAcquisitions(ids, rightStr);
case "dataset":
return hasRightOnDatasets(ids, rightStr);

default:
throw new EntityNotFoundException("Data type " + dataType + " not supported");

}
}

private boolean isAdmin() {
return KeycloakUtil.getTokenRoles().contains(ROLE_ADMIN);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,9 @@ ResponseEntity<Void> updateDatasetProcessing(
@ApiResponse(responseCode = "404", description = "no dataset found"),
@ApiResponse(responseCode = "500", description = "unexpected error")})
@PostMapping(value = "/massiveDownloadByProcessingIds")
@PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.HasRightOnEveryDatasetOfProcessings(#processingIds, 'CAN_DOWNLOAD'))")
@PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.HasRightOnDatasetOfProcessings(#processingIds, 'CAN_DOWNLOAD'))")
void massiveDownloadByProcessingIds(
@Parameter(description = "id of the processing", required = true) @Valid
@Parameter(description = "ids of the processing", required = true) @Valid
@RequestBody List<Long> processingIds,
@Parameter(description = "outputs to extract") @Valid
@RequestParam(value = "resultOnly", defaultValue = "false") boolean resultOnly, HttpServletResponse response) throws RestServiceException;
Expand All @@ -147,11 +147,28 @@ void massiveDownloadByProcessingIds(
@PostMapping(value = "/massiveDownloadProcessingByExaminationIds")
@PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.hasRightOnExaminations(#examinationIds, 'CAN_DOWNLOAD'))")
void massiveDownloadProcessingByExaminationIds(
@Parameter(description = "id of the examination", required = true) @Valid
@Parameter(description = "ids of the examination", required = true) @Valid
@RequestBody List<Long> examinationIds,
@Parameter(description = "comment of the desired processings") @Valid
@RequestParam(value = "processingComment", required = false) String processingComment,
@Parameter(description = "outputs to extract") @Valid
@RequestParam(value = "resultOnly", defaultValue = "false") boolean resultOnly, HttpServletResponse response) throws RestServiceException;

@Operation(summary = "downloadPipelineDatas", description = "Return a .csv file describing datas used for pipeline executions and their results. ")
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "csv file"),
@ApiResponse(responseCode = "401", description = "unauthorized"),
@ApiResponse(responseCode = "403", description = "forbidden"),
@ApiResponse(responseCode = "404", description = "no datas found"),
@ApiResponse(responseCode = "500", description = "unexpected error")})
@PostMapping(value = "/downloadPipelineDatas")
@PreAuthorize("hasRole('ADMIN') or (hasAnyRole('EXPERT', 'USER') and @datasetSecurityService.hasRightOnDatas(#ids, #dataType, 'CAN_SEE_ALL'))")
void downloadPipelineDatas(
@Parameter(description = "ids ", required = true) @Valid
@RequestBody List<Long> dataIds,
@Parameter(description = "data type", required = true) @Valid
@RequestParam String dataType,
@Parameter(description = "pipeline identifier", required = true) @Valid
@RequestParam String pipelineIdentifier, HttpServletResponse response) throws RestServiceException, IOException;

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,32 @@
import org.shanoir.ng.dataset.dto.mapper.DatasetMapper;
import org.shanoir.ng.dataset.model.Dataset;
import org.shanoir.ng.dataset.model.DatasetExpressionFormat;
import org.shanoir.ng.dataset.repository.DatasetRepository;
import org.shanoir.ng.dataset.service.DatasetService;
import org.shanoir.ng.datasetacquisition.model.DatasetAcquisition;
import org.shanoir.ng.datasetacquisition.repository.DatasetAcquisitionRepository;
import org.shanoir.ng.examination.model.Examination;
import org.shanoir.ng.examination.repository.ExaminationRepository;
import org.shanoir.ng.examination.service.ExaminationService;
import org.shanoir.ng.processing.dto.DatasetProcessingDTO;
import org.shanoir.ng.processing.dto.mapper.DatasetProcessingMapper;
import org.shanoir.ng.processing.model.DatasetProcessing;
import org.shanoir.ng.processing.model.DatasetProcessingType;
import org.shanoir.ng.processing.repository.DatasetProcessingRepository;
import org.shanoir.ng.processing.service.DatasetProcessingService;
import org.shanoir.ng.processing.service.ProcessingDownloaderServiceImpl;
import org.shanoir.ng.shared.error.FieldErrorMap;
import org.shanoir.ng.shared.exception.*;
import org.shanoir.ng.shared.model.Study;
import org.shanoir.ng.shared.model.Subject;
import org.shanoir.ng.shared.repository.StudyRepository;
import org.shanoir.ng.shared.repository.SubjectRepository;
import org.shanoir.ng.utils.KeycloakUtil;
import org.shanoir.ng.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
Expand Down Expand Up @@ -75,6 +86,11 @@ public class DatasetProcessingApiController implements DatasetProcessingApi {
@Autowired
private ExaminationService examinationService;

@Autowired
@Lazy
private DatasetProcessingRepository processingRepository;


public DatasetProcessingApiController(){

}
Expand Down Expand Up @@ -228,4 +244,30 @@ public void massiveDownloadProcessingByExaminationIds(
}
processingDownloaderService.massiveDownloadByExaminations(examinationList, processingComment, resultOnly, "dcm" , response, false, null);
}

@Override
public void downloadPipelineDatas(List<Long> dataIds, String dataType, String pipelineIdentifier, HttpServletResponse response) throws IOException {
List<Long> processingIds = new ArrayList<>();
switch (dataType) {
case "study":
processingIds = processingRepository.findAllIdsByStudyIds(dataIds);
break;
case "subject":
processingIds = processingRepository.findAllIdsBySubjectIds(dataIds);
break;
case "examination":
processingIds = processingRepository.findAllIdsByExaminationIds(dataIds);
break;
case "acquisition":
processingIds = processingRepository.findAllIdsByAcquisitionIds(dataIds);
break;
case "dataset":
processingIds = processingRepository.findAllIdsByInputDatasets_Ids(dataIds);
break;
}
processingIds = processingRepository.filterIdsByIdentifier(processingIds, pipelineIdentifier);
if(!processingIds.isEmpty()) {
processingDownloaderService.downloadPipelineDatas(processingIds, pipelineIdentifier, response);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,30 +31,99 @@ public interface DatasetProcessingRepository extends CrudRepository<DatasetProce
* Find dataset processing by name.
*
* @param comment comment.
* @return a dataset processing.
* @return a dataset processing (Optional).
*/
Optional<DatasetProcessing> findByComment(String comment);

/**
* Find all processings that are linked to given dataset through INPUT_OF_DATASET_PROCESSING table
*
* @param datasetId
* @return
* @return list of processing
*/
List<DatasetProcessing> findAllByInputDatasets_Id(Long datasetId);

List<DatasetProcessing> findAllByParentId(Long id);
/**
* Find all processings ids that are linked to given datasets through INPUT_OF_DATASET_PROCESSING table
*
* @param datasetIds
* @return list of processing ids
*/
@Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " +
"INNER JOIN input_of_dataset_processing as input ON processing.id=input.processing_id " +
"WHERE input.dataset_id IN (:datasetIds)", nativeQuery = true)
List<Long> findAllIdsByInputDatasets_Ids(List<Long> datasetIds);

/**
* Find all processings ids that are linked to given processing id
*
* @param id
* @return list of processing
*/
List<DatasetProcessing> findAllByParentId(Long id);

/**
* Find all processings that are linked to given examinations
*
* @param examinationIds
* @return
* @return list of processing ids
*/
@Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " +
"INNER JOIN input_of_dataset_processing as input ON processing.id=input.processing_id " +
"INNER JOIN dataset as dataset ON dataset.id=input.dataset_id " +
"INNER JOIN dataset_acquisition as acquisition ON acquisition.id=dataset.dataset_acquisition_id " +
"WHERE acquisition.examination_id IN (:examinationIds)", nativeQuery = true)
List<Long> findAllIdsByExaminationIds(List<Long> examinationIds);

/**
* Find all processings that are linked to given studies
*
* @param studyIds
* @return list of processing ids
*/
@Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " +
"INNER JOIN input_of_dataset_processing as input ON processing.id=input.processing_id " +
"INNER JOIN dataset as dataset ON dataset.id=input.dataset_id " +
"INNER JOIN dataset_acquisition as acquisition ON acquisition.id=dataset.dataset_acquisition_id " +
"INNER JOIN examination as examination ON examination.id = acquisition.examination_id " +
"WHERE examination.study_id IN (:studyIds)", nativeQuery = true)
List<Long>findAllIdsByStudyIds(List<Long> studyIds);

/**
* Find all processings that are linked to given acquisitions
*
* @param acquisitionIds
* @return list of processing ids
*/
@Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " +
"INNER JOIN input_of_dataset_processing as input ON processing.id=input.processing_id " +
"INNER JOIN dataset as dataset ON dataset.id=input.dataset_id " +
"WHERE dataset.dataset_acquisition_id IN (:acquisitionIds)", nativeQuery = true)
List<Long>findAllIdsByAcquisitionIds(List<Long> acquisitionIds);

/**
* Find all processings that are linked to given subjects
*
* @param subjectIds
* @return list of processing ids
*/
@Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " +
"INNER JOIN input_of_dataset_processing as input ON processing.id=input.processing_id " +
"INNER JOIN dataset as dataset ON dataset.id=input.dataset_id " +
"INNER JOIN dataset_acquisition as acquisition ON acquisition.id=dataset.dataset_acquisition_id " +
"INNER JOIN examination as examination ON examination.id = acquisition.examination_id " +
"WHERE examination.subject_id IN (:subjectIds)", nativeQuery = true)
List<Long>findAllIdsBySubjectIds(List<Long> subjectIds);

/**
* Filter all processing according to a specific pipelineIdentifier
*
* @param processingIds list of processing to filter
* @param pipelineIdentifier pipelineIdentifier to filter
* @return list of processing ids
*/
@Query(value="SELECT DISTINCT processing.id FROM dataset_processing as processing " +
"WHERE processing.comment LIKE :pipelineIdentifier " +
"AND processing.id IN (:processingIds)", nativeQuery = true)
List<Long>filterIdsByIdentifier(List<Long> processingIds, String pipelineIdentifier);
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,30 +2,29 @@

import jakarta.servlet.http.HttpServletResponse;
import org.apache.solr.common.util.Pair;
import org.shanoir.ng.dataset.modality.BidsDataset;
import org.shanoir.ng.dataset.modality.EegDataset;
import com.opencsv.CSVWriter;
import org.shanoir.ng.dataset.model.Dataset;
import org.shanoir.ng.dataset.model.DatasetExpressionFormat;
import org.shanoir.ng.dataset.service.DatasetDownloaderServiceImpl;
import org.shanoir.ng.datasetacquisition.model.DatasetAcquisition;
import org.shanoir.ng.download.DatasetDownloadError;
import org.shanoir.ng.download.WADODownloaderService;
import org.shanoir.ng.examination.model.Examination;
import org.shanoir.ng.processing.model.DatasetProcessing;
import org.shanoir.ng.processing.model.DatasetProcessingType;
import org.shanoir.ng.processing.repository.DatasetProcessingRepository;
import org.shanoir.ng.shared.event.ShanoirEvent;
import org.shanoir.ng.shared.event.ShanoirEventType;
import org.shanoir.ng.shared.exception.ErrorModel;
import org.shanoir.ng.shared.exception.RestServiceException;
import org.shanoir.ng.shared.model.Study;
import org.shanoir.ng.shared.model.Subject;
import org.shanoir.ng.utils.DatasetFileUtils;
import org.shanoir.ng.utils.KeycloakUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.net.URL;
import java.io.OutputStreamWriter;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
Expand Down Expand Up @@ -157,4 +156,42 @@ private String shapeForPath(String path){
}
return path;
}


public void downloadPipelineDatas(List<Long> processingIds, String pipelineIdentifier, HttpServletResponse response) throws IOException {
try (ZipOutputStream zipOutputStream = new ZipOutputStream(response.getOutputStream())) {
ZipEntry zipEntry = new ZipEntry("processing_" + shapeForPath(pipelineIdentifier) + "_datas.csv");
zipEntry.setTime(System.currentTimeMillis());
zipOutputStream.putNextEntry(zipEntry);

String[] header = { "Study", "Subject", "Examination", "Acquisition", "AcquisitionId", "Dataset", "DatasetId", "Error file", "Results file" };
CSVWriter writer = new CSVWriter(new OutputStreamWriter(zipOutputStream));
writer.writeNext(header);
for(DatasetProcessing processing : datasetProcessingService.findAllById(processingIds)){
for(Dataset input : processing.getInputDatasets()) {
writer.writeNext(buildLineForCsv(processing, input));
}
}
writer.close();
zipOutputStream.closeEntry();
}
}

private String[] buildLineForCsv(DatasetProcessing processing, Dataset input) {
DatasetAcquisition datasetAcquisition = input.getDatasetAcquisition();
Examination examination = Objects.nonNull(datasetAcquisition) ? datasetAcquisition.getExamination() : null;
Subject subject = Objects.nonNull(examination) ? examination.getSubject() : null;
Study study = Objects.nonNull(examination) ? examination.getStudy() : null;

String studyName = Objects.nonNull(study) ? study.getName() : null;
String subjectName = Objects.nonNull(subject) ? subject.getName() : null;
String examinationId = Objects.nonNull(examination) ? examination.getId().toString() : "";
String acquisitionName = Objects.nonNull(datasetAcquisition) ? (Objects.nonNull(datasetAcquisition.getStudyCard()) ? datasetAcquisition.getStudyCard().getName() : "") : "";
String acquisitionId = Objects.nonNull(datasetAcquisition) ? datasetAcquisition.getId().toString() : "";
String inputName = input.getName();
String inputId = input.getId().toString();
String errorFile = processing.getOutputDatasets().stream().anyMatch(it->Objects.equals(it.getName(), "error.yaml")) ? "true" : "false";
String resultsFile = processing.getOutputDatasets().stream().anyMatch(it->Objects.equals(it.getName(), "results.yaml")) ? "true" : "false";
return new String[]{studyName, subjectName, examinationId, acquisitionName, acquisitionId, inputName, inputId, errorFile, resultsFile};
}
}

0 comments on commit ba34ed8

Please sign in to comment.