Skip to content

Commit

Permalink
Merge pull request #168 from EBISPOT/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
sajo-ebi authored Nov 16, 2022
2 parents 7ea58b0 + 9e33605 commit f3f2c8e
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 5 deletions.
4 changes: 2 additions & 2 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ deploy-fallback:
- mkdir -p /root/.kube
- echo ${PFALLBACK_KUBECONFIG} | base64 -d > /root/.kube/config
- helm init --stable-repo-url https://charts.helm.sh/stable
- helm delete --purge gwas-curation-service || true
#- helm delete --purge gwas-curation-service || true
- helm install --name gwas-curation-service --set k8Namespace=gwas,replicaCount=1,image.env.envName=prod-fallback,image.repository=$BACKEND_REGISTRY_IMAGE,image.tag=$CI_COMMIT_SHA,image.env.dbUser=gwasdepo,image.env.secretsName=prod-secrets,image.env.secretsKey=db-backend,image.env.rabbitSecretsName=prod-secrets,image.env.rabbitSecretsKey=rabbitmq-password ./k8chart/ --wait
environment:
name: prod
Expand All @@ -121,7 +121,7 @@ deploy-live:
- mkdir -p /root/.kube
- echo ${PLIVE_KUBECONFIG} | base64 -d > /root/.kube/config
- helm init --stable-repo-url https://charts.helm.sh/stable
- helm delete --purge gwas-curation-service || true
#- helm delete --purge gwas-curation-service || true
- helm install --name gwas-curation-service --set k8Namespace=gwas,replicaCount=1,image.env.envName=prod,image.repository=$BACKEND_REGISTRY_IMAGE,image.tag=$CI_COMMIT_SHA,image.env.dbUser=gwasdepo,image.env.secretsName=prod-secrets,image.env.secretsKey=db-backend,image.env.rabbitSecretsName=prod-secrets,image.env.rabbitSecretsKey=rabbitmq-password ./k8chart/ --wait
environment:
name: prod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,11 @@ public class StudyIngestConsumer {
@RabbitListener(queues = { DepositionCurationConstants.QUEUE_NAME_SANDBOX,
DepositionCurationConstants.QUEUE_NAME_PROD } )
public void listen(StudyDto studyDto) {
log.info("Consuming message for"+studyDto.getSubmissionId()+":"+studyDto.getAccession());
studySolrIndexerService.syncSolrWithStudies(studyDto);
try {
log.info("Consuming message for" + studyDto.getSubmissionId() + ":" + studyDto.getAccession());
studySolrIndexerService.syncSolrWithStudies(studyDto);
} catch(Exception ex) {
log.error("Error in consuming message"+ex.getMessage(),ex);
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package uk.ac.ebi.spot.gwas.curation.service.impl;

//import com.querydsl.core.types.Predicate;
import com.mongodb.bulk.BulkWriteResult;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -345,6 +346,8 @@ public Study getStudyByAccession(String accessionId, String submissionId) {

@Override
public UploadReportWrapper updateMultiTraitsForStudies(List<MultiTraitStudyMappingDto> multiTraitStudyMappingDtos, String submissionId) {
log.info("Inside updateMultiTraitsForStudies()");

Set<String> shortForms = new HashSet<>();
Set<String> backgroundShortForms = new HashSet<>();
Set<String> reportedTraits = new HashSet<>();
Expand All @@ -353,6 +356,9 @@ public UploadReportWrapper updateMultiTraitsForStudies(List<MultiTraitStudyMappi
backgroundShortForms.addAll(Arrays.asList(StringUtils.deleteWhitespace(multiTraitStudyMappingDto.getBackgroundEfoShortForm()).split("\\|")));
reportedTraits.add(multiTraitStudyMappingDto.getReportedTrait());
});



Map<String, EfoTrait> retrievedEfoTraits = efoTraitRepository.findByShortFormIn(shortForms).collect(Collectors.toMap(EfoTrait::getShortForm, e -> e));
Map<String, EfoTrait> retrievedBackgroundEfoTraits = efoTraitRepository.findByShortFormIn(backgroundShortForms).collect(Collectors.toMap(EfoTrait::getShortForm, e -> e));
Map<String, DiseaseTrait> retrievedReportedTraits = diseaseTraitRepository.findByTraitIgnoreCaseIn(reportedTraits).collect(Collectors.toMap(DiseaseTrait::getTrait, d -> d));
Expand All @@ -370,10 +376,12 @@ public UploadReportWrapper updateMultiTraitsForStudies(List<MultiTraitStudyMappi
}
else {
if(!multiTraitStudyMappingDto.getStudyTag().trim().equalsIgnoreCase(study.getStudyTag())) {
//log.info("Inside invalidStudyTag block()"+study.getStudyTag());
invalidStudyTag = true;
}

if (!invalidStudyTag) {
//log.info("Inside validStudyTag block()"+study.getStudyTag());
String efoTraitComments = "";
HashSet<String> newStudyEfos = new HashSet<>(Arrays.asList(StringUtils.deleteWhitespace(multiTraitStudyMappingDto.getEfoTraitShortForm().trim()).split("\\|")));
ArrayList<String> studyEfoTraitsIds = new ArrayList<>();
Expand Down Expand Up @@ -412,7 +420,10 @@ public UploadReportWrapper updateMultiTraitsForStudies(List<MultiTraitStudyMappi

String reportedTraitComments = "";
if (retrievedReportedTraits.containsKey(multiTraitStudyMappingDto.getReportedTrait().trim())) {
//log.info("Reported Trait in file:"+multiTraitStudyMappingDto.getReportedTrait().trim());
//log.info("GCST currently being analysed:"+study.getAccession());
DiseaseTrait diseaseTrait = retrievedReportedTraits.get(multiTraitStudyMappingDto.getReportedTrait().trim());
//log.info("Disease Trait Id being assigned :"+diseaseTrait.getId());
study.setDiseaseTrait(diseaseTrait.getId());
studiesToSave.put(study.getId(), study);
reportedTraitComments = reportedTraitComments.concat("Reported trait set to: " + diseaseTrait.getTrait());
Expand All @@ -430,7 +441,9 @@ public UploadReportWrapper updateMultiTraitsForStudies(List<MultiTraitStudyMappi
});
BulkOperations bulkOps = mongoTemplate.bulkOps(BulkOperations.BulkMode.UNORDERED, Study.class);
for (Study study: studiesToSave.values()) {
Query query = new Query().addCriteria(new Criteria("id ").is(study.getId()));
//log.info("Study GCST which are bulkuploaded -:"+study.getAccession());
//log.info("Disease Trait Id being assigned to study is :"+study.getDiseaseTrait());
Query query = new Query().addCriteria(new Criteria("id").is(study.getId()));
Update update = new Update()
.set("efoTraits", study.getEfoTraits())
.set("backgroundEfoTraits", study.getBackgroundEfoTraits())
Expand All @@ -440,6 +453,7 @@ public UploadReportWrapper updateMultiTraitsForStudies(List<MultiTraitStudyMappi
}
if (!studiesToSave.isEmpty()) {
bulkOps.execute();
//log.info("Bule write Result"+bulkWriteResult.getUpserts());
}
uploadReportWrapper.setUploadReport(fileHandler.serializePojoToTsv(report));
return uploadReportWrapper;
Expand Down

0 comments on commit f3f2c8e

Please sign in to comment.