Skip to content

Commit

Permalink
[ALS-5755] Switch time series processor to ISO timestamps
Browse files Browse the repository at this point in the history
- Make service that does this
- Isolate time series logic a bit more
- Tests
  • Loading branch information
Luke Sikina committed Jan 23, 2024
1 parent c6d8296 commit 1919f66
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ protected PhenoCube getCube(String path) {
* Useful for federated pic-sure's where there are fewer
* guarantees about concept paths.
*/
protected Optional<PhenoCube<?>> nullableGetCube(String path) {
public Optional<PhenoCube<?>> nullableGetCube(String path) {
try {
return Optional.ofNullable(store.get(path));
} catch (InvalidCacheLoadException | ExecutionException e) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package edu.harvard.hms.dbmi.avillach.hpds.processing.timeseries;

import org.springframework.stereotype.Service;

import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import java.util.TimeZone;

@Service
public class TimeSeriesConversionService {

public String toISOString(Long unixTimeStamp) {
Instant instant = Instant.ofEpochMilli(unixTimeStamp);
return DateTimeFormatter.ISO_INSTANT.format(instant);
}
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
package edu.harvard.hms.dbmi.avillach.hpds.processing;
package edu.harvard.hms.dbmi.avillach.hpds.processing.timeseries;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;

import edu.harvard.hms.dbmi.avillach.hpds.processing.AbstractProcessor;
import edu.harvard.hms.dbmi.avillach.hpds.processing.AsyncResult;
import edu.harvard.hms.dbmi.avillach.hpds.processing.HpdsProcessor;
import edu.harvard.hms.dbmi.avillach.hpds.processing.QueryProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -36,14 +40,16 @@ public class TimeseriesProcessor implements HpdsProcessor {
private Logger log = LoggerFactory.getLogger(QueryProcessor.class);

private AbstractProcessor abstractProcessor;
private final TimeSeriesConversionService conversionService;

private final String ID_CUBE_NAME;
private final int ID_BATCH_SIZE;
private final int CACHE_SIZE;

@Autowired
public TimeseriesProcessor(AbstractProcessor abstractProcessor) {
public TimeseriesProcessor(AbstractProcessor abstractProcessor, TimeSeriesConversionService conversionService) {
this.abstractProcessor = abstractProcessor;
this.conversionService = conversionService;
// todo: handle these via spring annotations
CACHE_SIZE = Integer.parseInt(System.getProperty("CACHE_SIZE", "100"));
ID_BATCH_SIZE = Integer.parseInt(System.getProperty("ID_BATCH_SIZE", "0"));
Expand Down Expand Up @@ -115,14 +121,24 @@ private void addDataForConcepts(Collection<String> pathList, Set<String> exporte
if (cube.isStringType()) {
KeyAndValue<String> keyAndValue = (KeyAndValue) kvObj;
// "PATIENT_NUM","CONCEPT_PATH","NVAL_NUM","TVAL_CHAR","TIMESTAMP"
String[] entryData = { keyAndValue.getKey().toString(), conceptPath, "", keyAndValue.getValue(),
keyAndValue.getTimestamp().toString() };
String[] entryData = {
keyAndValue.getKey().toString(),
conceptPath,
"",
keyAndValue.getValue(),
conversionService.toISOString(keyAndValue.getTimestamp())
};
dataEntries.add(entryData);
} else { // numeric
KeyAndValue<Double> keyAndValue = (KeyAndValue) kvObj;
// "PATIENT_NUM","CONCEPT_PATH","NVAL_NUM","TVAL_CHAR","TIMESTAMP"
String[] entryData = { keyAndValue.getKey().toString(), conceptPath,
keyAndValue.getValue().toString(), "", keyAndValue.getTimestamp().toString() };
String[] entryData = {
keyAndValue.getKey().toString(),
conceptPath,
keyAndValue.getValue().toString(),
"",
conversionService.toISOString(keyAndValue.getTimestamp())
};
dataEntries.add(entryData);
}
//batch exports so we don't take double memory (valuesForKeys + dataEntries could be a lot of data points)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package edu.harvard.hms.dbmi.avillach.hpds.processing.timeseries;

import org.junit.Test;

import java.util.TimeZone;

import static org.junit.Assert.assertEquals;


public class TimeSeriesConversionServiceTest {

TimeSeriesConversionService subject = new TimeSeriesConversionService();

@Test
public void shouldConvertToIsoString() {
String actual = subject.toISOString(0L);
String expected = "1970-01-01T00:00:00Z";

assertEquals(expected, actual);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import java.util.function.Predicate;
import java.util.stream.Collectors;

import edu.harvard.hms.dbmi.avillach.hpds.processing.timeseries.TimeseriesProcessor;
import edu.harvard.hms.dbmi.avillach.hpds.service.util.QueryDecorator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down

0 comments on commit 1919f66

Please sign in to comment.