diff --git a/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/BucketIndexBySample.java b/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/BucketIndexBySample.java index acd4f856..46723234 100644 --- a/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/BucketIndexBySample.java +++ b/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/BucketIndexBySample.java @@ -100,7 +100,7 @@ public BucketIndexBySample(VariantStore variantStore, String storageDir) throws } }); } catch (IOException e) { - log.error("Error getting bucket", e); + throw new UncheckedIOException(e); } // For each patient set the patientBucketCharMask entry to 0 or 1 if they have a variant in the bucket. @@ -149,7 +149,7 @@ public void run() { }catch(NumberFormatException e) { log.error("NFE caught for " + patientId, e); } catch (IOException e) { - log.error("Error writing patient bucket masks", e); + throw new UncheckedIOException(e); } processedPatients[0] += 1; }); @@ -178,9 +178,9 @@ public Collection filterVariantSetForPatientSet(Set variantSet, try { return patientBucketMasks.get(patientNum); } catch (IOException e) { - log.error("Error getting mask for patient", e); + throw new UncheckedIOException(e); } - return _defaultMask; + //return _defaultMask; }).collect(Collectors.toList()); for(BigInteger patientMask : patientBucketmasksForSet) { patientBucketMask = patientMask.or(patientBucketMask); diff --git a/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/CompressedIndex.java b/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/CompressedIndex.java index 7fa12ab9..eb8705b9 100644 --- a/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/CompressedIndex.java +++ b/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/CompressedIndex.java @@ -1,10 +1,6 @@ package edu.harvard.hms.dbmi.avillach.hpds.data.genotype; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; +import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -64,7 +60,7 @@ public TreeMap> buildContinuousValuesMap(FileBackedByteIn continuousValueMap.put(DoubleValue, currentValues); setMinAndMax(DoubleValue); }catch(NumberFormatException e3) { - System.out.println("Unable to parse value : " + value.trim()); + log.info("Unable to parse value : " + value.trim()); } } } @@ -117,7 +113,7 @@ public void buildIndex(TreeMap> continuousValueMap) { gzos.close(); compressed = baos.toByteArray(); } catch (IOException e) { - log.error("Error writing range map", e); + throw new UncheckedIOException(e); } return compressed; }) @@ -176,9 +172,9 @@ private TreeMap> retrieveRangeMap(Range range) { ){ continousValueMap = (TreeMap>)ois.readObject(); } catch (IOException e) { - log.error("Error reading range map from file", e); + throw new UncheckedIOException(e); } catch (ClassNotFoundException e) { - log.error("Error deserializing range map", e); + throw new RuntimeException(e); } return continousValueMap; } diff --git a/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/VariantStore.java b/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/VariantStore.java index 8629f09b..4a566427 100644 --- a/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/VariantStore.java +++ b/data/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/VariantStore.java @@ -86,8 +86,7 @@ public Map countVariants() { || masks.heterozygousNoCallMask != null || masks.homozygousNoCallMask != null ? 1 : 0; })); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } }); } @@ -137,8 +136,7 @@ public void open() { try { fbbis.open(); } catch (FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } } })); diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/util/HideAnnotationCategoryValue.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/util/HideAnnotationCategoryValue.java index 7d6f9823..3a5074f0 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/util/HideAnnotationCategoryValue.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/genotype/util/HideAnnotationCategoryValue.java @@ -1,7 +1,6 @@ package edu.harvard.hms.dbmi.avillach.hpds.data.genotype.util; import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; @@ -20,40 +19,27 @@ public class HideAnnotationCategoryValue { protected static LoadingCache> store; - protected static TreeMap metaStoreSource; - protected static TreeSet allIds; - public static void main(String[] args) throws ClassNotFoundException, FileNotFoundException, IOException { + public static void main(String[] args) throws ClassNotFoundException, IOException { String infoStoreToModify = args[0]; String valueToScrub = args[1]; String infoStoreFilename = "/opt/local/hpds/all/" + infoStoreToModify.trim(); - try ( - FileInputStream fis = new FileInputStream(infoStoreFilename); - GZIPInputStream gis = new GZIPInputStream(fis); - ObjectInputStream ois = new ObjectInputStream(gis) - ){ - FileBackedByteIndexedInfoStore infoStore = (FileBackedByteIndexedInfoStore) ois.readObject(); - infoStore.getAllValues().keys().remove(valueToScrub); - try( - FileOutputStream fos = new FileOutputStream(infoStoreFilename); - GZIPOutputStream gos = new GZIPOutputStream(fos); - ObjectOutputStream oos = new ObjectOutputStream(gos); - ){ - oos.writeObject(infoStore); - oos.flush();oos.close(); - } - } catch (FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (ClassNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + + FileInputStream fis = new FileInputStream(infoStoreFilename); + GZIPInputStream gis = new GZIPInputStream(fis); + ObjectInputStream ois = new ObjectInputStream(gis); + + FileBackedByteIndexedInfoStore infoStore = (FileBackedByteIndexedInfoStore) ois.readObject(); + infoStore.getAllValues().keys().remove(valueToScrub); + + FileOutputStream fos = new FileOutputStream(infoStoreFilename); + GZIPOutputStream gos = new GZIPOutputStream(fos); + ObjectOutputStream oos = new ObjectOutputStream(gos); + + oos.writeObject(infoStore); + oos.flush();oos.close(); } } \ No newline at end of file diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/DumpSourceCSV.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/DumpSourceCSV.java index 3db25f9c..585204d8 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/DumpSourceCSV.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/DumpSourceCSV.java @@ -1,12 +1,6 @@ package edu.harvard.hms.dbmi.avillach.hpds.data.phenotype.util; -import java.io.ByteArrayInputStream; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.RandomAccessFile; +import java.io.*; import java.util.ArrayList; import java.util.Set; import java.util.TreeMap; @@ -68,10 +62,10 @@ public static void main(String[] args) throws ClassNotFoundException, FileNotFou cubeLines.add(line); } writer.printRecords(cubeLines); - }catch(ExecutionException e) { - e.printStackTrace(); + } catch(ExecutionException e) { + throw new RuntimeException(e); } catch (IOException e) { - e.printStackTrace(); + throw new UncheckedIOException(e); } }); writer.flush(); @@ -90,7 +84,6 @@ protected static Object[] loadMetadata() { Set allIds = (TreeSet) objectInputStream.readObject(); return new Object[] {metastoreScrubbed, allIds}; } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); throw new RuntimeException("Could not load metastore"); } } diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/FixCategoricalConcepts.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/FixCategoricalConcepts.java index 683565de..579d8a16 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/FixCategoricalConcepts.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/FixCategoricalConcepts.java @@ -125,7 +125,7 @@ private static void processRecord(final PhenoCube[] currentConcept, List store.allIds.add(Integer.parseInt(record.get(PATIENT_NUM))); } } catch (ExecutionException e) { - e.printStackTrace(); + log.error("Error processing record", e); } } @@ -174,10 +174,9 @@ protected static Object[] loadMetadata() { Set allIds = (TreeSet) objectInputStream.readObject(); return new Object[] {metastoreScrubbed, allIds}; } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); log.warn("************************************************"); log.warn("************************************************"); - log.warn("Could not load metastore"); + log.warn("Could not load metastore", e); log.warn("If you meant to include phenotype data of any kind, please check that the file /opt/local/source/columnMeta.javabin exists and is readable by the service."); log.warn("************************************************"); log.warn("************************************************"); diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RekeyDataset.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RekeyDataset.java index 76c8f80f..6d28bca8 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RekeyDataset.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RekeyDataset.java @@ -100,10 +100,9 @@ protected static Object[] loadMetadata() { Set allIds = (TreeSet) objectInputStream.readObject(); return new Object[] {metastoreScrubbed, allIds}; } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); log.warn("************************************************"); log.warn("************************************************"); - log.warn("Could not load metastore"); + log.warn("Could not load metastore", e); log.warn("If you meant to include phenotype data of any kind, please check that the file /opt/local/source/columnMeta.javabin exists and is readable by the service."); log.warn("************************************************"); log.warn("************************************************"); diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemapIds.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemapIds.java index d06bbc25..d2bd3b76 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemapIds.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemapIds.java @@ -177,10 +177,9 @@ protected static Object[] loadMetadata() { Set allIds = (TreeSet) objectInputStream.readObject(); return new Object[] {metastoreScrubbed, allIds}; } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); log.warn("************************************************"); log.warn("************************************************"); - log.warn("Could not load metastore"); + log.warn("Could not load metastore", e); log.warn("If you meant to include phenotype data of any kind, please check that the file /opt/local/source/columnMeta.javabin exists and is readable by the service."); log.warn("************************************************"); log.warn("************************************************"); diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemoveConceptFromMetadata.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemoveConceptFromMetadata.java index c440db69..3a4c2efd 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemoveConceptFromMetadata.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RemoveConceptFromMetadata.java @@ -78,7 +78,6 @@ protected static TreeMap removeMetadata(Path filePath) { return metastore; } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); throw new RuntimeException("Could not load metastore"); } } diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RenameCategories.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RenameCategories.java index d94a1f6e..1711370b 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RenameCategories.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/data/phenotype/util/RenameCategories.java @@ -49,8 +49,7 @@ public static void main(String[] args) throws ClassNotFoundException, FileNotFou try { loadingStoreTarget.store.put(key, store.get(key)); } catch (ExecutionException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new RuntimeException(e); } }); } @@ -67,7 +66,6 @@ protected static Object[] loadMetadata() { Set allIds = (TreeSet) objectInputStream.readObject(); return new Object[] {metastoreScrubbed, allIds}; } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); throw new RuntimeException("Could not load metastore"); } } diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/MultialleleCounter.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/MultialleleCounter.java index 13575e33..758e772f 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/MultialleleCounter.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/MultialleleCounter.java @@ -1,9 +1,6 @@ package edu.harvard.hms.dbmi.avillach.hpds.etl.genotype; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.ObjectInputStream; +import java.io.*; import java.util.ArrayList; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; @@ -46,8 +43,7 @@ public static void main(String[] args) throws ClassNotFoundException, FileNotFou } } } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } System.out.println("Completed bucket : " + offsetBucket); }); diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/VariantCounter.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/VariantCounter.java index 7e14ab4c..baee8fd6 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/VariantCounter.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/genotype/VariantCounter.java @@ -1,9 +1,6 @@ package edu.harvard.hms.dbmi.avillach.hpds.etl.genotype; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.ObjectInputStream; +import java.io.*; import java.util.ArrayList; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; @@ -34,8 +31,7 @@ public static void main(String[] args) throws ClassNotFoundException, FileNotFou } } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } }); System.out.println(contig + "," + countOfVariants[0]); diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/CSVLoader.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/CSVLoader.java index 2f0dfbba..e254b72b 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/CSVLoader.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/CSVLoader.java @@ -106,7 +106,8 @@ private static void processRecord(final PhenoCube[] currentConcept, CSVRecord re store.allIds.add(patientId); } } catch (ExecutionException e) { - e.printStackTrace(); + // todo: do we really want to ignore this? + log.error("Error processing record", e); } } } diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/LoadingStore.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/LoadingStore.java index 3deda37d..9977b97c 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/LoadingStore.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/LoadingStore.java @@ -54,20 +54,17 @@ public void onRemoval(RemovalNotification arg0) { columnMeta.setMax(max); } ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); - try { - - ObjectOutputStream out = new ObjectOutputStream(byteStream); - out.writeObject(arg0.getValue()); - out.flush(); - out.close(); - } catch (IOException e) { - e.printStackTrace(); - } + + ObjectOutputStream out = new ObjectOutputStream(byteStream); + out.writeObject(arg0.getValue()); + out.flush(); + out.close(); + allObservationsStore.write(Crypto.encryptData(byteStream.toByteArray())); columnMeta.setAllObservationsLength(allObservationsStore.getFilePointer()); metadataMap.put(columnMeta.getName(), columnMeta); - } catch (IOException e1) { - e1.printStackTrace(); + } catch (IOException e) { + throw new UncheckedIOException(e); } } diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SQLLoader.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SQLLoader.java index d9900b65..01860baa 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SQLLoader.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SQLLoader.java @@ -20,12 +20,16 @@ import edu.harvard.hms.dbmi.avillach.hpds.data.phenotype.PhenoCube; import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowCallbackHandler; import org.springframework.jdbc.datasource.DriverManagerDataSource; public class SQLLoader { + private static final Logger log = LoggerFactory.getLogger(SQLLoader.class); + private static final SimpleDateFormat ORACLE_DATE_FORMAT = new SimpleDateFormat("dd-MMM-yy"); static JdbcTemplate template; @@ -157,8 +161,7 @@ public void processRow(ResultSet arg0) throws SQLException { try { Thread.sleep(5000); } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + log.error("Thread interrupted", e); } // stillProcessingRecords[0] = false; // chunkWriteEx.shutdown(); @@ -223,11 +226,9 @@ private static void processRecord(final PhenoCube[] currentConcept, ResultSet ar currentConcept[0].add(patientId, isAlpha ? value : Double.parseDouble(value), arg0.getDate(DATETIME)); store.allIds.add(patientId); } - } catch (ExecutionException e) { - e.printStackTrace(); - } catch (SQLException e2) { - // TODO Auto-generated catch block - e2.printStackTrace(); + } catch (ExecutionException | SQLException e) { + // todo: do we really want to ignore these? + log.error("Exception processing record", e); } } } \ No newline at end of file diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoader.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoader.java index 65d99c83..8fbf5fc6 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoader.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoader.java @@ -45,7 +45,7 @@ public class SequentialLoader { private static long processedRecords = 0; - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws IOException, ClassNotFoundException { Crypto.loadDefaultKey(); @@ -75,26 +75,17 @@ public static void main(String[] args) throws IOException { //load each into observation store for(String filename : inputFiles) { log.info("Loading file " + filename); - try { - if(filename.toLowerCase().endsWith("sql")) { - loadSqlFile(filename); - } else if(filename.toLowerCase().endsWith("csv")){ - loadCsvFile(filename); - } - }catch (Exception e) { - log.warn("Exception loading " + filename + " ", e); + if(filename.toLowerCase().endsWith("sql")) { + loadSqlFile(filename); + } else if(filename.toLowerCase().endsWith("csv")){ + loadCsvFile(filename); } } //then complete, which will compact, sort, and write out the data in the final place - try { - log.info("found a total of " + processedRecords + " entries"); - store.saveStore(); - store.dumpStats(); - } catch (ClassNotFoundException e) { - System.out.println("Class error: " + e.getLocalizedMessage()); - e.printStackTrace(); - } + log.info("found a total of " + processedRecords + " entries"); + store.saveStore(); + store.dumpStats(); } private static List readFileList() throws IOException { @@ -201,7 +192,8 @@ private static void processRecord(final PhenoCube[] currentConcept, PhenoRecord log.info("Loaded " + processedRecords + " records"); } } catch (ExecutionException e) { - e.printStackTrace(); + // todo: do we really want to ignore this? + log.error("Error processing record", e); } } } diff --git a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoadingStore.java b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoadingStore.java index efc0bc21..affe54c4 100644 --- a/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoadingStore.java +++ b/etl/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/etl/phenotype/SequentialLoadingStore.java @@ -44,7 +44,7 @@ public SequentialLoadingStore() { try { allObservationsTemp = new RandomAccessFile(OBS_TEMP_FILENAME, "rw"); } catch (FileNotFoundException e) { - e.printStackTrace(); + throw new UncheckedIOException(e); } } @@ -131,7 +131,7 @@ private void write(ColumnMeta columnMeta, PhenoCube cube) throws IOException { out.writeObject(cube); out.flush(); allObservationsStore.write(Crypto.encryptData(byteStream.toByteArray())); } catch (IOException e) { - e.printStackTrace(); + throw new UncheckedIOException(e); } columnMeta.setAllObservationsLength(allObservationsStore.getFilePointer()); } @@ -220,7 +220,6 @@ public void dumpStats() { log.info("Total Number of Observations : " + totalNumberOfObservations); } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); throw new RuntimeException("Could not load metastore"); } } diff --git a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/AbstractProcessor.java b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/AbstractProcessor.java index e73ea8b2..f0e57a58 100644 --- a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/AbstractProcessor.java +++ b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/AbstractProcessor.java @@ -128,8 +128,10 @@ public AbstractProcessor(PhenotypeMetaStore phenotypeMetaStore, VariantService v FileBackedByteIndexedInfoStore infoStore = (FileBackedByteIndexedInfoStore) ois.readObject(); infoStores.put(filename.replace("_infoStore.javabin", ""), infoStore); ois.close(); - } catch (IOException | ClassNotFoundException e) { - e.printStackTrace(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); } }); } diff --git a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/CountProcessor.java b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/CountProcessor.java index 8e9a4878..246880b4 100644 --- a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/CountProcessor.java +++ b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/CountProcessor.java @@ -124,54 +124,46 @@ public Map> runCategoryCrossCounts(Query query) { TreeSet baseQueryPatientSet = abstractProcessor.getPatientSubsetForQuery(query); query.getRequiredFields().parallelStream().forEach(concept -> { Map varCount = new TreeMap<>();; - try { - TreeMap> categoryMap = abstractProcessor.getCube(concept).getCategoryMap(); - //We do not have all the categories (aka variables) for required fields, so we need to get them and - // then ensure that our base patient set, which is filtered down by our filters. Which may include - // not only other required filters, but categorical filters, numerical filters, or genomic filters. - // We then need to get the amount a patients for each category and map that to the concept path. - categoryMap.forEach((String category, TreeSet patientSet)->{ - //If all the patients are in the base then no need to loop, this would always be true for single - // filter queries. - if (baseQueryPatientSet.containsAll(patientSet)) { - varCount.put(category, patientSet.size()); - } else { - for (Integer patient : patientSet) { - if (baseQueryPatientSet.contains(patient)) { - // If we have a patient in the base set, we add 1 to the count. - // We are only worried about patients in the base set - varCount.put(category, varCount.getOrDefault(category, 1) + 1); - } else { - // If we don't have a patient in the base set, we add 0 to the count. - // This is necessary because we need to ensure that all categories are included in the - // map, even if they have a count of 0. This is because we are displaying the counts - // in a table (or other form). - varCount.put(category, varCount.getOrDefault(category, 0)); - } + TreeMap> categoryMap = abstractProcessor.getCube(concept).getCategoryMap(); + //We do not have all the categories (aka variables) for required fields, so we need to get them and + // then ensure that our base patient set, which is filtered down by our filters. Which may include + // not only other required filters, but categorical filters, numerical filters, or genomic filters. + // We then need to get the amount a patients for each category and map that to the concept path. + categoryMap.forEach((String category, TreeSet patientSet)->{ + //If all the patients are in the base then no need to loop, this would always be true for single + // filter queries. + if (baseQueryPatientSet.containsAll(patientSet)) { + varCount.put(category, patientSet.size()); + } else { + for (Integer patient : patientSet) { + if (baseQueryPatientSet.contains(patient)) { + // If we have a patient in the base set, we add 1 to the count. + // We are only worried about patients in the base set + varCount.put(category, varCount.getOrDefault(category, 1) + 1); + } else { + // If we don't have a patient in the base set, we add 0 to the count. + // This is necessary because we need to ensure that all categories are included in the + // map, even if they have a count of 0. This is because we are displaying the counts + // in a table (or other form). + varCount.put(category, varCount.getOrDefault(category, 0)); } } - }); - categoryCounts.put(concept, varCount); - } catch (Exception e) { - e.printStackTrace(); - } + } + }); + categoryCounts.put(concept, varCount); }); //For categoryFilters we need to ensure the variables included in the filter are the ones included in our count //map. Then we make sure that the patients who have that variable are also in our base set. query.getCategoryFilters().entrySet().parallelStream().forEach(categoryFilterEntry-> { Map varCount; - try { - TreeMap> categoryMap = abstractProcessor.getCube(categoryFilterEntry.getKey()).getCategoryMap(); - varCount = new TreeMap<>(); - categoryMap.forEach((String category, TreeSet patientSet)->{ - if (Arrays.asList(categoryFilterEntry.getValue()).contains(category)) { - varCount.put(category, Sets.intersection(patientSet, baseQueryPatientSet).size()); - } - }); - categoryCounts.put(categoryFilterEntry.getKey(), varCount); - } catch (Exception e) { - e.printStackTrace(); - } + TreeMap> categoryMap = abstractProcessor.getCube(categoryFilterEntry.getKey()).getCategoryMap(); + varCount = new TreeMap<>(); + categoryMap.forEach((String category, TreeSet patientSet)->{ + if (Arrays.asList(categoryFilterEntry.getValue()).contains(category)) { + varCount.put(category, Sets.intersection(patientSet, baseQueryPatientSet).size()); + } + }); + categoryCounts.put(categoryFilterEntry.getKey(), varCount); }); return categoryCounts; } @@ -226,7 +218,7 @@ public Map runVariantCount(Query query) { try { response.put("count", abstractProcessor.getVariantList(query).size()); } catch (IOException e) { - e.printStackTrace(); + log.error("Error processing query", e); response.put("count", "0"); response.put("message", "An unexpected error occurred while processing the query, please contact us to let us know using the Contact Us option in the Help menu."); } diff --git a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/ResultStoreStream.java b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/ResultStoreStream.java index c2004436..5bb23d81 100644 --- a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/ResultStoreStream.java +++ b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/ResultStoreStream.java @@ -1,13 +1,6 @@ package edu.harvard.hms.dbmi.avillach.hpds.processing; -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; +import java.io.*; import java.util.ArrayList; import java.util.List; import java.util.TreeMap; @@ -140,7 +133,7 @@ public void close() { try { in.close(); } catch (IOException e) { - e.printStackTrace(); + throw new UncheckedIOException(e); } } @@ -162,8 +155,7 @@ public void open() { writer.printRecord(header); firstRow[0] = false; } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } }else { ArrayList records = new ArrayList(); @@ -192,8 +184,7 @@ public void open() { try { writer.printRecord(records); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } } }); @@ -207,8 +198,7 @@ public void open() { } catch (FileNotFoundException e) { throw new RuntimeException("temp file for result not found : " + tempFile.getAbsolutePath()); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } } diff --git a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/VariantService.java b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/VariantService.java index f8d01fdc..616d6f0f 100644 --- a/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/VariantService.java +++ b/processing/src/main/java/edu/harvard/hms/dbmi/avillach/hpds/processing/VariantService.java @@ -190,8 +190,7 @@ public void run() { System.arraycopy(variantIndexBucket, 0, _varaiantIndex2, (_i * VARIANT_INDEX_BLOCK_SIZE), variantIndexBucket.length); log.info("loaded " + (_i * VARIANT_INDEX_BLOCK_SIZE) + " block"); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } } }); diff --git a/processing/src/test/java/edu/harvard/hms/dbmi/avillach/hpds/Testing504MVariants.java b/processing/src/test/java/edu/harvard/hms/dbmi/avillach/hpds/Testing504MVariants.java index fe61650d..7a6d63e8 100644 --- a/processing/src/test/java/edu/harvard/hms/dbmi/avillach/hpds/Testing504MVariants.java +++ b/processing/src/test/java/edu/harvard/hms/dbmi/avillach/hpds/Testing504MVariants.java @@ -1,6 +1,7 @@ package edu.harvard.hms.dbmi.avillach.hpds; import java.io.IOException; +import java.io.UncheckedIOException; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Random; @@ -39,8 +40,7 @@ private BigInteger generateRandomBitmask() { buf.putInt(x); out.write(buf.array()); } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + throw new UncheckedIOException(e); } } mask = new BigInteger(out.toByteArray());