Skip to content

Commit

Permalink
Revert "[ALS-5755] Switch time series processor to ISO timestamps"
Browse files Browse the repository at this point in the history
This reverts commit 7772255.
  • Loading branch information
Luke-Sikina authored Jul 1, 2024
1 parent 76b763a commit 7225a85
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 60 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ protected PhenoCube getCube(String path) {
* Useful for federated pic-sure's where there are fewer
* guarantees about concept paths.
*/
public Optional<PhenoCube<?>> nullableGetCube(String path) {
protected Optional<PhenoCube<?>> nullableGetCube(String path) {
try {
return Optional.ofNullable(store.get(path));
} catch (InvalidCacheLoadException | ExecutionException e) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
package edu.harvard.hms.dbmi.avillach.hpds.processing.timeseries;
package edu.harvard.hms.dbmi.avillach.hpds.processing;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;

import edu.harvard.hms.dbmi.avillach.hpds.processing.AbstractProcessor;
import edu.harvard.hms.dbmi.avillach.hpds.processing.AsyncResult;
import edu.harvard.hms.dbmi.avillach.hpds.processing.HpdsProcessor;
import edu.harvard.hms.dbmi.avillach.hpds.processing.QueryProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -40,16 +36,14 @@ public class TimeseriesProcessor implements HpdsProcessor {
private Logger log = LoggerFactory.getLogger(QueryProcessor.class);

private AbstractProcessor abstractProcessor;
private final TimeSeriesConversionService conversionService;

private final String ID_CUBE_NAME;
private final int ID_BATCH_SIZE;
private final int CACHE_SIZE;

@Autowired
public TimeseriesProcessor(AbstractProcessor abstractProcessor, TimeSeriesConversionService conversionService) {
public TimeseriesProcessor(AbstractProcessor abstractProcessor) {
this.abstractProcessor = abstractProcessor;
this.conversionService = conversionService;
// todo: handle these via spring annotations
CACHE_SIZE = Integer.parseInt(System.getProperty("CACHE_SIZE", "100"));
ID_BATCH_SIZE = Integer.parseInt(System.getProperty("ID_BATCH_SIZE", "0"));
Expand Down Expand Up @@ -121,24 +115,14 @@ private void addDataForConcepts(Collection<String> pathList, Set<String> exporte
if (cube.isStringType()) {
KeyAndValue<String> keyAndValue = (KeyAndValue) kvObj;
// "PATIENT_NUM","CONCEPT_PATH","NVAL_NUM","TVAL_CHAR","TIMESTAMP"
String[] entryData = {
keyAndValue.getKey().toString(),
conceptPath,
"",
keyAndValue.getValue(),
conversionService.toISOString(keyAndValue.getTimestamp())
};
String[] entryData = { keyAndValue.getKey().toString(), conceptPath, "", keyAndValue.getValue(),
keyAndValue.getTimestamp().toString() };
dataEntries.add(entryData);
} else { // numeric
KeyAndValue<Double> keyAndValue = (KeyAndValue) kvObj;
// "PATIENT_NUM","CONCEPT_PATH","NVAL_NUM","TVAL_CHAR","TIMESTAMP"
String[] entryData = {
keyAndValue.getKey().toString(),
conceptPath,
keyAndValue.getValue().toString(),
"",
conversionService.toISOString(keyAndValue.getTimestamp())
};
String[] entryData = { keyAndValue.getKey().toString(), conceptPath,
keyAndValue.getValue().toString(), "", keyAndValue.getTimestamp().toString() };
dataEntries.add(entryData);
}
//batch exports so we don't take double memory (valuesForKeys + dataEntries could be a lot of data points)
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import java.util.function.Predicate;
import java.util.stream.Collectors;

import edu.harvard.hms.dbmi.avillach.hpds.processing.timeseries.TimeseriesProcessor;
import edu.harvard.hms.dbmi.avillach.hpds.service.util.QueryDecorator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down

0 comments on commit 7225a85

Please sign in to comment.