Skip to content

Commit

Permalink
Merge branch 'master' into dependabot/maven/geotools.version-31.3
Browse files Browse the repository at this point in the history
  • Loading branch information
jfbischoff authored Aug 13, 2024
2 parents 799bf54 + 33ad29f commit 1cd0c59
Show file tree
Hide file tree
Showing 45 changed files with 1,178 additions and 258 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,8 @@ public static void runWithDefaults(Class<? extends MATSimApplication> clazz, Str

if (ApplicationUtils.isRunFromDesktop() && args.length == 0) {

System.setProperty("MATSIM_GUI_DESKTOP", "true");

if (defaultArgs.length > 0) {
String value = String.join(ARGS_DELIMITER, defaultArgs);
System.setProperty("MATSIM_GUI_ARGS", value);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,13 @@ public Integer call() throws Exception {

Gui gui = f.get();

// Set the current working directory to be used in the gui, when run from the command line
// If the gui is run from desktop, the working directory is not overwritten

// Assumption is that starting something from command line, the user expects that the working directory remains the same
if (!System.getProperty("MATSIM_GUI_DESKTOP", "false").equals("true"))
gui.setWorkingDirectory(new File(""));

while (gui.isShowing())
Thread.sleep(250);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,14 @@
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.matsim.api.core.v01.Coord;
import org.matsim.application.avro.XYTData;
import org.matsim.core.config.Config;
import org.matsim.core.utils.io.IOUtils;
import org.matsim.core.utils.misc.Time;
import tech.tablesaw.api.*;
import tech.tablesaw.io.csv.CsvReadOptions;

Expand All @@ -29,7 +32,8 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.Collectors;

import static org.geotools.gml3.v3_2.GML.coordinateSystem;

/**
* Merges noise data from multiple files into one file.
Expand All @@ -44,17 +48,16 @@ final class MergeNoiseOutput {
*/
private static final boolean CREATE_CSV_FILES = false;

private final String[] inputPath;
private final Path outputDirectory;
private final String crs;
private final String[] labels = {"immission", "emission"};
private final int minTime = 3600;
private int maxTime = 24 * 3600;

MergeNoiseOutput(String[] inputPath, Path outputDirectory, String crs) {
this.inputPath = inputPath;
this.outputDirectory = outputDirectory;
this.crs = crs;
private final Map<String,Float> totalReceiverPointValues = new HashMap<>();

MergeNoiseOutput(Path path, String coordinateSystem ) {
this.outputDirectory = path;
this.crs = coordinateSystem;
}

/**
Expand Down Expand Up @@ -90,25 +93,9 @@ public void setMaxTime(int maxTime) {
* Merges noise data from multiple files into one file.
*/
public void run() {

// Loop over all paths
for (int i = 0; i < labels.length; i++) {

// Select the correct method based on the label
switch (labels[i]) {
case "immission" -> {
if (CREATE_CSV_FILES) {
mergeImmissionsCSV(inputPath[i], labels[i]);
} else {
mergeImissions(inputPath[i], labels[i]);
}

}
case "emission" -> mergeEmissions(inputPath[i], labels[i]);
default -> log.warn("Unknown path: " + inputPath[i]);
}

}
mergeReceiverPointData(outputDirectory + "/immissions/", "immission");
mergeReceiverPointData(outputDirectory + "/damages_receiverPoint/", "damages_receiverPoint");
mergeLinkData(outputDirectory.toString() + "/emissions/", "emission");
}

/**
Expand All @@ -118,6 +105,7 @@ public void run() {
* @param output
*/
private void writeAvro(XYTData xytData, File output) {
log.info(String.format("Start writing avro file to %s", output.toString() ));
DatumWriter<XYTData> datumWriter = new SpecificDatumWriter<>(XYTData.class);
try (DataFileWriter<XYTData> dataFileWriter = new DataFileWriter<>(datumWriter)) {
dataFileWriter.setCodec(CodecFactory.deflateCodec(9));
Expand All @@ -128,7 +116,7 @@ private void writeAvro(XYTData xytData, File output) {
}
}

private void mergeEmissions(String pathParameter, String label) {
private void mergeLinkData(String pathParameter, String label) {
log.info("Merging emissions data for label {}", label);
Object2DoubleMap<String> mergedData = new Object2DoubleOpenHashMap<>();
Table csvOutputMerged = Table.create(TextColumn.create("Link Id"), DoubleColumn.create("value"));
Expand All @@ -143,9 +131,8 @@ private void mergeEmissions(String pathParameter, String label) {
.separator(';').build());

for (Row row : table) {
// index for Noise Emission xx:xx:xx -> 7
String linkId = row.getString("Link Id");
double value = row.getDouble(7);
double value = row.getDouble(row.columnCount() - 1);
mergedData.mergeDouble(linkId, value, Double::max);

}
Expand All @@ -165,38 +152,49 @@ private void mergeEmissions(String pathParameter, String label) {
}

/**
* Merges the immissions data
* Merges receiverPoint data (written by {@link org.matsim.contrib.noise.NoiseWriter}
*
* @param pathParameter path to the immissions data
* @param label label for the immissions data
* @param outputDir path to the receiverPoint data
* @param label label for the receiverPoint data (which kind of data)
*/
private void mergeImissions(String pathParameter, String label) {
private void mergeReceiverPointData(String outputDir, String label) {

// data per time step, maps coord to value
Int2ObjectMap<Object2FloatMap<FloatFloatPair>> data = new Int2ObjectOpenHashMap<>();

// Loop over all files
//TODO could be adjusted to time bin size from noise config group
String substrToCapitalize = null;
for (int time = minTime; time <= maxTime; time += 3600) {

String path = pathParameter + label + "_" + round(time, 1) + ".csv";
String timeDataFile = outputDir + label + "_" + round(time, 1) + ".csv";

Object2FloatOpenHashMap<FloatFloatPair> values = new Object2FloatOpenHashMap<>();

if (!Files.exists(Path.of(path))) {
log.warn("File {} does not exist", path);
if (!Files.exists(Path.of(timeDataFile))) {
log.warn("File {} does not exist", timeDataFile);
continue;
}

// Read the file
Table table = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path))
.columnTypesPartial(Map.of("x", ColumnType.FLOAT, "y", ColumnType.FLOAT, "Receiver Point Id", ColumnType.INTEGER, "t", ColumnType.DOUBLE))
//we need "damages_receiverPoint" -> "Damages 01:00:00" and "immission" -> "Immision 01:00:00"
substrToCapitalize = label.contains("_") ? label.substring(0, label.lastIndexOf("_")) : label;
String valueHeader = StringUtils.capitalize(substrToCapitalize) + " " + Time.writeTime(time, Time.TIMEFORMAT_HHMMSS);

// Read the data file
Table dataTable = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(timeDataFile))
.columnTypesPartial(Map.of("x", ColumnType.FLOAT,
"y", ColumnType.FLOAT,
"Receiver Point Id", ColumnType.INTEGER,
"t", ColumnType.DOUBLE,
valueHeader, ColumnType.DOUBLE))
.sample(false)
.separator(';').build());

// Loop over all rows in the file
for (Row row : table) {
// Loop over all rows in the data file
for (Row row : dataTable) {
float x = row.getFloat("x");
float y = row.getFloat("y");
float value = (float) row.getDouble(1); // 1
float value = (float) row.getDouble(valueHeader);
FloatFloatPair coord = FloatFloatPair.of(x, y);
values.put(coord, value);
}
Expand Down Expand Up @@ -232,7 +230,7 @@ private void mergeImissions(String pathParameter, String label) {
}
}

xytHourData.setData(Map.of("imissions", raw));
xytHourData.setData(Map.of(label, raw));
xytHourData.setCrs(crs);

File out = outputDirectory.getParent().resolve(label + "_per_hour.avro").toFile();
Expand All @@ -254,15 +252,18 @@ private void mergeImissions(String pathParameter, String label) {
xytDayData.setTimestamps(List.of(0));
xytDayData.setXCoords(xCoords);
xytDayData.setYCoords(yCoords);
xytDayData.setData(Map.of("imissions", raw));
xytDayData.setData(Map.of(label, raw));
xytDayData.setCrs(crs);

File outDay = outputDirectory.getParent().resolve(label + "_per_day.avro").toFile();

writeAvro(xytDayData, outDay);
//cache the overall sum
this.totalReceiverPointValues.put(substrToCapitalize, raw.stream().reduce(0f, Float::sum));
}

// Merges the immissions data

@Deprecated
private void mergeImmissionsCSV(String pathParameter, String label) {
log.info("Merging immissions data for label {}", label);
Expand All @@ -278,7 +279,10 @@ private void mergeImmissionsCSV(String pathParameter, String label) {

// Read the file
Table table = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path))
.columnTypesPartial(Map.of("x", ColumnType.DOUBLE, "y", ColumnType.DOUBLE, "Receiver Point Id", ColumnType.INTEGER, "t", ColumnType.DOUBLE))
.columnTypesPartial(Map.of("x", ColumnType.DOUBLE,
"y", ColumnType.DOUBLE,
"Receiver Point Id", ColumnType.INTEGER,
"t", ColumnType.DOUBLE))
.sample(false)
.separator(';').build());

Expand Down Expand Up @@ -319,4 +323,7 @@ private void mergeImmissionsCSV(String pathParameter, String label) {
log.info("Merged noise data written to {} ", outPerDay);

}
public Map<String, Float> getTotalReceiverPointValues() {
return totalReceiverPointValues;
}
}
Loading

0 comments on commit 1cd0c59

Please sign in to comment.