diff --git a/contribs/application/pom.xml b/contribs/application/pom.xml index 118a3b9c0c2..95ea86036d9 100644 --- a/contribs/application/pom.xml +++ b/contribs/application/pom.xml @@ -1,4 +1,5 @@ - + org.matsim @@ -20,12 +21,21 @@ + + 1.11.3 + + tech.tablesaw tablesaw-core + + org.apache.avro + avro + ${avro.version} + org.matsim.contrib @@ -140,4 +150,27 @@ + + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + ${project.basedir}/src/main/avro/ + ${project.basedir}/src/main/java/ + + + + + + + diff --git a/contribs/application/src/main/avro/xyt.avsc b/contribs/application/src/main/avro/xyt.avsc new file mode 100644 index 00000000000..0d56d538c99 --- /dev/null +++ b/contribs/application/src/main/avro/xyt.avsc @@ -0,0 +1,47 @@ +{ + "namespace": "org.matsim.application.avro", + "type": "record", + "name": "XYTData", + "fields": [ + { + "name": "crs", + "type": "string", + "doc": "Coordinate reference system" + }, + { + "name": "xCoords", + "doc": "List of x coordinates", + "type": { + "type": "array", + "items": "float" + } + }, + { + "name": "yCoords", + "doc": "List of y coordinates", + "type": { + "type": "array", + "items": "float" + } + }, + { + "name": "timestamps", + "doc": "List of timestamps in seconds", + "type": { + "type": "array", + "items": "int" + } + }, + { + "name": "data", + "doc": "XYT data for each dataset. The key is the name, the data is stored in one chunk of data.", + "type": { + "type": "map", + "values": { + "type": "array", + "items": "float" + } + } + } + ] +} \ No newline at end of file diff --git a/contribs/application/src/main/java/org/matsim/application/analysis/emissions/AirPollutionAnalysis.java b/contribs/application/src/main/java/org/matsim/application/analysis/emissions/AirPollutionAnalysis.java index 4c193d6b83c..f8210c4aa15 100644 --- a/contribs/application/src/main/java/org/matsim/application/analysis/emissions/AirPollutionAnalysis.java +++ b/contribs/application/src/main/java/org/matsim/application/analysis/emissions/AirPollutionAnalysis.java @@ -142,6 +142,7 @@ private Config prepareConfig() { config.transit().setTransitScheduleFile(ApplicationUtils.matchInput("transitSchedule", input.getRunDirectory()).toAbsolutePath().toString()); config.transit().setVehiclesFile(ApplicationUtils.matchInput("transitVehicles", input.getRunDirectory()).toAbsolutePath().toString()); config.plans().setInputFile(null); + config.facilities().setInputFile(null); config.eventsManager().setNumberOfThreads(null); config.eventsManager().setEstimatedNumberOfEvents(null); config.global().setNumberOfThreads(1); @@ -337,8 +338,8 @@ private void writeTimeDependentRaster(Network network, Config config, EmissionsO Coord coord = raster.getCoordForIndex(xi, yi); double value = timeBin.getValue().get(Pollutant.CO2_TOTAL).getValueByIndex(xi, yi); -// if (value == 0) -// continue; + if (value == 0) + continue; printer.print(timeBin.getStartTime()); printer.print(coord.getX()); diff --git a/contribs/application/src/main/java/org/matsim/application/analysis/noise/MergeNoiseOutput.java b/contribs/application/src/main/java/org/matsim/application/analysis/noise/MergeNoiseOutput.java new file mode 100644 index 00000000000..aef1ec23d05 --- /dev/null +++ b/contribs/application/src/main/java/org/matsim/application/analysis/noise/MergeNoiseOutput.java @@ -0,0 +1,322 @@ +package org.matsim.application.analysis.noise; + +import it.unimi.dsi.fastutil.floats.FloatArrayList; +import it.unimi.dsi.fastutil.floats.FloatFloatPair; +import it.unimi.dsi.fastutil.floats.FloatList; +import it.unimi.dsi.fastutil.ints.Int2ObjectMap; +import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; +import it.unimi.dsi.fastutil.objects.Object2DoubleMap; +import it.unimi.dsi.fastutil.objects.Object2DoubleOpenHashMap; +import it.unimi.dsi.fastutil.objects.Object2FloatMap; +import it.unimi.dsi.fastutil.objects.Object2FloatOpenHashMap; +import org.apache.avro.file.CodecFactory; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.matsim.api.core.v01.Coord; +import org.matsim.application.avro.XYTData; +import org.matsim.core.utils.io.IOUtils; +import tech.tablesaw.api.*; +import tech.tablesaw.io.csv.CsvReadOptions; + +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.stream.Collectors; + +/** + * Merges noise data from multiple files into one file. + */ +final class MergeNoiseOutput { + + private static final Logger log = LogManager.getLogger(MergeNoiseOutput.class); + + + /** + * If true, a CSV file is created for immissions. Deprecated, this code will be removed. + */ + private static final boolean CREATE_CSV_FILES = false; + + private final String[] inputPath; + private final Path outputDirectory; + private final String crs; + private final String[] labels = {"immission", "emission"}; + private final int minTime = 3600; + private int maxTime = 24 * 3600; + + MergeNoiseOutput(String[] inputPath, Path outputDirectory, String crs) { + this.inputPath = inputPath; + this.outputDirectory = outputDirectory; + this.crs = crs; + } + + /** + * Rounds a value to a given precision. + * + * @param value value to round + * @param precision number of decimal places + * @return rounded value + */ + private static double round(double value, int precision) { + return BigDecimal.valueOf(value).setScale(precision, RoundingMode.HALF_UP).doubleValue(); + } + + /** + * Returns the maximum time. + * + * @return maxTime value + */ + public int getMaxTime() { + return maxTime; + } + + /** + * Sets the maximum time. + * + * @param maxTime value + */ + public void setMaxTime(int maxTime) { + this.maxTime = maxTime; + } + + /** + * Merges noise data from multiple files into one file. + */ + public void run() { + + // Loop over all paths + for (int i = 0; i < labels.length; i++) { + + // Select the correct method based on the label + switch (labels[i]) { + case "immission" -> { + if (CREATE_CSV_FILES) { + mergeImmissionsCSV(inputPath[i], labels[i]); + } else { + mergeImissions(inputPath[i], labels[i]); + } + + } + case "emission" -> mergeEmissions(inputPath[i], labels[i]); + default -> log.warn("Unknown path: " + inputPath[i]); + } + + } + } + + /** + * Writes the given data to the given file. + * + * @param xytData + * @param output + */ + private void writeAvro(XYTData xytData, File output) { + DatumWriter datumWriter = new SpecificDatumWriter<>(XYTData.class); + try (DataFileWriter dataFileWriter = new DataFileWriter<>(datumWriter)) { + dataFileWriter.setCodec(CodecFactory.deflateCodec(9)); + dataFileWriter.create(xytData.getSchema(), IOUtils.getOutputStream(IOUtils.getFileUrl(output.toString()), false)); + dataFileWriter.append(xytData); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private void mergeEmissions(String pathParameter, String label) { + log.info("Merging emissions data for label {}", label); + Object2DoubleMap mergedData = new Object2DoubleOpenHashMap<>(); + Table csvOutputMerged = Table.create(TextColumn.create("Link Id"), DoubleColumn.create("value")); + + for (double time = minTime; time <= maxTime; time += 3600.) { + String path = pathParameter + label + "_" + this.round(time, 1) + ".csv"; + + // Read the file + Table table = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path)) + .columnTypesPartial(Map.of("Link Id", ColumnType.TEXT)) + .sample(false) + .separator(';').build()); + + for (Row row : table) { + // index for Noise Emission xx:xx:xx -> 7 + String linkId = row.getString("Link Id"); + double value = row.getDouble(7); + mergedData.mergeDouble(linkId, value, Double::max); + + } + } + + for (Object2DoubleMap.Entry entry : mergedData.object2DoubleEntrySet()) { + if (entry.getDoubleValue() >= 0.0) { + Row writeRow = csvOutputMerged.appendRow(); + writeRow.setString("Link Id", entry.getKey()); + writeRow.setDouble("value", entry.getDoubleValue()); + } + } + + File out = outputDirectory.getParent().resolve(label + "_per_day.csv").toFile(); + csvOutputMerged.write().csv(out); + log.info("Merged noise data written to {} ", out); + } + + /** + * Merges the immissions data + * + * @param pathParameter path to the immissions data + * @param label label for the immissions data + */ + private void mergeImissions(String pathParameter, String label) { + + // data per time step, maps coord to value + Int2ObjectMap> data = new Int2ObjectOpenHashMap<>(); + + // Loop over all files + for (int time = minTime; time <= maxTime; time += 3600) { + + String path = pathParameter + label + "_" + round(time, 1) + ".csv"; + Object2FloatOpenHashMap values = new Object2FloatOpenHashMap<>(); + + if (!Files.exists(Path.of(path))) { + log.warn("File {} does not exist", path); + continue; + } + + // Read the file + Table table = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path)) + .columnTypesPartial(Map.of("x", ColumnType.FLOAT, "y", ColumnType.FLOAT, "Receiver Point Id", ColumnType.INTEGER, "t", ColumnType.DOUBLE)) + .sample(false) + .separator(';').build()); + + // Loop over all rows in the file + for (Row row : table) { + float x = row.getFloat("x"); + float y = row.getFloat("y"); + float value = (float) row.getDouble(1); // 1 + FloatFloatPair coord = FloatFloatPair.of(x, y); + values.put(coord, value); + } + + data.put(time, values); + } + + // hour data + XYTData xytHourData = new XYTData(); + + xytHourData.setTimestamps(data.keySet().intStream().boxed().toList()); + List xCoords = data.values().stream().flatMap(m -> m.keySet().stream().map(FloatFloatPair::firstFloat)).distinct().sorted().toList(); + List yCoords = data.values().stream().flatMap(m -> m.keySet().stream().map(FloatFloatPair::secondFloat)).distinct().sorted().toList(); + + xytHourData.setXCoords(xCoords); + xytHourData.setYCoords(yCoords); + + FloatList raw = new FloatArrayList(); + + Object2FloatMap perDay = new Object2FloatOpenHashMap<>(); + + for (Integer ts : xytHourData.getTimestamps()) { + Object2FloatMap d = data.get((int) ts); + + for (Float x : xytHourData.getXCoords()) { + for (Float y : xytHourData.getYCoords()) { + FloatFloatPair coord = FloatFloatPair.of(x, y); + float v = d.getOrDefault(coord, 0); + raw.add(v); + if (v > 0) + perDay.mergeFloat(coord, v, Float::sum); + } + } + } + + xytHourData.setData(Map.of("imissions", raw)); + xytHourData.setCrs(crs); + + File out = outputDirectory.getParent().resolve(label + "_per_hour.avro").toFile(); + + writeAvro(xytHourData, out); + + raw = new FloatArrayList(); + // day data + XYTData xytDayData = new XYTData(); + + for (Float x : xytHourData.getXCoords()) { + for (Float y : xytHourData.getYCoords()) { + FloatFloatPair coord = FloatFloatPair.of(x, y); + float v = perDay.getOrDefault(coord, 0); + raw.add(v); + } + } + + xytDayData.setTimestamps(List.of(0)); + xytDayData.setXCoords(xCoords); + xytDayData.setYCoords(yCoords); + xytDayData.setData(Map.of("imissions", raw)); + xytDayData.setCrs(crs); + + File outDay = outputDirectory.getParent().resolve(label + "_per_day.avro").toFile(); + + writeAvro(xytDayData, outDay); + } + + // Merges the immissions data + @Deprecated + private void mergeImmissionsCSV(String pathParameter, String label) { + log.info("Merging immissions data for label {}", label); + Object2DoubleMap mergedData = new Object2DoubleOpenHashMap<>(); + + Table csvOutputPerHour = Table.create(DoubleColumn.create("time"), DoubleColumn.create("x"), DoubleColumn.create("y"), DoubleColumn.create("value")); + Table csvOutputMerged = Table.create(DoubleColumn.create("time"), DoubleColumn.create("x"), DoubleColumn.create("y"), DoubleColumn.create("value")); + + // Loop over all files + for (double time = minTime; time <= maxTime; time += 3600.) { + + String path = pathParameter + label + "_" + round(time, 1) + ".csv"; + + // Read the file + Table table = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path)) + .columnTypesPartial(Map.of("x", ColumnType.DOUBLE, "y", ColumnType.DOUBLE, "Receiver Point Id", ColumnType.INTEGER, "t", ColumnType.DOUBLE)) + .sample(false) + .separator(';').build()); + + // Loop over all rows in the file + for (Row row : table) { + double x = row.getDouble("x"); + double y = row.getDouble("y"); + Coord coord = new Coord(x, y); + double value = row.getDouble(1); // 1 + + mergedData.mergeDouble(coord, value, Double::max); + + Row writeRow = csvOutputPerHour.appendRow(); + writeRow.setDouble("time", time); + writeRow.setDouble("x", coord.getX()); + writeRow.setDouble("y", coord.getY()); + writeRow.setDouble("value", value); + } + } + + // Create the merged data + for (Object2DoubleMap.Entry entry : mergedData.object2DoubleEntrySet()) { + Row writeRow = csvOutputMerged.appendRow(); + writeRow.setDouble("time", 0.0); + writeRow.setDouble("x", entry.getKey().getX()); + writeRow.setDouble("y", entry.getKey().getY()); + writeRow.setDouble("value", entry.getDoubleValue()); + } + + // Write the merged data (per hour) to a file + File out = outputDirectory.getParent().resolve(label + "_per_hour.csv").toFile(); + csvOutputPerHour.write().csv(out); + log.info("Merged noise data written to {} ", out); + + // Write the merged data (per day) to a file + File outPerDay = outputDirectory.getParent().resolve(label + "_per_day.csv").toFile(); + csvOutputMerged.write().csv(outPerDay); + log.info("Merged noise data written to {} ", outPerDay); + + } +} diff --git a/contribs/application/src/main/java/org/matsim/application/analysis/noise/NoiseAnalysis.java b/contribs/application/src/main/java/org/matsim/application/analysis/noise/NoiseAnalysis.java index 3e99b052194..8238eec25da 100644 --- a/contribs/application/src/main/java/org/matsim/application/analysis/noise/NoiseAnalysis.java +++ b/contribs/application/src/main/java/org/matsim/application/analysis/noise/NoiseAnalysis.java @@ -1,13 +1,14 @@ package org.matsim.application.analysis.noise; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.locationtech.jts.geom.Envelope; import org.matsim.api.core.v01.Coord; import org.matsim.api.core.v01.Scenario; +import org.matsim.application.ApplicationUtils; +import org.matsim.application.CommandSpec; import org.matsim.application.MATSimAppCommand; -import org.matsim.application.options.CrsOptions; +import org.matsim.application.options.InputOptions; +import org.matsim.application.options.OutputOptions; import org.matsim.application.options.ShpOptions; -import org.matsim.contrib.noise.MergeNoiseCSVFile; import org.matsim.contrib.noise.NoiseConfigGroup; import org.matsim.contrib.noise.NoiseOfflineCalculation; import org.matsim.contrib.noise.ProcessNoiseImmissions; @@ -17,142 +18,108 @@ import org.matsim.core.utils.geometry.CoordinateTransformation; import picocli.CommandLine; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; +import java.nio.file.Path; +import java.util.Objects; +import java.util.Set; @CommandLine.Command( - name = "noise-analysis", - description = "Noise analysis", - mixinStandardHelpOptions = true, - showDefaultValues = true + name = "noise-analysis", + description = "Noise analysis", + mixinStandardHelpOptions = true, + showDefaultValues = true +) +@CommandSpec( + requireRunDirectory = true, + produces = { + "emission_per_day.csv", + "immission_per_day.%s", + "immission_per_hour.%s" + } ) public class NoiseAnalysis implements MATSimAppCommand { - private static final Logger log = LogManager.getLogger(NoiseAnalysis.class); - @CommandLine.Option(names = "--directory", description = "Path to run directory", required = true) - private String runDirectory; + @CommandLine.Mixin + private final InputOptions input = InputOptions.ofCommand(NoiseAnalysis.class); + @CommandLine.Mixin + private final OutputOptions output = OutputOptions.ofCommand(NoiseAnalysis.class); - @CommandLine.Option(names = "--runId", description = "Pattern to match runId.", defaultValue = "") - private String runId; + @CommandLine.Mixin + private final ShpOptions shp = new ShpOptions(); - @CommandLine.Option(names = "--receiver-point-gap", description = "The gap between analysis points in meter", - defaultValue = "250") - private double receiverPointGap; + @CommandLine.Option(names = "--consider-activities", split = ",", description = "Considered activities for noise calculation", defaultValue = "h,w,home,work") + private Set considerActivities; @CommandLine.Option(names = "--noise-barrier", description = "Path to the noise barrier File", defaultValue = "") private String noiseBarrierFile; - @CommandLine.Mixin - private CrsOptions crs = new CrsOptions(); - - @CommandLine.Mixin - private ShpOptions shp = new ShpOptions(); - - public static void main(String[] args) { new NoiseAnalysis().execute(args); } @Override public Integer call() throws Exception { - Config config = ConfigUtils.createConfig(new NoiseConfigGroup()); - - if (crs.getInputCRS() == null || crs.getInputCRS().isBlank()) { - log.error("Input CRS must be set [--input-crs]"); - return 2; - } + Config config = prepareConfig(); - if (shp.getShapeFile() == null) { - log.error("Shp file is always required [--shp]"); - return 2; - } - - if (!runDirectory.endsWith("/")) runDirectory = runDirectory + "/"; - - config.global().setCoordinateSystem(crs.getInputCRS()); - config.controller().setRunId(runId); - if (!runId.equals("")) { - config.network().setInputFile(runDirectory + runId + ".output_network.xml.gz"); - config.plans().setInputFile(runDirectory + runId + ".output_plans.xml.gz"); - } else { - config.network().setInputFile(runDirectory + "output_network.xml.gz"); - config.plans().setInputFile(runDirectory + "output_plans.xml.gz"); - } - config.controller().setOutputDirectory(runDirectory); + config.controller().setOutputDirectory(input.getRunDirectory().toString()); // adjust the default noise parameters NoiseConfigGroup noiseParameters = ConfigUtils.addOrGetModule(config, NoiseConfigGroup.class); - noiseParameters.setReceiverPointGap(receiverPointGap); - noiseParameters.setConsideredActivitiesForReceiverPointGridArray(new String[]{"h", "w", "home", "work"}); - noiseParameters.setConsideredActivitiesForDamageCalculationArray(new String[]{"h", "w", "home", "work"}); + noiseParameters.setConsideredActivitiesForReceiverPointGridArray(considerActivities.toArray(String[]::new)); + noiseParameters.setConsideredActivitiesForDamageCalculationArray(considerActivities.toArray(String[]::new)); if (shp.getShapeFile() != null) { - CoordinateTransformation ct = shp.createInverseTransformation(crs.getInputCRS()); - double maxX = Double.MIN_VALUE; // Initialization with the opposite min/max - double maxY = Double.MIN_VALUE; - double minX = Double.MAX_VALUE; - double minY = Double.MAX_VALUE; - List coords = Arrays.stream(shp.getGeometry().getCoordinates()). - map(c -> new Coord(c.x, c.y)). - collect(Collectors.toList()); - for (Coord coord : coords) { - ct.transform(coord); - double x = coord.getX(); - double y = coord.getY(); - if (x > maxX) { - maxX = x; - } - - if (x < minX) { - minX = x; - } - - if (y > maxY) { - maxY = y; - } - - if (y < minY) { - minY = y; - } - } - noiseParameters.setReceiverPointsGridMinX(minX); - noiseParameters.setReceiverPointsGridMinY(minY); - noiseParameters.setReceiverPointsGridMaxX(maxX); - noiseParameters.setReceiverPointsGridMaxY(maxY); + CoordinateTransformation ct = shp.createInverseTransformation(config.global().getCoordinateSystem()); + + Envelope bbox = shp.getGeometry().getEnvelopeInternal(); + + Coord minCoord = ct.transform(new Coord(bbox.getMinX(), bbox.getMinY())); + Coord maxCoord = ct.transform(new Coord(bbox.getMaxX(), bbox.getMaxY())); + + noiseParameters.setReceiverPointsGridMinX(minCoord.getX()); + noiseParameters.setReceiverPointsGridMinY(minCoord.getY()); + noiseParameters.setReceiverPointsGridMaxX(maxCoord.getX()); + noiseParameters.setReceiverPointsGridMaxY(maxCoord.getY()); } noiseParameters.setNoiseComputationMethod(NoiseConfigGroup.NoiseComputationMethod.RLS19); - if (!noiseBarrierFile.equals("")) { - noiseParameters.setNoiseBarriersSourceCRS(crs.getInputCRS()); + if (!Objects.equals(noiseBarrierFile, "")) { + noiseParameters.setNoiseBarriersSourceCRS(config.global().getCoordinateSystem()); noiseParameters.setConsiderNoiseBarriers(true); noiseParameters.setNoiseBarriersFilePath(noiseBarrierFile); } - // ... Scenario scenario = ScenarioUtils.loadScenario(config); - String outputDirectory = runDirectory + "analysis/"; - NoiseOfflineCalculation noiseCalculation = new NoiseOfflineCalculation(scenario, outputDirectory); - noiseCalculation.run(); + String outputFilePath = output.getPath().getParent() == null ? "." : output.getPath().getParent().toString(); + NoiseOfflineCalculation noiseCalculation = new NoiseOfflineCalculation(scenario, outputFilePath); + outputFilePath += "/noise-analysis"; + noiseCalculation.run(); - String outputFilePath = outputDirectory + "noise-analysis/"; - ProcessNoiseImmissions process = new ProcessNoiseImmissions(outputFilePath + "immissions/", outputFilePath + "receiverPoints/receiverPoints.csv", noiseParameters.getReceiverPointGap()); + ProcessNoiseImmissions process = new ProcessNoiseImmissions(outputFilePath + "/immissions/", outputFilePath + "/receiverPoints/receiverPoints.csv", noiseParameters.getReceiverPointGap()); process.run(); - final String[] labels = {"immission", "consideredAgentUnits", "damages_receiverPoint"}; - final String[] workingDirectories = {outputFilePath + "/immissions/", outputFilePath + "/consideredAgentUnits/", outputFilePath + "/damages_receiverPoint/"}; + final String[] paths = {outputFilePath + "/immissions/", outputFilePath + "/emissions/"}; + MergeNoiseOutput mergeNoiseOutput = new MergeNoiseOutput(paths, Path.of(outputFilePath), config.global().getCoordinateSystem()); + mergeNoiseOutput.run(); - MergeNoiseCSVFile merger = new MergeNoiseCSVFile(); - merger.setReceiverPointsFile(outputFilePath + "receiverPoints/receiverPoints.csv"); - merger.setOutputDirectory(outputFilePath); - merger.setTimeBinSize(noiseParameters.getTimeBinSizeNoiseComputation()); - merger.setWorkingDirectory(workingDirectories); - merger.setLabel(labels); - merger.run(); return 0; } + private Config prepareConfig() { + Config config = ConfigUtils.loadConfig(ApplicationUtils.matchInput("config.xml", input.getRunDirectory()).toAbsolutePath().toString(), new NoiseConfigGroup()); + + config.vehicles().setVehiclesFile(ApplicationUtils.matchInput("vehicles", input.getRunDirectory()).toAbsolutePath().toString()); + config.network().setInputFile(ApplicationUtils.matchInput("network", input.getRunDirectory()).toAbsolutePath().toString()); + config.transit().setTransitScheduleFile(null); + config.transit().setVehiclesFile(null); + config.plans().setInputFile(ApplicationUtils.matchInput("plans", input.getRunDirectory()).toAbsolutePath().toString()); + config.facilities().setInputFile(null); + config.eventsManager().setNumberOfThreads(null); + config.eventsManager().setEstimatedNumberOfEvents(null); + config.global().setNumberOfThreads(1); + + return config; + } } diff --git a/contribs/application/src/main/java/org/matsim/application/avro/XYTData.java b/contribs/application/src/main/java/org/matsim/application/avro/XYTData.java new file mode 100644 index 00000000000..1838a9cae83 --- /dev/null +++ b/contribs/application/src/main/java/org/matsim/application/avro/XYTData.java @@ -0,0 +1,863 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.matsim.application.avro; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class XYTData extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -7545707257116531193L; + + + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"XYTData\",\"namespace\":\"org.matsim.application.avro\",\"fields\":[{\"name\":\"crs\",\"type\":\"string\",\"doc\":\"Coordinate reference system\"},{\"name\":\"xCoords\",\"type\":{\"type\":\"array\",\"items\":\"float\"},\"doc\":\"List of x coordinates\"},{\"name\":\"yCoords\",\"type\":{\"type\":\"array\",\"items\":\"float\"},\"doc\":\"List of y coordinates\"},{\"name\":\"timestamps\",\"type\":{\"type\":\"array\",\"items\":\"int\"},\"doc\":\"List of timestamps in seconds\"},{\"name\":\"data\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":\"float\"}},\"doc\":\"XYT data for each dataset. The key is the name, the data is stored in one chunk of data.\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this XYTData to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a XYTData from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a XYTData instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static XYTData fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + /** Coordinate reference system */ + private java.lang.CharSequence crs; + /** List of x coordinates */ + private java.util.List xCoords; + /** List of y coordinates */ + private java.util.List yCoords; + /** List of timestamps in seconds */ + private java.util.List timestamps; + /** XYT data for each dataset. The key is the name, the data is stored in one chunk of data. */ + private java.util.Map> data; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public XYTData() {} + + /** + * All-args constructor. + * @param crs Coordinate reference system + * @param xCoords List of x coordinates + * @param yCoords List of y coordinates + * @param timestamps List of timestamps in seconds + * @param data XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + */ + public XYTData(java.lang.CharSequence crs, java.util.List xCoords, java.util.List yCoords, java.util.List timestamps, java.util.Map> data) { + this.crs = crs; + this.xCoords = xCoords; + this.yCoords = yCoords; + this.timestamps = timestamps; + this.data = data; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } + + @Override + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return crs; + case 1: return xCoords; + case 2: return yCoords; + case 3: return timestamps; + case 4: return data; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: crs = (java.lang.CharSequence)value$; break; + case 1: xCoords = (java.util.List)value$; break; + case 2: yCoords = (java.util.List)value$; break; + case 3: timestamps = (java.util.List)value$; break; + case 4: data = (java.util.Map>)value$; break; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'crs' field. + * @return Coordinate reference system + */ + public java.lang.CharSequence getCrs() { + return crs; + } + + + /** + * Sets the value of the 'crs' field. + * Coordinate reference system + * @param value the value to set. + */ + public void setCrs(java.lang.CharSequence value) { + this.crs = value; + } + + /** + * Gets the value of the 'xCoords' field. + * @return List of x coordinates + */ + public java.util.List getXCoords() { + return xCoords; + } + + + /** + * Sets the value of the 'xCoords' field. + * List of x coordinates + * @param value the value to set. + */ + public void setXCoords(java.util.List value) { + this.xCoords = value; + } + + /** + * Gets the value of the 'yCoords' field. + * @return List of y coordinates + */ + public java.util.List getYCoords() { + return yCoords; + } + + + /** + * Sets the value of the 'yCoords' field. + * List of y coordinates + * @param value the value to set. + */ + public void setYCoords(java.util.List value) { + this.yCoords = value; + } + + /** + * Gets the value of the 'timestamps' field. + * @return List of timestamps in seconds + */ + public java.util.List getTimestamps() { + return timestamps; + } + + + /** + * Sets the value of the 'timestamps' field. + * List of timestamps in seconds + * @param value the value to set. + */ + public void setTimestamps(java.util.List value) { + this.timestamps = value; + } + + /** + * Gets the value of the 'data' field. + * @return XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + */ + public java.util.Map> getData() { + return data; + } + + + /** + * Sets the value of the 'data' field. + * XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + * @param value the value to set. + */ + public void setData(java.util.Map> value) { + this.data = value; + } + + /** + * Creates a new XYTData RecordBuilder. + * @return A new XYTData RecordBuilder + */ + public static org.matsim.application.avro.XYTData.Builder newBuilder() { + return new org.matsim.application.avro.XYTData.Builder(); + } + + /** + * Creates a new XYTData RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new XYTData RecordBuilder + */ + public static org.matsim.application.avro.XYTData.Builder newBuilder(org.matsim.application.avro.XYTData.Builder other) { + if (other == null) { + return new org.matsim.application.avro.XYTData.Builder(); + } else { + return new org.matsim.application.avro.XYTData.Builder(other); + } + } + + /** + * Creates a new XYTData RecordBuilder by copying an existing XYTData instance. + * @param other The existing instance to copy. + * @return A new XYTData RecordBuilder + */ + public static org.matsim.application.avro.XYTData.Builder newBuilder(org.matsim.application.avro.XYTData other) { + if (other == null) { + return new org.matsim.application.avro.XYTData.Builder(); + } else { + return new org.matsim.application.avro.XYTData.Builder(other); + } + } + + /** + * RecordBuilder for XYTData instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + /** Coordinate reference system */ + private java.lang.CharSequence crs; + /** List of x coordinates */ + private java.util.List xCoords; + /** List of y coordinates */ + private java.util.List yCoords; + /** List of timestamps in seconds */ + private java.util.List timestamps; + /** XYT data for each dataset. The key is the name, the data is stored in one chunk of data. */ + private java.util.Map> data; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(org.matsim.application.avro.XYTData.Builder other) { + super(other); + if (isValidValue(fields()[0], other.crs)) { + this.crs = data().deepCopy(fields()[0].schema(), other.crs); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.xCoords)) { + this.xCoords = data().deepCopy(fields()[1].schema(), other.xCoords); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.yCoords)) { + this.yCoords = data().deepCopy(fields()[2].schema(), other.yCoords); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + if (isValidValue(fields()[3], other.timestamps)) { + this.timestamps = data().deepCopy(fields()[3].schema(), other.timestamps); + fieldSetFlags()[3] = other.fieldSetFlags()[3]; + } + if (isValidValue(fields()[4], other.data)) { + this.data = data().deepCopy(fields()[4].schema(), other.data); + fieldSetFlags()[4] = other.fieldSetFlags()[4]; + } + } + + /** + * Creates a Builder by copying an existing XYTData instance + * @param other The existing instance to copy. + */ + private Builder(org.matsim.application.avro.XYTData other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.crs)) { + this.crs = data().deepCopy(fields()[0].schema(), other.crs); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.xCoords)) { + this.xCoords = data().deepCopy(fields()[1].schema(), other.xCoords); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.yCoords)) { + this.yCoords = data().deepCopy(fields()[2].schema(), other.yCoords); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.timestamps)) { + this.timestamps = data().deepCopy(fields()[3].schema(), other.timestamps); + fieldSetFlags()[3] = true; + } + if (isValidValue(fields()[4], other.data)) { + this.data = data().deepCopy(fields()[4].schema(), other.data); + fieldSetFlags()[4] = true; + } + } + + /** + * Gets the value of the 'crs' field. + * Coordinate reference system + * @return The value. + */ + public java.lang.CharSequence getCrs() { + return crs; + } + + + /** + * Sets the value of the 'crs' field. + * Coordinate reference system + * @param value The value of 'crs'. + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder setCrs(java.lang.CharSequence value) { + validate(fields()[0], value); + this.crs = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'crs' field has been set. + * Coordinate reference system + * @return True if the 'crs' field has been set, false otherwise. + */ + public boolean hasCrs() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'crs' field. + * Coordinate reference system + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder clearCrs() { + crs = null; + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'xCoords' field. + * List of x coordinates + * @return The value. + */ + public java.util.List getXCoords() { + return xCoords; + } + + + /** + * Sets the value of the 'xCoords' field. + * List of x coordinates + * @param value The value of 'xCoords'. + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder setXCoords(java.util.List value) { + validate(fields()[1], value); + this.xCoords = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'xCoords' field has been set. + * List of x coordinates + * @return True if the 'xCoords' field has been set, false otherwise. + */ + public boolean hasXCoords() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'xCoords' field. + * List of x coordinates + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder clearXCoords() { + xCoords = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'yCoords' field. + * List of y coordinates + * @return The value. + */ + public java.util.List getYCoords() { + return yCoords; + } + + + /** + * Sets the value of the 'yCoords' field. + * List of y coordinates + * @param value The value of 'yCoords'. + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder setYCoords(java.util.List value) { + validate(fields()[2], value); + this.yCoords = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'yCoords' field has been set. + * List of y coordinates + * @return True if the 'yCoords' field has been set, false otherwise. + */ + public boolean hasYCoords() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'yCoords' field. + * List of y coordinates + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder clearYCoords() { + yCoords = null; + fieldSetFlags()[2] = false; + return this; + } + + /** + * Gets the value of the 'timestamps' field. + * List of timestamps in seconds + * @return The value. + */ + public java.util.List getTimestamps() { + return timestamps; + } + + + /** + * Sets the value of the 'timestamps' field. + * List of timestamps in seconds + * @param value The value of 'timestamps'. + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder setTimestamps(java.util.List value) { + validate(fields()[3], value); + this.timestamps = value; + fieldSetFlags()[3] = true; + return this; + } + + /** + * Checks whether the 'timestamps' field has been set. + * List of timestamps in seconds + * @return True if the 'timestamps' field has been set, false otherwise. + */ + public boolean hasTimestamps() { + return fieldSetFlags()[3]; + } + + + /** + * Clears the value of the 'timestamps' field. + * List of timestamps in seconds + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder clearTimestamps() { + timestamps = null; + fieldSetFlags()[3] = false; + return this; + } + + /** + * Gets the value of the 'data' field. + * XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + * @return The value. + */ + public java.util.Map> getData() { + return data; + } + + + /** + * Sets the value of the 'data' field. + * XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + * @param value The value of 'data'. + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder setData(java.util.Map> value) { + validate(fields()[4], value); + this.data = value; + fieldSetFlags()[4] = true; + return this; + } + + /** + * Checks whether the 'data' field has been set. + * XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + * @return True if the 'data' field has been set, false otherwise. + */ + public boolean hasData() { + return fieldSetFlags()[4]; + } + + + /** + * Clears the value of the 'data' field. + * XYT data for each dataset. The key is the name, the data is stored in one chunk of data. + * @return This builder. + */ + public org.matsim.application.avro.XYTData.Builder clearData() { + data = null; + fieldSetFlags()[4] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public XYTData build() { + try { + XYTData record = new XYTData(); + record.crs = fieldSetFlags()[0] ? this.crs : (java.lang.CharSequence) defaultValue(fields()[0]); + record.xCoords = fieldSetFlags()[1] ? this.xCoords : (java.util.List) defaultValue(fields()[1]); + record.yCoords = fieldSetFlags()[2] ? this.yCoords : (java.util.List) defaultValue(fields()[2]); + record.timestamps = fieldSetFlags()[3] ? this.timestamps : (java.util.List) defaultValue(fields()[3]); + record.data = fieldSetFlags()[4] ? this.data : (java.util.Map>) defaultValue(fields()[4]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeString(this.crs); + + long size0 = this.xCoords.size(); + out.writeArrayStart(); + out.setItemCount(size0); + long actualSize0 = 0; + for (java.lang.Float e0: this.xCoords) { + actualSize0++; + out.startItem(); + out.writeFloat(e0); + } + out.writeArrayEnd(); + if (actualSize0 != size0) + throw new java.util.ConcurrentModificationException("Array-size written was " + size0 + ", but element count was " + actualSize0 + "."); + + long size1 = this.yCoords.size(); + out.writeArrayStart(); + out.setItemCount(size1); + long actualSize1 = 0; + for (java.lang.Float e1: this.yCoords) { + actualSize1++; + out.startItem(); + out.writeFloat(e1); + } + out.writeArrayEnd(); + if (actualSize1 != size1) + throw new java.util.ConcurrentModificationException("Array-size written was " + size1 + ", but element count was " + actualSize1 + "."); + + long size2 = this.timestamps.size(); + out.writeArrayStart(); + out.setItemCount(size2); + long actualSize2 = 0; + for (java.lang.Integer e2: this.timestamps) { + actualSize2++; + out.startItem(); + out.writeInt(e2); + } + out.writeArrayEnd(); + if (actualSize2 != size2) + throw new java.util.ConcurrentModificationException("Array-size written was " + size2 + ", but element count was " + actualSize2 + "."); + + long size3 = this.data.size(); + out.writeMapStart(); + out.setItemCount(size3); + long actualSize3 = 0; + for (java.util.Map.Entry> e3: this.data.entrySet()) { + actualSize3++; + out.startItem(); + out.writeString(e3.getKey()); + java.util.List v3 = e3.getValue(); + long size4 = v3.size(); + out.writeArrayStart(); + out.setItemCount(size4); + long actualSize4 = 0; + for (java.lang.Float e4: v3) { + actualSize4++; + out.startItem(); + out.writeFloat(e4); + } + out.writeArrayEnd(); + if (actualSize4 != size4) + throw new java.util.ConcurrentModificationException("Array-size written was " + size4 + ", but element count was " + actualSize4 + "."); + } + out.writeMapEnd(); + if (actualSize3 != size3) + throw new java.util.ConcurrentModificationException("Map-size written was " + size3 + ", but element count was " + actualSize3 + "."); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.crs = in.readString(this.crs instanceof Utf8 ? (Utf8)this.crs : null); + + long size0 = in.readArrayStart(); + java.util.List a0 = this.xCoords; + if (a0 == null) { + a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("xCoords").schema()); + this.xCoords = a0; + } else a0.clear(); + SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null); + for ( ; 0 < size0; size0 = in.arrayNext()) { + for ( ; size0 != 0; size0--) { + java.lang.Float e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readFloat(); + a0.add(e0); + } + } + + long size1 = in.readArrayStart(); + java.util.List a1 = this.yCoords; + if (a1 == null) { + a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("yCoords").schema()); + this.yCoords = a1; + } else a1.clear(); + SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null); + for ( ; 0 < size1; size1 = in.arrayNext()) { + for ( ; size1 != 0; size1--) { + java.lang.Float e1 = (ga1 != null ? ga1.peek() : null); + e1 = in.readFloat(); + a1.add(e1); + } + } + + long size2 = in.readArrayStart(); + java.util.List a2 = this.timestamps; + if (a2 == null) { + a2 = new SpecificData.Array((int)size2, SCHEMA$.getField("timestamps").schema()); + this.timestamps = a2; + } else a2.clear(); + SpecificData.Array ga2 = (a2 instanceof SpecificData.Array ? (SpecificData.Array)a2 : null); + for ( ; 0 < size2; size2 = in.arrayNext()) { + for ( ; size2 != 0; size2--) { + java.lang.Integer e2 = (ga2 != null ? ga2.peek() : null); + e2 = in.readInt(); + a2.add(e2); + } + } + + long size3 = in.readMapStart(); + java.util.Map> m3 = this.data; // Need fresh name due to limitation of macro system + if (m3 == null) { + m3 = new java.util.HashMap>((int)size3); + this.data = m3; + } else m3.clear(); + for ( ; 0 < size3; size3 = in.mapNext()) { + for ( ; size3 != 0; size3--) { + java.lang.CharSequence k3 = null; + k3 = in.readString(k3 instanceof Utf8 ? (Utf8)k3 : null); + java.util.List v3 = null; + long size4 = in.readArrayStart(); + java.util.List a4 = v3; + if (a4 == null) { + a4 = new SpecificData.Array((int)size4, SCHEMA$.getField("data").schema().getValueType()); + v3 = a4; + } else a4.clear(); + SpecificData.Array ga4 = (a4 instanceof SpecificData.Array ? (SpecificData.Array)a4 : null); + for ( ; 0 < size4; size4 = in.arrayNext()) { + for ( ; size4 != 0; size4--) { + java.lang.Float e4 = (ga4 != null ? ga4.peek() : null); + e4 = in.readFloat(); + a4.add(e4); + } + } + m3.put(k3, v3); + } + } + + } else { + for (int i = 0; i < 5; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.crs = in.readString(this.crs instanceof Utf8 ? (Utf8)this.crs : null); + break; + + case 1: + long size0 = in.readArrayStart(); + java.util.List a0 = this.xCoords; + if (a0 == null) { + a0 = new SpecificData.Array((int)size0, SCHEMA$.getField("xCoords").schema()); + this.xCoords = a0; + } else a0.clear(); + SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array)a0 : null); + for ( ; 0 < size0; size0 = in.arrayNext()) { + for ( ; size0 != 0; size0--) { + java.lang.Float e0 = (ga0 != null ? ga0.peek() : null); + e0 = in.readFloat(); + a0.add(e0); + } + } + break; + + case 2: + long size1 = in.readArrayStart(); + java.util.List a1 = this.yCoords; + if (a1 == null) { + a1 = new SpecificData.Array((int)size1, SCHEMA$.getField("yCoords").schema()); + this.yCoords = a1; + } else a1.clear(); + SpecificData.Array ga1 = (a1 instanceof SpecificData.Array ? (SpecificData.Array)a1 : null); + for ( ; 0 < size1; size1 = in.arrayNext()) { + for ( ; size1 != 0; size1--) { + java.lang.Float e1 = (ga1 != null ? ga1.peek() : null); + e1 = in.readFloat(); + a1.add(e1); + } + } + break; + + case 3: + long size2 = in.readArrayStart(); + java.util.List a2 = this.timestamps; + if (a2 == null) { + a2 = new SpecificData.Array((int)size2, SCHEMA$.getField("timestamps").schema()); + this.timestamps = a2; + } else a2.clear(); + SpecificData.Array ga2 = (a2 instanceof SpecificData.Array ? (SpecificData.Array)a2 : null); + for ( ; 0 < size2; size2 = in.arrayNext()) { + for ( ; size2 != 0; size2--) { + java.lang.Integer e2 = (ga2 != null ? ga2.peek() : null); + e2 = in.readInt(); + a2.add(e2); + } + } + break; + + case 4: + long size3 = in.readMapStart(); + java.util.Map> m3 = this.data; // Need fresh name due to limitation of macro system + if (m3 == null) { + m3 = new java.util.HashMap>((int)size3); + this.data = m3; + } else m3.clear(); + for ( ; 0 < size3; size3 = in.mapNext()) { + for ( ; size3 != 0; size3--) { + java.lang.CharSequence k3 = null; + k3 = in.readString(k3 instanceof Utf8 ? (Utf8)k3 : null); + java.util.List v3 = null; + long size4 = in.readArrayStart(); + java.util.List a4 = v3; + if (a4 == null) { + a4 = new SpecificData.Array((int)size4, SCHEMA$.getField("data").schema().getValueType()); + v3 = a4; + } else a4.clear(); + SpecificData.Array ga4 = (a4 instanceof SpecificData.Array ? (SpecificData.Array)a4 : null); + for ( ; 0 < size4; size4 = in.arrayNext()) { + for ( ; size4 != 0; size4--) { + java.lang.Float e4 = (ga4 != null ? ga4.peek() : null); + e4 = in.readFloat(); + a4.add(e4); + } + } + m3.put(k3, v3); + } + } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/contribs/application/src/main/java/org/matsim/application/prepare/pt/CreateTransitScheduleFromGtfs.java b/contribs/application/src/main/java/org/matsim/application/prepare/pt/CreateTransitScheduleFromGtfs.java index 0c14745c671..f65bffae57a 100644 --- a/contribs/application/src/main/java/org/matsim/application/prepare/pt/CreateTransitScheduleFromGtfs.java +++ b/contribs/application/src/main/java/org/matsim/application/prepare/pt/CreateTransitScheduleFromGtfs.java @@ -13,6 +13,7 @@ import org.matsim.application.MATSimAppCommand; import org.matsim.application.options.CrsOptions; import org.matsim.application.options.ShpOptions; +import org.matsim.contrib.emissions.HbefaVehicleCategory; import org.matsim.contrib.gtfs.GtfsConverter; import org.matsim.contrib.gtfs.TransitSchedulePostProcessTools; import org.matsim.core.config.ConfigUtils; @@ -42,10 +43,10 @@ * @author rakow */ @CommandLine.Command( - name = "transit-from-gtfs", - description = "Create transit schedule and vehicles from GTFS data and merge pt network into existing network", - showDefaultValues = true, - mixinStandardHelpOptions = true + name = "transit-from-gtfs", + description = "Create transit schedule and vehicles from GTFS data and merge pt network into existing network", + showDefaultValues = true, + mixinStandardHelpOptions = true ) public class CreateTransitScheduleFromGtfs implements MATSimAppCommand { @@ -102,8 +103,8 @@ public Integer call() throws Exception { // Output files File scheduleFile = new File(output, name + "-transitSchedule.xml.gz"); File networkPTFile = new File(output, - FilenameUtils.getName(IOUtils.resolveFileOrResource(networkFile).getPath()) - .replace(".xml", "-with-pt.xml")); + FilenameUtils.getName(IOUtils.resolveFileOrResource(networkFile).getPath()) + .replace(".xml", "-with-pt.xml")); File transitVehiclesFile = new File(output, name + "-transitVehicles.xml.gz"); Scenario scenario = ScenarioUtils.createScenario(ConfigUtils.createConfig()); @@ -118,13 +119,13 @@ public Integer call() throws Exception { log.info("Converting {} at date {}", gtfsFile, date); GtfsConverter.Builder converter = GtfsConverter.newBuilder() - .setScenario(scenario) - .setTransform(ct) - .setDate(date) // use first date for all sets - .setFeed(gtfsFile) - //.setIncludeAgency(agency -> agency.equals("rbg-70")) - .setIncludeStop(createFilter(i)) - .setMergeStops(mergeStops); + .setScenario(scenario) + .setTransform(ct) + .setDate(date) // use first date for all sets + .setFeed(gtfsFile) + //.setIncludeAgency(agency -> agency.equals("rbg-70")) + .setIncludeStop(createFilter(i)) + .setMergeStops(mergeStops); if (prefixes.size() > 0) { String prefix = prefixes.get(i); @@ -216,6 +217,9 @@ private static Scenario getScenarioWithPseudoPtNetworkAndTransitVehicles(Network VehicleUtils.setDoorOperationMode(reRbVehicleType, VehicleType.DoorOperationMode.serial); // first finish boarding, then start alighting VehicleUtils.setAccessTime(reRbVehicleType, 1.0 / 10.0); // 1s per boarding agent, distributed on 10 doors VehicleUtils.setEgressTime(reRbVehicleType, 1.0 / 10.0); // 1s per alighting agent, distributed on 10 doors + + addHbefaMapping(reRbVehicleType, HbefaVehicleCategory.NON_HBEFA_VEHICLE); + scenario.getTransitVehicles().addVehicleType(reRbVehicleType); } VehicleType sBahnVehicleType = vehicleFactory.createVehicleType(Id.create("S-Bahn_veh_type", VehicleType.class)); @@ -237,6 +241,9 @@ private static Scenario getScenarioWithPseudoPtNetworkAndTransitVehicles(Network VehicleUtils.setAccessTime(uBahnVehicleType, 1.0 / 18.0); // 1s per boarding agent, distributed on 6*3 doors VehicleUtils.setEgressTime(uBahnVehicleType, 1.0 / 18.0); // 1s per alighting agent, distributed on 6*3 doors scenario.getTransitVehicles().addVehicleType(uBahnVehicleType); + + addHbefaMapping(uBahnVehicleType, HbefaVehicleCategory.NON_HBEFA_VEHICLE); + } VehicleType tramVehicleType = vehicleFactory.createVehicleType(Id.create("Tram_veh_type", VehicleType.class)); { @@ -291,7 +298,7 @@ private static Scenario getScenarioWithPseudoPtNetworkAndTransitVehicles(Network gtfsTransitType = Integer.parseInt((String) line.getAttributes().getAttribute("gtfs_route_type")); } catch (NumberFormatException e) { log.error("unknown transit mode! Line id was " + line.getId().toString() + - "; gtfs route type was " + line.getAttributes().getAttribute("gtfs_route_type")); + "; gtfs route type was " + line.getAttributes().getAttribute("gtfs_route_type")); throw new RuntimeException("unknown transit mode"); } @@ -357,7 +364,7 @@ private static Scenario getScenarioWithPseudoPtNetworkAndTransitVehicles(Network break; default: log.warn("unknown transit mode! Line id was " + line.getId().toString() + - "; gtfs route type was " + line.getAttributes().getAttribute("gtfs_route_type")); + "; gtfs route type was " + line.getAttributes().getAttribute("gtfs_route_type")); lineVehicleType = ptVehicleType; } @@ -393,13 +400,13 @@ private static Scenario getScenarioWithPseudoPtNetworkAndTransitVehicles(Network // if there is no departure offset set (or infinity), it is the last stop of the line, // so we don't need to care about the stop duration double stopDuration = routeStop.getDepartureOffset().isDefined() ? - routeStop.getDepartureOffset().seconds() - routeStop.getArrivalOffset().seconds() : minStopTime; + routeStop.getDepartureOffset().seconds() - routeStop.getArrivalOffset().seconds() : minStopTime; // ensure arrival at next stop early enough to allow for 30s stop duration -> time for passengers to board / alight // if link freespeed had been set such that the pt veh arrives exactly on time, but departure tiome is identical // with arrival time the pt vehicle would have been always delayed // Math.max to avoid negative values of travelTime double travelTime = Math.max(1, routeStop.getArrivalOffset().seconds() - lastDepartureOffset - 1.0 - - (stopDuration >= minStopTime ? 0 : (minStopTime - stopDuration))); + (stopDuration >= minStopTime ? 0 : (minStopTime - stopDuration))); Link link = network.getLinks().get(routeStop.getStopFacility().getLinkId()); increaseLinkFreespeedIfLower(link, link.getLength() / travelTime); lastDepartureOffset = routeStop.getDepartureOffset().seconds(); @@ -419,6 +426,14 @@ private static Scenario getScenarioWithPseudoPtNetworkAndTransitVehicles(Network return scenario; } + private static void addHbefaMapping(VehicleType vehicleType, HbefaVehicleCategory category) { + EngineInformation carEngineInformation = vehicleType.getEngineInformation(); + VehicleUtils.setHbefaVehicleCategory(carEngineInformation, String.valueOf(category)); + VehicleUtils.setHbefaTechnology(carEngineInformation, "average"); + VehicleUtils.setHbefaSizeClass(carEngineInformation, "average"); + VehicleUtils.setHbefaEmissionsConcept(carEngineInformation, "average"); + } + private static void increaseLinkFreespeedIfLower(Link link, double newFreespeed) { if (link.getFreespeed() < newFreespeed) { link.setFreespeed(newFreespeed); diff --git a/contribs/emissions/src/main/java/org/matsim/contrib/emissions/OsmHbefaMapping.java b/contribs/emissions/src/main/java/org/matsim/contrib/emissions/OsmHbefaMapping.java index e57105326ca..4938d75e993 100644 --- a/contribs/emissions/src/main/java/org/matsim/contrib/emissions/OsmHbefaMapping.java +++ b/contribs/emissions/src/main/java/org/matsim/contrib/emissions/OsmHbefaMapping.java @@ -97,11 +97,8 @@ private void put(String s, Hbefa hbefa) { @Override public String determineHebfaType(Link link) { - String roadTypeAttribute = (String) link.getAttributes().getAttribute(NetworkUtils.TYPE); - Object allowedSpeedAttribute = link.getAttributes().getAttribute(NetworkUtils.ALLOWED_SPEED); - - String roadType = StringUtils.isBlank(roadTypeAttribute) ? "unclassified" : roadTypeAttribute; - double allowedSpeed = allowedSpeedAttribute != null ? (double) allowedSpeedAttribute : link.getFreespeed(); + String roadType = NetworkUtils.getHighwayType(link); + double allowedSpeed = NetworkUtils.getAllowedSpeed(link); return getHEBFAtype(roadType, allowedSpeed); } diff --git a/contribs/emissions/src/main/java/org/matsim/contrib/emissions/analysis/FastEmissionGridAnalyzer.java b/contribs/emissions/src/main/java/org/matsim/contrib/emissions/analysis/FastEmissionGridAnalyzer.java index 0e407e1e3c8..b036c3d9f72 100644 --- a/contribs/emissions/src/main/java/org/matsim/contrib/emissions/analysis/FastEmissionGridAnalyzer.java +++ b/contribs/emissions/src/main/java/org/matsim/contrib/emissions/analysis/FastEmissionGridAnalyzer.java @@ -239,8 +239,11 @@ static Raster rasterizeNetwork(final Network network, final TObjectDoubleMap { var link = network.getLinks().get(linkId); - var numberOfCells = rasterizeLink(link, 0, raster); - rasterizeLink(link, value / numberOfCells / cellArea, raster); + // If the link does not exist in the network, we ignore it + if (link != null) { + var numberOfCells = rasterizeLink(link, 0, raster); + rasterizeLink(link, value / numberOfCells / cellArea, raster); + } return true; }); return raster; @@ -260,11 +263,14 @@ static Raster rasterizeNetwork(Network network, Map, Double> emissions, for (var entry : emissions.entrySet()) { var link = network.getLinks().get(entry.getKey()); - var value = entry.getValue(); - // first count number of cells - var numberOfCells = rasterizeLink(link, 0, raster); - // second pass for actually writing the emission values - rasterizeLink(link, value / numberOfCells / cellArea, raster); + // If the link does not exist in the network, we ignore it + if (link != null) { + var value = entry.getValue(); + // first count number of cells + var numberOfCells = rasterizeLink(link, 0, raster); + // second pass for actually writing the emission values + rasterizeLink(link, value / numberOfCells / cellArea, raster); + } } return raster; } diff --git a/contribs/noise/src/main/java/org/matsim/contrib/noise/MergeNoiseCSVFile.java b/contribs/noise/src/main/java/org/matsim/contrib/noise/MergeNoiseCSVFile.java index b2ee2c02ed2..d052f341cfe 100644 --- a/contribs/noise/src/main/java/org/matsim/contrib/noise/MergeNoiseCSVFile.java +++ b/contribs/noise/src/main/java/org/matsim/contrib/noise/MergeNoiseCSVFile.java @@ -18,7 +18,7 @@ * *********************************************************************** */ /** - * + * */ package org.matsim.contrib.noise; @@ -42,7 +42,6 @@ /** * @author ikaddoura - * */ public final class MergeNoiseCSVFile { @@ -53,17 +52,19 @@ public final class MergeNoiseCSVFile { private double timeBinSize = 3600.; private double endTime = 24. * 3600.; private String separator = ";"; - private double threshold = -1. ; - private OutputFormat outputFormat = OutputFormat.xyt ; - + private double threshold = -1.; + private OutputFormat outputFormat = OutputFormat.xyt; + private String outputDirectory = null; private String[] workingDirectories = null; private String[] labels = null; - + private String receiverPointsFile = null; private String networkFile = null; - public static enum OutputFormat { xyt1t2t3etc, xyt } ; + public static enum OutputFormat {xyt1t2t3etc, xyt} + + ; private Map>> label2time2rp2value = new HashMap<>(); private Map, Coord> rp2Coord = new HashMap, Coord>(); @@ -73,7 +74,7 @@ public static void main(String[] args) { MergeNoiseCSVFile readNoiseFile = new MergeNoiseCSVFile(); readNoiseFile.run(); } - + public final void setThreshold(double threshold) { this.threshold = threshold; } @@ -129,10 +130,10 @@ public final void run() { // lv final. kai readValues(); - + if (receiverPointsFile != null) readReceiverPoints(); if (networkFile != null) loadNetwork(); - + if (receiverPointsFile != null) { writeFileReceiverPoint(); } else if (networkFile != null) { @@ -149,89 +150,89 @@ private void loadNetwork() { } private void writeFileLink() { - int lineCounter = 0 ; + int lineCounter = 0; String outputFile = this.outputDirectory; for (int i = 0; i < this.labels.length; i++) { - outputFile = outputFile + this.labels[i] + "_"; + outputFile = outputFile + this.labels[i] + "_"; } outputFile = outputFile + "merged_" + this.outputFormat.toString() + ".csv.gz"; - try ( BufferedWriter bw = IOUtils.getBufferedWriter(outputFile) ) { + try (BufferedWriter bw = IOUtils.getBufferedWriter(outputFile)) { // so-called "try-with-resources". Kai - log.info(" Writing merged file to " + outputFile + "...") ; + log.info(" Writing merged file to " + outputFile + "..."); // write headers - switch( this.outputFormat ) { - // yy should probably become different classes. kai - case xyt1t2t3etc: - bw.write("Id"); - for (String label : this.label2time2rp2value.keySet()) { - for (double time = startTime; time <= endTime; time = time + timeBinSize) { - bw.write(";" + label + "_" + Time.writeTime(time, Time.TIMEFORMAT_HHMMSS)); + switch (this.outputFormat) { + // yy should probably become different classes. kai + case xyt1t2t3etc: + bw.write("Id"); + for (String label : this.label2time2rp2value.keySet()) { + for (double time = startTime; time <= endTime; time = time + timeBinSize) { + bw.write(";" + label + "_" + Time.writeTime(time, Time.TIMEFORMAT_HHMMSS)); + } } - } - break; - case xyt: - bw.write("Id;time"); - for (String label : this.label2time2rp2value.keySet()) { - bw.write(";" + label); - } - break; - default: - throw new RuntimeException("not implemented") ; + break; + case xyt: + bw.write("Id;time"); + for (String label : this.label2time2rp2value.keySet()) { + bw.write(";" + label); + } + break; + default: + throw new RuntimeException("not implemented"); } bw.newLine(); // fill table - switch( this.outputFormat ) { - case xyt1t2t3etc: + switch (this.outputFormat) { + case xyt1t2t3etc: - for (Id rp : this.network.getLinks().keySet()) { - bw.write(rp.toString()); + for (Id rp : this.network.getLinks().keySet()) { + bw.write(rp.toString()); - for (String label : this.label2time2rp2value.keySet()) { - for (double time = startTime; time <= endTime; time = time + timeBinSize) { - bw.write(";" + this.label2time2rp2value.get(label).get(time).get(rp.toString())); + for (String label : this.label2time2rp2value.keySet()) { + for (double time = startTime; time <= endTime; time = time + timeBinSize) { + bw.write(";" + this.label2time2rp2value.get(label).get(time).get(rp.toString())); + } } + bw.newLine(); } - bw.newLine(); - } - break; - case xyt: + break; + case xyt: - for (Id rp : this.network.getLinks().keySet()) { + for (Id rp : this.network.getLinks().keySet()) { - for (double time = startTime; time <= endTime; time = time + timeBinSize) { + for (double time = startTime; time <= endTime; time = time + timeBinSize) { - boolean writeThisLine = false; - String lineToWrite = rp.toString(); + boolean writeThisLine = false; + String lineToWrite = rp.toString(); - for (String label : this.label2time2rp2value.keySet()) { - double value = this.label2time2rp2value.get(label).get(time).get(rp.toString()); - if (value > this.threshold) { - writeThisLine = true; + for (String label : this.label2time2rp2value.keySet()) { + double value = this.label2time2rp2value.get(label).get(time).get(rp.toString()); + if (value > this.threshold) { + writeThisLine = true; + } + lineToWrite = lineToWrite + ";" + value; } - lineToWrite = lineToWrite + ";" + value; - } - // only write the line if at least one value is larger than threshold - if (writeThisLine) { - bw.write(lineToWrite); - bw.newLine(); - lineCounter ++ ; - if (lineCounter % 10000 == 0.) { - log.info("# " + lineCounter); + // only write the line if at least one value is larger than threshold + if (writeThisLine) { + bw.write(lineToWrite); + bw.newLine(); + lineCounter++; + if (lineCounter % 10000 == 0.) { + log.info("# " + lineCounter); + } } } } - } - break ; - default: - throw new RuntimeException("not implemented") ; + break; + default: + throw new RuntimeException("not implemented"); } bw.close(); @@ -244,92 +245,92 @@ private void writeFileLink() { } private void writeFileReceiverPoint() { - int lineCounter = 0 ; + int lineCounter = 0; String outputFile = this.outputDirectory; for (int i = 0; i < this.labels.length; i++) { - outputFile = outputFile + this.labels[i] + "_"; + outputFile = outputFile + this.labels[i] + "_"; } outputFile = outputFile + "merged_" + this.outputFormat.toString() + ".csv.gz"; - try ( BufferedWriter bw = IOUtils.getBufferedWriter(outputFile) ) { + try (BufferedWriter bw = IOUtils.getBufferedWriter(outputFile)) { // so-called "try-with-resources". Kai - log.info(" Writing merged file to " + outputFile + "...") ; + log.info(" Writing merged file to " + outputFile + "..."); // write headers - switch( this.outputFormat ) { - // yy should probably become different classes. kai - case xyt1t2t3etc: - bw.write("Receiver Point Id;x;y"); - for (String label : this.label2time2rp2value.keySet()) { - for (double time = startTime; time <= endTime; time = time + timeBinSize) { - bw.write(";" + label + "_" + Time.writeTime(time, Time.TIMEFORMAT_HHMMSS)); + switch (this.outputFormat) { + // yy should probably become different classes. kai + case xyt1t2t3etc: + bw.write("Receiver Point Id;x;y"); + for (String label : this.label2time2rp2value.keySet()) { + for (double time = startTime; time <= endTime; time = time + timeBinSize) { + bw.write(";" + label + "_" + Time.writeTime(time, Time.TIMEFORMAT_HHMMSS)); + } } - } - break; - case xyt: - bw.write("Receiver Point Id;x;y;time"); - for (String label : this.label2time2rp2value.keySet()) { - bw.write(";" + label); - } - break; - default: - throw new RuntimeException("not implemented") ; + break; + case xyt: + bw.write("Receiver Point Id;x;y;time"); + for (String label : this.label2time2rp2value.keySet()) { + bw.write(";" + label); + } + break; + default: + throw new RuntimeException("not implemented"); } bw.newLine(); // fill table - switch( this.outputFormat ) { - case xyt1t2t3etc: + switch (this.outputFormat) { + case xyt1t2t3etc: - for (Id rp : this.rp2Coord.keySet()) { - bw.write(rp.toString() + ";" + rp2Coord.get(rp).getX() + ";" + rp2Coord.get(rp).getY()); + for (Id rp : this.rp2Coord.keySet()) { + bw.write(rp.toString() + ";" + rp2Coord.get(rp).getX() + ";" + rp2Coord.get(rp).getY()); - for (String label : this.label2time2rp2value.keySet()) { - for (double time = startTime; time <= endTime; time = time + timeBinSize) { - if (this.label2time2rp2value.get(label).get(time).get(rp.toString()) == null) { - throw new RuntimeException("null!"); + for (String label : this.label2time2rp2value.keySet()) { + for (double time = startTime; time <= endTime; time = time + timeBinSize) { + if (this.label2time2rp2value.get(label).get(time).get(rp.toString()) == null) { + throw new RuntimeException("null!"); + } + bw.write(";" + this.label2time2rp2value.get(label).get(time).get(rp.toString())); } - bw.write(";" + this.label2time2rp2value.get(label).get(time).get(rp.toString())); } + bw.newLine(); } - bw.newLine(); - } - break; - case xyt: + break; + case xyt: - for (Id rp : this.rp2Coord.keySet()) { + for (Id rp : this.rp2Coord.keySet()) { - for (double time = startTime; time <= endTime; time = time + timeBinSize) { + for (double time = startTime; time <= endTime; time = time + timeBinSize) { - boolean writeThisLine = false; - String lineToWrite = rp.toString() + ";" + rp2Coord.get(rp).getX() + ";" + rp2Coord.get(rp).getY() + ";" + String.valueOf(time); + boolean writeThisLine = false; + String lineToWrite = rp.toString() + ";" + rp2Coord.get(rp).getX() + ";" + rp2Coord.get(rp).getY() + ";" + String.valueOf(time); - for (String label : this.label2time2rp2value.keySet()) { - double value = this.label2time2rp2value.get(label).get(time).get(rp.toString()); - if (value > this.threshold) { - writeThisLine = true; + for (String label : this.label2time2rp2value.keySet()) { + double value = this.label2time2rp2value.get(label).get(time).get(rp.toString()); + if (value > this.threshold) { + writeThisLine = true; + } + lineToWrite = lineToWrite + ";" + value; } - lineToWrite = lineToWrite + ";" + value; - } - // only write the line if at least one value is larger than threshold - if (writeThisLine) { - bw.write(lineToWrite); - bw.newLine(); - lineCounter ++ ; - if (lineCounter % 10000 == 0.) { - log.info("# " + lineCounter); + // only write the line if at least one value is larger than threshold + if (writeThisLine) { + bw.write(lineToWrite); + bw.newLine(); + lineCounter++; + if (lineCounter % 10000 == 0.) { + log.info("# " + lineCounter); + } } } } - } - break ; - default: - throw new RuntimeException("not implemented") ; + break; + default: + throw new RuntimeException("not implemented"); } bw.close(); @@ -356,7 +357,7 @@ private void readReceiverPoints() { log.info("Reading receiver points file..."); try { - while( (line = br.readLine()) != null){ + while ((line = br.readLine()) != null) { if (lineCounter % 10000 == 0.) { log.info("# " + lineCounter); @@ -367,16 +368,20 @@ private void readReceiverPoints() { double x = 0; double y = 0; - for(int i = 0; i < columns.length; i++){ - - switch(i){ - case 0: rpId = Id.create(columns[i], ReceiverPoint.class); - break; - case 1: x = Double.valueOf(columns[i]); - break; - case 2: y = Double.valueOf(columns[i]); - break; - default: throw new RuntimeException("More than three columns. Aborting..."); + for (int i = 0; i < columns.length; i++) { + + switch (i) { + case 0: + rpId = Id.create(columns[i], ReceiverPoint.class); + break; + case 1: + x = Double.valueOf(columns[i]); + break; + case 2: + y = Double.valueOf(columns[i]); + break; + default: + throw new RuntimeException("More than three columns. Aborting..."); } } lineCounter++; @@ -405,7 +410,7 @@ private void readValues() { double valueSumTimeBin = 0.; String fileName = workingDirectory + label + "_" + Double.toString(time) + ".csv"; - try ( BufferedReader br = IOUtils.getBufferedReader(fileName) ) { + try (BufferedReader br = IOUtils.getBufferedReader(fileName)) { // this will automagically use the *.gz version if a non-gzipped version does not exist. kai, jan'15 String line = br.readLine(); @@ -428,7 +433,7 @@ private void readValues() { rp = columns[column]; } else if (column == 1) { value = Double.valueOf(columns[column]); - valueSumTimeBin+=value; + valueSumTimeBin += value; } else { // throw new RuntimeException("More than two columns. Aborting..."); } @@ -442,8 +447,8 @@ private void readValues() { } catch (NumberFormatException | IOException e) { e.printStackTrace(); } - - totalValueSum+=valueSumTimeBin; + + totalValueSum += valueSumTimeBin; log.info("total sum of all values: " + totalValueSum); } @@ -452,7 +457,7 @@ private void readValues() { } public void setNetworkFile(String networkfile) { - this.networkFile = networkfile; + this.networkFile = networkfile; } } diff --git a/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseConfigGroup.java b/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseConfigGroup.java index 8b5736f744f..cd7b2a500ab 100644 --- a/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseConfigGroup.java +++ b/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseConfigGroup.java @@ -18,7 +18,7 @@ * *********************************************************************** */ /** - * + * */ package org.matsim.contrib.noise; @@ -41,12 +41,11 @@ /** * Provides the parameters required to build a simple grid with some basic spatial functionality. * Provides the parameters required to compute noise emissions, immissions and damages. - * - * @author ikaddoura * + * @author ikaddoura */ public final class NoiseConfigGroup extends ReflectiveConfigGroup { - + public static final String GROUP_NAME = "noise"; private static final String RECEIVER_POINT_GAP = "receiverPointGap"; private static final String TRANSFORMATION_FACTORY = "transformationFactory"; @@ -91,24 +90,24 @@ public final class NoiseConfigGroup extends ReflectiveConfigGroup { public NoiseConfigGroup() { super(GROUP_NAME); } - + private static final Logger log = LogManager.getLogger(NoiseConfigGroup.class); - + private double receiverPointGap = 250.; private String[] consideredActivitiesForReceiverPointGrid = {"home", "work"}; private String[] consideredActivitiesForDamageCalculation = {"home", "work"}; - + private double receiverPointsGridMinX = 0.; private double receiverPointsGridMinY = 0.; private double receiverPointsGridMaxX = 0.; private double receiverPointsGridMaxY = 0.; - + private String receiverPointsCSVFile = null; private String receiverPointsCSVFileCoordinateSystem = TransformationFactory.DHDN_SoldnerBerlin; - - private static double annualCostRateEws = (85.0/(1.95583)) * (Math.pow(1.02, (2014-1995))); - private double annualCostRate = annualCostRateEws ; + + private static double annualCostRateEws = (85.0 / (1.95583)) * (Math.pow(1.02, (2014 - 1995))); + private double annualCostRate = annualCostRateEws; // -- 1st term is EWS value (85DM/"dB(A) above German threshold value") converted to Euro // -- 2nd term is 2 pct inflation @@ -119,200 +118,201 @@ public NoiseConfigGroup() { private int writeOutputIteration = 10; private boolean useActualSpeedLevel = true; private boolean allowForSpeedsOutsideTheValidRange = false; - + private boolean throwNoiseEventsAffected = true; private boolean computeNoiseDamages = true; private boolean internalizeNoiseDamages = true; // throw money events based on caused noise cost private boolean computeAvgNoiseCostPerLinkAndTime = true; - private boolean computeCausingAgents = true; + private boolean computeCausingAgents = true; private boolean throwNoiseEventsCaused = true; private boolean computePopulationUnits = true; - public enum NoiseAllocationApproach { - AverageCost, MarginalCost - } + public enum NoiseAllocationApproach { + AverageCost, MarginalCost + } + private NoiseAllocationApproach noiseAllocationApproach = NoiseAllocationApproach.AverageCost; - - private String[] hgvIdPrefixes = { "lkw" }; + + private String[] hgvIdPrefixes = {"lkw", "truck", "freight"}; private Set busIdIdentifier = new HashSet<>(); private Set> tunnelLinkIDs = new HashSet<>(); private Set networkModesToIgnore = new HashSet<>(); - + private double noiseTollFactor = 1.0; private boolean considerNoiseBarriers = false; - private String noiseBarriersFilePath = null; - private String noiseBarriersSourceCrs = null; + private String noiseBarriersFilePath = null; + private String noiseBarriersSourceCrs = null; + + private boolean useDEM = false; + private String demFile = null; - private boolean useDEM = false; - private String demFile = null; + public enum NoiseComputationMethod { + RLS90, RLS19 + } - public enum NoiseComputationMethod { - RLS90,RLS19 - } + private NoiseComputationMethod noiseComputationMethod = NoiseComputationMethod.RLS90; + + // ######################################################################################################## - private NoiseComputationMethod noiseComputationMethod = NoiseComputationMethod.RLS90; - - // ######################################################################################################## - @Override public Map getComments() { Map comments = super.getComments(); - - comments.put(RECEIVER_POINT_GAP, RECEIVER_POINT_GAP_CMT) ; - comments.put(TRANSFORMATION_FACTORY, "coordinate system; so far only tested for 'TransformationFactory.DHDN_GK4'" ) ; - comments.put(CONSIDERED_ACTIVITIES_FOR_DAMAGE_CALCULATION, "Specifies the activity types that are considered when computing noise damages (= the activities at which being exposed to noise results in noise damages). A list of the exact activity types, e.g. 'home,work_8hours,work_4hours', the prefixes 'home*,work*' or both, e.g. 'home,work*'.\"" ) ; - comments.put(CONSIDERED_ACTIVITIES_FOR_RECEIVER_POINT_GRID, "Creates a grid of noise receiver points which contains all agents' activity locations of the specified types. A list of the exact activity types, e.g. 'home,work_8hours,work_4hours', the prefixes 'home*,work*' or both, e.g. 'home,work*'.\"" ) ; + + comments.put(RECEIVER_POINT_GAP, RECEIVER_POINT_GAP_CMT); + comments.put(TRANSFORMATION_FACTORY, "coordinate system; so far only tested for 'TransformationFactory.DHDN_GK4'"); + comments.put(CONSIDERED_ACTIVITIES_FOR_DAMAGE_CALCULATION, "Specifies the activity types that are considered when computing noise damages (= the activities at which being exposed to noise results in noise damages). A list of the exact activity types, e.g. 'home,work_8hours,work_4hours', the prefixes 'home*,work*' or both, e.g. 'home,work*'.\""); + comments.put(CONSIDERED_ACTIVITIES_FOR_RECEIVER_POINT_GRID, "Creates a grid of noise receiver points which contains all agents' activity locations of the specified types. A list of the exact activity types, e.g. 'home,work_8hours,work_4hours', the prefixes 'home*,work*' or both, e.g. 'home,work*'.\""); comments.put(RECEIVER_POINTS_GRID_MIN_X, "Specifies a boundary coordinate min/max x/y value of the receiver point grid. " - + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " - + "(see parameter 'consideredActivitiesForReceiverPointGrid')." ) ; + + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " + + "(see parameter 'consideredActivitiesForReceiverPointGrid')."); comments.put(RECEIVER_POINTS_GRID_MAX_X, "Specifies a boundary coordinate min/max x/y value of the receiver point grid. " - + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " - + "(see parameter 'consideredActivitiesForReceiverPointGrid')." ) ; + + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " + + "(see parameter 'consideredActivitiesForReceiverPointGrid')."); comments.put(RECEIVER_POINTS_GRID_MIN_Y, "Specifies a boundary coordinate min/max x/y value of the receiver point grid. " - + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " - + "(see parameter 'consideredActivitiesForReceiverPointGrid')." ) ; + + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " + + "(see parameter 'consideredActivitiesForReceiverPointGrid')."); comments.put(RECEIVER_POINTS_GRID_MAX_Y, "Specifies a boundary coordinate min/max x/y value of the receiver point grid. " - + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " - + "(see parameter 'consideredActivitiesForReceiverPointGrid')." ) ; + + "0.0 means the boundary coordinates are ignored and the grid is created based on the agents' activity coordinates of the specified activity types " + + "(see parameter 'consideredActivitiesForReceiverPointGrid')."); comments.put(RECEIVER_POINTS_CSV_FILE, "A csv file which provides the ReceiverPoint coordinates (first column: id, second column: x-coordinate, third column: y-coordinate, separator: ',')"); comments.put(RECEIVER_POINTS_CSV_FILE_COORDINATE_SYSTEM, "The coordinate reference system of the provided ReceiverPoint csv file."); - comments.put(ANNUAL_COST_RATE, "annual noise cost rate [in EUR per exposed pulation unit]; following the German EWS approach" ) ; - comments.put(TIME_BIN_SIZE_NOISE_COMPUTATION, "Specifies the temporal resolution, i.e. the time bin size [in seconds] to compute noise levels." ) ; - comments.put(SCALE_FACTOR, "Set to '1.' for a 100 percent sample size. Set to '10.' for a 10 percent sample size. Set to '100.' for a 1 percent sample size." ) ; - comments.put(RELEVANT_RADIUS, "Specifies the radius [in coordinate units] around each receiver point links are taken into account." ) ; - comments.put(TUNNEL_LINK_ID_FILE, "Specifies a csv file which contains all tunnel link IDs." ) ; - comments.put(TUNNEL_LINK_IDS, "Specifies the tunnel link IDs. Will be ignored in case a the tunnel link IDs are provided as file (see parameter 'tunnelLinkIdFile')." ) ; - - comments.put(WRITE_OUTPUT_ITERATION, WRITE_OUTPUT_ITERATION_CMT) ; - comments.put(USE_ACTUAL_SPEED_LEVEL, "Set to 'true' if the actual speed level should be used to compute noise levels. Set to 'false' if the freespeed level should be used to compute noise levels." ) ; - comments.put(ALLOW_FOR_SPEEDS_OUTSIDE_THE_VALID_RANGE, "Set to 'true' if speed levels below 30 km/h or above 80 km/h (HGV) / 130 km/h (car) should be used to compute noise levels. Set to 'false' if speed levels outside of the valid range should not be used to compute noise levels (recommended)." ) ; - - comments.put(THROW_NOISE_EVENTS_AFFECTED, "Set to 'true' if noise events (providing information about the affected agent) should be thrown. Otherwise set to 'false'." ) ; - comments.put(COMPUTE_NOISE_DAMAGES, "Set to 'true' if noise damages should be computed. Otherwise set to 'false'." ) ; - comments.put(COMPUTE_CAUSING_AGENTS, "Set to 'true' if the noise damages should be traced back and a causing agent should be identified. Otherwise set to 'false'." ) ; - comments.put(THROW_NOISE_EVENTS_CAUSED, "Set to 'true' if noise events (providing information about the causing agent) should be thrown. Otherwise set to 'false'." ) ; - comments.put(COMPUTE_POPULATION_UNITS, "Set to 'true' if population densities should be computed. Otherwise set to 'false'." ) ; - comments.put(INTERNALIZE_NOISE_DAMAGES, "Set to 'true' if money events should be thrown based on the caused noise damages. Otherwise set to 'false'." ) ; + comments.put(ANNUAL_COST_RATE, "annual noise cost rate [in EUR per exposed pulation unit]; following the German EWS approach"); + comments.put(TIME_BIN_SIZE_NOISE_COMPUTATION, "Specifies the temporal resolution, i.e. the time bin size [in seconds] to compute noise levels."); + comments.put(SCALE_FACTOR, "Set to '1.' for a 100 percent sample size. Set to '10.' for a 10 percent sample size. Set to '100.' for a 1 percent sample size."); + comments.put(RELEVANT_RADIUS, "Specifies the radius [in coordinate units] around each receiver point links are taken into account."); + comments.put(TUNNEL_LINK_ID_FILE, "Specifies a csv file which contains all tunnel link IDs."); + comments.put(TUNNEL_LINK_IDS, "Specifies the tunnel link IDs. Will be ignored in case a the tunnel link IDs are provided as file (see parameter 'tunnelLinkIdFile')."); + + comments.put(WRITE_OUTPUT_ITERATION, WRITE_OUTPUT_ITERATION_CMT); + comments.put(USE_ACTUAL_SPEED_LEVEL, "Set to 'true' if the actual speed level should be used to compute noise levels. Set to 'false' if the freespeed level should be used to compute noise levels."); + comments.put(ALLOW_FOR_SPEEDS_OUTSIDE_THE_VALID_RANGE, "Set to 'true' if speed levels below 30 km/h or above 80 km/h (HGV) / 130 km/h (car) should be used to compute noise levels. Set to 'false' if speed levels outside of the valid range should not be used to compute noise levels (recommended)."); + + comments.put(THROW_NOISE_EVENTS_AFFECTED, "Set to 'true' if noise events (providing information about the affected agent) should be thrown. Otherwise set to 'false'."); + comments.put(COMPUTE_NOISE_DAMAGES, "Set to 'true' if noise damages should be computed. Otherwise set to 'false'."); + comments.put(COMPUTE_CAUSING_AGENTS, "Set to 'true' if the noise damages should be traced back and a causing agent should be identified. Otherwise set to 'false'."); + comments.put(THROW_NOISE_EVENTS_CAUSED, "Set to 'true' if noise events (providing information about the causing agent) should be thrown. Otherwise set to 'false'."); + comments.put(COMPUTE_POPULATION_UNITS, "Set to 'true' if population densities should be computed. Otherwise set to 'false'."); + comments.put(INTERNALIZE_NOISE_DAMAGES, "Set to 'true' if money events should be thrown based on the caused noise damages. Otherwise set to 'false'."); comments.put(COMPUTE_AVG_NOISE_COST_PER_LINK_AND_TIME, "Set to 'true' if average noise cost per link and time bin should be computed (required by the default noise travel distutility uesed for routing)." - + "Set to 'false' if you use your own statistics for your own travel disutility." ) ; + + "Set to 'false' if you use your own statistics for your own travel disutility."); - comments.put(HGV_ID_PREFIXES, "Specifies the HGV (heavy goods vehicles, trucks) ID prefix." ) ; - comments.put(BUS_ID_IDENTIFIER, "Specifies the public transit vehicle ID identifiers. Buses are treated as HGV, other public transit vehicles are neglected." ) ; + comments.put(HGV_ID_PREFIXES, "Specifies the HGV (heavy goods vehicles, trucks) ID prefix."); + comments.put(BUS_ID_IDENTIFIER, "Specifies the public transit vehicle ID identifiers. Buses are treated as HGV, other public transit vehicles are neglected."); - comments.put(NOISE_TOLL_FACTOR, "To be used for sensitivity analysis. Default: 1.0 (= the parameter has no effect)" ) ; + comments.put(NOISE_TOLL_FACTOR, "To be used for sensitivity analysis. Default: 1.0 (= the parameter has no effect)"); comments.put(CONSIDER_NOISE_BARRIERS, "Set to 'true' if noise barriers / building shielding should be considered. Otherwise set to 'false'."); - comments.put(NOISE_BARRIERS_GEOJSON_FILE, "Path to the geojson file for noise barriers."); - comments.put(NOISE_BARRIERS_SOURCE_CRS, "Source coordinate reference system of noise barriers geojson file."); + comments.put(NOISE_BARRIERS_GEOJSON_FILE, "Path to the geojson file for noise barriers."); + comments.put(NOISE_BARRIERS_SOURCE_CRS, "Source coordinate reference system of noise barriers geojson file."); comments.put(USE_DEM, "Set to 'true' if a DEM (digital elevation model) should be used for road gradients. Otherwise set to 'false'."); comments.put(DEM_FILE, "Path to the geoTiff file of the DEM."); comments.put(NETWORK_MODES_TO_IGNORE, "Specifies the network modes to be excluded from the noise computation, e.g. 'bike'."); - comments.put(NOISE_COMPUTATION_METHOD, "Specifies the computation method of different guidelines: " + Arrays.toString(NoiseComputationMethod.values())); + comments.put(NOISE_COMPUTATION_METHOD, "Specifies the computation method of different guidelines: " + Arrays.toString(NoiseComputationMethod.values())); return comments; } // ######################################################################################################## - + @Override protected void checkConsistency(Config config) { this.checkGridParametersForConsistency(); this.checkNoiseParametersForConsistency(config); - if ( Math.abs( this.getAnnualCostRate() - annualCostRateEws ) < 0.001 ) { - log.warn( "you are using old EWS noise annual cost rates; please go through https://ec.europa" + - ".eu/transport/themes/sustainable/studies/sustainable_en to find mor modern values. kai/Ihab, dec'19" ); + if (Math.abs(this.getAnnualCostRate() - annualCostRateEws) < 0.001) { + log.warn("you are using old EWS noise annual cost rates; please go through https://ec.europa" + + ".eu/transport/themes/sustainable/studies/sustainable_en to find mor modern values. kai/Ihab, dec'19"); } } - + private void checkGridParametersForConsistency() { - + List consideredActivitiesForReceiverPointGridList = new ArrayList<>(); List consideredActivitiesForDamagesList = new ArrayList<>(); Collections.addAll(consideredActivitiesForDamagesList, consideredActivitiesForDamageCalculation); consideredActivitiesForReceiverPointGridList.addAll(Arrays.asList(consideredActivitiesForReceiverPointGrid)); - + if (this.receiverPointGap == 0.) { throw new RuntimeException("The receiver point gap is 0. Aborting..."); } - + if (consideredActivitiesForReceiverPointGridList.size() == 0 && this.receiverPointsGridMinX == 0. && this.receiverPointsGridMinY == 0. && this.receiverPointsGridMaxX == 0. && receiverPointsGridMaxY == 0.) { throw new RuntimeException("NEITHER providing a considered activity type for the minimum and maximum coordinates of the receiver point grid area " - + "NOR providing receiver point grid minimum and maximum coordinates. Aborting..."); + + "NOR providing receiver point grid minimum and maximum coordinates. Aborting..."); } } private void checkNoiseParametersForConsistency(Config config) { - + if (this.internalizeNoiseDamages) { - + // required for internalization if (this.computeCausingAgents == false) { log.warn("Inconsistent parameters will be adjusted:"); this.setComputeCausingAgents(true); } - + // required for internalization, i.e. the scoring if (this.throwNoiseEventsCaused == false) { log.warn("Inconsistent parameters will be adjusted:"); this.setThrowNoiseEventsCaused(true); } - + } - - if (this.computeCausingAgents - || this.internalizeNoiseDamages - || this.throwNoiseEventsAffected - || this.throwNoiseEventsCaused - ) { - + + if (this.computeCausingAgents + || this.internalizeNoiseDamages + || this.throwNoiseEventsAffected + || this.throwNoiseEventsCaused + ) { + // required if (this.computeNoiseDamages == false) { log.warn("Inconsistent parameters will be adjusted:"); this.setComputeNoiseDamages(true); } - + if (this.computePopulationUnits == false) { log.warn("Inconsistent parameters will be adjusted:"); this.setComputePopulationUnits(true); } } - + if (this.computeNoiseDamages) { - - // required + + // required if (this.computePopulationUnits == false) { log.warn("Inconsistent parameters will be adjusted:"); this.setComputePopulationUnits(true); } } - + if (this.throwNoiseEventsCaused) { - + // required if (this.computeCausingAgents == false) { log.warn("Inconsistent parameters will be adjusted:"); this.setComputeCausingAgents(true); } } - + if (this.useActualSpeedLevel && this.allowForSpeedsOutsideTheValidRange) { log.warn("Using the actual vehicle speeds for the noise computation may result in very low speed levels due to congestion." - + " The RLS computation approach defines a range of valid speed levels: for cars: 30-130 km/h; for HGV: 30-80 km/h." - + " 20 km/h or 10 km/h may still result in an 'okay' estimate of the traffic noise. However, 1 km/h or lower speeds will definitly make no sense." - + " It is therefore recommended not to use speeds outside of the range of valid parameters!"); + + " The RLS computation approach defines a range of valid speed levels: for cars: 30-130 km/h; for HGV: 30-80 km/h." + + " 20 km/h or 10 km/h may still result in an 'okay' estimate of the traffic noise. However, 1 km/h or lower speeds will definitly make no sense." + + " It is therefore recommended not to use speeds outside of the range of valid parameters!"); } - if(this.considerNoiseBarriers) { - if(this.noiseBarriersFilePath == null || "".equals(this.noiseBarriersFilePath)) { - log.warn("Cannot consider noise barriers without a specified file path to the geojson file of barriers / buildings."); - this.considerNoiseBarriers = false; - } - } + if (this.considerNoiseBarriers) { + if (this.noiseBarriersFilePath == null || "".equals(this.noiseBarriersFilePath)) { + log.warn("Cannot consider noise barriers without a specified file path to the geojson file of barriers / buildings."); + this.considerNoiseBarriers = false; + } + } } // ######################################################################################################## @@ -321,7 +321,7 @@ private void checkNoiseParametersForConsistency(Config config) { public double getReceiverPointGap() { return receiverPointGap; } - + /** * @param receiverPointGap -- {@value #RECEIVER_POINT_GAP_CMT} */ @@ -384,10 +384,10 @@ public void setConsideredActivitiesForReceiverPointGridArray(String[] considered for (int i = 0; i < consideredActivitiesForReceiverPointGrid.length; i++) { log.info(consideredActivitiesForReceiverPointGrid[i]); } - this.consideredActivitiesForReceiverPointGrid = consideredActivitiesForReceiverPointGrid; + this.consideredActivitiesForReceiverPointGrid = consideredActivitiesForReceiverPointGrid; } - - public String[] getConsideredActivitiesForDamageCalculationArray() { + + public String[] getConsideredActivitiesForDamageCalculationArray() { return consideredActivitiesForDamageCalculation; } @@ -409,18 +409,18 @@ public void setConsideredActivitiesForReceiverPointGrid(String consideredActivit this.setConsideredActivitiesForReceiverPointGridArray(CollectionUtils.stringToArray(consideredActivitiesForReceiverPointGridString)); } - @StringGetter( CONSIDERED_ACTIVITIES_FOR_DAMAGE_CALCULATION ) + @StringGetter(CONSIDERED_ACTIVITIES_FOR_DAMAGE_CALCULATION) public String getConsideredActivitiesForDamageCalculation() { return CollectionUtils.arrayToString(consideredActivitiesForDamageCalculation); } - @StringSetter( CONSIDERED_ACTIVITIES_FOR_DAMAGE_CALCULATION ) - public void setConsideredActivitiesForDamageCalculation(String consideredActivitiesForSpatialFunctionalityString) { + @StringSetter(CONSIDERED_ACTIVITIES_FOR_DAMAGE_CALCULATION) + public void setConsideredActivitiesForDamageCalculation(String consideredActivitiesForSpatialFunctionalityString) { this.setConsideredActivitiesForDamageCalculationArray(CollectionUtils.stringToArray(consideredActivitiesForSpatialFunctionalityString)); } - + // ### - + @StringGetter(THROW_NOISE_EVENTS_AFFECTED) public boolean isThrowNoiseEventsAffected() { return throwNoiseEventsAffected; @@ -447,53 +447,53 @@ public void setThrowNoiseEventsCaused(boolean throwNoiseEventsCaused) { public boolean isComputeCausingAgents() { return computeCausingAgents; } - + @StringSetter(COMPUTE_CAUSING_AGENTS) public void setComputeCausingAgents(boolean computeCausingAgents) { log.info("Allocating the noise damages to the causing agents: " + computeCausingAgents); this.computeCausingAgents = computeCausingAgents; } - - @StringSetter( ANNUAL_COST_RATE ) + + @StringSetter(ANNUAL_COST_RATE) public void setAnnualCostRate(double annualCostRate) { log.info("setting the annual cost rate to " + annualCostRate); this.annualCostRate = annualCostRate; } - @StringSetter( TIME_BIN_SIZE_NOISE_COMPUTATION ) + @StringSetter(TIME_BIN_SIZE_NOISE_COMPUTATION) public void setTimeBinSizeNoiseComputation(double timeBinSizeNoiseComputation) { log.info("setting the time bin size for the computation of noise to " + timeBinSizeNoiseComputation); this.timeBinSizeNoiseComputation = timeBinSizeNoiseComputation; } - @StringSetter( SCALE_FACTOR ) + @StringSetter(SCALE_FACTOR) public void setScaleFactor(double scaleFactor) { log.info("setting the scale factor to " + scaleFactor); this.scaleFactor = scaleFactor; } - @StringSetter( RELEVANT_RADIUS ) + @StringSetter(RELEVANT_RADIUS) public void setRelevantRadius(double relevantRadius) { log.info("setting the radius of relevant links around each receiver point to " + relevantRadius); this.relevantRadius = relevantRadius; } - - @StringGetter( ANNUAL_COST_RATE ) + + @StringGetter(ANNUAL_COST_RATE) public double getAnnualCostRate() { return annualCostRate; } - - @StringGetter( TIME_BIN_SIZE_NOISE_COMPUTATION ) + + @StringGetter(TIME_BIN_SIZE_NOISE_COMPUTATION) public double getTimeBinSizeNoiseComputation() { return timeBinSizeNoiseComputation; } - - @StringGetter( SCALE_FACTOR ) + + @StringGetter(SCALE_FACTOR) public double getScaleFactor() { return scaleFactor; } - - @StringGetter( RELEVANT_RADIUS ) + + @StringGetter(RELEVANT_RADIUS) public double getRelevantRadius() { return relevantRadius; } @@ -535,11 +535,11 @@ public void setNoiseAllocationApproach(NoiseAllocationApproach noiseAllocationAp public int getWriteOutputIteration() { return writeOutputIteration; } - + /** * @param writeOutputIteration -- {@value #WRITE_OUTPUT_ITERATION_CMT} */ - @StringSetter( WRITE_OUTPUT_ITERATION ) + @StringSetter(WRITE_OUTPUT_ITERATION) public void setWriteOutputIteration(int writeOutputIteration) { log.info("Writing output every " + writeOutputIteration + " iteration."); this.writeOutputIteration = writeOutputIteration; @@ -550,7 +550,7 @@ public void setTunnelLinkIdFile(String tunnelLinkIdFile) { log.info("setting file which contains the tunnel link Ids to " + tunnelLinkIdFile + "."); this.tunnelLinkIdFile = tunnelLinkIdFile; } - + @StringGetter(TUNNEL_LINK_ID_FILE) public String getTunnelLinkIdFile() { return tunnelLinkIdFile; @@ -560,7 +560,7 @@ public String getTunnelLinkIdFile() { public boolean isUseActualSpeedLevel() { return useActualSpeedLevel; } - + @StringSetter(USE_ACTUAL_SPEED_LEVEL) public void setUseActualSpeedLevel(boolean useActualSpeedLevel) { log.info("Using the actual speed level for noise calculation: " + useActualSpeedLevel); @@ -582,7 +582,7 @@ public void setComputePopulationUnits(boolean computePopulationUnits) { public boolean isAllowForSpeedsOutsideTheValidRange() { return allowForSpeedsOutsideTheValidRange; } - + @StringSetter(ALLOW_FOR_SPEEDS_OUTSIDE_THE_VALID_RANGE) public void setAllowForSpeedsOutsideTheValidRange(boolean allowForSpeedsOutsideTheValidRange) { log.info("Allowing for speeds above or below the valid range (cars: 30-130 km/h; HGV: 30-80 km/h): " + allowForSpeedsOutsideTheValidRange); @@ -590,14 +590,14 @@ public void setAllowForSpeedsOutsideTheValidRange(boolean allowForSpeedsOutsideT } // ####### - + @StringGetter(HGV_ID_PREFIXES) private String getHgvIdPrefixes() { return CollectionUtils.arrayToString(hgvIdPrefixes); } @StringSetter(HGV_ID_PREFIXES) - public void setHgvIdPrefixes(String hgvIdPrefixes) { + public void setHgvIdPrefixes(String hgvIdPrefixes) { this.setHgvIdPrefixesArray(CollectionUtils.stringToArray(hgvIdPrefixes)); } @@ -607,26 +607,26 @@ private String getTunnelLinkIDs() { } @StringSetter(TUNNEL_LINK_IDS) - public void setTunnelLinkIDs(String tunnelLinkIDs) { + public void setTunnelLinkIDs(String tunnelLinkIDs) { this.setTunnelLinkIDsSet(stringToLinkIdSet(tunnelLinkIDs)); } - + @StringGetter(RECEIVER_POINTS_CSV_FILE) public String getReceiverPointsCSVFile() { return receiverPointsCSVFile; } - @StringSetter( RECEIVER_POINTS_CSV_FILE ) + @StringSetter(RECEIVER_POINTS_CSV_FILE) public void setReceiverPointsCSVFile(String receiverPointsGridCSVFile) { this.receiverPointsCSVFile = receiverPointsGridCSVFile; } - - @StringGetter( RECEIVER_POINTS_CSV_FILE_COORDINATE_SYSTEM ) + + @StringGetter(RECEIVER_POINTS_CSV_FILE_COORDINATE_SYSTEM) public String getReceiverPointsCSVFileCoordinateSystem() { return receiverPointsCSVFileCoordinateSystem; } - @StringSetter( RECEIVER_POINTS_CSV_FILE_COORDINATE_SYSTEM ) + @StringSetter(RECEIVER_POINTS_CSV_FILE_COORDINATE_SYSTEM) public void setReceiverPointsCSVFileCoordinateSystem(String receiverPointsCSVFileCoordinateSystem) { this.receiverPointsCSVFileCoordinateSystem = receiverPointsCSVFileCoordinateSystem; } @@ -640,7 +640,7 @@ public void setTunnelLinkIDsSet(Set> tunnelLinkIDs) { log.info("setting tunnel link IDs to " + tunnelLinkIDs.toString()); this.tunnelLinkIDs = tunnelLinkIDs; } - + public String[] getHgvIdPrefixesArray() { return hgvIdPrefixes; } @@ -648,17 +648,17 @@ public String[] getHgvIdPrefixesArray() { public Set> getTunnelLinkIDsSet() { return tunnelLinkIDs; } - + @StringGetter(BUS_ID_IDENTIFIER) private String getBusIdPrefixes() { return CollectionUtils.setToString(busIdIdentifier); } @StringSetter(BUS_ID_IDENTIFIER) - public void setBusIdIdentifiers(String busIdPrefixes) { + public void setBusIdIdentifiers(String busIdPrefixes) { this.setBusIdIdentifierSet(CollectionUtils.stringToSet(busIdPrefixes)); } - + public Set getBusIdIdentifierSet() { return busIdIdentifier; } @@ -667,13 +667,13 @@ public void setBusIdIdentifierSet(Set busIdPrefixes) { log.info("setting the bus Id identifiers to : " + busIdPrefixes.toString()); this.busIdIdentifier = busIdPrefixes; } - + @StringGetter(NETWORK_MODES_TO_IGNORE) public String getNetworkModesToIgnore() { return CollectionUtils.setToString(networkModesToIgnore); } - @StringSetter(NETWORK_MODES_TO_IGNORE) + @StringSetter(NETWORK_MODES_TO_IGNORE) public void setNetworkModesToIgnore(String networkModesToIgnore) { this.setNetworkModesToIgnoreSet(CollectionUtils.stringToSet(networkModesToIgnore)); } @@ -686,8 +686,8 @@ public void setNetworkModesToIgnoreSet(Set networkModesToIgnore) { log.info("setting the network modes to ignore to : " + networkModesToIgnore.toString()); this.networkModesToIgnore = networkModesToIgnore; } - - private String linkIdSetToString (Set> linkIds) { + + private String linkIdSetToString(Set> linkIds) { String linkIdsString = null; boolean first = true; for (Id id : linkIds) { @@ -700,7 +700,7 @@ private String linkIdSetToString (Set> linkIds) { } return linkIdsString; } - + private Set> stringToLinkIdSet(String linkIds) { if (linkIds == null) { return Collections.emptySet(); @@ -750,15 +750,15 @@ public void setConsiderNoiseBarriers(boolean considerNoiseBarriers) { this.considerNoiseBarriers = considerNoiseBarriers; } - @StringGetter(NOISE_BARRIERS_GEOJSON_FILE) - public String getNoiseBarriersFilePath() { - return this.noiseBarriersFilePath; - } + @StringGetter(NOISE_BARRIERS_GEOJSON_FILE) + public String getNoiseBarriersFilePath() { + return this.noiseBarriersFilePath; + } - @StringSetter(NOISE_BARRIERS_GEOJSON_FILE) - public void setNoiseBarriersFilePath(String noiseBarriersFilePath) { - this.noiseBarriersFilePath = noiseBarriersFilePath; - } + @StringSetter(NOISE_BARRIERS_GEOJSON_FILE) + public void setNoiseBarriersFilePath(String noiseBarriersFilePath) { + this.noiseBarriersFilePath = noiseBarriersFilePath; + } @StringGetter(USE_DEM) public boolean isUseDEM() { @@ -780,17 +780,17 @@ public void setDEMFilePath(String demFilePath) { this.demFile = demFilePath; } - @StringGetter(NOISE_BARRIERS_SOURCE_CRS) - public String getNoiseBarriersSourceCRS() { - return this.noiseBarriersSourceCrs; - } + @StringGetter(NOISE_BARRIERS_SOURCE_CRS) + public String getNoiseBarriersSourceCRS() { + return this.noiseBarriersSourceCrs; + } - @StringSetter(NOISE_BARRIERS_SOURCE_CRS) - public void setNoiseBarriersSourceCRS(String noiseBarriersSourceCrs) { - this.noiseBarriersSourceCrs = noiseBarriersSourceCrs; - } + @StringSetter(NOISE_BARRIERS_SOURCE_CRS) + public void setNoiseBarriersSourceCRS(String noiseBarriersSourceCrs) { + this.noiseBarriersSourceCrs = noiseBarriersSourceCrs; + } - @StringGetter(NOISE_COMPUTATION_METHOD) + @StringGetter(NOISE_COMPUTATION_METHOD) public NoiseComputationMethod getNoiseComputationMethod() { return this.noiseComputationMethod; } diff --git a/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseOfflineCalculation.java b/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseOfflineCalculation.java index 8f54b37c270..a167572a846 100644 --- a/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseOfflineCalculation.java +++ b/contribs/noise/src/main/java/org/matsim/contrib/noise/NoiseOfflineCalculation.java @@ -126,7 +126,7 @@ public void run() { if (this.scenario.getConfig().controller().getRunId() == null || this.scenario.getConfig().controller().getRunId().equals("")) { eventsFile = this.scenario.getConfig().controller().getOutputDirectory() + "output_events.xml.gz"; } else { - eventsFile = this.scenario.getConfig().controller().getOutputDirectory() + this.scenario.getConfig().controller().getRunId() + ".output_events.xml.gz"; + eventsFile = this.scenario.getConfig().controller().getOutputDirectory() + "/" + this.scenario.getConfig().controller().getRunId() + ".output_events.xml.gz"; } reader.readFile(eventsFile); log.info("Reading events file... Done."); diff --git a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/DefaultDashboardProvider.java b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/DefaultDashboardProvider.java index 144a2c27c13..d7befe82fc0 100644 --- a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/DefaultDashboardProvider.java +++ b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/DefaultDashboardProvider.java @@ -1,6 +1,7 @@ package org.matsim.simwrapper; import org.matsim.contrib.emissions.utils.EmissionsConfigGroup; +import org.matsim.contrib.noise.NoiseConfigGroup; import org.matsim.core.config.Config; import org.matsim.core.config.ConfigUtils; import org.matsim.simwrapper.dashboard.*; @@ -30,6 +31,10 @@ public List getDashboards(Config config, SimWrapper simWrapper) { result.add(new EmissionsDashboard()); } + if (ConfigUtils.hasModule(config, NoiseConfigGroup.class)) { + result.add(new NoiseDashboard()); + } + return result; } diff --git a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/EmissionsDashboard.java b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/EmissionsDashboard.java index 2a8d0d12e60..9f167447922 100644 --- a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/EmissionsDashboard.java +++ b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/EmissionsDashboard.java @@ -57,6 +57,9 @@ public void configure(Header header, Layout layout) { viz.opacity = 0.2; viz.maxHeight = 100; viz.projection = "EPSG:25832"; + viz.zoom = data.context().mapZoomLevel; + viz.center = data.context().getCenter(); + viz.setColorRamp("greenRed", 10, false); viz.file = data.compute(AirPollutionAnalysis.class, "emissions_grid_per_day.csv"); }); @@ -70,6 +73,9 @@ public void configure(Header header, Layout layout) { viz.opacity = 0.2; viz.maxHeight = 100; viz.projection = "EPSG:25832"; + viz.zoom = data.context().mapZoomLevel; + viz.center = data.context().getCenter(); + viz.setColorRamp("greenRed", 10, false); viz.file = data.compute(AirPollutionAnalysis.class, "emissions_grid_per_hour.csv"); }); diff --git a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/NoiseDashboard.java b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/NoiseDashboard.java new file mode 100644 index 00000000000..2d2f3a5070b --- /dev/null +++ b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/dashboard/NoiseDashboard.java @@ -0,0 +1,82 @@ +package org.matsim.simwrapper.dashboard; + +import org.matsim.application.analysis.noise.NoiseAnalysis; +import org.matsim.application.prepare.network.CreateGeoJsonNetwork; +import org.matsim.simwrapper.Dashboard; +import org.matsim.simwrapper.Header; +import org.matsim.simwrapper.Layout; +import org.matsim.simwrapper.viz.ColorScheme; +import org.matsim.simwrapper.viz.GridMap; +import org.matsim.simwrapper.viz.MapPlot; + +/** + * Shows emission in the scenario. + */ +public class NoiseDashboard implements Dashboard { + + private double minDb = 40; + private double maxDb = 80; + + /** + * Set the min and max values for the noise map. + */ + public NoiseDashboard withMinMaxDb(double minDb, double maxDb) { + this.minDb = minDb; + this.maxDb = maxDb; + return this; + } + + @Override + public void configure(Header header, Layout layout) { + + header.title = "Noise"; + header.description = "Shows the noise footprint and spatial distribution."; + + layout.row("aggregate noise") + .el(GridMap.class, (viz, data) -> { + viz.title = "Noise Immissions (Grid)"; + viz.description = "Aggregate Noise Immissions per day"; + viz.height = 12.0; + viz.cellSize = 250; + viz.opacity = 0.2; + viz.maxHeight = 20; + viz.center = data.context().getCenter(); + viz.zoom = data.context().mapZoomLevel; + viz.setColorRamp(new double[]{40, 50, 60}, new String[]{"#1175b3", "#95c7df", "#f4a986", "#cc0c27"}); + viz.file = data.computeWithPlaceholder(NoiseAnalysis.class, "immission_per_day.%s", "avro"); + }) + .el(MapPlot.class, (viz, data) -> { + viz.title = "Noise Emissions (Link)"; + viz.description = "Aggregate Noise Emissions per day"; + viz.height = 12.0; + viz.center = data.context().getCenter(); + viz.zoom = data.context().mapZoomLevel; + viz.minValue = minDb; + viz.maxValue = maxDb; + viz.setShape(data.compute(CreateGeoJsonNetwork.class, "network.geojson", "--with-properties"), "id"); + viz.addDataset("noise", data.compute(NoiseAnalysis.class, "emission_per_day.csv")); + viz.display.lineColor.dataset = "noise"; + viz.display.lineColor.columnName = "value"; + viz.display.lineColor.join = "Link Id"; + viz.display.lineColor.fixedColors = new String[]{"#1175b3", "#95c7df", "#f4a986", "#cc0c27"}; + viz.display.lineColor.setColorRamp(ColorScheme.RdYlBu, 4, true, "45, 55, 65"); + viz.display.lineWidth.dataset = "noise"; + viz.display.lineWidth.columnName = "value"; + viz.display.lineWidth.scaleFactor = 8d; + viz.display.lineWidth.join = "Link Id"; + }); + layout.row("hourly noise") + .el(GridMap.class, (viz, data) -> { + viz.title = "Hourly Noise Immissions (Grid)"; + viz.description = "Noise Immissions per hour"; + viz.height = 12.0; + viz.cellSize = 250; + viz.opacity = 0.2; + viz.maxHeight = 20; + viz.center = data.context().getCenter(); + viz.zoom = data.context().mapZoomLevel; + viz.setColorRamp(new double[]{40, 50, 60}, new String[]{"#1175b3", "#95c7df", "#f4a986", "#cc0c27"}); + viz.file = data.computeWithPlaceholder(NoiseAnalysis.class, "immission_per_hour.%s", "avro"); + }); + } +} diff --git a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/GridMap.java b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/GridMap.java index cdb56165c69..08e19a3bd37 100644 --- a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/GridMap.java +++ b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/GridMap.java @@ -9,6 +9,18 @@ */ public class GridMap extends Viz { + /** + * The center of the map. + */ + @JsonProperty(required = false) + public Double zoom; + + /** + * The center of the map. + */ + @JsonProperty(required = false) + public double[] center; + /** * The filepath containing the data. */ @@ -39,6 +51,12 @@ public class GridMap extends Viz { @JsonProperty(required = false) public Integer maxHeight; + /** + * The valueColumn defines the column with the values. The default value is `value`. + */ + @JsonProperty(required = false) + public String valueColumn; + private Map colorRamp; public GridMap() { @@ -56,6 +74,11 @@ public GridMap setColorRamp(String ramp) { /** * Sets the full color ramps settings. */ + public GridMap setColorRamp(double[] breakpoints, String[] colors) { + colorRamp = Map.of("breakpoints", breakpoints, "fixedColors", colors); + return this; + } + public GridMap setColorRamp(String ramp, int steps, boolean reverse) { colorRamp = Map.of("ramp", ramp, "reverse", reverse, "steps", steps); return this; diff --git a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/MapPlot.java b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/MapPlot.java index d5255a0227f..ac675cc6381 100644 --- a/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/MapPlot.java +++ b/contribs/simwrapper/src/main/java/org/matsim/simwrapper/viz/MapPlot.java @@ -14,6 +14,8 @@ public final class MapPlot extends Viz { public Double zoom; public Display display = new Display(); + public Double minValue; + public Double maxValue; @JsonProperty(required = true) private Object shapes; private Map datasets = new HashMap<>(); @@ -79,6 +81,8 @@ public static final class DisplaySettings { public String join; public Double scaleFactor; + @JsonProperty() + public String[] fixedColors; private Map colorRamp; @@ -93,6 +97,11 @@ public DisplaySettings setColorRamp(String ramp) { /** * Sets the full color ramps settings. */ + public DisplaySettings setColorRamp(String ramp, int steps, boolean reverse, String breakpoints) { + colorRamp = Map.of("ramp", ramp, "reverse", reverse, "steps", steps, "breakpoints", breakpoints); + return this; + } + public DisplaySettings setColorRamp(String ramp, int steps, boolean reverse) { colorRamp = Map.of("ramp", ramp, "reverse", reverse, "steps", steps); return this; diff --git a/contribs/simwrapper/src/test/java/org/matsim/simwrapper/dashboard/NoiseDashboardTests.java b/contribs/simwrapper/src/test/java/org/matsim/simwrapper/dashboard/NoiseDashboardTests.java new file mode 100644 index 00000000000..58c3182b5ba --- /dev/null +++ b/contribs/simwrapper/src/test/java/org/matsim/simwrapper/dashboard/NoiseDashboardTests.java @@ -0,0 +1,44 @@ +package org.matsim.simwrapper.dashboard; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.matsim.application.MATSimApplication; +import org.matsim.core.config.Config; +import org.matsim.core.config.ConfigUtils; +import org.matsim.core.controler.Controler; +import org.matsim.core.utils.io.IOUtils; +import org.matsim.examples.ExamplesUtils; +import org.matsim.simwrapper.SimWrapper; +import org.matsim.simwrapper.SimWrapperConfigGroup; +import org.matsim.simwrapper.TestScenario; +import org.matsim.testcases.MatsimTestUtils; + + +import java.net.URL; + +public class NoiseDashboardTests { + + + @RegisterExtension + private MatsimTestUtils utils = new MatsimTestUtils(); + + @Test + void generate() { + Config config = TestScenario.loadConfig(utils); + + config.global().setCoordinateSystem("EPSG:25832"); + + SimWrapperConfigGroup simWrapperConfigGroup = ConfigUtils.addOrGetModule(config, SimWrapperConfigGroup.class); + + URL kelheim = ExamplesUtils.getTestScenarioURL("kelheim"); + + simWrapperConfigGroup.defaultParams().shp = IOUtils.extendUrl(kelheim, "area/area.shp").toString(); + + + SimWrapper sw = SimWrapper.create(config).addDashboard(new NoiseDashboard()); + Controler controler = MATSimApplication.prepare(new TestScenario(sw), config); + + + controler.run(); + } +}