Skip to content

Commit

Permalink
Merge branch 'master' into scenario-cutout
Browse files Browse the repository at this point in the history
  • Loading branch information
Aleksander1234519 authored Oct 9, 2024
2 parents 39dda99 + c7df7e9 commit 8b3a831
Show file tree
Hide file tree
Showing 94 changed files with 6,349 additions and 643 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ private Config prepareConfig() {
config.transit().setTransitScheduleFile(null);
config.transit().setVehiclesFile(null);
config.plans().setInputFile(ApplicationUtils.matchInput("plans", input.getRunDirectory()).toAbsolutePath().toString());
config.facilities().setInputFile(null);
config.facilities().setInputFile(ApplicationUtils.matchInput("facilities", input.getRunDirectory()).toAbsolutePath().toString());
config.eventsManager().setNumberOfThreads(null);
config.eventsManager().setEstimatedNumberOfEvents(null);
//ts, aug '24: not sure if and why we need to set 1 thread
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntList;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.ints.IntSet;
import it.unimi.dsi.fastutil.objects.Object2IntLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2LongMap;
Expand Down Expand Up @@ -55,15 +57,15 @@ public class TripAnalysis implements MATSimAppCommand {
/**
* Attributes which relates this person to a reference person.
*/
public static String ATTR_REF_ID = "ref_id";
public static final String ATTR_REF_ID = "ref_id";
/**
* Person attribute that contains the reference modes of a person. Multiple modes are delimited by "-".
*/
public static String ATTR_REF_MODES = "ref_modes";
public static final String ATTR_REF_MODES = "ref_modes";
/**
* Person attribute containing its weight for analysis purposes.
*/
public static String ATTR_REF_WEIGHT = "ref_weight";
public static final String ATTR_REF_WEIGHT = "ref_weight";

@CommandLine.Mixin
private InputOptions input = InputOptions.ofCommand(TripAnalysis.class);
Expand All @@ -85,6 +87,10 @@ public class TripAnalysis implements MATSimAppCommand {
@CommandLine.Option(names = "--shp-filter", description = "Define how the shp file filtering should work", defaultValue = "home")
private LocationFilter filter;

@CommandLine.Option(names = "--person-filter", description = "Define which persons should be included into trip analysis. Map like: Attribute name (key), attribute value (value). " +
"The attribute needs to be contained by output_persons.csv. Persons who do not match all filters are filtered out.", split = ",")
private final Map<String, String> personFilters = new HashMap<>();

@CommandLine.Mixin
private ShpOptions shp;

Expand Down Expand Up @@ -146,6 +152,43 @@ public Integer call() throws Exception {
persons = persons.where(persons.textColumn("person").matchesRegex(matchId));
}

// filter persons according to person (attribute) filter
if (!personFilters.isEmpty()) {
IntSet generalFilteredRowIds = null;
for (Map.Entry<String, String> entry : personFilters.entrySet()) {
if (!persons.containsColumn(entry.getKey())) {
log.warn("Persons table does not contain column for filter attribute {}. Filter on {} will not be applied.", entry.getKey(), entry.getValue());
continue;
}
log.info("Using person filter for attribute {} and value {}", entry.getKey(), entry.getValue());

IntSet filteredRowIds = new IntOpenHashSet();

for (int i = 0; i < persons.rowCount(); i++) {
Row row = persons.row(i);
String value = row.getString(entry.getKey());
// only add value once
if (value.equals(entry.getValue())) {
filteredRowIds.add(i);
}
}

if (generalFilteredRowIds == null) {
// If generalFilteredRowIds is empty, add all elements from filteredRowIds to generalFilteredRowIds
generalFilteredRowIds = filteredRowIds;
} else {
// If generalFilteredRowIds is not empty, retain only the elements that are also in filteredRowIds
generalFilteredRowIds.retainAll(filteredRowIds);
}
}

if (generalFilteredRowIds != null) {
persons = persons.where(Selection.with(generalFilteredRowIds.intStream().toArray()));
}
}

log.info("Filtered {} out of {} persons", persons.rowCount(), total);

// Home filter by standard attribute
if (shp.isDefined() && filter == LocationFilter.home) {
Geometry geometry = shp.getGeometry();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
package org.matsim.application.avro;

import it.unimi.dsi.fastutil.objects.Object2FloatAVLTreeMap;
import it.unimi.dsi.fastutil.objects.Object2FloatSortedMap;
import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.matsim.core.utils.io.IOUtils;

import java.io.IOException;
import java.nio.file.Path;
import java.util.*;

public class CSVToAvroConverter {

public static void main(String[] args) throws IOException {
String projection = args.length > 2 ? args[2] : null;
String name = args.length > 3 ? args[3] : "Emissions";

XYTData avroData = readCSV(args[0], projection, name);
writeAvroFile(avroData, Path.of(args[1]));
}

/**
* Reads a CSV file, processes its data, and returns the corresponding Avro object.
*
* @param csvFilePath the path to the CSV file
* @param projection the projection (CRS)
* @param name the name for the data series (defaults is "Emissions")
* @throws IOException if an error occurs during reading the file
*/
public static XYTData readCSV(String csvFilePath, String projection, String name) throws IOException {
List<CSVEntries> entries = new ArrayList<>();
List<Float> xCoords = new ArrayList<>();
List<Float> yCoords = new ArrayList<>();
List<Integer> timestamps = new ArrayList<>();
Object2FloatSortedMap<XYT> valuesMap = new Object2FloatAVLTreeMap<>(Comparator.comparing((XYT e) -> e.t)
.thenComparing(e -> e.x)
.thenComparing(e -> e.y));

try (CSVParser csvReader = new CSVParser(IOUtils.getBufferedReader(csvFilePath), CSVFormat.DEFAULT.builder()
.setCommentMarker('#').setSkipHeaderRecord(true).setHeader().build())) {

String comment = csvReader.getHeaderComment();

if (comment != null && (projection == null || projection.isEmpty())) {
projection = comment;
} else if (projection == null) {
projection = "";
}

for (CSVRecord record : csvReader) {
try {
int time = (int) Double.parseDouble(record.get(0));
float x = Float.parseFloat(record.get(1));
float y = Float.parseFloat(record.get(2));
float value = Float.parseFloat(record.get(3));

entries.add(new CSVEntries(time, x, y, value));

} catch (NumberFormatException e) {
System.out.println("Skipping invalid line: " + String.join(",", record));
}
}
}

// Sort entries by time -> x -> y
entries.sort(Comparator.comparing((CSVEntries e) -> e.time)
.thenComparing(e -> e.x)
.thenComparing(e -> e.y));

for (CSVEntries entry : entries) {
if (!xCoords.contains(entry.x)) {
xCoords.add(entry.x);
}
if (!yCoords.contains(entry.y)) {
yCoords.add(entry.y);
}
if (!timestamps.contains(entry.time)) {
timestamps.add(entry.time);
}

valuesMap.put(new XYT(entry.x, entry.y, entry.time), entry.value);
}

// Check if all combinations of x, y, and time exist
for (int time : timestamps) {
for (float x : xCoords) {
for (float y : yCoords) {
XYT key = new XYT(x, y, time);
if (!valuesMap.containsKey(key)) {
valuesMap.put(key, 0f);
}
}
}
}

// Create Avro data object
XYTData avroData = new XYTData();
avroData.setCrs(projection);
avroData.setXCoords(xCoords);
avroData.setYCoords(yCoords);
avroData.setTimestamps(timestamps);

List<Float> valuesList = new ArrayList<>(valuesMap.values());
Map<CharSequence, List<Float>> result = new HashMap<>();
result.put(name != null && !name.isEmpty() ? name : "Emissions", valuesList);

avroData.setData(result);

return avroData;
}

/**
* Writes the Avro data
*
* @param avroData the Avro data
* @param avroFile the path to the output Avro file
* @throws IOException if an error occurs during writing the file
*/
public static void writeAvroFile(XYTData avroData, Path avroFile) throws IOException {
DatumWriter<XYTData> datumWriter = new SpecificDatumWriter<>(XYTData.class);
try (DataFileWriter<XYTData> dataFileWriter = new DataFileWriter<>(datumWriter)) {
dataFileWriter.setCodec(CodecFactory.deflateCodec(9));
dataFileWriter.create(XYTData.getClassSchema(), avroFile.toFile());
dataFileWriter.append(avroData);
}
}

private record CSVEntries(int time, float x, float y, float value) {
}

private record XYT(float x, float y, float t) {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import org.matsim.api.core.v01.population.*;
import org.matsim.application.MATSimAppCommand;
import org.matsim.core.population.PopulationUtils;
import org.matsim.core.population.algorithms.PersonAlgorithm;
import org.matsim.core.population.algorithms.TripsToLegsAlgorithm;
import org.matsim.core.router.RoutingModeMainModeIdentifier;
import picocli.CommandLine;
Expand All @@ -24,7 +25,7 @@
mixinStandardHelpOptions = true,
showDefaultValues = true
)
public class CleanPopulation implements MATSimAppCommand {
public class CleanPopulation implements MATSimAppCommand, PersonAlgorithm {

private static final Logger log = LogManager.getLogger(CleanPopulation.class);

Expand All @@ -46,6 +47,10 @@ public class CleanPopulation implements MATSimAppCommand {
@CommandLine.Option(names = "--output", description = "Output file name", required = true)
private Path output;

// Using the analysis main mode identifier instead of the routing mode based one on purpose
// to be able to process older population files without any routing modes!
private final TripsToLegsAlgorithm trips2Legs = new TripsToLegsAlgorithm(new RoutingModeMainModeIdentifier());

public static void main(String[] args) {
System.exit(new CommandLine(new CleanPopulation()).execute(args));
}
Expand All @@ -63,43 +68,64 @@ public Integer call() throws Exception {
if (output.getParent() != null)
Files.createDirectories(output.getParent());

// Using the analysis main mode identifier instead of the routing mode based one on purpose
// to be able to process older population files without any routing modes!
TripsToLegsAlgorithm trips2Legs = new TripsToLegsAlgorithm(new RoutingModeMainModeIdentifier());

for (Person person : population.getPersons().values()) {
run(person);
}

PopulationUtils.writePopulation(population, output.toString());

return 0;
}

@Override
public void run(Person person) {
if (rmUnselected) {
removeUnselectedPlans(person);
}

if (rmUnselected) {
Plan selected = person.getSelectedPlan();
for (Plan plan : Lists.newArrayList(person.getPlans())) {
if (plan != selected)
person.removePlan(plan);
for (Plan plan : person.getPlans()) {
if (tripsToLegs)
trips2Legs.run(plan);

for (PlanElement el : plan.getPlanElements()) {
if (rmRoutes) {
removeRouteFromLeg(el);
}
}

for (Plan plan : person.getPlans()) {
if (tripsToLegs)
trips2Legs.run(plan);

for (PlanElement el : plan.getPlanElements()) {
if (rmRoutes) {
if (el instanceof Leg) {
((Leg) el).setRoute(null);
}
}

if (rmActivityLocations) {
if (el instanceof Activity) {
((Activity) el).setLinkId(null);
((Activity) el).setFacilityId(null);
}
}
if (rmActivityLocations) {
removeActivityLocation(el);
}
}
}
}

PopulationUtils.writePopulation(population, output.toString());
/**
* Remove link and facility information from activity.
*/
public static void removeActivityLocation(PlanElement el) {
if (el instanceof Activity act) {
act.setLinkId(null);
act.setFacilityId(null);
}
}

return 0;
/**
* Remove route information from leg.
*/
public static void removeRouteFromLeg(PlanElement el) {
if (el instanceof Leg leg) {
leg.setRoute(null);
}
}

/**
* Remove all unselected plans for given person.
*/
public static void removeUnselectedPlans(Person person) {
Plan selected = person.getSelectedPlan();
for (Plan plan : Lists.newArrayList(person.getPlans())) {
if (plan != selected)
person.removePlan(plan);
}
}
}
}
Loading

0 comments on commit 8b3a831

Please sign in to comment.