Skip to content

Commit

Permalink
Apply changes by Black
Browse files Browse the repository at this point in the history
  • Loading branch information
evelyn9191 committed Mar 7, 2020
1 parent 44ca8d3 commit ca485c7
Show file tree
Hide file tree
Showing 95 changed files with 6,227 additions and 3,867 deletions.
20 changes: 8 additions & 12 deletions examples/example_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,24 +10,21 @@


# get elementary bus events (connections) taking place within a given time interval:
all_events = networks.temporal_network(g,
start_time_ut=start_ut,
end_time_ut=end_ut
)
all_events = networks.temporal_network(g, start_time_ut=start_ut, end_time_ut=end_ut)
print("Number of elementary PT events during rush hour in Kuopio: ", len(all_events))

# get elementary bus events (connections) taking place within a given time interval:
tram_events = networks.temporal_network(g,
start_time_ut=start_ut,
end_time_ut=end_ut,
route_type=route_types.TRAM
)
assert(len(tram_events) == 0) # there should be no trams in our example city (Kuopio, Finland)
tram_events = networks.temporal_network(
g, start_time_ut=start_ut, end_time_ut=end_ut, route_type=route_types.TRAM
)
assert len(tram_events) == 0 # there should be no trams in our example city (Kuopio, Finland)

# construct a networkx graph
print("\nConstructing a combined stop_to_stop_network")

graph = networks.combined_stop_to_stop_transit_network(g, start_time_ut=start_ut, end_time_ut=end_ut)
graph = networks.combined_stop_to_stop_transit_network(
g, start_time_ut=start_ut, end_time_ut=end_ut
)
print("Number of edges: ", len(graph.edges()))
print("Number of nodes: ", len(graph.nodes()))
print("Example edge: ", list(graph.edges(data=True))[0])
Expand All @@ -37,4 +34,3 @@
#################################################
# See also other functions in gtfspy.networks ! #
#################################################

16 changes: 12 additions & 4 deletions examples/example_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,24 @@
# filter by time and 3 kilometers from the city center
week_start = G.get_weekly_extract_start_date()
week_end = week_start + datetime.timedelta(days=7)
fe = FilterExtract(G, filtered_database_path, start_date=week_start, end_date=week_end,
buffer_lat=62.8930796, buffer_lon=27.6671316, buffer_distance_km=3)
fe = FilterExtract(
G,
filtered_database_path,
start_date=week_start,
end_date=week_end,
buffer_lat=62.8930796,
buffer_lon=27.6671316,
buffer_distance_km=3,
)

fe.create_filtered_copy()
assert (os.path.exists(filtered_database_path))
assert os.path.exists(filtered_database_path)

G = GTFS(filtered_database_path)

# visualize the routes of the filtered database
from gtfspy import mapviz
from matplotlib import pyplot as plt

mapviz.plot_route_network_from_gtfs(G)
plt.show()
plt.show()
25 changes: 17 additions & 8 deletions examples/example_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,31 @@

def load_or_import_example_gtfs(verbose=False):
imported_database_path = "test_db_kuopio.sqlite"
if not os.path.exists(imported_database_path): # reimport only if the imported database does not already exist
if not os.path.exists(
imported_database_path
): # reimport only if the imported database does not already exist
print("Importing gtfs zip file")
import_gtfs.import_gtfs(["data/gtfs_kuopio_finland.zip"], # input: list of GTFS zip files (or directories)
imported_database_path, # output: where to create the new sqlite3 database
print_progress=verbose, # whether to print progress when importing data
location_name="Kuopio")
import_gtfs.import_gtfs(
["data/gtfs_kuopio_finland.zip"], # input: list of GTFS zip files (or directories)
imported_database_path, # output: where to create the new sqlite3 database
print_progress=verbose, # whether to print progress when importing data
location_name="Kuopio",
)

# Not this is an optional step, which is not necessary for many things.
print("Computing walking paths using OSM")
G = gtfs.GTFS(imported_database_path)
G.meta['download_date'] = "2017-03-15"
G.meta["download_date"] = "2017-03-15"

osm_path = "data/kuopio_extract_mapzen_2017_03_15.osm.pbf"

# when using with the Kuopio test data set,
# this should raise a warning due to no nearby OSM nodes for one of the stops.
osm_transfers.add_walk_distances_to_db_python(imported_database_path, osm_path)

print("Note: for large cities we have also a faster option for computing footpaths that uses Java.)")
print(
"Note: for large cities we have also a faster option for computing footpaths that uses Java.)"
)
dir_path = os.path.dirname(os.path.realpath(__file__))
java_path = os.path.join(dir_path, "../java_routing/")
print("Please see the contents of " + java_path + " for more details.")
Expand All @@ -35,7 +41,10 @@ def load_or_import_example_gtfs(verbose=False):

if verbose:
print("Location name:" + G.get_location_name()) # should print Kuopio
print("Time span of the data in unixtime: " + str(G.get_approximate_schedule_time_span_in_ut()))
print(
"Time span of the data in unixtime: "
+ str(G.get_approximate_schedule_time_span_in_ut())
)
# prints the time span in unix time
return G

Expand Down
2 changes: 1 addition & 1 deletion examples/example_map_visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@

# ax_thumbnail.figure.savefig("test_thumbnail.jpg")

plt.show()
plt.show()
19 changes: 9 additions & 10 deletions examples/example_plot_trip_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
daily_trip_counts = G.get_trip_counts_per_day()
f, ax = plt.subplots()

datetimes = [date.to_pydatetime() for date in daily_trip_counts['date']]
trip_counts = daily_trip_counts['trip_counts']
datetimes = [date.to_pydatetime() for date in daily_trip_counts["date"]]
trip_counts = daily_trip_counts["trip_counts"]

ax.bar(datetimes, trip_counts)
ax.axvline(G.meta['download_date'], color="red")
ax.axvline(G.meta["download_date"], color="red")
threshold = 0.96
ax.axhline(trip_counts.max() * threshold, color="red")
ax.axvline(G.get_weekly_extract_start_date(weekdays_at_least_of_max=threshold), color="yellow")
Expand All @@ -24,18 +24,17 @@
G = GTFS(weekly_db_path)
f, ax = plt.subplots()
daily_trip_counts = G.get_trip_counts_per_day()
datetimes = [date.to_pydatetime() for date in daily_trip_counts['date']]
trip_counts = daily_trip_counts['trip_counts']
datetimes = [date.to_pydatetime() for date in daily_trip_counts["date"]]
trip_counts = daily_trip_counts["trip_counts"]
ax.bar(datetimes, trip_counts)

events = list(G.generate_routable_transit_events(0, G.get_approximate_schedule_time_span_in_ut()[0]))
min_ut = float('inf')
events = list(
G.generate_routable_transit_events(0, G.get_approximate_schedule_time_span_in_ut()[0])
)
min_ut = float("inf")
for e in events:
min_ut = min(e.dep_time_ut, min_ut)

print(G.get_approximate_schedule_time_span_in_ut())

plt.show()



58 changes: 32 additions & 26 deletions examples/example_temporal_distance_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@

import example_import
from gtfspy.routing.helpers import get_transit_connections, get_walk_network
from gtfspy.routing.multi_objective_pseudo_connection_scan_profiler import MultiObjectivePseudoCSAProfiler
from gtfspy.routing.multi_objective_pseudo_connection_scan_profiler import (
MultiObjectivePseudoCSAProfiler,
)
from gtfspy.routing.node_profile_analyzer_time_and_veh_legs import NodeProfileAnalyzerTimeAndVehLegs

G = example_import.load_or_import_example_gtfs()
Expand All @@ -14,12 +16,12 @@
to_stop_I = None
stop_dict = G.stops().to_dict("index")
for stop_I, data in stop_dict.items():
if data['name'] == from_stop_name:
if data["name"] == from_stop_name:
from_stop_I = stop_I
if data['name'] == to_stop_name:
if data["name"] == to_stop_name:
to_stop_I = stop_I
assert (from_stop_I is not None)
assert (to_stop_I is not None)
assert from_stop_I is not None
assert to_stop_I is not None

# The start and end times between which PT operations (and footpaths) are scanned:
ANALYSIS_START_TIME_UT = G.get_suitable_date_for_daily_extract(ut=True) + 10 * 3600
Expand All @@ -40,17 +42,18 @@
# gtfspy.osm_transfers.add_walk_distances_to_db_python(..., cutoff_distance_m=2000).



mpCSA = MultiObjectivePseudoCSAProfiler(connections,
targets=[to_stop_I],
start_time_ut=CONNECTION_SCAN_START_TIME_UT,
end_time_ut=CONNECTION_SCAN_END_TIME_UT,
transfer_margin=120, # seconds
walk_network=walk_network,
walk_speed=1.5, # meters per second
verbose=True,
track_vehicle_legs=True,
track_time=True)
mpCSA = MultiObjectivePseudoCSAProfiler(
connections,
targets=[to_stop_I],
start_time_ut=CONNECTION_SCAN_START_TIME_UT,
end_time_ut=CONNECTION_SCAN_END_TIME_UT,
transfer_margin=120, # seconds
walk_network=walk_network,
walk_speed=1.5, # meters per second
verbose=True,
track_vehicle_legs=True,
track_time=True,
)

mpCSA.run()
profiles = mpCSA.stop_profiles
Expand All @@ -60,19 +63,21 @@
direct_walk_duration = departure_stop_profile.get_walk_to_target_duration()
# This equals inf, if walking distance between the departure_stop (from_stop_I) and target_stop (to_stop_I)
# is longer than MAX_WALK_LENGTH
analyzer = NodeProfileAnalyzerTimeAndVehLegs(departure_stop_profile.get_final_optimal_labels(),
direct_walk_duration,
ANALYSIS_START_TIME_UT,
ANALYSIS_END_TIME_UT)
analyzer = NodeProfileAnalyzerTimeAndVehLegs(
departure_stop_profile.get_final_optimal_labels(),
direct_walk_duration,
ANALYSIS_START_TIME_UT,
ANALYSIS_END_TIME_UT,
)

# Print out results:
stop_dict = G.stops().to_dict("index")
print("Origin: ", stop_dict[from_stop_I])
print("Destination: ", stop_dict[to_stop_I])
print("Minimum temporal distance: ", analyzer.min_temporal_distance() / 60., " minutes")
print("Mean temporal distance: ", analyzer.mean_temporal_distance() / 60., " minutes")
print("Medan temporal distance: ", analyzer.median_temporal_distance() / 60., " minutes")
print("Maximum temporal distance: ", analyzer.max_temporal_distance() / 60., " minutes")
print("Minimum temporal distance: ", analyzer.min_temporal_distance() / 60.0, " minutes")
print("Mean temporal distance: ", analyzer.mean_temporal_distance() / 60.0, " minutes")
print("Medan temporal distance: ", analyzer.median_temporal_distance() / 60.0, " minutes")
print("Maximum temporal distance: ", analyzer.max_temporal_distance() / 60.0, " minutes")
# Note that the mean and max temporal distances have the value of `direct_walk_duration`,
# if there are no journey alternatives departing after (or at the same time as) `ANALYSIS_END_TIME_UT`.
# Thus, if you obtain a float('inf') value for some of the temporal distance measures, it could probably be
Expand All @@ -85,8 +90,9 @@

# use tex in plotting
rc("text", usetex=True)
fig1 = analyzer.plot_new_transfer_temporal_distance_profile(timezone=timezone_pytz,
format_string="%H:%M")
fig1 = analyzer.plot_new_transfer_temporal_distance_profile(
timezone=timezone_pytz, format_string="%H:%M"
)
fig2 = analyzer.plot_temporal_distance_pdf_horizontal(use_minutes=True)
print("Showing...")
plt.show()
68 changes: 43 additions & 25 deletions gtfspy/calc_transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,17 @@
from gtfspy.gtfs import GTFS
from gtfspy.util import wgs84_distance, wgs84_height, wgs84_width

create_stmt = ('CREATE TABLE IF NOT EXISTS main.stop_distances '
'(from_stop_I INT, '
' to_stop_I INT, '
' d INT, '
' d_walk INT, '
' min_transfer_time INT, '
' timed_transfer INT, '
'UNIQUE (from_stop_I, to_stop_I)'
')'
)
create_stmt = (
"CREATE TABLE IF NOT EXISTS main.stop_distances "
"(from_stop_I INT, "
" to_stop_I INT, "
" d INT, "
" d_walk INT, "
" min_transfer_time INT, "
" timed_transfer INT, "
"UNIQUE (from_stop_I, to_stop_I)"
")"
)


def bind_functions(conn):
Expand All @@ -36,11 +37,14 @@ def _get_geo_hash_precision(search_radius_in_km):
suggested_precision = precision
break
if suggested_precision is None:
raise RuntimeError("GeoHash cannot work with this large search radius (km): " + search_radius_in_km)
raise RuntimeError(
"GeoHash cannot work with this large search radius (km): " + search_radius_in_km
)
return suggested_precision


def calc_transfers(conn, threshold_meters=1000):
geohash_precision = _get_geo_hash_precision(threshold_meters / 1000.)
geohash_precision = _get_geo_hash_precision(threshold_meters / 1000.0)
geo_index = GeoGridIndex(precision=geohash_precision)
g = GTFS(conn)
stops = g.get_table("stops")
Expand All @@ -52,7 +56,9 @@ def calc_transfers(conn, threshold_meters=1000):
geo_index.add_point(stop_geopoint)
stop_geopoints.append(stop_geopoint)
for stop_geopoint in stop_geopoints:
nearby_stop_geopoints = geo_index.get_nearest_points_dirty(stop_geopoint, threshold_meters / 1000.0, "km")
nearby_stop_geopoints = geo_index.get_nearest_points_dirty(
stop_geopoint, threshold_meters / 1000.0, "km"
)
from_stop_I = int(stop_geopoint.ref)
from_lat = stop_geopoint.latitude
from_lon = stop_geopoint.longitude
Expand All @@ -71,32 +77,44 @@ def calc_transfers(conn, threshold_meters=1000):
distances.append(distance)

n_pairs = len(to_stop_Is)
from_stop_Is = [from_stop_I]*n_pairs
cursor.executemany('INSERT OR REPLACE INTO stop_distances VALUES (?, ?, ?, ?, ?, ?);',
zip(from_stop_Is, to_stop_Is, distances, [None]*n_pairs, [None]*n_pairs, [None]*n_pairs))
cursor.execute('CREATE INDEX IF NOT EXISTS idx_sd_fsid ON stop_distances (from_stop_I);')
from_stop_Is = [from_stop_I] * n_pairs
cursor.executemany(
"INSERT OR REPLACE INTO stop_distances VALUES (?, ?, ?, ?, ?, ?);",
zip(
from_stop_Is,
to_stop_Is,
distances,
[None] * n_pairs,
[None] * n_pairs,
[None] * n_pairs,
),
)
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sd_fsid ON stop_distances (from_stop_I);")


def _export_transfers(conn, fname):
conn = GTFS(conn).conn
cur = conn.cursor()
cur.execute('SELECT S1.lat, S1.lon, S2.lat, S2.lon, SD.d '
'FROM stop_distances SD '
' LEFT JOIN stops S1 ON (SD.from_stop_I=S1.stop_I) '
' LEFT JOIN stops S2 ON (SD.to_stop_I =S2.stop_I)')
f = open(fname, 'w')
cur.execute(
"SELECT S1.lat, S1.lon, S2.lat, S2.lon, SD.d "
"FROM stop_distances SD "
" LEFT JOIN stops S1 ON (SD.from_stop_I=S1.stop_I) "
" LEFT JOIN stops S2 ON (SD.to_stop_I =S2.stop_I)"
)
f = open(fname, "w")
for row in cur:
print(' '.join(str(x) for x in row), file=f)
print(" ".join(str(x) for x in row), file=f)


def main():
import sys

cmd = sys.argv[1]
if cmd == 'calc':
if cmd == "calc":
dbname = sys.argv[2]
conn = GTFS(dbname).conn
calc_transfers(conn)
elif cmd == 'export':
elif cmd == "export":
_export_transfers(sys.argv[2], sys.argv[3])


Expand Down
4 changes: 1 addition & 3 deletions gtfspy/colormaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,9 @@ def get_list_of_colors(values, observable_name=None):
colorvalues.append(colorvalue)
return colorvalues, norm, cmap


def createcolorbar(cmap, norm):
"""Create a colourbar with limits of lwr and upr"""
cax, kw = matplotlib.colorbar.make_axes(matplotlib.pyplot.gca())
c = matplotlib.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
return c



Loading

0 comments on commit ca485c7

Please sign in to comment.