Skip to content

Commit

Permalink
Merge pull request #322 from OpenTrafficCam/user-story/2774-add-cli-o…
Browse files Browse the repository at this point in the history
…ption-to-export-counts

user-story/2774-add-cli-option-to-export-counts
  • Loading branch information
martinbaerwolff authored Sep 6, 2023
2 parents 5a4a14e + 4a2a2dc commit 3ddf9ba
Show file tree
Hide file tree
Showing 13 changed files with 233 additions and 60 deletions.
4 changes: 2 additions & 2 deletions OTAnalytics/application/analysis/traffic_counting.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ def create_mode_tag(tag: str) -> Tag:

def create_timeslot_tag(start_of_time_slot: datetime, interval: timedelta) -> Tag:
end_of_time_slot = start_of_time_slot + interval
serialized_start = start_of_time_slot.strftime("%H:%M")
serialized_end = end_of_time_slot.strftime("%H:%M")
serialized_start = start_of_time_slot.strftime(r"%Y-%m-%d %H:%M:%S")
serialized_end = end_of_time_slot.strftime(r"%Y-%m-%d %H:%M:%S")
return MultiTag(
frozenset(
[
Expand Down
5 changes: 4 additions & 1 deletion OTAnalytics/application/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@
"""The log save directory."""

GEOMETRY_CACHE_SIZE: int = 20000
DEFAULT_EVENTLIST_SAVE_NAME: str = "events"
DEFAULT_EVENTLIST_FILE_STEM: str = "events"
DEFAULT_EVENTLIST_FILE_TYPE: str = "otevents"
DEFAULT_COUNTS_FILE_STEM: str = "counts"
DEFAULT_COUNTS_FILE_TYPE: str = "csv"
DEFAULT_TRACK_FILE_TYPE: str = "ottrk"
DEFAULT_SECTIONS_FILE_TYPE: str = "otflow"
DEFAULT_COUNTING_INTERVAL_IN_MINUTES: int = 15
DEFAULT_TRACK_OFFSET: RelativeOffsetCoordinate = RelativeOffsetCoordinate(0.5, 0.5)

OS: str = platform.system()
Expand Down
26 changes: 25 additions & 1 deletion OTAnalytics/application/use_cases/track_repository.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
from pathlib import Path
from typing import Iterable

from OTAnalytics.domain.track import Track, TrackFileRepository, TrackRepository
from OTAnalytics.domain.track import (
Track,
TrackFileRepository,
TrackId,
TrackRepository,
)


class GetAllTracks:
Expand All @@ -18,6 +23,25 @@ def __call__(self) -> list[Track]:
return self._track_repository.get_all()


class GetAllTrackIds:
"""Get all track ids from the track repository.
Args:
track_repository (TrackRepository): the track repository to get the ids from.
"""

def __init__(self, track_repository: TrackRepository) -> None:
self._track_repository = track_repository

def __call__(self) -> Iterable[TrackId]:
"""Get all track ids from the track repository.
Returns:
Iterable[TrackId]: the track ids.
"""
return self._track_repository.get_all_ids()


class AddAllTracks:
"""Add tracks to the track repository.
Expand Down
8 changes: 8 additions & 0 deletions OTAnalytics/domain/track.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,14 @@ def get_all(self) -> list[Track]:
"""
return list(self._tracks.values())

def get_all_ids(self) -> Iterable[TrackId]:
"""Get all track ids in this repository.
Returns:
Iterable[TrackId]: the track ids.
"""
return self._tracks.keys()

def clear(self) -> None:
"""
Clear the repository and inform the observers about the empty repository.
Expand Down
19 changes: 19 additions & 0 deletions OTAnalytics/plugin_parser/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@
from pandas import DataFrame

from OTAnalytics.application.analysis.traffic_counting import (
LEVEL_CLASSIFICATION,
LEVEL_END_TIME,
LEVEL_FLOW,
LEVEL_START_TIME,
Count,
Exporter,
ExporterFactory,
Expand All @@ -28,8 +32,23 @@ def __init__(self, output_file: str) -> None:
def export(self, counts: Count) -> None:
logger().info(f"Exporting counts {counts} to {self._output_file}")
dataframe = self.__create_data_frame(counts)
dataframe = self._set_column_order(dataframe)
dataframe.to_csv(self.__create_path(), index=False)

def _set_column_order(self, dataframe: DataFrame) -> DataFrame:
desired_columns_order = [
LEVEL_START_TIME,
LEVEL_END_TIME,
LEVEL_CLASSIFICATION,
LEVEL_FLOW,
]
dataframe = dataframe[
desired_columns_order
+ [col for col in dataframe.columns if col not in desired_columns_order]
]

return dataframe

def __create_data_frame(self, counts: Count) -> DataFrame:
transformed = counts.to_dict()
indexed: list[dict] = []
Expand Down
74 changes: 67 additions & 7 deletions OTAnalytics/plugin_ui/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,29 @@
from pathlib import Path
from typing import Iterable

from OTAnalytics.application.analysis.traffic_counting import ExportCounts
from OTAnalytics.application.analysis.traffic_counting_specification import (
CountingSpecificationDto,
)
from OTAnalytics.application.config import (
DEFAULT_COUNTING_INTERVAL_IN_MINUTES,
DEFAULT_COUNTS_FILE_STEM,
DEFAULT_COUNTS_FILE_TYPE,
DEFAULT_EVENTLIST_FILE_STEM,
DEFAULT_EVENTLIST_FILE_TYPE,
DEFAULT_EVENTLIST_SAVE_NAME,
DEFAULT_SECTIONS_FILE_TYPE,
DEFAULT_TRACK_FILE_TYPE,
)
from OTAnalytics.application.datastore import EventListParser, FlowParser, TrackParser
from OTAnalytics.application.logger import logger
from OTAnalytics.application.state import TracksMetadata
from OTAnalytics.application.use_cases.create_events import CreateEvents
from OTAnalytics.application.use_cases.flow_repository import AddFlow
from OTAnalytics.application.use_cases.section_repository import AddSection
from OTAnalytics.application.use_cases.track_repository import (
AddAllTracks,
ClearAllTracks,
GetAllTrackIds,
)
from OTAnalytics.domain.event import EventRepository
from OTAnalytics.domain.flow import Flow
Expand Down Expand Up @@ -123,8 +133,11 @@ def __init__(
event_list_parser: EventListParser,
event_repository: EventRepository,
add_section: AddSection,
add_flow: AddFlow,
create_events: CreateEvents,
export_counts: ExportCounts,
add_all_tracks: AddAllTracks,
get_all_track_ids: GetAllTrackIds,
clear_all_tracks: ClearAllTracks,
progressbar: ProgressbarBuilder,
) -> None:
Expand All @@ -136,8 +149,11 @@ def __init__(
self._event_list_parser = event_list_parser
self._event_repository = event_repository
self._add_section = add_section
self._add_flow = add_flow
self._create_events = create_events
self._export_counts = export_counts
self._add_all_tracks = add_all_tracks
self._get_all_track_ids = get_all_track_ids
self._clear_all_tracks = clear_all_tracks
self._progressbar = progressbar

Expand All @@ -149,7 +165,7 @@ def start(self) -> None:

sections, flows = self._parse_flows(sections_file)

self._run_analysis(ottrk_files, sections)
self._run_analysis(ottrk_files, sections, flows)

def _parse_flows(self, flow_file: Path) -> tuple[Iterable[Section], Iterable[Flow]]:
return self._flow_parser.parse(flow_file)
Expand All @@ -159,18 +175,24 @@ def _add_sections(self, sections: Iterable[Section]) -> None:
for section in sections:
self._add_section(section)

def _add_flows(self, flows: Iterable[Flow]) -> None:
"""Add flows to flow repository."""
for flow in flows:
self._add_flow(flow)

def _parse_tracks(self, track_files: list[Path]) -> None:
for track_file in self._progressbar(track_files, "Parsed track files", "files"):
tracks = self._track_parser.parse(track_file)
self._add_all_tracks(tracks)

def _run_analysis(
self, ottrk_files: set[Path], sections: Iterable[Section]
self, ottrk_files: set[Path], sections: Iterable[Section], flows: Iterable[Flow]
) -> None:
"""Run analysis."""
self._clear_all_tracks()
self._event_repository.clear()
self._add_sections(sections)
self._add_flows(flows)
ottrk_files_sorted: list[Path] = sorted(
ottrk_files, key=lambda file: str(file).lower()
)
Expand All @@ -180,11 +202,15 @@ def _run_analysis(
self._create_events()
logger().info("Event list created.")

save_path = self._determine_eventlist_save_path(ottrk_files_sorted[0])
event_list_output_file = self._determine_eventlist_save_path(
ottrk_files_sorted[0]
)
self._event_list_parser.serialize(
self._event_repository.get_all(), sections, save_path
self._event_repository.get_all(), sections, event_list_output_file
)
logger().info(f"Event list saved at '{save_path}'")
logger().info(f"Event list saved at '{event_list_output_file}'")

self._do_export_counts(event_list_output_file)

def _determine_eventlist_save_path(self, track_file: Path) -> Path:
"""Determine save path of eventlist.
Expand All @@ -202,7 +228,7 @@ def _determine_eventlist_save_path(self, track_file: Path) -> Path:
eventlist_file_name = self.cli_args.eventlist_filename
if eventlist_file_name == "":
return track_file.with_name(
f"{DEFAULT_EVENTLIST_SAVE_NAME}.{DEFAULT_EVENTLIST_FILE_TYPE}"
f"{DEFAULT_EVENTLIST_FILE_STEM}.{DEFAULT_EVENTLIST_FILE_TYPE}"
)

return track_file.with_name(
Expand Down Expand Up @@ -285,3 +311,37 @@ def _get_sections_file(file: str) -> Path:
)

return sections_file

def _do_export_counts(self, event_list_output_file: Path) -> None:
logger().info("Create counts ...")
tracks_metadata = TracksMetadata(self._add_all_tracks._track_repository)
tracks_metadata.notify_tracks(list(self._get_all_track_ids()))
start = tracks_metadata.first_detection_occurrence
end = tracks_metadata.last_detection_occurrence
modes = tracks_metadata.classifications
if start is None:
raise ValueError("start is None but has to be defined for exporting counts")
if end is None:
raise ValueError("end is None but has to be defined for exporting counts")
if modes is None:
raise ValueError("modes is None but has to be defined for exporting counts")
interval: int = DEFAULT_COUNTING_INTERVAL_IN_MINUTES
if event_list_output_file.stem == DEFAULT_EVENTLIST_FILE_STEM:
output_file_stem = DEFAULT_COUNTS_FILE_STEM
else:
output_file_stem = (
f"{event_list_output_file.stem}_{DEFAULT_COUNTS_FILE_STEM}"
)
output_file = event_list_output_file.with_stem(output_file_stem).with_suffix(
f".{DEFAULT_COUNTS_FILE_TYPE}"
)
counting_specification = CountingSpecificationDto(
start=start,
end=end,
modes=list(modes),
interval_in_minutes=interval,
output_file=str(output_file),
output_format="CSV",
)
self._export_counts.export(specification=counting_specification)
logger().info(f"Counts saved at {output_file}")
3 changes: 2 additions & 1 deletion OTAnalytics/plugin_ui/customtkinter_gui/dummy_viewmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
MultipleSectionsSelected,
OTAnalyticsApplication,
)
from OTAnalytics.application.config import DEFAULT_COUNTING_INTERVAL_IN_MINUTES
from OTAnalytics.application.datastore import FlowParser, NoSectionsToSave
from OTAnalytics.application.logger import logger
from OTAnalytics.application.use_cases.config import MissingDate
Expand Down Expand Up @@ -1402,7 +1403,7 @@ def export_counts(self) -> None:
end = self._application._tracks_metadata.last_detection_occurrence
modes = list(self._application._tracks_metadata.classifications)
default_values: dict = {
INTERVAL: 15,
INTERVAL: DEFAULT_COUNTING_INTERVAL_IN_MINUTES,
START: start,
END: end,
EXPORT_FORMAT: default_format,
Expand Down
4 changes: 2 additions & 2 deletions OTAnalytics/plugin_ui/customtkinter_gui/frame_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@

from OTAnalytics.adapter_ui.view_model import ViewModel
from OTAnalytics.application.config import (
DEFAULT_EVENTLIST_FILE_STEM,
DEFAULT_EVENTLIST_FILE_TYPE,
DEFAULT_EVENTLIST_SAVE_NAME,
)
from OTAnalytics.application.logger import logger
from OTAnalytics.plugin_ui.customtkinter_gui.constants import PADX, PADY, STICKY
Expand Down Expand Up @@ -92,7 +92,7 @@ def _save_eventlist(self) -> None:
title="Save event list file as",
filetypes=[("events file", "*.otevents")],
defaultextension=".otevents",
initialfile=f"{DEFAULT_EVENTLIST_SAVE_NAME}.{DEFAULT_EVENTLIST_FILE_TYPE}",
initialfile=f"{DEFAULT_EVENTLIST_FILE_STEM}.{DEFAULT_EVENTLIST_FILE_TYPE}",
)
if not file:
return
Expand Down
10 changes: 10 additions & 0 deletions OTAnalytics/plugin_ui/main_application.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
AddAllTracks,
ClearAllTracks,
GetAllTrackFiles,
GetAllTrackIds,
GetAllTracks,
)
from OTAnalytics.application.use_cases.track_to_video_repository import (
Expand Down Expand Up @@ -337,21 +338,27 @@ def start_cli(self, cli_args: CliArguments) -> None:
track_repository = self._create_track_repository()
track_file_repository = self._create_track_file_repository()
section_repository = self._create_section_repository()
flow_repository = self._create_flow_repository()
track_parser = self._create_track_parser(
track_repository, track_file_repository
)
flow_parser = self._create_flow_parser()
event_list_parser = self._create_event_list_parser()
event_repository = self._create_event_repository()
add_section = AddSection(section_repository)
add_flow = AddFlow(flow_repository)
add_events = AddEvents(event_repository)
get_all_tracks = GetAllTracks(track_repository)
get_all_track_ids = GetAllTrackIds(track_repository)
clear_all_events = ClearAllEvents(event_repository)
create_events = self._create_use_case_create_events(
section_repository, clear_all_events, get_all_tracks, add_events
)
add_all_tracks = AddAllTracks(track_repository)
clear_all_tracks = ClearAllTracks(track_repository)
export_counts = self._create_export_counts(
event_repository, flow_repository, track_repository
)
OTAnalyticsCli(
cli_args,
track_parser=track_parser,
Expand All @@ -360,7 +367,10 @@ def start_cli(self, cli_args: CliArguments) -> None:
event_repository=event_repository,
add_section=add_section,
create_events=create_events,
export_counts=export_counts,
add_all_tracks=add_all_tracks,
get_all_track_ids=get_all_track_ids,
add_flow=add_flow,
clear_all_tracks=clear_all_tracks,
progressbar=TqdmBuilder(),
).start()
Expand Down
Loading

0 comments on commit 3ddf9ba

Please sign in to comment.