diff --git a/conflowgen/__init__.py b/conflowgen/__init__.py index 9c43e1e6..2c204791 100644 --- a/conflowgen/__init__.py +++ b/conflowgen/__init__.py @@ -63,6 +63,9 @@ from conflowgen.analyses.container_flow_vehicle_type_adjustment_per_vehicle_analysis_report import \ ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysisReport +# Cache for analyses and previews +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache + # Specific classes for reports from conflowgen.reporting.output_style import DisplayAsMarkupLanguage, DisplayAsPlainText, DisplayAsMarkdown diff --git a/conflowgen/analyses/container_dwell_time_analysis.py b/conflowgen/analyses/container_dwell_time_analysis.py index 2f4a1999..822e42b7 100644 --- a/conflowgen/analyses/container_dwell_time_analysis.py +++ b/conflowgen/analyses/container_dwell_time_analysis.py @@ -7,6 +7,7 @@ from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.container import Container from conflowgen.analyses.abstract_analysis import AbstractAnalysis +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache class ContainerDwellTimeAnalysis(AbstractAnalysis): @@ -15,7 +16,7 @@ class ContainerDwellTimeAnalysis(AbstractAnalysis): The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) as it is the case with :class:`.ContainerDwellTimeAnalysisReport`. """ - + @DataSummariesCache.cache_result def get_container_dwell_times( self, container_delivered_by_vehicle_type: typing.Union[ @@ -25,8 +26,7 @@ def get_container_dwell_times( storage_requirement: typing.Union[ str, typing.Collection[StorageRequirement], StorageRequirement] = "all", start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> set[datetime.timedelta]: """ The containers are filtered according to the provided criteria. @@ -50,8 +50,6 @@ def get_container_dwell_times( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: A set of container dwell times. @@ -77,8 +75,8 @@ def get_container_dwell_times( container: Container for container in selected_containers: - container_enters_yard = container.get_arrival_time(use_cache=use_cache) - container_leaves_yard = container.get_departure_time(use_cache=use_cache) + container_enters_yard = container.get_arrival_time() + container_leaves_yard = container.get_departure_time() assert container_enters_yard < container_leaves_yard, "A container should enter the yard before leaving it" if start_date and container_enters_yard < start_date: continue diff --git a/conflowgen/analyses/container_dwell_time_analysis_report.py b/conflowgen/analyses/container_dwell_time_analysis_report.py index f281be88..ce494cd8 100644 --- a/conflowgen/analyses/container_dwell_time_analysis_report.py +++ b/conflowgen/analyses/container_dwell_time_analysis_report.py @@ -55,9 +55,6 @@ def get_report_as_text(self, **kwargs) -> str: Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The report in text format (possibly spanning over several lines). @@ -124,9 +121,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The matplotlib axis of the histogram """ @@ -163,7 +157,6 @@ def _get_container_dwell_times(self, kwargs): storage_requirement = kwargs.pop("storage_requirement", "all") start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", True) assert len(kwargs) == 0, f"Keyword(s) {list(kwargs.keys())} have not been processed" container_dwell_times: set[datetime.timedelta] = self.analysis.get_container_dwell_times( @@ -171,8 +164,7 @@ def _get_container_dwell_times(self, kwargs): container_picked_up_by_vehicle_type=container_picked_up_by_vehicle_type, storage_requirement=storage_requirement, start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) return ( container_delivered_by_vehicle_type, container_dwell_times, container_picked_up_by_vehicle_type, diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py index 4346db82..01d81544 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis @@ -17,10 +18,10 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysis(AbstractAnalysis): """ @staticmethod + @DataSummariesCache.cache_result def get_initial_to_adjusted_outbound_flow( start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerVolumeFromOriginToDestination: """ When containers are generated, in order to obey the maximum dwell time, the vehicle type that is used for @@ -33,9 +34,6 @@ def get_initial_to_adjusted_outbound_flow( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. Returns: The data structure describes how often an initial outbound vehicle type had to be adjusted with which other @@ -64,9 +62,9 @@ def get_initial_to_adjusted_outbound_flow( # Iterate over all containers and count number of containers / used teu capacity container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue vehicle_type_initial = container.picked_up_by_initial vehicle_type_adjusted = container.picked_up_by diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py index 26e468e0..c14ed88d 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis import \ ContainerFlowAdjustmentByVehicleTypeAnalysis @@ -42,12 +43,11 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysisSummary(ContainerFlowAdjustmen The analysis summary returns a data structure that can be used for generating reports (e.g., in text or as a figure) as it is the case with :class:`.ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport`. """ - + @DataSummariesCache.cache_result def get_summary( self, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerFlowAdjustedToVehicleType: """ Under certain circumstances (as explained in @@ -62,14 +62,10 @@ def get_summary( The earliest arriving container that is included. Consider all containers if :obj:`None`. end_date: The latest departing container that is included. Consider all containers if :obj:`None`. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ initial_to_adjusted_outbound_flow = self.get_initial_to_adjusted_outbound_flow( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) initial_to_adjusted_outbound_flow_in_teu = initial_to_adjusted_outbound_flow.teu diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py index 9549fc3b..c1fb92c1 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py @@ -43,9 +43,6 @@ def get_report_as_text( Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The report in text format (possibly spanning over several lines). @@ -82,9 +79,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The matplotlib axis of the pie chart. @@ -117,11 +111,9 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: def _get_analysis(self, kwargs: dict) -> ContainerFlowAdjustedToVehicleType: start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", True) assert len(kwargs) == 0, f"Keyword(s) {kwargs.keys()} have not been processed" adjusted_to = self.analysis_summary.get_summary( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) return adjusted_to diff --git a/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py b/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py index 3367b870..20734fa7 100644 --- a/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py +++ b/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py @@ -4,6 +4,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis @@ -18,10 +19,10 @@ class ContainerFlowByVehicleTypeAnalysis(AbstractAnalysis): """ @staticmethod + @DataSummariesCache.cache_result def get_inbound_to_outbound_flow( start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerVolumeFromOriginToDestination: """ This is the overview of the generated inbound to outbound container flow by vehicle type. @@ -31,8 +32,6 @@ def get_inbound_to_outbound_flow( The earliest arriving container that is included. Consider all containers if :obj:`None`. end_date: The latest departing container that is included. Consider all containers if :obj:`None`. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. """ inbound_to_outbound_flow_in_containers: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] = { vehicle_type_inbound: @@ -46,9 +45,9 @@ def get_inbound_to_outbound_flow( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue inbound_vehicle_type = container.delivered_by outbound_vehicle_type = container.picked_up_by diff --git a/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py b/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py index a93d8f35..0d5a0589 100644 --- a/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py +++ b/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py @@ -4,7 +4,7 @@ import datetime import typing - +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis @@ -17,14 +17,13 @@ class ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysis(AbstractAnalysis): The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) as it is the case with :class:`.ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysisReport`. """ - + @DataSummariesCache.cache_result def get_vehicle_type_adjustments_per_vehicle( self, initial_vehicle_type: ModeOfTransport | str | typing.Collection = "scheduled vehicles", adjusted_vehicle_type: ModeOfTransport | str | typing.Collection = "scheduled vehicles", start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> typing.Dict[VehicleIdentifier, int]: """ When containers are generated, in order to obey the maximum dwell time, the vehicle type that is used for @@ -45,9 +44,6 @@ def get_vehicle_type_adjustments_per_vehicle( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. Returns: The data structure describes how often an initial outbound vehicle type had to be adjusted over time in relation to the total container flows. @@ -73,9 +69,9 @@ def get_vehicle_type_adjustments_per_vehicle( container: Container for container in selected_containers: - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue vehicle_identifier = self._get_vehicle_identifier_for_vehicle_picking_up_the_container(container) @@ -104,14 +100,14 @@ def _get_vehicle_identifier_for_vehicle_picking_up_the_container(container: Cont if container.picked_up_by == ModeOfTransport.truck: vehicle_identifier = VehicleIdentifier( mode_of_transport=ModeOfTransport.truck, - vehicle_arrival_time=container.get_departure_time(use_cache=True), + vehicle_arrival_time=container.get_departure_time(), service_name=None, vehicle_name=None ) else: vehicle_identifier = VehicleIdentifier( mode_of_transport=container.picked_up_by, - vehicle_arrival_time=container.get_departure_time(use_cache=True), + vehicle_arrival_time=container.get_departure_time(), service_name=container.picked_up_by_large_scheduled_vehicle.schedule.service_name, vehicle_name=container.picked_up_by_large_scheduled_vehicle.vehicle_name ) diff --git a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py index d024ddd5..638eab61 100644 --- a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py +++ b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py @@ -6,6 +6,7 @@ import numpy as np +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity, ContainerVolumeByVehicleType from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -26,10 +27,10 @@ def __init__(self, transportation_buffer: float): ) @staticmethod + @DataSummariesCache.cache_result def get_inbound_container_volumes_by_vehicle_type( start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerVolumeByVehicleType: """ This is the used capacity of all vehicles separated by vehicle type on their inbound journey in TEU. @@ -39,8 +40,6 @@ def get_inbound_container_volumes_by_vehicle_type( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. """ inbound_container_volume_in_teu: typing.Dict[ModeOfTransport, float] = { vehicle_type: 0 @@ -50,9 +49,9 @@ def get_inbound_container_volumes_by_vehicle_type( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue inbound_vehicle_type = container.delivered_by inbound_container_volume_in_teu[inbound_vehicle_type] += container.occupied_teu @@ -63,11 +62,11 @@ def get_inbound_container_volumes_by_vehicle_type( teu=inbound_container_volume_in_teu ) + @DataSummariesCache.cache_result def get_outbound_container_volume_by_vehicle_type( self, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> OutboundUsedAndMaximumCapacity: """ This is the used and the maximum capacity of all vehicles separated by vehicle type on their outbound journey @@ -81,8 +80,6 @@ def get_outbound_container_volume_by_vehicle_type( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: Both the used and maximum outbound capacities grouped by vehicle type. """ @@ -101,9 +98,9 @@ def get_outbound_container_volume_by_vehicle_type( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue outbound_vehicle_type: ModeOfTransport = container.picked_up_by outbound_actually_moved_container_volume_in_teu[outbound_vehicle_type] += container.occupied_teu diff --git a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py index 4c2d84e1..74f17e7c 100644 --- a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py +++ b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py @@ -43,8 +43,6 @@ def get_report_as_text(self, **kwargs) -> str: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: The report in text format spanning over several lines. @@ -82,8 +80,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: The matplotlib axis of the bar chart. @@ -116,19 +112,16 @@ def _get_container_volumes_in_teu( ) start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", True) # gather data inbound_container_volume = self.analysis.get_inbound_container_volumes_by_vehicle_type( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) outbound_container_volume, outbound_maximum_container_volume = \ self.analysis.get_outbound_container_volume_by_vehicle_type( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) return inbound_container_volume.teu, outbound_container_volume.teu, outbound_maximum_container_volume.teu diff --git a/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py index 29669191..33626555 100644 --- a/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py +++ b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.descriptive_datatypes import VehicleIdentifier from conflowgen.domain_models.container import Container from conflowgen.domain_models.large_vehicle_schedule import Schedule @@ -34,6 +35,7 @@ def __init__(self, transportation_buffer: float): transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def get_inbound_and_outbound_capacity_of_each_vehicle( self, vehicle_type: typing.Any = "scheduled vehicles", diff --git a/conflowgen/analyses/modal_split_analysis.py b/conflowgen/analyses/modal_split_analysis.py index 0b5c5824..a933e9f2 100644 --- a/conflowgen/analyses/modal_split_analysis.py +++ b/conflowgen/analyses/modal_split_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis from conflowgen.analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis @@ -33,11 +34,11 @@ def __init__(self): super().__init__() self.container_flow_by_vehicle_type_analysis = ContainerFlowByVehicleTypeAnalysis() + @DataSummariesCache.cache_result def get_transshipment_and_hinterland_split( self, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> TransshipmentAndHinterlandSplit: """ Args: @@ -45,8 +46,6 @@ def get_transshipment_and_hinterland_split( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The amount of containers in TEU dedicated for or coming from the hinterland versus the amount of containers @@ -54,8 +53,7 @@ def get_transshipment_and_hinterland_split( """ inbound_to_outbound_flows = self.container_flow_by_vehicle_type_analysis.get_inbound_to_outbound_flow( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) inbound_to_outbound_flow = inbound_to_outbound_flows.teu @@ -75,13 +73,13 @@ def get_transshipment_and_hinterland_split( hinterland_capacity=hinterland_capacity ) + @DataSummariesCache.cache_result def get_modal_split_for_hinterland_traffic( self, inbound: bool, outbound: bool, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> HinterlandModalSplit: """ Args: @@ -91,16 +89,13 @@ def get_modal_split_for_hinterland_traffic( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The modal split for the hinterland in TEU. """ inbound_to_outbound_flows = self.container_flow_by_vehicle_type_analysis.get_inbound_to_outbound_flow( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) inbound_to_outbound_flow_in_teu = inbound_to_outbound_flows.teu diff --git a/conflowgen/analyses/modal_split_analysis_report.py b/conflowgen/analyses/modal_split_analysis_report.py index 9971c3ae..48cb36af 100644 --- a/conflowgen/analyses/modal_split_analysis_report.py +++ b/conflowgen/analyses/modal_split_analysis_report.py @@ -37,9 +37,6 @@ def get_report_as_text( Only include containers that arrive after the given start time. Defaults to ``None```. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ ( modal_split_in_hinterland_traffic_both_directions, modal_split_in_hinterland_inbound_traffic, @@ -64,8 +61,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axes.Axes: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The matplotlib axes with all pie charts. @@ -87,20 +82,19 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axes.Axes: def _get_analysis_output(self, kwargs): start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", False) assert len(kwargs) == 0, f"Keyword(s) {kwargs.keys()} have not been processed" transshipment_and_hinterland_split = self.analysis.get_transshipment_and_hinterland_split( - start_date=start_date, end_date=end_date, use_cache=use_cache + start_date=start_date, end_date=end_date ) modal_split_in_hinterland_inbound_traffic = self.analysis.get_modal_split_for_hinterland_traffic( - inbound=True, outbound=False, start_date=start_date, end_date=end_date, use_cache=use_cache + inbound=True, outbound=False, start_date=start_date, end_date=end_date ) modal_split_in_hinterland_outbound_traffic = self.analysis.get_modal_split_for_hinterland_traffic( - inbound=False, outbound=True, start_date=start_date, end_date=end_date, use_cache=use_cache + inbound=False, outbound=True, start_date=start_date, end_date=end_date ) modal_split_in_hinterland_traffic_both_directions = self.analysis.get_modal_split_for_hinterland_traffic( - inbound=True, outbound=True, start_date=start_date, end_date=end_date, use_cache=use_cache + inbound=True, outbound=True, start_date=start_date, end_date=end_date ) return ( modal_split_in_hinterland_traffic_both_directions, modal_split_in_hinterland_inbound_traffic, diff --git a/conflowgen/analyses/quay_side_throughput_analysis.py b/conflowgen/analyses/quay_side_throughput_analysis.py index 997431e6..9c809776 100644 --- a/conflowgen/analyses/quay_side_throughput_analysis.py +++ b/conflowgen/analyses/quay_side_throughput_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.vehicle import LargeScheduledVehicle from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_week_based_time_window, \ @@ -24,13 +25,13 @@ class QuaySideThroughputAnalysis(AbstractAnalysis): } @classmethod + @DataSummariesCache.cache_result def get_throughput_over_time( cls, inbound: bool = True, outbound: bool = True, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> typing.Dict[datetime.date, float]: """ For each week, the containers crossing the quay are checked. Based on this, the required quay capacity in boxes @@ -46,9 +47,6 @@ def get_throughput_over_time( outbound: Whether to check for vessels which pick up a container on their outbound journey start_date: The earliest arriving container that is included. Consider all containers if :obj:`None`. end_date: The latest departing container that is included. Consider all containers if :obj:`None`. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ @@ -58,9 +56,9 @@ def get_throughput_over_time( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue if inbound: diff --git a/conflowgen/analyses/quay_side_throughput_analysis_report.py b/conflowgen/analyses/quay_side_throughput_analysis_report.py index 07a5ed67..38d36eb2 100644 --- a/conflowgen/analyses/quay_side_throughput_analysis_report.py +++ b/conflowgen/analyses/quay_side_throughput_analysis_report.py @@ -70,8 +70,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache (bool): - Returns: The matplotlib axis of the line chart over time. diff --git a/conflowgen/analyses/truck_gate_throughput_analysis.py b/conflowgen/analyses/truck_gate_throughput_analysis.py index e2d1d541..b60b6b78 100644 --- a/conflowgen/analyses/truck_gate_throughput_analysis.py +++ b/conflowgen/analyses/truck_gate_throughput_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_hour_based_time_window, get_hour_based_range from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -16,13 +17,13 @@ class TruckGateThroughputAnalysis(AbstractAnalysis): """ @classmethod + @DataSummariesCache.cache_result def get_throughput_over_time( cls, inbound: bool = True, outbound: bool = True, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> typing.Dict[datetime.datetime, float]: """ For each hour, the trucks entering through the truck gate are checked. Based on this, the required truck gate @@ -33,9 +34,6 @@ def get_throughput_over_time( outbound: Whether to check for trucks which pick up a container on their outbound journey start_date: When to start recording. Start with the earliest container if no date is provided. end_date: When to end recording. Stop with the latest container if no date is provided. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ assert (inbound or outbound), "At least one of the two must be checked for" @@ -52,7 +50,7 @@ def get_throughput_over_time( if inbound: mode_of_transport_at_container_arrival: ModeOfTransport = container.delivered_by if mode_of_transport_at_container_arrival == ModeOfTransport.truck: - time_of_entering = container.get_arrival_time(use_cache=use_cache) + time_of_entering = container.get_arrival_time() if ( (start_date is None or time_of_entering >= start_date) and (end_date is None or time_of_entering <= end_date) @@ -62,7 +60,7 @@ def get_throughput_over_time( if outbound: mode_of_transport_at_container_departure: ModeOfTransport = container.picked_up_by if mode_of_transport_at_container_departure == ModeOfTransport.truck: - time_of_leaving = container.get_departure_time(use_cache=use_cache) + time_of_leaving = container.get_departure_time() if ( (start_date is None or time_of_leaving >= start_date) and (end_date is None or time_of_leaving <= end_date) diff --git a/conflowgen/analyses/truck_gate_throughput_analysis_report.py b/conflowgen/analyses/truck_gate_throughput_analysis_report.py index d6cf88a1..6805370c 100644 --- a/conflowgen/analyses/truck_gate_throughput_analysis_report.py +++ b/conflowgen/analyses/truck_gate_throughput_analysis_report.py @@ -45,8 +45,6 @@ def get_report_as_text(self, **kwargs) -> str: Whether to check for trucks which deliver a container on their inbound journey outbound (bool): Whether to check for trucks which pick up a container on their outbound journey - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The report in text format. @@ -96,8 +94,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Whether to check for trucks which deliver a container on their inbound journey outbound (bool): Whether to check for trucks which pick up a container on their outbound journey - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. ax (matplotlib.axis.Axis): Which matplotlib axis to plot on. diff --git a/conflowgen/analyses/yard_capacity_analysis.py b/conflowgen/analyses/yard_capacity_analysis.py index fdb767ca..3c29e822 100644 --- a/conflowgen/analyses/yard_capacity_analysis.py +++ b/conflowgen/analyses/yard_capacity_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.container import Container from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_hour_based_time_window, get_hour_based_range @@ -15,11 +16,11 @@ class YardCapacityAnalysis(AbstractAnalysis): as it is the case with :class:`.YardCapacityAnalysisReport`. """ + @DataSummariesCache.cache_result def get_used_yard_capacity_over_time( self, storage_requirement: typing.Union[str, typing.Collection, StorageRequirement] = "all", - smoothen_peaks: bool = True, - use_cache: bool = True + smoothen_peaks: bool = True ) -> typing.Dict[datetime.datetime, float]: """ For each hour, the containers entering and leaving the yard are checked. Based on this, the required yard @@ -46,8 +47,6 @@ def get_used_yard_capacity_over_time( a collection of :class:`StorageRequirement` enum values (as a list, set, or similar), or a single :class:`StorageRequirement` enum value. smoothen_peaks: Whether to smoothen the peaks. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: A series of the used yard capacity in TEU over the time. """ @@ -62,8 +61,8 @@ def get_used_yard_capacity_over_time( for container in selected_containers: container_stays.append( ( - container.get_arrival_time(use_cache=use_cache), - container.get_departure_time(use_cache=use_cache), + container.get_arrival_time(), + container.get_departure_time(), container.occupied_teu ) ) diff --git a/conflowgen/api/container_dwell_time_distribution_manager.py b/conflowgen/api/container_dwell_time_distribution_manager.py index 2eceb30a..b3e82579 100644 --- a/conflowgen/api/container_dwell_time_distribution_manager.py +++ b/conflowgen/api/container_dwell_time_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ @@ -56,3 +57,4 @@ def set_container_dwell_time_distribution( self.container_dwell_time_distribution_repository.set_distributions( sanitized_distribution ) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/container_flow_generation_manager.py b/conflowgen/api/container_flow_generation_manager.py index 8783e0c4..ff4e3fab 100644 --- a/conflowgen/api/container_flow_generation_manager.py +++ b/conflowgen/api/container_flow_generation_manager.py @@ -2,6 +2,7 @@ import logging import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.application.repositories.container_flow_generation_properties_repository import \ ContainerFlowGenerationPropertiesRepository from conflowgen.flow_generator.container_flow_generation_service import \ @@ -51,6 +52,7 @@ def set_properties( self.container_flow_generation_properties_repository.set_container_flow_generation_properties( properties ) + DataSummariesCache.reset_cache() def get_properties(self) -> typing.Dict[str, typing.Union[str, datetime.date, float, int]]: """ diff --git a/conflowgen/api/container_length_distribution_manager.py b/conflowgen/api/container_length_distribution_manager.py index 2293069a..3277ab9a 100644 --- a/conflowgen/api/container_length_distribution_manager.py +++ b/conflowgen/api/container_length_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ ContainerLengthDistributionRepository @@ -41,3 +42,4 @@ def set_container_length_distribution( values_are_frequencies=True ) self.container_length_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/container_weight_distribution_manager.py b/conflowgen/api/container_weight_distribution_manager.py index 9e892827..2fcce369 100644 --- a/conflowgen/api/container_weight_distribution_manager.py +++ b/conflowgen/api/container_weight_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.container_weight_distribution_repository import \ ContainerWeightDistributionRepository @@ -41,3 +42,4 @@ def set_container_weight_distribution( values_are_frequencies=True ) self.container_weight_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/database_chooser.py b/conflowgen/api/database_chooser.py index f41774a4..2da4817e 100644 --- a/conflowgen/api/database_chooser.py +++ b/conflowgen/api/database_chooser.py @@ -3,6 +3,7 @@ from peewee import SqliteDatabase +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.database_connection.sqlite_database_connection import SqliteDatabaseConnection @@ -45,6 +46,7 @@ def load_existing_sqlite_database(self, file_name: str) -> None: if self.peewee_sqlite_db is not None: self._close_and_reset_db() self.peewee_sqlite_db = self.sqlite_database_connection.choose_database(file_name, create=False, reset=False) + DataSummariesCache.reset_cache() def create_new_sqlite_database( self, @@ -77,6 +79,7 @@ def create_new_sqlite_database( self.peewee_sqlite_db = self.sqlite_database_connection.choose_database( file_name, create=True, reset=overwrite, **seeder_options ) + DataSummariesCache.reset_cache() def close_current_connection(self) -> None: """ @@ -91,3 +94,4 @@ def _close_and_reset_db(self): self.logger.debug("Closing current database connection.") self.peewee_sqlite_db.close() self.peewee_sqlite_db = None + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/mode_of_transport_distribution_manager.py b/conflowgen/api/mode_of_transport_distribution_manager.py index de9497f8..41763a73 100644 --- a/conflowgen/api/mode_of_transport_distribution_manager.py +++ b/conflowgen/api/mode_of_transport_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -45,3 +46,4 @@ def set_mode_of_transport_distribution( self.mode_of_transport_distribution_repository.set_mode_of_transport_distributions( sanitized_distribution ) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/port_call_manager.py b/conflowgen/api/port_call_manager.py index ae55168b..5da8191d 100644 --- a/conflowgen/api/port_call_manager.py +++ b/conflowgen/api/port_call_manager.py @@ -2,6 +2,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.factories.schedule_factory import ScheduleFactory from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -101,6 +102,7 @@ def add_vehicle( next_destinations=next_destinations, vehicle_arrives_every_k_days=vehicle_arrives_every_k_days ) + DataSummariesCache.reset_cache() def has_schedule( self, diff --git a/conflowgen/api/storage_requirement_distribution_manager.py b/conflowgen/api/storage_requirement_distribution_manager.py index fa604503..6bf6c92c 100644 --- a/conflowgen/api/storage_requirement_distribution_manager.py +++ b/conflowgen/api/storage_requirement_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.data_types.container_length import ContainerLength @@ -43,3 +44,4 @@ def set_storage_requirement_distribution( values_are_frequencies=True ) self.storage_requirement_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/truck_arrival_distribution_manager.py b/conflowgen/api/truck_arrival_distribution_manager.py index d4a2ebff..7c1ac4b0 100644 --- a/conflowgen/api/truck_arrival_distribution_manager.py +++ b/conflowgen/api/truck_arrival_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.truck_arrival_distribution_repository import \ TruckArrivalDistributionRepository @@ -43,3 +44,4 @@ def set_truck_arrival_distribution(self, distribution: typing.Dict[int, float]) values_are_frequencies=True ) self.truck_arrival_distribution_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/data_summaries/__init__.py b/conflowgen/data_summaries/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/data_summaries/data_summaries_cache.py b/conflowgen/data_summaries/data_summaries_cache.py new file mode 100644 index 00000000..3c97b328 --- /dev/null +++ b/conflowgen/data_summaries/data_summaries_cache.py @@ -0,0 +1,60 @@ +# Decorator class for preview and analysis result caching +from functools import wraps + + +class DataSummariesCache: + """ + This class is used to cache the results of the data summaries (analyses and previews). This is useful when the + same data summary is requested multiple times, e.g., when generating a report. In this case, the data summary + computation is only performed once and the result is cached. The next time the same data summary is requested, the + cached result is returned instead of computing the data summary again. This can significantly speed up the report + generation process. + To use this class, simply decorate the data summary function with the :meth:`.DataSummariesCache.cache_result` + decorator. + The cache is automatically reset when input data changes or a new database is used. This can also be done manually + by calling :meth:`.DataSummariesCache.reset_cache`. + """ + + cached_results = {} + _hit_counter = {} # For internal testing purposes + + # Decorator function to accept function as argument, and return cached result if available or compute and cache + # result + @classmethod + def cache_result(cls, func): + """ + Decorator function to accept function as argument, and return cached result if available or compute and cache + result. + """ + @wraps(func) + def wrapper(*args, **kwargs): + # Create key from function id, name and arguments + key = str(id(func)) + repr(args) + repr(kwargs) + + # Adjust hit counter + function_name = func.__name__ + if function_name not in cls._hit_counter: + cls._hit_counter[function_name] = 0 + cls._hit_counter[function_name] += 1 + + # Check if key exists in cache + if key in cls.cached_results: + return cls.cached_results[key] + + # If not, compute result + result = func(*args, **kwargs) + + # Cache new result + cls.cached_results[key] = result + return result + + return wrapper + + # Reset cache + @classmethod + def reset_cache(cls): + """ + Resets the cache. + """ + cls.cached_results = {} + cls._hit_counter = {} diff --git a/conflowgen/domain_models/container.py b/conflowgen/domain_models/container.py index 9ce58379..f5e6514b 100644 --- a/conflowgen/domain_models/container.py +++ b/conflowgen/domain_models/container.py @@ -4,6 +4,7 @@ from peewee import ForeignKeyField from peewee import IntegerField +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from .arrival_information import TruckArrivalInformationForDelivery, TruckArrivalInformationForPickup from .base_model import BaseModel from .data_types.container_length import CONTAINER_LENGTH_TO_OCCUPIED_TEU @@ -16,6 +17,21 @@ from ..domain_models.data_types.mode_of_transport import ModeOfTransport +class FaultyDataException(Exception): + def __init__(self, message): + self.message = message + super().__init__(self.message) + + +class NoPickupVehicleException(Exception): + def __init__(self, container, vehicle_type): + self.container = container + self.vehicle_type = vehicle_type + message = f"The container {self.container} is not picked up by any vehicle even though a vehicle of type " \ + f"{self.vehicle_type} should be there." + super().__init__(message) + + class Container(BaseModel): """A representation of the physical container that is moved through the yard.""" id = AutoField() @@ -100,12 +116,8 @@ class Container(BaseModel): def occupied_teu(self) -> float: return CONTAINER_LENGTH_TO_OCCUPIED_TEU[self.length] - def get_arrival_time(self, use_cache: bool) -> datetime.datetime: - - if use_cache: - if self.cached_arrival_time is not None: - # noinspection PyTypeChecker - return self.cached_arrival_time + @DataSummariesCache.cache_result + def get_arrival_time(self) -> datetime.datetime: container_arrival_time: datetime.datetime if self.delivered_by == ModeOfTransport.truck: @@ -118,18 +130,14 @@ def get_arrival_time(self, use_cache: bool) -> datetime.datetime: large_scheduled_vehicle: LargeScheduledVehicle = self.delivered_by_large_scheduled_vehicle container_arrival_time = large_scheduled_vehicle.scheduled_arrival else: - raise Exception(f"Faulty data: {self}") + raise FaultyDataException(f"Faulty data: {self}") self.cached_arrival_time = container_arrival_time self.save() return container_arrival_time - def get_departure_time(self, use_cache: bool) -> datetime.datetime: - - if use_cache: - if self.cached_departure_time is not None: - # noinspection PyTypeChecker - return self.cached_departure_time + @DataSummariesCache.cache_result + def get_departure_time(self) -> datetime.datetime: container_departure_time: datetime.datetime if self.picked_up_by_truck is not None: @@ -143,8 +151,7 @@ def get_departure_time(self, use_cache: bool) -> datetime.datetime: vehicle: LargeScheduledVehicle = self.picked_up_by_large_scheduled_vehicle container_departure_time = vehicle.scheduled_arrival else: - raise Exception(f"The container {self} is not picked up by any vehicle even though a vehicle of type " - f"{self.picked_up_by} should be there.") + raise NoPickupVehicleException(self, self.picked_up_by) self.cached_departure_time = container_departure_time self.save() diff --git a/conflowgen/domain_models/vehicle.py b/conflowgen/domain_models/vehicle.py index da998bfe..6ad588ab 100644 --- a/conflowgen/domain_models/vehicle.py +++ b/conflowgen/domain_models/vehicle.py @@ -6,6 +6,7 @@ import datetime import uuid +from abc import abstractmethod from typing import Type from peewee import AutoField, BooleanField, CharField, ForeignKeyField, DateTimeField @@ -14,6 +15,7 @@ from conflowgen.domain_models.arrival_information import \ TruckArrivalInformationForDelivery, TruckArrivalInformationForPickup from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from .base_model import BaseModel from .data_types.mode_of_transport import ModeOfTransport @@ -105,6 +107,7 @@ class LargeScheduledVehicle(BaseModel): "ModeOfTransportDistribution as obviously the different information does not match." ) + @DataSummariesCache.cache_result def get_arrival_time(self) -> datetime.datetime: """ Returns: @@ -119,12 +122,14 @@ def __repr__(self): class AbstractLargeScheduledVehicle(BaseModel): @property + @abstractmethod def large_scheduled_vehicle(self) -> LargeScheduledVehicle: - raise Exception("You must pick one of the concrete subclasses, this is the common parent class.") + pass @staticmethod + @abstractmethod def get_mode_of_transport() -> ModeOfTransport: - raise Exception("You must pick one of the concrete subclasses, this is the common parent class.") + pass @staticmethod def map_mode_of_transport_to_class( diff --git a/conflowgen/flow_generator/container_flow_generation_service.py b/conflowgen/flow_generator/container_flow_generation_service.py index bdc84680..9a772284 100644 --- a/conflowgen/flow_generator/container_flow_generation_service.py +++ b/conflowgen/flow_generator/container_flow_generation_service.py @@ -18,6 +18,7 @@ TruckForExportContainersManager from conflowgen.flow_generator.truck_for_import_containers_manager import \ TruckForImportContainersManager +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache class ContainerFlowGenerationService: @@ -73,6 +74,8 @@ def container_flow_data_exists() -> bool: return len(Container.select().limit(1)) == 1 def generate(self): + self.logger.info("Resetting preview and analysis cache...") + DataSummariesCache.reset_cache() self.logger.info("Remove previous data...") self.clear_previous_container_flow() self.logger.info("Reloading properties and distributions...") diff --git a/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py b/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py index 8c2dd5a0..8a5e6888 100644 --- a/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py +++ b/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py @@ -9,6 +9,7 @@ # noinspection PyProtectedMember from peewee import fn, JOIN, ModelSelect +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from ..domain_models.data_types.container_length import ContainerLength from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForDelivery @@ -24,11 +25,8 @@ class LargeScheduledVehicleForOnwardTransportationManager: - random_seed = 1 - use_cache = True - def __init__(self): self.seeded_random = random.Random(x=self.random_seed) self.logger = logging.getLogger("conflowgen") @@ -254,10 +252,11 @@ def _get_dwell_times(self, container: Container) -> Tuple[int, int]: return minimum_dwell_time_in_hours, maximum_dwell_time_in_hours + @DataSummariesCache.cache_result def _get_arrival_time_of_container(self, container: Container) -> datetime.datetime: """get container arrival from correct source """ - return container.get_arrival_time(use_cache=self.use_cache) + return container.get_arrival_time() def _find_alternative_mode_of_transportation( self, diff --git a/conflowgen/previews/container_flow_by_vehicle_type_preview.py b/conflowgen/previews/container_flow_by_vehicle_type_preview.py index 5779389d..ce2c7b6f 100644 --- a/conflowgen/previews/container_flow_by_vehicle_type_preview.py +++ b/conflowgen/previews/container_flow_by_vehicle_type_preview.py @@ -2,6 +2,7 @@ import datetime from typing import Dict +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ @@ -49,6 +50,7 @@ def __init__( transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -58,6 +60,7 @@ def hypothesize_with_mode_of_transport_distribution( ) self.mode_of_transport_distribution = mode_of_transport_distribution + @DataSummariesCache.cache_result def get_inbound_to_outbound_flow( self ) -> Dict[ModeOfTransport, Dict[ModeOfTransport, float]]: diff --git a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py index f8ab382b..d56b4d4b 100644 --- a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py +++ b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py @@ -3,6 +3,7 @@ from typing import Dict import numpy as np +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity, ContainerVolumeByVehicleType from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable from conflowgen.previews.abstract_preview import AbstractPreview @@ -49,6 +50,7 @@ def __init__( self.mode_of_transport_distribution = ModeOfTransportDistributionRepository().get_distribution() + @DataSummariesCache.cache_result def _get_truck_capacity_for_export_containers( self, inbound_capacity_of_vehicles: Dict[ModeOfTransport, float] @@ -69,6 +71,7 @@ def _get_truck_capacity_for_export_containers( truck_capacity += number_of_containers_to_pick_up_by_truck_from_vehicle_type return truck_capacity + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -78,6 +81,7 @@ def hypothesize_with_mode_of_transport_distribution( ) self.mode_of_transport_distribution = mode_of_transport_distribution + @DataSummariesCache.cache_result def get_inbound_capacity_of_vehicles(self) -> ContainerVolumeByVehicleType: """ For the inbound capacity, first vehicles that adhere to a schedule are considered. Trucks, which are created @@ -110,6 +114,7 @@ def get_inbound_capacity_of_vehicles(self) -> ContainerVolumeByVehicleType: teu=inbound_capacity_in_teu ) + @DataSummariesCache.cache_result def get_outbound_capacity_of_vehicles(self) -> OutboundUsedAndMaximumCapacity: """ For the outbound capacity, both the used outbound capacity (estimated) and the maximum outbound capacity is diff --git a/conflowgen/previews/modal_split_preview.py b/conflowgen/previews/modal_split_preview.py index 7138c4a0..75266672 100644 --- a/conflowgen/previews/modal_split_preview.py +++ b/conflowgen/previews/modal_split_preview.py @@ -2,6 +2,7 @@ import datetime from typing import Dict +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.previews.container_flow_by_vehicle_type_preview import \ ContainerFlowByVehicleTypePreview @@ -58,6 +59,7 @@ def __init__( transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -66,6 +68,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution ) + @DataSummariesCache.cache_result def get_transshipment_and_hinterland_split(self) -> TransshipmentAndHinterlandSplit: """ Returns: @@ -90,6 +93,7 @@ def get_transshipment_and_hinterland_split(self) -> TransshipmentAndHinterlandSp hinterland_capacity=hinterland_capacity ) + @DataSummariesCache.cache_result def get_modal_split_for_hinterland( self, inbound: bool, diff --git a/conflowgen/previews/truck_gate_throughput_preview.py b/conflowgen/previews/truck_gate_throughput_preview.py index 945fb61f..5714fdd6 100644 --- a/conflowgen/previews/truck_gate_throughput_preview.py +++ b/conflowgen/previews/truck_gate_throughput_preview.py @@ -5,6 +5,7 @@ from datetime import datetime from collections import namedtuple +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ InboundAndOutboundVehicleCapacityPreview from conflowgen.api.truck_arrival_distribution_manager import TruckArrivalDistributionManager @@ -34,6 +35,7 @@ def __init__(self, start_date: datetime.date, end_date: datetime.date, transport ) ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] @@ -44,6 +46,7 @@ def hypothesize_with_mode_of_transport_distribution( self.inbound_and_outbound_vehicle_capacity_preview.hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution) + @DataSummariesCache.cache_result def _get_total_trucks(self) -> typing.Tuple[int, int]: # Calculate the truck capacity for export containers using the inbound container capacities inbound_used_and_maximum_capacity = self.inbound_and_outbound_vehicle_capacity_preview. \ @@ -72,6 +75,7 @@ def _get_total_trucks(self) -> typing.Tuple[int, int]: return total_containers_transported_by_truck + @DataSummariesCache.cache_result def _get_number_of_trucks_per_week(self) -> typing.Tuple[float, float]: # Calculate average number of trucks per week num_weeks = (self.end_date - self.start_date).days / 7 @@ -84,6 +88,7 @@ def _get_number_of_trucks_per_week(self) -> typing.Tuple[float, float]: return total_weekly_trucks + @DataSummariesCache.cache_result def get_weekly_truck_arrivals(self, inbound: bool = True, outbound: bool = True) -> typing.Dict[int, int]: assert inbound or outbound, "At least one of inbound or outbound must be True" diff --git a/conflowgen/previews/vehicle_capacity_exceeded_preview.py b/conflowgen/previews/vehicle_capacity_exceeded_preview.py index f54f697b..9e003abb 100644 --- a/conflowgen/previews/vehicle_capacity_exceeded_preview.py +++ b/conflowgen/previews/vehicle_capacity_exceeded_preview.py @@ -2,6 +2,7 @@ import datetime from typing import Dict, NamedTuple +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.previews.container_flow_by_vehicle_type_preview import \ @@ -74,6 +75,7 @@ def __init__( transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -88,6 +90,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution ) + @DataSummariesCache.cache_result def compare( self ) -> Dict[ModeOfTransport, RequiredAndMaximumCapacityComparison]: diff --git a/conflowgen/tests/data_summaries/__init__.py b/conflowgen/tests/data_summaries/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/tests/data_summaries/test_data_summaries_cache.py b/conflowgen/tests/data_summaries/test_data_summaries_cache.py new file mode 100644 index 00000000..e088b6ed --- /dev/null +++ b/conflowgen/tests/data_summaries/test_data_summaries_cache.py @@ -0,0 +1,515 @@ +import unittest +import datetime +from functools import wraps + +from conflowgen import ContainerLength, TruckArrivalDistributionManager, ModeOfTransport, TruckGateThroughputPreview +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestDataSummariesCache(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + self.now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 1, + ContainerLength.forty_feet: 0, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=self.now, + end_date=self.now + datetime.timedelta(weeks=2) + ) # mostly use default values + arrival_distribution = { + 3: .2, + 4: .8 + } + truck_arrival_distribution_manager = TruckArrivalDistributionManager() + truck_arrival_distribution_manager.set_truck_arrival_distribution(arrival_distribution) + self.preview = TruckGateThroughputPreview( + start_date=self.now.date(), + end_date=(self.now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + self.cache = DataSummariesCache() # This is technically incorrect usage of the cache as it should never be + # instantiated, but it's the easiest way to test it + + def test_sanity(self): + # Define a function to be decorated + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def my_function(n): + return n ** 2 + + # Test case 1: Call the decorated function with argument 5 + result = my_function(5) + self.assertEqual(result, 25, "Result of 5^2 should be 25") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "There should be one cached result") + self.assertEqual(list(DataSummariesCache.cached_results.values())[0], 25, "Cached result should be 25") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'my_function': 1}, "Hit counter should be 1") + + # Test case 2: Call the decorated function with argument 5 again + # This should retrieve the cached result from the previous call + result = my_function(5) + self.assertEqual(result, 25, "Result of 5^2 should be 25") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "There should be one cached result") + self.assertEqual(list(DataSummariesCache.cached_results.values())[0], 25, "Cached result should be 25") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'my_function': 2}, "Hit counter should be 2") + + # Test case 3: Call the decorated function with argument 10 + result = my_function(10) + self.assertEqual(result, 100, "Result of 10^2 should be 100") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "There should be two cached results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 100 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 100") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'my_function': 3}, "Hit counter should be 3") + + def test_with_preview(self): + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 7, "There should be 7 cached results") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, '_get_total_trucks': 1, + '_get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 2, + 'get_outbound_capacity_of_vehicles': 1, + 'get_weekly_truck_arrivals': 1}, "Incorrect hit counter") + + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 12, 4: 48}, "Cached result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 7, "There should be 7 cached results") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, '_get_total_trucks': 1, + '_get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 2, + 'get_outbound_capacity_of_vehicles': 1, + 'get_weekly_truck_arrivals': 2}, "Incorrect hit counter") + # Only get_weekly_truck_arrivals should be called again as the other functions are cached + + def test_with_adjusted_preview(self): + # Create a preview, adjust input distribution, then create another preview + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 7, "There should be 7 cached results") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, '_get_total_trucks': 1, + '_get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 2, + 'get_outbound_capacity_of_vehicles': 1, + 'get_weekly_truck_arrivals': 1}, "Incorrect hit counter") + + arrival_distribution = { + 3: .1, + 4: .4, + 5: .5 + } + truck_arrival_distribution_manager = TruckArrivalDistributionManager() + truck_arrival_distribution_manager.set_truck_arrival_distribution(arrival_distribution) + self.preview = TruckGateThroughputPreview( + start_date=self.now.date(), + end_date=(self.now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 6, 4: 24, 5: 30}, "New result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 7, "There should be 7 cached results, because" + "the preview was adjusted") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 6, 4: 24, 5: 30} in list(DataSummariesCache.cached_results.values()), + "Incorrect results cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + '_get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 2, + 'get_outbound_capacity_of_vehicles': 1, + 'get_weekly_truck_arrivals': 1}, "Incorrect hit counter") + # Hit counter should be the same as before, because the preview was adjusted i.e. the cache was reset and then + # we re-ran the same functions + + def test_cache_reset(self): + @DataSummariesCache.cache_result + def increment_counter(counter): + return counter + 1 + + # Check initial state + self.assertEqual(len(DataSummariesCache.cached_results), 0, "Initial cache should be empty") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {}, "Initial hit counter should be empty") + + # Call the function and check cache and hit counter + result = increment_counter(5) + self.assertEqual(result, 6, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(6 in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'increment_counter': 1}, + "Hit counter should be 1 for 'increment_counter'") + + # Reset cache and check again + DataSummariesCache.reset_cache() + self.assertEqual(len(DataSummariesCache.cached_results), 0, "Cache should be empty after reset") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {}, "Hit counter should be empty after reset") + + def test_cache_with_different_function_args(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def add_numbers(a, b): + return a + b + + # Call the function with different arguments and check if the results are cached correctly + result1 = add_numbers(1, 2) + result2 = add_numbers(3, 4) + self.assertEqual(result1, 3, "Incorrect result returned") + self.assertEqual(result2, 7, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()) and + 7 in list(DataSummariesCache.cached_results.values()), "Cached results should be 3 and 7") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add_numbers': 2}, + "Hit counter should be 2 for 'add_numbers'") + + # Call the function with the same arguments and check if the results are retrieved from the cache + result3 = add_numbers(1, 2) + self.assertEqual(result3, 3, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should still have two results") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()) and + 7 in list(DataSummariesCache.cached_results.values()), "Cached results should be 3 and 7") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add_numbers': 3}, + "Hit counter should be 3 for 'add_numbers'") + + def test_cache_with_different_functions(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def square(n): + return n ** 2 + + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def cube(n): + return n ** 3 + + # Call the functions and check if the results are cached correctly + result1 = square(5) + result2 = cube(5) + self.assertEqual(result1, 25, "Incorrect result returned") + self.assertEqual(result2, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'square': 1, 'cube': 1}, + "Hit counter should be 1 for both 'square' and 'cube'") + + # Call the functions again and check if the results are retrieved from the cache + result3 = square(5) + result4 = cube(5) + self.assertEqual(result3, 25, "Incorrect result returned") + self.assertEqual(result4, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should still have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'square': 2, 'cube': 2}, + "Hit counter should be 2 for both 'square' and 'cube'") + + def test_cache_with_no_args(self): + @DataSummariesCache.cache_result + def get_constant(): + return 42 + + # Call the function and check if the result is cached + constant1 = get_constant() + self.assertEqual(constant1, 42, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(42 in list(DataSummariesCache.cached_results.values()), "Cached result should be 42") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'get_constant': 1}, + "Hit counter should be 1 for 'get_constant'") + + # Call the function again and check if the result is retrieved from the cache + constant2 = get_constant() + self.assertEqual(constant2, 42, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should still have one result") + self.assertTrue(42 in list(DataSummariesCache.cached_results.values()), "Cached result should still be 42") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'get_constant': 2}, + "Hit counter should be 2 for 'get_constant'") + + def test_cache_with_default_args(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def power(n, p=2): + return n ** p + + # Call the function with and without default argument and check if the results are cached correctly + result1 = power(5) + result2 = power(5, 3) + self.assertEqual(result1, 25, "Incorrect result returned") + self.assertEqual(result2, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'power': 2}, "Hit counter should be 2 for 'power'") + + # Call the function with the same arguments and check if the results are retrieved from the cache + result3 = power(5) + result4 = power(5, 3) + self.assertEqual(result3, 25, "Incorrect result returned") + self.assertEqual(result4, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should still have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'power': 4}, "Hit counter should be 4 for 'power'") + + def test_docstring_preservation(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def square(n): + """Return the square of a number.""" + return n ** 2 + + self.assertEqual(square.__doc__, "Return the square of a number.", "Docstring should be preserved") + + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def cube(n): + """Return the cube of a number.""" + return n ** 3 + + self.assertEqual(cube.__doc__, "Return the cube of a number.", "Docstring should be preserved") + + def test_cache_none(self): + @DataSummariesCache.cache_result + def return_none(): + return None + + self.assertEqual(return_none(), None, "Function should return None") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(None in list(DataSummariesCache.cached_results.values()), "None should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_none': 1}) + + def test_cache_float(self): + @DataSummariesCache.cache_result + def return_float(): + return 3.14 + + self.assertEqual(return_float(), 3.14, "Function should return float") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(3.14 in list(DataSummariesCache.cached_results.values()), "Float should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_float': 1}) + + def test_cache_string(self): + @DataSummariesCache.cache_result + def return_string(): + return "hello" + + self.assertEqual(return_string(), "hello", "Function should return string") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue("hello" in list(DataSummariesCache.cached_results.values()), "String should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_string': 1}) + + def test_cache_list(self): + @DataSummariesCache.cache_result + def return_list(): + return [1, 2, 3] + + self.assertEqual(return_list(), [1, 2, 3], "Function should return list") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue([1, 2, 3] in list(DataSummariesCache.cached_results.values()), "List should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_list': 1}) + + def test_cache_dictionary(self): + @DataSummariesCache.cache_result + def return_dictionary(): + return {"a": 1, "b": 2} + + self.assertEqual(return_dictionary(), {"a": 1, "b": 2}, "Function should return dictionary") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue({"a": 1, "b": 2} in list(DataSummariesCache.cached_results.values()), "Dictionary should be " + "cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_dictionary': 1}) + + def test_cache_custom_object(self): + class CustomObject: + pass + + @DataSummariesCache.cache_result + def return_custom_object(): + return CustomObject() + + self.assertIsInstance(return_custom_object(), CustomObject, "Function should return custom object") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertIsInstance(list(DataSummariesCache.cached_results.values())[0], CustomObject, + "Function should return an instance of CustomObject") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_custom_object': 1}) + + def test_nested_decorator(self): + # pylint: disable=invalid-name + def simple_decorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + + return wrapper + + @DataSummariesCache.cache_result + @simple_decorator + # pylint: disable=invalid-name + def add(a, b): + """Adds two numbers.""" + return a + b + + # Initial call + result = add(1, 2) + self.assertEqual(result, 3, "Function should return the sum of the two arguments") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()), "Result should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add': 1}, "Cache should have one hit") + + # Repeated call + result = add(1, 2) + self.assertEqual(result, 3, "Function should return the sum of the two arguments (from cache)") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should still have one result") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()), "Result should still be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add': 2}, "Cache should have two hits") + + # Check function metadata + self.assertEqual(add.__name__, 'add', "Function name should be preserved") + self.assertEqual(add.__doc__.strip(), 'Adds two numbers.', "Docstring should be preserved") + + def test_class_methods(self): + class TestClass: + def __init__(self): + self.counter = 0 + + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def method(self, a, b): + """Adds two numbers and the instance counter.""" + self.counter = getattr(self, 'counter', 0) + 1 + return a + b + self.counter + + # Create instance and call method + instance = TestClass() + result = instance.method(1, 2) + self.assertEqual(result, 4, "Method should return the sum of the two arguments and the counter") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(4 in list(DataSummariesCache.cached_results.values()), "Result should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter['method'], 1) + + # Repeated call + result = instance.method(1, 2) + self.assertEqual(result, 4, "Method should return the cached result") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should still have one result") + self.assertTrue(4 in list(DataSummariesCache.cached_results.values()), "Result should still be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter['method'], 2) + + # Call with different instance + another_instance = TestClass() + result = another_instance.method(1, 2) + self.assertEqual(result, 4, + "Method should return the sum of the two arguments and the counter (from new instance)") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(4 in list(DataSummariesCache.cached_results.values()), "Both results should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter['method'], 3) diff --git a/conflowgen/tests/domain_models/test_container.py b/conflowgen/tests/domain_models/test_container.py index a8a1b61c..369f65cd 100644 --- a/conflowgen/tests/domain_models/test_container.py +++ b/conflowgen/tests/domain_models/test_container.py @@ -3,10 +3,11 @@ """ import unittest +from dataclasses import dataclass from peewee import IntegrityError -from conflowgen.domain_models.container import Container +from conflowgen.domain_models.container import Container, FaultyDataException, NoPickupVehicleException from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement @@ -101,3 +102,51 @@ def test_container_repr(self) -> None: "" ) + + def test_faulty_data_exception(self): + @dataclass + class BogusModeOfTransport: + value: int + name: str + + def __init__(self, value: int, name: str): + self.value = value + self.name = name + + mode_of_transport = BogusModeOfTransport(1, "Bogus") + + container = Container.create( + weight=10, + delivered_by=mode_of_transport, + picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard + ) + + with self.assertRaises(FaultyDataException): + container.get_arrival_time() + + def test_no_pickup_vehicle_exception(self): + @dataclass + class BogusModeOfTransport: + value: int + name: str + + def __init__(self, value: int, name: str): + self.value = value + self.name = name + + mode_of_transport = BogusModeOfTransport(1, "Bogus") + + container = Container.create( + weight=10, + delivered_by=ModeOfTransport.barge, + picked_up_by=mode_of_transport, + picked_up_by_initial=mode_of_transport, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard + ) + + with self.assertRaises(NoPickupVehicleException): + container.get_departure_time() diff --git a/conflowgen/tests/substitute_peewee_database.py b/conflowgen/tests/substitute_peewee_database.py index 863a6c55..550a9111 100644 --- a/conflowgen/tests/substitute_peewee_database.py +++ b/conflowgen/tests/substitute_peewee_database.py @@ -1,5 +1,6 @@ from peewee import SqliteDatabase +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.base_model import database_proxy @@ -12,4 +13,5 @@ def setup_sqlite_in_memory_db() -> SqliteDatabase: }) database_proxy.initialize(sqlite_db) sqlite_db.connect() + DataSummariesCache.reset_cache() return sqlite_db diff --git a/docs/api.rst b/docs/api.rst index 3ab9cc1f..b1cb0306 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -205,6 +205,9 @@ you can define them here and pass them as parameters to the aforementioned funct .. autoclass:: conflowgen.DisplayAsPlainText :members: +.. autoclass:: conflowgen.DataSummariesCache + :members: + Exporting data ==============