diff --git a/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py b/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py index b6ff3d92..5b5583fe 100644 --- a/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py +++ b/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py @@ -29,6 +29,7 @@ def get_truck_capacity_for_export_containers( Thus, this method accounts for both import and export. """ truck_capacity = 0 + vehicle_type: ModeOfTransport for vehicle_type in ModeOfTransport.get_scheduled_vehicles(): number_of_containers_delivered_to_terminal_by_vehicle_type = inbound_capacity_of_vehicles[vehicle_type] mode_of_transport_distribution_of_vehicle_type = \ @@ -50,16 +51,19 @@ def get_inbound_capacity_of_vehicles( depending on the outbound distribution, are created based on the assumptions of the further container flow generation process. """ - containers: Dict[ModeOfTransport, float] = { + inbound_container_volume_in_containers: Dict[ModeOfTransport, float] = { vehicle_type: 0 for vehicle_type in ModeOfTransport } - inbound_capacity_in_teu: Dict[ModeOfTransport, float] = { + inbound_container_volume_in_teu: Dict[ModeOfTransport, float] = { vehicle_type: 0 for vehicle_type in ModeOfTransport } + at_least_one_schedule_exists: bool = False + for schedule in Schedule.select(): + at_least_one_schedule_exists = True arrivals = create_arrivals_within_time_range( start_date, schedule.vehicle_arrives_at, @@ -67,23 +71,24 @@ def get_inbound_capacity_of_vehicles( schedule.vehicle_arrives_every_k_days, schedule.vehicle_arrives_at_time ) - total_capacity_moved_by_vessel = (len(arrivals) # number of vehicles that are planned - * schedule.average_moved_capacity) # TEU capacity of each vehicle - containers[schedule.vehicle_type] += total_capacity_moved_by_vessel / \ - (ContainerLengthDistributionRepository.get_teu_factor() * 20) - inbound_capacity_in_teu[schedule.vehicle_type] += total_capacity_moved_by_vessel - - inbound_capacity_in_teu[ModeOfTransport.truck] = \ - InboundAndOutboundVehicleCapacityCalculatorService.get_truck_capacity_for_export_containers( - inbound_capacity_in_teu - ) - containers[ModeOfTransport.truck] = \ - inbound_capacity_in_teu[ModeOfTransport.truck] / \ - (ContainerLengthDistributionRepository.get_teu_factor() * 20) + moved_inbound_volumes = (len(arrivals) # number of vehicles that are planned + * schedule.average_moved_capacity) # moved TEU capacity of each vehicle + inbound_container_volume_in_teu[schedule.vehicle_type] += moved_inbound_volumes + inbound_container_volume_in_containers[schedule.vehicle_type] += moved_inbound_volumes / \ + ContainerLengthDistributionRepository.get_teu_factor() + + if at_least_one_schedule_exists: + inbound_container_volume_in_teu[ModeOfTransport.truck] = \ + InboundAndOutboundVehicleCapacityCalculatorService.get_truck_capacity_for_export_containers( + inbound_container_volume_in_teu + ) + inbound_container_volume_in_containers[ModeOfTransport.truck] = \ + inbound_container_volume_in_teu[ModeOfTransport.truck] / \ + ContainerLengthDistributionRepository.get_teu_factor() return ContainerVolumeByVehicleType( - containers=containers, - teu=inbound_capacity_in_teu + containers=inbound_container_volume_in_containers, + teu=inbound_container_volume_in_teu ) @staticmethod @@ -130,10 +135,10 @@ def get_outbound_capacity_of_vehicles(start_date, end_date, transportation_buffe ) # If all container flows are balanced, only the average moved capacity is required - total_average_capacity_moved_by_vessel_in_teu = len(arrivals) * schedule.average_moved_capacity - outbound_used_capacity_in_teu[schedule.vehicle_type] += total_average_capacity_moved_by_vessel_in_teu - outbound_used_containers[schedule.vehicle_type] += total_average_capacity_moved_by_vessel_in_teu / \ - (ContainerLengthDistributionRepository.get_teu_factor() * 20) + container_volume_moved_by_vessels_in_teu = len(arrivals) * schedule.average_moved_capacity + outbound_used_capacity_in_teu[schedule.vehicle_type] += container_volume_moved_by_vessels_in_teu + outbound_used_containers[schedule.vehicle_type] += container_volume_moved_by_vessels_in_teu / \ + ContainerLengthDistributionRepository.get_teu_factor() # If there are unbalanced container flows, a vehicle departs with more containers than it delivered maximum_capacity_of_vehicle_in_teu = min( @@ -143,7 +148,7 @@ def get_outbound_capacity_of_vehicles(start_date, end_date, transportation_buffe total_maximum_capacity_moved_by_vessel = len(arrivals) * maximum_capacity_of_vehicle_in_teu outbound_maximum_capacity_in_teu[schedule.vehicle_type] += total_maximum_capacity_moved_by_vessel outbound_maximum_containers[schedule.vehicle_type] += total_maximum_capacity_moved_by_vessel / \ - (ContainerLengthDistributionRepository.get_teu_factor() * 20) + ContainerLengthDistributionRepository.get_teu_factor() inbound_capacity = InboundAndOutboundVehicleCapacityCalculatorService.\ get_inbound_capacity_of_vehicles(start_date, end_date) @@ -153,7 +158,7 @@ def get_outbound_capacity_of_vehicles(start_date, end_date, transportation_buffe ) outbound_used_containers[ModeOfTransport.truck] = \ outbound_used_capacity_in_teu[ModeOfTransport.truck] / \ - (ContainerLengthDistributionRepository.get_teu_factor() * 20) + ContainerLengthDistributionRepository.get_teu_factor() outbound_maximum_capacity_in_teu[ModeOfTransport.truck] = np.nan # Trucks can always be added as required outbound_maximum_containers[ModeOfTransport.truck] = np.nan diff --git a/conflowgen/descriptive_datatypes/__init__.py b/conflowgen/descriptive_datatypes/__init__.py index 2549096a..82597c2a 100644 --- a/conflowgen/descriptive_datatypes/__init__.py +++ b/conflowgen/descriptive_datatypes/__init__.py @@ -26,17 +26,26 @@ class HinterlandModalSplit(typing.NamedTuple): truck_capacity: float -class OutboundUsedAndMaximumCapacity(typing.NamedTuple): +class ContainerVolume(typing.NamedTuple): """ - This tuple keeps track of how much each vehicle type transports on the outbound journey and what the maximum - capacity is. + Several KPIs at container terminals can be both expressed in boxes and TEU. """ + #: The container volume expressed in TEU + teu: float - #: The container volume that is actually transported, summarized by vehicle type. - used: ContainerVolumeByVehicleType + #: The container volume expressed in number of boxes + containers: float - #: The container volume that could be transported if all capacities had been used, summarized by vehicle type. - maximum: ContainerVolumeByVehicleType + +class InboundAndOutboundContainerVolume(typing.NamedTuple): + """ + Note both the inbound and outbound container volume. + """ + #: The container volume transported by vehicles on their inbound journey + inbound: ContainerVolume + + #: The container volume transported by vehicles on their outbound journey + outbound: ContainerVolume class ContainerVolumeByVehicleType(typing.NamedTuple): @@ -52,6 +61,20 @@ class ContainerVolumeByVehicleType(typing.NamedTuple): containers: typing.Optional[typing.Dict[ModeOfTransport, float]] +class OutboundUsedAndMaximumCapacity(typing.NamedTuple): + """ + This tuple keeps track of how much each vehicle type transports on the outbound journey and what the maximum + capacity is. + """ + + #: The container volume that is actually transported, summarized by vehicle type. + used: ContainerVolumeByVehicleType + + #: The container volume that could be transported if all capacities had been used, summarized by vehicle type. + maximum: ContainerVolumeByVehicleType + + + class ContainerVolumeFromOriginToDestination(typing.NamedTuple): """ Several KPIs at container terminals can be both expressed in boxes per hour and TEU per hour (or a different time diff --git a/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py b/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py index 3bcd1f9f..d4df7c04 100644 --- a/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py +++ b/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py @@ -1,6 +1,7 @@ import math from typing import Dict +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.data_types.container_length import ContainerLength @@ -55,6 +56,7 @@ def set_distribution(cls, container_lengths: Dict[ContainerLength, float]): ).save() @classmethod + @DataSummariesCache.cache_result def get_teu_factor(cls) -> float: """ Calculates and returns the TEU factor based on the container length distribution. diff --git a/conflowgen/previews/__init__.py b/conflowgen/previews/__init__.py index f636a7b1..46e5dd30 100644 --- a/conflowgen/previews/__init__.py +++ b/conflowgen/previews/__init__.py @@ -3,6 +3,7 @@ from .inbound_and_outbound_vehicle_capacity_preview_report import InboundAndOutboundVehicleCapacityPreviewReport from .container_flow_by_vehicle_type_preview_report import ContainerFlowByVehicleTypePreviewReport from .modal_split_preview_report import ModalSplitPreviewReport +from .quay_side_throughput_preview_report import QuaySideThroughputPreviewReport from .truck_gate_throughput_preview_report import TruckGateThroughputPreviewReport from .vehicle_capacity_exceeded_preview_report import VehicleCapacityUtilizationOnOutboundJourneyPreviewReport from ..reporting import AbstractReport @@ -15,6 +16,7 @@ VehicleCapacityUtilizationOnOutboundJourneyPreviewReport, ContainerFlowByVehicleTypePreviewReport, ModalSplitPreviewReport, + QuaySideThroughputPreviewReport, TruckGateThroughputPreviewReport ] diff --git a/conflowgen/previews/quay_side_throughput_preview.py b/conflowgen/previews/quay_side_throughput_preview.py new file mode 100644 index 00000000..c8fff37d --- /dev/null +++ b/conflowgen/previews/quay_side_throughput_preview.py @@ -0,0 +1,81 @@ +import typing +from abc import ABC +from datetime import datetime + +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.previews.container_flow_by_vehicle_type_preview import ContainerFlowByVehicleTypePreview +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable +from conflowgen.previews.abstract_preview import AbstractPreview +from conflowgen.descriptive_datatypes import InboundAndOutboundContainerVolume, ContainerVolume + + +class QuaySideThroughputPreview(AbstractPreview, ABC): + """ + This preview calculates the quayside throughput based on the schedules. + + The preview returns a data structure that can be used for generating reports (e.g., in text or as a figure). The + preview is intended to provide an estimate of the quayside throughput for the given inputs. + """ + + QUAY_SIDE_VEHICLES = { + ModeOfTransport.deep_sea_vessel, + ModeOfTransport.feeder, + # barges are counted as hinterland here + } + + def __init__(self, start_date: datetime.date, end_date: datetime.date, transportation_buffer: float): + super().__init__(start_date, end_date, transportation_buffer) + self.container_flow_by_vehicle_type = ( + ContainerFlowByVehicleTypePreview( + self.start_date, + self.end_date, + self.transportation_buffer, + ) + ) + + @DataSummariesCache.cache_result + def hypothesize_with_mode_of_transport_distribution( + self, + mode_of_transport_distribution: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] + ): + validate_distribution_with_one_dependent_variable( + mode_of_transport_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True + ) + self.container_flow_by_vehicle_type.hypothesize_with_mode_of_transport_distribution( + mode_of_transport_distribution + ) + + + @DataSummariesCache.cache_result + def get_quay_side_throughput(self) -> InboundAndOutboundContainerVolume: + inbound_to_outbound_flow = self.container_flow_by_vehicle_type.get_inbound_to_outbound_flow() + + quayside_inbound_container_volume_in_teu: int = 0 + quayside_outbound_container_volume_in_teu: int = 0 + + inbound_vehicle_type: ModeOfTransport + outbound_vehicle_type: ModeOfTransport + for inbound_vehicle_type, to_outbound_flow in inbound_to_outbound_flow.items(): + for outbound_vehicle_type, container_volume in to_outbound_flow.items(): + if inbound_vehicle_type in self.QUAY_SIDE_VEHICLES: + quayside_inbound_container_volume_in_teu += container_volume + if outbound_vehicle_type in self.QUAY_SIDE_VEHICLES: + quayside_outbound_container_volume_in_teu += container_volume + + teu_factor = ContainerLengthDistributionRepository().get_teu_factor() + + result = InboundAndOutboundContainerVolume( + inbound=ContainerVolume( + teu=quayside_inbound_container_volume_in_teu, + containers=quayside_inbound_container_volume_in_teu / teu_factor + ), + outbound=ContainerVolume( + teu=quayside_outbound_container_volume_in_teu, + containers=quayside_outbound_container_volume_in_teu / teu_factor + ) + ) + + return result diff --git a/conflowgen/previews/quay_side_throughput_preview_report.py b/conflowgen/previews/quay_side_throughput_preview_report.py new file mode 100644 index 00000000..cd80066f --- /dev/null +++ b/conflowgen/previews/quay_side_throughput_preview_report.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from typing import Dict + +import pandas as pd + +from conflowgen.descriptive_datatypes import InboundAndOutboundContainerVolume +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.previews.quay_side_throughput_preview import QuaySideThroughputPreview +from conflowgen.reporting import AbstractReportWithMatplotlib + + +class QuaySideThroughputPreviewReport(AbstractReportWithMatplotlib): + """ + This preview report takes the data structure as generated by + :class:`.QuaySideThroughputPreview` + and creates a comprehensible representation for the user, either as text or as a graph. + """ + + report_description = """ + This report previews the inbound and outbound traffic at the quay side. + This is only an estimate, additional restrictions (such as the dwell time restrictions) might further + reduce the number of containers one vehicle can in fact pick up for its outbound journey. + """ + + def __init__(self): + super().__init__() + self._df = None + self.preview = QuaySideThroughputPreview( + start_date=self.start_date, + end_date=self.end_date, + transportation_buffer=self.transportation_buffer + ) + + def hypothesize_with_mode_of_transport_distribution( + self, + mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] + ): + self.preview.hypothesize_with_mode_of_transport_distribution(mode_of_transport_distribution) + + def get_report_as_text( + self, **kwargs + ) -> str: + assert len(kwargs) == 0, f"No keyword arguments supported for {self.__class__.__name__}" + + quay_side_throughput = self._get_quay_side_throughput() + + # create string representation + report = "\n" + report += "discharged (in containers) " + report += "loaded (in containers)" + report += "\n" + + report += f"{int(round(quay_side_throughput.inbound.containers)):>26} " + report += f"{int(round(quay_side_throughput.outbound.containers)):>22}" + report += "\n" + + report += "(rounding errors might exist)\n" + return report + + def get_report_as_graph(self, **kwargs) -> object: + assert len(kwargs) == 0, f"No keyword arguments supported for {self.__class__.__name__}" + + quay_side_throughput = self._get_quay_side_throughput() + + series = pd.Series({ + "Number discharged containers": quay_side_throughput.inbound.containers, + "Number loaded containers": quay_side_throughput.outbound.containers + }, name="Quayside Throughput") + + ax = series.plot() + + return ax + + def _get_quay_side_throughput(self) -> InboundAndOutboundContainerVolume: + assert self.start_date is not None + assert self.end_date is not None + assert self.transportation_buffer is not None + self.preview.update( + start_date=self.start_date, + end_date=self.end_date, + transportation_buffer=self.transportation_buffer + ) + # gather data + quay_side_throughput = self.preview.get_quay_side_throughput() + return quay_side_throughput diff --git a/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py b/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py index 752a6a7f..d85557f5 100644 --- a/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py +++ b/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py @@ -107,134 +107,3 @@ def test_set_container_dwell_time_distributions(self): self.SAMPLE_DISTRIBUTION ) mock_method.assert_called_once_with(self.SAMPLE_DISTRIBUTION) - - def test_get_average_container_dwell_time_base_case(self): - one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( - vehicle_type=ModeOfTransport.feeder, - service_name="TestFeederService", - vehicle_arrives_at=one_week_later.date(), - vehicle_arrives_at_time=one_week_later.time(), - average_vehicle_capacity=300, - average_moved_capacity=300, - vehicle_arrives_every_k_days=-1 - ) - schedule.save() - - now = datetime.datetime.now() - average_container_dwell_time = self.container_dwell_time_distribution_manager.get_average_container_dwell_time( - start_date=now.date(), - end_date=(now + datetime.timedelta(weeks=2)).date(), - ) - - print("average_container_dwell_time: ", average_container_dwell_time) - self.assertEqual(average_container_dwell_time, 129.9408) - - def test_get_average_container_dwell_time_1_sd(self): - one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( - vehicle_type=ModeOfTransport.feeder, - service_name="TestFeederService", - vehicle_arrives_at=one_week_later.date(), - vehicle_arrives_at_time=one_week_later.time(), - average_vehicle_capacity=300, - average_moved_capacity=300, - vehicle_arrives_every_k_days=-1 - ) - schedule.save() - - # Container dwell time increase - container_dwell_time_increase = 1 - - container_dwell_time_distribution_manager = ContainerDwellTimeDistributionManager() - - original_dwell_time_distributions = container_dwell_time_distribution_manager.\ - get_container_dwell_time_distribution() - - new_container_dwell_time_distributions = {} - - for mode1, mode1_dict in original_dwell_time_distributions.items(): - new_mode1_dict = {} - for mode2, mode2_dict in mode1_dict.items(): - new_mode2_dict = {} - for requirement, distribution in mode2_dict.items(): - sd = distribution.variance ** 0.5 - new_average = distribution.average + sd * container_dwell_time_increase - new_maximum = new_average * 3 # Necessary to avoid average > max - # Create a new dictionary with updated average value - new_distribution_dict = { - "distribution_name": "lognormal", - "average_number_of_hours": new_average, - "variance": distribution.variance, # Keep variance same - "minimum_number_of_hours": distribution.minimum, # Keep minimum same - "maximum_number_of_hours": new_maximum, - } - new_mode2_dict[requirement] = new_distribution_dict - new_mode1_dict[mode2] = new_mode2_dict - new_container_dwell_time_distributions[mode1] = new_mode1_dict - - container_dwell_time_distribution_manager.set_container_dwell_time_distribution( - new_container_dwell_time_distributions) - - now = datetime.datetime.now() - average_container_dwell_time = self.container_dwell_time_distribution_manager.get_average_container_dwell_time( - start_date=now.date(), - end_date=(now + datetime.timedelta(weeks=2)).date(), - ) - - print("average_container_dwell_time: ", average_container_dwell_time) - self.assertEqual(average_container_dwell_time, 207.68489589654993) - - def test_get_average_container_dwell_time_2_sd(self): - one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( - vehicle_type=ModeOfTransport.feeder, - service_name="TestFeederService", - vehicle_arrives_at=one_week_later.date(), - vehicle_arrives_at_time=one_week_later.time(), - average_vehicle_capacity=300, - average_moved_capacity=300, - vehicle_arrives_every_k_days=-1 - ) - schedule.save() - - # Container dwell time increase - container_dwell_time_increase = 2 - - container_dwell_time_distribution_manager = ContainerDwellTimeDistributionManager() - - original_dwell_time_distributions = container_dwell_time_distribution_manager.\ - get_container_dwell_time_distribution() - - new_container_dwell_time_distributions = {} - - for mode1, mode1_dict in original_dwell_time_distributions.items(): - new_mode1_dict = {} - for mode2, mode2_dict in mode1_dict.items(): - new_mode2_dict = {} - for requirement, distribution in mode2_dict.items(): - sd = distribution.variance ** 0.5 - new_average = distribution.average + sd * container_dwell_time_increase - new_maximum = new_average * 3 # Necessary to avoid average > max - # Create a new dictionary with updated average value - new_distribution_dict = { - "distribution_name": "lognormal", - "average_number_of_hours": new_average, - "variance": distribution.variance, # Keep variance same - "minimum_number_of_hours": distribution.minimum, # Keep minimum same - "maximum_number_of_hours": new_maximum, - } - new_mode2_dict[requirement] = new_distribution_dict - new_mode1_dict[mode2] = new_mode2_dict - new_container_dwell_time_distributions[mode1] = new_mode1_dict - - container_dwell_time_distribution_manager.set_container_dwell_time_distribution( - new_container_dwell_time_distributions) - - now = datetime.datetime.now() - average_container_dwell_time = self.container_dwell_time_distribution_manager.get_average_container_dwell_time( - start_date=now.date(), - end_date=(now + datetime.timedelta(weeks=2)).date(), - ) - - self.assertEqual(average_container_dwell_time, 285.42899179309984) diff --git a/conflowgen/tests/data_summaries/test_data_summaries_cache.py b/conflowgen/tests/data_summaries/test_data_summaries_cache.py index fcae2a6a..993cc97f 100644 --- a/conflowgen/tests/data_summaries/test_data_summaries_cache.py +++ b/conflowgen/tests/data_summaries/test_data_summaries_cache.py @@ -136,29 +136,40 @@ def test_with_preview(self): ) preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") - self.assertEqual(len(DataSummariesCache.cached_results), 9, "There should be 9 cached results") + self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") # pylint: disable=protected-access - self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, - '_get_total_trucks': 1, - 'get_truck_capacity_for_export_containers': 2, - 'get_inbound_capacity_of_vehicles': 3, - 'get_outbound_capacity_of_vehicles': 2, - 'get_weekly_truck_arrivals': 1}, "Incorrect hit counter") + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 1, + 'get_teu_factor': 5, + }, "Incorrect hit counter" + ) preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") - self.assertEqual(len(DataSummariesCache.cached_results), 9, "There should be 9 cached results") + self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") # pylint: disable=protected-access - self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, - '_get_total_trucks': 1, - 'get_truck_capacity_for_export_containers': 2, - 'get_inbound_capacity_of_vehicles': 3, - 'get_outbound_capacity_of_vehicles': 2, - 'get_weekly_truck_arrivals': 2}, "Incorrect hit counter") + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 2, + 'get_teu_factor': 5, + }, + "Incorrect hit counter" + ) # Only get_weekly_truck_arrivals should be called again as the other functions are cached def test_with_adjusted_preview(self): @@ -175,16 +186,20 @@ def test_with_adjusted_preview(self): ) preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") - self.assertEqual(len(DataSummariesCache.cached_results), 9, "There should be 9 cached results") + self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") # pylint: disable=protected-access - self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, - '_get_total_trucks': 1, - 'get_truck_capacity_for_export_containers': 2, - 'get_inbound_capacity_of_vehicles': 3, - 'get_outbound_capacity_of_vehicles': 2, - 'get_weekly_truck_arrivals': 1}, "Incorrect hit counter") + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 1, + 'get_teu_factor': 5, + }, "Incorrect hit counter") arrival_distribution = { 3: .1, @@ -200,18 +215,25 @@ def test_with_adjusted_preview(self): ) preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 6, 4: 24, 5: 30}, "New result is incorrect") - self.assertEqual(len(DataSummariesCache.cached_results), 9, "There should be 9 cached results, because" - "the preview was adjusted") + self.assertEqual( + len(DataSummariesCache.cached_results), 10, + "There should be 9 cached results, because the preview was adjusted") self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and {3: 6, 4: 24, 5: 30} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") # pylint: disable=protected-access - self.assertEqual(DataSummariesCache._hit_counter, {'_get_number_of_trucks_per_week': 1, - '_get_total_trucks': 1, - 'get_truck_capacity_for_export_containers': 2, - 'get_inbound_capacity_of_vehicles': 3, - 'get_outbound_capacity_of_vehicles': 2, - 'get_weekly_truck_arrivals': 1}, "Incorrect hit counter") + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 1, + 'get_teu_factor': 5, + }, + "Incorrect hit counter" + ) # Hit counter should be the same as before, because the preview was adjusted i.e. the cache was reset, and then # we re-ran the same functions diff --git a/conflowgen/tests/previews/test_quay_side_throughput_preview.py b/conflowgen/tests/previews/test_quay_side_throughput_preview.py new file mode 100644 index 00000000..32b8a9db --- /dev/null +++ b/conflowgen/tests/previews/test_quay_side_throughput_preview.py @@ -0,0 +1,119 @@ +import unittest +import datetime + +from conflowgen import ModeOfTransport, ContainerLength +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.previews.quay_side_throughput_preview import QuaySideThroughputPreview +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestQuaySideThroughputPreview(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 0, + ContainerLength.forty_feet: 1, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=now, + end_date=now + datetime.timedelta(weeks=2) + ) # mostly use default values + + self.preview = QuaySideThroughputPreview( + start_date=now.date(), + end_date=(now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + + def test_empty(self): + v = self.preview.get_quay_side_throughput() + vi = v.inbound + vo = v.outbound + vi_teu = vi.teu + vi_box = vi.containers + vo_teu = vo.teu + vo_box = vo.containers + self.assertEqual(vi_teu, 0) + self.assertEqual(vi_box, 0) + self.assertEqual(vo_teu, 0) + self.assertEqual(vo_box, 0) + + def test_one_feeder(self): + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=150, + vehicle_arrives_every_k_days=-1 + ) + v = self.preview.get_quay_side_throughput() + vi = v.inbound + vo = v.outbound + vi_teu = vi.teu + vi_box = vi.containers + vo_teu = vo.teu + vo_box = vo.containers + self.assertAlmostEqual(vi_teu, 150) + self.assertAlmostEqual(vi_box, 75) + self.assertAlmostEqual(vo_teu, 58.5) + self.assertAlmostEqual(vo_box, 29.25) diff --git a/conflowgen/tests/previews/test_quay_side_throughput_preview_report.py b/conflowgen/tests/previews/test_quay_side_throughput_preview_report.py new file mode 100644 index 00000000..eb10e792 --- /dev/null +++ b/conflowgen/tests/previews/test_quay_side_throughput_preview_report.py @@ -0,0 +1,121 @@ +import datetime +import unittest + +from conflowgen import ModeOfTransport, ContainerLength +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.previews.quay_side_throughput_preview_report import QuaySideThroughputPreviewReport +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestQuaySideThroughputPreviewReport(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 0, + ContainerLength.forty_feet: 1, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=now, + end_date=now + datetime.timedelta(weeks=2) + ) # mostly use default values + + self.preview_report = QuaySideThroughputPreviewReport() + + def test_report_with_no_schedules_as_graph(self): + """Not throwing an exception is sufficient""" + axes = self.preview_report.get_report_as_graph() + self.assertIsNotNone(axes) + + def test_report_with_schedules_as_graph(self): + """Not throwing an exception is sufficient for now""" + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=400, + average_moved_capacity=300, + vehicle_arrives_every_k_days=-1 + ) + axes = self.preview_report.get_report_as_graph() + self.assertIsNotNone(axes) + + def test_text_report(self): + # pylint: disable=protected-access + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=24000, + average_moved_capacity=24000 + ) + report = self.preview_report.get_report_as_text() + # flake8: noqa: W291 (ignore trailing whitespace in text report) + expected_report = \ + ''' +discharged (in containers) loaded (in containers) + 12000 4680 +(rounding errors might exist) +''' + self.assertEqual(report, expected_report) diff --git a/conflowgen/tests/previews/test_run_all_previews.py b/conflowgen/tests/previews/test_run_all_previews.py index a93d6374..abcd0a8f 100644 --- a/conflowgen/tests/previews/test_run_all_previews.py +++ b/conflowgen/tests/previews/test_run_all_previews.py @@ -26,7 +26,7 @@ def setUp(self) -> None: def test_with_no_data_as_text(self): with self.assertLogs('conflowgen', level='INFO') as context: run_all_previews(as_text=True) - self.assertEqual(len(context.output), 17) + self.assertEqual(len(context.output), 20) # Test only some entries. The detailed tests should be done in the unit test of the respective report. self.assertEqual( @@ -46,4 +46,4 @@ def test_with_no_data_as_graph(self): with unittest.mock.patch('matplotlib.pyplot.show'): with self.assertLogs('conflowgen', level='INFO') as context: run_all_previews(as_text=False, as_graph=True, static_graphs=True) - self.assertEqual(len(context.output), 13) + self.assertEqual(len(context.output), 15) diff --git a/conflowgen/tests/previews/test_truck_gate_throughput_preview.py b/conflowgen/tests/previews/test_truck_gate_throughput_preview.py index 201adb10..52447103 100644 --- a/conflowgen/tests/previews/test_truck_gate_throughput_preview.py +++ b/conflowgen/tests/previews/test_truck_gate_throughput_preview.py @@ -104,7 +104,9 @@ def test_get_total_trucks(self): # 300 TEU arrive by feeder # 300 TEU * 0.2 (from mode of transport distribution) = 60 TEU to be exported by truck # Only twenty-feet containers used, so 60 TEU = 60 trucks needed - self.assertEqual(total_trucks, (60, 60)) + self.assertAlmostEqual(total_trucks.inbound, 60) + self.assertAlmostEqual(total_trucks.outbound, 60) + self.assertAlmostEqual(sum(total_trucks), 120) def test_get_weekly_trucks(self): # pylint: disable=protected-access @@ -121,7 +123,9 @@ def test_get_weekly_trucks(self): weekly_trucks = self.preview._get_number_of_trucks_per_week() # 60 trucks total (from test_get_total_trucks above) # 60 trucks / 2 weeks = 30 trucks per week - self.assertEqual(weekly_trucks, (30, 30)) + self.assertAlmostEqual(weekly_trucks.inbound, 30) + self.assertAlmostEqual(weekly_trucks.outbound, 30) + self.assertAlmostEqual(sum(weekly_trucks), 60) def test_get_truck_distribution(self): # Test case 1 diff --git a/pytest.ini b/pytest.ini index 7fc6011c..2af2baa1 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,4 +3,4 @@ filterwarnings = ignore:Proactor event loop does not implement add_reader family of methods required for zmq.*:RuntimeWarning ignore:There is no current event loop:DeprecationWarning ignore:setDaemon\(\) is deprecated, set the daemon attribute instead:DeprecationWarning -addopts = -n auto +#addopts = -n auto