diff --git a/.flake8 b/.flake8 index 3ae1956b..b6179913 100644 --- a/.flake8 +++ b/.flake8 @@ -2,6 +2,7 @@ exclude = .git, .venv, + venv, __pycache__, build, docs, diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index d607588f..38716d56 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -29,9 +29,12 @@ jobs: sudo apt-get update && sudo apt-get upgrade && sudo apt-get install pandoc texlive texlive-publishers texlive-science texlive-luatex latexmk - uses: actions/checkout@v3 - with: - lfs: 'true' - - run: git lfs pull + - run: | + curl -LJO "https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/demo_poc.sqlite" + curl -LJO "https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/demo_deham_cta.sqlite" + mkdir -p docs/notebooks/data/prepared_dbs + mv demo_poc.sqlite docs/notebooks/data/prepared_dbs/ + mv demo_deham_cta.sqlite docs/notebooks/data/prepared_dbs/ - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 diff --git a/.github/workflows/installation-from-remote.yaml b/.github/workflows/installation-from-remote.yaml index 717a865a..1bc585d0 100644 --- a/.github/workflows/installation-from-remote.yaml +++ b/.github/workflows/installation-from-remote.yaml @@ -3,6 +3,8 @@ name: Install from repository on: schedule: - cron: '42 23 * * 3' # every Wednesday at 23:42 + pull_request: + types: [opened, reopened, edited, synchronize] jobs: build-conda-on-windows: @@ -21,15 +23,18 @@ jobs: with: auto-update-conda: true auto-activate-base: true - activate-environment: "" + activate-environment: true + python-version: '3.10' - name: Install ConFlowGen run: | + conda info conda create -n test-install-conflowgen -c conda-forge conflowgen pytest - name: Run tests run: | conda activate test-install-conflowgen + conda install pillow=9.0.0 python -m pytest --pyargs conflowgen build-conda-on-linux: @@ -54,6 +59,7 @@ jobs: run: | conda init bash eval "$(conda shell.bash hook)" + conda info conda activate base conda create -n test-install-conflowgen -c conda-forge conflowgen pytest @@ -86,6 +92,7 @@ jobs: - name: Install ConFlowGen run: | python -m pip install conflowgen pytest + python -m pip show --verbose conflowgen - name: Run tests run: | diff --git a/.github/workflows/unittests.yaml b/.github/workflows/unittests.yaml index e56df5a0..eb289f22 100644 --- a/.github/workflows/unittests.yaml +++ b/.github/workflows/unittests.yaml @@ -20,11 +20,10 @@ jobs: uses: fkirc/skip-duplicate-actions@v5 - uses: actions/checkout@v2 - with: - lfs: 'true' - run: | - git lfs fetch -p -I '**/notebooks/data/prepared_dbs/demo_poc.sqlite' - git lfs checkout + curl -LJO "https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/demo_poc.sqlite" + mkdir -p docs/notebooks/data/prepared_dbs + mv demo_poc.sqlite docs/notebooks/data/prepared_dbs/ - name: Set up Python 3.10 uses: actions/setup-python@v4 diff --git a/.gitignore b/.gitignore index 7811066e..37679273 100644 --- a/.gitignore +++ b/.gitignore @@ -62,3 +62,4 @@ examples/Python_Script/databases/ # Ignore local changes as they happen with every execution. If something changes, the commit must be forced. docs/notebooks/data/prepared_dbs/demo_poc.sqlite +conflowgen/data/tools/ diff --git a/.pylintrc b/.pylintrc index d8a9d089..2ec4e2d6 100644 --- a/.pylintrc +++ b/.pylintrc @@ -185,18 +185,17 @@ function-naming-style=snake_case #function-rgx= # Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _, - df, - ax, - x, - xs, - mu, - gs +good-names=i, # typical counter variable + j, # typical counter variable + k, # typical counter variable + _, # typical discard result indicator + df, # pandas DataFrame + ax, # matplotlib axis + gs, # matplotlib gridspec + x, # one element on the x axis + xs, # collection of elements on the x axis + mu, # mu of, e.g., a normal distribution + sd, # standard deviation of, e.g., a normal distribution # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted @@ -547,5 +546,5 @@ preferred-modules= # Exceptions that will emit a warning when being caught. Defaults to # "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception +overgeneral-exceptions=builtins.BaseException, + builtins.Exception diff --git a/Contributing.md b/Contributing.md index 08625164..a796b367 100644 --- a/Contributing.md +++ b/Contributing.md @@ -66,7 +66,7 @@ Linux users invoke `make html` instead. The landing page of the documentation is created at `/docs/_build/html/index.html`. It is advised to use a strict approach by using the additional argument `SPHINXOPTS="-W --keep-going` (see the corresponding -[GitHub CI pipeline](https://github.com/1kastner/conflowgen/blob/main/.github/workflows/docs.yaml#L38) +[GitHub CI pipeline](https://github.com/1kastner/conflowgen/blob/main/.github/workflows/docs.yaml) for reference). The invocation should be equivalent to `python -m sphinx -W --keep-going ./docs ./docs/_build`. diff --git a/conflowgen/__init__.py b/conflowgen/__init__.py index 84855c69..7eaeb02f 100644 --- a/conflowgen/__init__.py +++ b/conflowgen/__init__.py @@ -25,6 +25,10 @@ VehicleCapacityUtilizationOnOutboundJourneyPreviewReport from conflowgen.previews.modal_split_preview import ModalSplitPreview from conflowgen.previews.modal_split_preview_report import ModalSplitPreviewReport +from conflowgen.previews.truck_gate_throughput_preview import TruckGateThroughputPreview +from conflowgen.previews.truck_gate_throughput_preview_report import TruckGateThroughputPreviewReport +from conflowgen.previews.quay_side_throughput_preview import QuaySideThroughputPreview +from conflowgen.previews.quay_side_throughput_preview_report import QuaySideThroughputPreviewReport # Analyses and their reports from conflowgen.analyses.inbound_and_outbound_vehicle_capacity_analysis import \ @@ -61,6 +65,9 @@ from conflowgen.analyses.container_flow_vehicle_type_adjustment_per_vehicle_analysis_report import \ ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysisReport +# Cache for analyses and previews +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache + # Specific classes for reports from conflowgen.reporting.output_style import DisplayAsMarkupLanguage, DisplayAsPlainText, DisplayAsMarkdown @@ -85,11 +92,13 @@ from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import OutboundUsedAndMaximumCapacity from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ ContainerFlowAdjustedToVehicleType -from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandSplit, ContainerVolumeFromOriginToDestination +from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandSplit +from conflowgen.descriptive_datatypes import ContainerVolumeFromOriginToDestination from conflowgen.descriptive_datatypes import HinterlandModalSplit -from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ - VehicleIdentifier +from conflowgen.descriptive_datatypes import UsedYardCapacityOverTime +from conflowgen.descriptive_datatypes import VehicleIdentifier from conflowgen.descriptive_datatypes import ContainerVolumeByVehicleType +from conflowgen.descriptive_datatypes import ContainersTransportedByTruck # Add metadata constants from .metadata import __version__ diff --git a/conflowgen/analyses/container_dwell_time_analysis.py b/conflowgen/analyses/container_dwell_time_analysis.py index 2f4a1999..822e42b7 100644 --- a/conflowgen/analyses/container_dwell_time_analysis.py +++ b/conflowgen/analyses/container_dwell_time_analysis.py @@ -7,6 +7,7 @@ from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.container import Container from conflowgen.analyses.abstract_analysis import AbstractAnalysis +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache class ContainerDwellTimeAnalysis(AbstractAnalysis): @@ -15,7 +16,7 @@ class ContainerDwellTimeAnalysis(AbstractAnalysis): The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) as it is the case with :class:`.ContainerDwellTimeAnalysisReport`. """ - + @DataSummariesCache.cache_result def get_container_dwell_times( self, container_delivered_by_vehicle_type: typing.Union[ @@ -25,8 +26,7 @@ def get_container_dwell_times( storage_requirement: typing.Union[ str, typing.Collection[StorageRequirement], StorageRequirement] = "all", start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> set[datetime.timedelta]: """ The containers are filtered according to the provided criteria. @@ -50,8 +50,6 @@ def get_container_dwell_times( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: A set of container dwell times. @@ -77,8 +75,8 @@ def get_container_dwell_times( container: Container for container in selected_containers: - container_enters_yard = container.get_arrival_time(use_cache=use_cache) - container_leaves_yard = container.get_departure_time(use_cache=use_cache) + container_enters_yard = container.get_arrival_time() + container_leaves_yard = container.get_departure_time() assert container_enters_yard < container_leaves_yard, "A container should enter the yard before leaving it" if start_date and container_enters_yard < start_date: continue diff --git a/conflowgen/analyses/container_dwell_time_analysis_report.py b/conflowgen/analyses/container_dwell_time_analysis_report.py index 628b48f6..ce494cd8 100644 --- a/conflowgen/analyses/container_dwell_time_analysis_report.py +++ b/conflowgen/analyses/container_dwell_time_analysis_report.py @@ -16,6 +16,9 @@ class ContainerDwellTimeAnalysisReport(AbstractReportWithMatplotlib): """ This analysis report takes the data structure as generated by :class:`.ContainerDwellTimeAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example ContainerDwellTimeAnalysisReport \ + `_. """ report_description = """ @@ -52,9 +55,6 @@ def get_report_as_text(self, **kwargs) -> str: Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The report in text format (possibly spanning over several lines). @@ -121,9 +121,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The matplotlib axis of the histogram """ @@ -160,7 +157,6 @@ def _get_container_dwell_times(self, kwargs): storage_requirement = kwargs.pop("storage_requirement", "all") start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", True) assert len(kwargs) == 0, f"Keyword(s) {list(kwargs.keys())} have not been processed" container_dwell_times: set[datetime.timedelta] = self.analysis.get_container_dwell_times( @@ -168,8 +164,7 @@ def _get_container_dwell_times(self, kwargs): container_picked_up_by_vehicle_type=container_picked_up_by_vehicle_type, storage_requirement=storage_requirement, start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) return ( container_delivered_by_vehicle_type, container_dwell_times, container_picked_up_by_vehicle_type, diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py index 4346db82..01d81544 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis @@ -17,10 +18,10 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysis(AbstractAnalysis): """ @staticmethod + @DataSummariesCache.cache_result def get_initial_to_adjusted_outbound_flow( start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerVolumeFromOriginToDestination: """ When containers are generated, in order to obey the maximum dwell time, the vehicle type that is used for @@ -33,9 +34,6 @@ def get_initial_to_adjusted_outbound_flow( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. Returns: The data structure describes how often an initial outbound vehicle type had to be adjusted with which other @@ -64,9 +62,9 @@ def get_initial_to_adjusted_outbound_flow( # Iterate over all containers and count number of containers / used teu capacity container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue vehicle_type_initial = container.picked_up_by_initial vehicle_type_adjusted = container.picked_up_by diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py index 3d5417a7..ac774e55 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py @@ -15,6 +15,9 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysisReport(AbstractReportWithPlotl This analysis report takes the data structure as generated by :class:`.ContainerFlowAdjustmentByVehicleTypeAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example ContainerFlowAdjustmentByVehicleTypeAnalysisReport \ + `_. """ report_description = """ @@ -125,21 +128,21 @@ def get_report_as_graph(self, **kwargs) -> plotly.graph_objs.Figure: data=[ plotly.graph_objs.Sankey( arrangement='perpendicular', - node=dict( - pad=15, - thickness=20, - line=dict( - color="black", - width=0.5 - ), - label=initial_labels + adjusted_labels, - color="dimgray", - ), - link=dict( - source=source_ids_with_duplication, - target=target_ids_with_duplication, - value=value - ) + node={ + 'pad': 15, + 'thickness': 20, + 'line': { + 'color': "black", + 'width': 0.5 + }, + 'label': initial_labels + adjusted_labels, + 'color': "dimgray", + }, + link={ + 'source': source_ids_with_duplication, + 'target': target_ids_with_duplication, + 'value': value + } ) ] ) diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py index 26e468e0..c14ed88d 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis import \ ContainerFlowAdjustmentByVehicleTypeAnalysis @@ -42,12 +43,11 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysisSummary(ContainerFlowAdjustmen The analysis summary returns a data structure that can be used for generating reports (e.g., in text or as a figure) as it is the case with :class:`.ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport`. """ - + @DataSummariesCache.cache_result def get_summary( self, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerFlowAdjustedToVehicleType: """ Under certain circumstances (as explained in @@ -62,14 +62,10 @@ def get_summary( The earliest arriving container that is included. Consider all containers if :obj:`None`. end_date: The latest departing container that is included. Consider all containers if :obj:`None`. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ initial_to_adjusted_outbound_flow = self.get_initial_to_adjusted_outbound_flow( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) initial_to_adjusted_outbound_flow_in_teu = initial_to_adjusted_outbound_flow.teu diff --git a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py index f12d33d4..c1fb92c1 100644 --- a/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py @@ -17,6 +17,9 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport(AbstractReportWi This analysis report takes the data structure as generated by :class:`.ContainerFlowAdjustmentByVehicleTypeAnalysisSummary` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport \ + `_. """ report_description = """ @@ -40,9 +43,6 @@ def get_report_as_text( Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The report in text format (possibly spanning over several lines). @@ -79,9 +79,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. Defaults to ``None``. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use internally cached values. Please set this to false if data are altered between analysis runs. - Defaults to ``True``. Returns: The matplotlib axis of the pie chart. @@ -114,11 +111,9 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: def _get_analysis(self, kwargs: dict) -> ContainerFlowAdjustedToVehicleType: start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", True) assert len(kwargs) == 0, f"Keyword(s) {kwargs.keys()} have not been processed" adjusted_to = self.analysis_summary.get_summary( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) return adjusted_to diff --git a/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py b/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py index 3367b870..20734fa7 100644 --- a/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py +++ b/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py @@ -4,6 +4,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis @@ -18,10 +19,10 @@ class ContainerFlowByVehicleTypeAnalysis(AbstractAnalysis): """ @staticmethod + @DataSummariesCache.cache_result def get_inbound_to_outbound_flow( start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerVolumeFromOriginToDestination: """ This is the overview of the generated inbound to outbound container flow by vehicle type. @@ -31,8 +32,6 @@ def get_inbound_to_outbound_flow( The earliest arriving container that is included. Consider all containers if :obj:`None`. end_date: The latest departing container that is included. Consider all containers if :obj:`None`. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. """ inbound_to_outbound_flow_in_containers: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] = { vehicle_type_inbound: @@ -46,9 +45,9 @@ def get_inbound_to_outbound_flow( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue inbound_vehicle_type = container.delivered_by outbound_vehicle_type = container.picked_up_by diff --git a/conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py b/conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py index 9b7aabb6..09e0c790 100644 --- a/conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py +++ b/conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py @@ -17,6 +17,9 @@ class ContainerFlowByVehicleTypeAnalysisReport(AbstractReportWithPlotly): """ This analysis report takes the data structure as generated by :class:`.ContainerFlowByVehicleTypeAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example ContainerFlowByVehicleTypeAnalysisReport \ + `_. """ report_description = """ @@ -155,21 +158,18 @@ def _plot_inbound_to_outbound_flow( data=[ plotly.graph_objects.Sankey( arrangement='perpendicular', - node=dict( - pad=15, - thickness=20, - line=dict( - color="black", - width=0.5 - ), - label=inbound_labels + outbound_labels, - color="dimgray", - ), - link=dict( - source=source_ids_with_duplication, - target=target_ids_with_duplication, - value=value - ) + node={ + "pad": 15, + "thickness": 20, + "line": {"color": "black", "width": 0.5}, + "label": inbound_labels + outbound_labels, + "color": "dimgray" + }, + link={ + "source": source_ids_with_duplication, + "target": target_ids_with_duplication, + "value": value + } ) ] ) diff --git a/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py b/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py index a93d8f35..0d5a0589 100644 --- a/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py +++ b/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis.py @@ -4,7 +4,7 @@ import datetime import typing - +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis @@ -17,14 +17,13 @@ class ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysis(AbstractAnalysis): The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) as it is the case with :class:`.ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysisReport`. """ - + @DataSummariesCache.cache_result def get_vehicle_type_adjustments_per_vehicle( self, initial_vehicle_type: ModeOfTransport | str | typing.Collection = "scheduled vehicles", adjusted_vehicle_type: ModeOfTransport | str | typing.Collection = "scheduled vehicles", start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> typing.Dict[VehicleIdentifier, int]: """ When containers are generated, in order to obey the maximum dwell time, the vehicle type that is used for @@ -45,9 +44,6 @@ def get_vehicle_type_adjustments_per_vehicle( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. Returns: The data structure describes how often an initial outbound vehicle type had to be adjusted over time in relation to the total container flows. @@ -73,9 +69,9 @@ def get_vehicle_type_adjustments_per_vehicle( container: Container for container in selected_containers: - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue vehicle_identifier = self._get_vehicle_identifier_for_vehicle_picking_up_the_container(container) @@ -104,14 +100,14 @@ def _get_vehicle_identifier_for_vehicle_picking_up_the_container(container: Cont if container.picked_up_by == ModeOfTransport.truck: vehicle_identifier = VehicleIdentifier( mode_of_transport=ModeOfTransport.truck, - vehicle_arrival_time=container.get_departure_time(use_cache=True), + vehicle_arrival_time=container.get_departure_time(), service_name=None, vehicle_name=None ) else: vehicle_identifier = VehicleIdentifier( mode_of_transport=container.picked_up_by, - vehicle_arrival_time=container.get_departure_time(use_cache=True), + vehicle_arrival_time=container.get_departure_time(), service_name=container.picked_up_by_large_scheduled_vehicle.schedule.service_name, vehicle_name=container.picked_up_by_large_scheduled_vehicle.vehicle_name ) diff --git a/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis_report.py b/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis_report.py index d683a05f..3680f26d 100644 --- a/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis_report.py +++ b/conflowgen/analyses/container_flow_vehicle_type_adjustment_per_vehicle_analysis_report.py @@ -20,6 +20,9 @@ class ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysisReport(AbstractReportW This analysis report takes the data structure as generated by :class:`.ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example ContainerFlowVehicleTypeAdjustmentPerVehicleAnalysisReport \ + `_. """ report_description = """ diff --git a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py index d024ddd5..638eab61 100644 --- a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py +++ b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py @@ -6,6 +6,7 @@ import numpy as np +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity, ContainerVolumeByVehicleType from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -26,10 +27,10 @@ def __init__(self, transportation_buffer: float): ) @staticmethod + @DataSummariesCache.cache_result def get_inbound_container_volumes_by_vehicle_type( start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> ContainerVolumeByVehicleType: """ This is the used capacity of all vehicles separated by vehicle type on their inbound journey in TEU. @@ -39,8 +40,6 @@ def get_inbound_container_volumes_by_vehicle_type( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. """ inbound_container_volume_in_teu: typing.Dict[ModeOfTransport, float] = { vehicle_type: 0 @@ -50,9 +49,9 @@ def get_inbound_container_volumes_by_vehicle_type( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue inbound_vehicle_type = container.delivered_by inbound_container_volume_in_teu[inbound_vehicle_type] += container.occupied_teu @@ -63,11 +62,11 @@ def get_inbound_container_volumes_by_vehicle_type( teu=inbound_container_volume_in_teu ) + @DataSummariesCache.cache_result def get_outbound_container_volume_by_vehicle_type( self, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> OutboundUsedAndMaximumCapacity: """ This is the used and the maximum capacity of all vehicles separated by vehicle type on their outbound journey @@ -81,8 +80,6 @@ def get_outbound_container_volume_by_vehicle_type( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: Both the used and maximum outbound capacities grouped by vehicle type. """ @@ -101,9 +98,9 @@ def get_outbound_container_volume_by_vehicle_type( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue outbound_vehicle_type: ModeOfTransport = container.picked_up_by outbound_actually_moved_container_volume_in_teu[outbound_vehicle_type] += container.occupied_teu diff --git a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py index 815b6f5b..74f17e7c 100644 --- a/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py +++ b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py @@ -16,6 +16,9 @@ class InboundAndOutboundVehicleCapacityAnalysisReport(AbstractReportWithMatplotl """ This analysis report takes the data structure as generated by :class:`.InboundAndOutboundVehicleCapacityAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example InboundAndOutboundVehicleCapacityAnalysisReport \ + `_. """ report_description = """ @@ -40,8 +43,6 @@ def get_report_as_text(self, **kwargs) -> str: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: The report in text format spanning over several lines. @@ -79,8 +80,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache: - Use internally cached values. Please set this to false if data are altered between analysis runs. Returns: The matplotlib axis of the bar chart. @@ -113,19 +112,16 @@ def _get_container_volumes_in_teu( ) start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", True) # gather data inbound_container_volume = self.analysis.get_inbound_container_volumes_by_vehicle_type( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) outbound_container_volume, outbound_maximum_container_volume = \ self.analysis.get_outbound_container_volume_by_vehicle_type( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) return inbound_container_volume.teu, outbound_container_volume.teu, outbound_maximum_container_volume.teu diff --git a/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py index 29669191..33626555 100644 --- a/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py +++ b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.descriptive_datatypes import VehicleIdentifier from conflowgen.domain_models.container import Container from conflowgen.domain_models.large_vehicle_schedule import Schedule @@ -34,6 +35,7 @@ def __init__(self, transportation_buffer: float): transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def get_inbound_and_outbound_capacity_of_each_vehicle( self, vehicle_type: typing.Any = "scheduled vehicles", diff --git a/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py index 92f76003..4e82ef4e 100644 --- a/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py +++ b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py @@ -15,10 +15,17 @@ from conflowgen.reporting.no_data_plot import no_data_graph +class UnsupportedPlotTypeException(Exception): + pass + + class InboundToOutboundVehicleCapacityUtilizationAnalysisReport(AbstractReportWithMatplotlib): """ This analysis report takes the data structure as generated by :class:`.InboundToOutboundCapacityUtilizationAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example InboundToOutboundVehicleCapacityUtilizationAnalysisReport \ + `_. """ report_description = """ @@ -165,7 +172,7 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.figure.Figure: self._plot_relative_values_over_time(ax=ax3) fig.tight_layout(pad=5.0) else: - raise Exception(f"Plot type '{plot_type}' is not supported.") + raise UnsupportedPlotTypeException(f"Plot type '{plot_type}' is not supported.") plt.legend( loc='lower left', diff --git a/conflowgen/analyses/modal_split_analysis.py b/conflowgen/analyses/modal_split_analysis.py index 0b5c5824..a933e9f2 100644 --- a/conflowgen/analyses/modal_split_analysis.py +++ b/conflowgen/analyses/modal_split_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.analyses.abstract_analysis import AbstractAnalysis from conflowgen.analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis @@ -33,11 +34,11 @@ def __init__(self): super().__init__() self.container_flow_by_vehicle_type_analysis = ContainerFlowByVehicleTypeAnalysis() + @DataSummariesCache.cache_result def get_transshipment_and_hinterland_split( self, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> TransshipmentAndHinterlandSplit: """ Args: @@ -45,8 +46,6 @@ def get_transshipment_and_hinterland_split( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The amount of containers in TEU dedicated for or coming from the hinterland versus the amount of containers @@ -54,8 +53,7 @@ def get_transshipment_and_hinterland_split( """ inbound_to_outbound_flows = self.container_flow_by_vehicle_type_analysis.get_inbound_to_outbound_flow( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) inbound_to_outbound_flow = inbound_to_outbound_flows.teu @@ -75,13 +73,13 @@ def get_transshipment_and_hinterland_split( hinterland_capacity=hinterland_capacity ) + @DataSummariesCache.cache_result def get_modal_split_for_hinterland_traffic( self, inbound: bool, outbound: bool, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> HinterlandModalSplit: """ Args: @@ -91,16 +89,13 @@ def get_modal_split_for_hinterland_traffic( Only include containers that arrive after the given start time. end_date: Only include containers that depart before the given end time. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The modal split for the hinterland in TEU. """ inbound_to_outbound_flows = self.container_flow_by_vehicle_type_analysis.get_inbound_to_outbound_flow( start_date=start_date, - end_date=end_date, - use_cache=use_cache + end_date=end_date ) inbound_to_outbound_flow_in_teu = inbound_to_outbound_flows.teu diff --git a/conflowgen/analyses/modal_split_analysis_report.py b/conflowgen/analyses/modal_split_analysis_report.py index fedb54a6..48cb36af 100644 --- a/conflowgen/analyses/modal_split_analysis_report.py +++ b/conflowgen/analyses/modal_split_analysis_report.py @@ -12,6 +12,9 @@ class ModalSplitAnalysisReport(AbstractReportWithMatplotlib): """ This analysis report takes the data structure as generated by :class:`.ModalSplitAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example ModalSplitAnalysisReport \ + `_. """ report_description = """ @@ -34,9 +37,6 @@ def get_report_as_text( Only include containers that arrive after the given start time. Defaults to ``None```. end_date (datetime.datetime): Only include containers that depart before the given end time. Defaults to ``None``. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ ( modal_split_in_hinterland_traffic_both_directions, modal_split_in_hinterland_inbound_traffic, @@ -61,8 +61,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axes.Axes: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The matplotlib axes with all pie charts. @@ -84,20 +82,19 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axes.Axes: def _get_analysis_output(self, kwargs): start_date = kwargs.pop("start_date", None) end_date = kwargs.pop("end_date", None) - use_cache = kwargs.pop("use_cache", False) assert len(kwargs) == 0, f"Keyword(s) {kwargs.keys()} have not been processed" transshipment_and_hinterland_split = self.analysis.get_transshipment_and_hinterland_split( - start_date=start_date, end_date=end_date, use_cache=use_cache + start_date=start_date, end_date=end_date ) modal_split_in_hinterland_inbound_traffic = self.analysis.get_modal_split_for_hinterland_traffic( - inbound=True, outbound=False, start_date=start_date, end_date=end_date, use_cache=use_cache + inbound=True, outbound=False, start_date=start_date, end_date=end_date ) modal_split_in_hinterland_outbound_traffic = self.analysis.get_modal_split_for_hinterland_traffic( - inbound=False, outbound=True, start_date=start_date, end_date=end_date, use_cache=use_cache + inbound=False, outbound=True, start_date=start_date, end_date=end_date ) modal_split_in_hinterland_traffic_both_directions = self.analysis.get_modal_split_for_hinterland_traffic( - inbound=True, outbound=True, start_date=start_date, end_date=end_date, use_cache=use_cache + inbound=True, outbound=True, start_date=start_date, end_date=end_date ) return ( modal_split_in_hinterland_traffic_both_directions, modal_split_in_hinterland_inbound_traffic, diff --git a/conflowgen/analyses/quay_side_throughput_analysis.py b/conflowgen/analyses/quay_side_throughput_analysis.py index 997431e6..9c809776 100644 --- a/conflowgen/analyses/quay_side_throughput_analysis.py +++ b/conflowgen/analyses/quay_side_throughput_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.domain_models.vehicle import LargeScheduledVehicle from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_week_based_time_window, \ @@ -24,13 +25,13 @@ class QuaySideThroughputAnalysis(AbstractAnalysis): } @classmethod + @DataSummariesCache.cache_result def get_throughput_over_time( cls, inbound: bool = True, outbound: bool = True, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> typing.Dict[datetime.date, float]: """ For each week, the containers crossing the quay are checked. Based on this, the required quay capacity in boxes @@ -46,9 +47,6 @@ def get_throughput_over_time( outbound: Whether to check for vessels which pick up a container on their outbound journey start_date: The earliest arriving container that is included. Consider all containers if :obj:`None`. end_date: The latest departing container that is included. Consider all containers if :obj:`None`. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ @@ -58,9 +56,9 @@ def get_throughput_over_time( container: Container for container in Container.select(): - if start_date and container.get_arrival_time(use_cache=use_cache) < start_date: + if start_date and container.get_arrival_time() < start_date: continue - if end_date and container.get_departure_time(use_cache=use_cache) > end_date: + if end_date and container.get_departure_time() > end_date: continue if inbound: diff --git a/conflowgen/analyses/quay_side_throughput_analysis_report.py b/conflowgen/analyses/quay_side_throughput_analysis_report.py index 700fb752..38d36eb2 100644 --- a/conflowgen/analyses/quay_side_throughput_analysis_report.py +++ b/conflowgen/analyses/quay_side_throughput_analysis_report.py @@ -14,6 +14,9 @@ class QuaySideThroughputAnalysisReport(AbstractReportWithMatplotlib): """ This analysis report takes the data structure as generated by :class:`.QuaySideThroughputAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example QuaySideThroughputAnalysisReport \ + `_. """ report_description = """ @@ -67,8 +70,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Only include containers that arrive after the given start time. end_date (datetime.datetime): Only include containers that depart before the given end time. - use_cache (bool): - Returns: The matplotlib axis of the line chart over time. diff --git a/conflowgen/analyses/truck_gate_throughput_analysis.py b/conflowgen/analyses/truck_gate_throughput_analysis.py index e2d1d541..b60b6b78 100644 --- a/conflowgen/analyses/truck_gate_throughput_analysis.py +++ b/conflowgen/analyses/truck_gate_throughput_analysis.py @@ -3,6 +3,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.container import Container from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_hour_based_time_window, get_hour_based_range from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -16,13 +17,13 @@ class TruckGateThroughputAnalysis(AbstractAnalysis): """ @classmethod + @DataSummariesCache.cache_result def get_throughput_over_time( cls, inbound: bool = True, outbound: bool = True, start_date: typing.Optional[datetime.datetime] = None, - end_date: typing.Optional[datetime.datetime] = None, - use_cache: bool = True + end_date: typing.Optional[datetime.datetime] = None ) -> typing.Dict[datetime.datetime, float]: """ For each hour, the trucks entering through the truck gate are checked. Based on this, the required truck gate @@ -33,9 +34,6 @@ def get_throughput_over_time( outbound: Whether to check for trucks which pick up a container on their outbound journey start_date: When to start recording. Start with the earliest container if no date is provided. end_date: When to end recording. Stop with the latest container if no date is provided. - use_cache (bool): - Use cache instead of re-calculating the arrival and departure time of the container. - Defaults to ``True``. """ assert (inbound or outbound), "At least one of the two must be checked for" @@ -52,7 +50,7 @@ def get_throughput_over_time( if inbound: mode_of_transport_at_container_arrival: ModeOfTransport = container.delivered_by if mode_of_transport_at_container_arrival == ModeOfTransport.truck: - time_of_entering = container.get_arrival_time(use_cache=use_cache) + time_of_entering = container.get_arrival_time() if ( (start_date is None or time_of_entering >= start_date) and (end_date is None or time_of_entering <= end_date) @@ -62,7 +60,7 @@ def get_throughput_over_time( if outbound: mode_of_transport_at_container_departure: ModeOfTransport = container.picked_up_by if mode_of_transport_at_container_departure == ModeOfTransport.truck: - time_of_leaving = container.get_departure_time(use_cache=use_cache) + time_of_leaving = container.get_departure_time() if ( (start_date is None or time_of_leaving >= start_date) and (end_date is None or time_of_leaving <= end_date) diff --git a/conflowgen/analyses/truck_gate_throughput_analysis_report.py b/conflowgen/analyses/truck_gate_throughput_analysis_report.py index cca40083..6805370c 100644 --- a/conflowgen/analyses/truck_gate_throughput_analysis_report.py +++ b/conflowgen/analyses/truck_gate_throughput_analysis_report.py @@ -16,6 +16,9 @@ class TruckGateThroughputAnalysisReport(AbstractReportWithMatplotlib): """ This analysis report takes the data structure as generated by :class:`.TruckGateThroughputAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example TruckGateThroughputAnalysisReport \ + `_. """ report_description = """ @@ -42,8 +45,6 @@ def get_report_as_text(self, **kwargs) -> str: Whether to check for trucks which deliver a container on their inbound journey outbound (bool): Whether to check for trucks which pick up a container on their outbound journey - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: The report in text format. @@ -93,8 +94,6 @@ def get_report_as_graph(self, **kwargs) -> matplotlib.axis.Axis: Whether to check for trucks which deliver a container on their inbound journey outbound (bool): Whether to check for trucks which pick up a container on their outbound journey - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. ax (matplotlib.axis.Axis): Which matplotlib axis to plot on. diff --git a/conflowgen/analyses/yard_capacity_analysis.py b/conflowgen/analyses/yard_capacity_analysis.py index fdb767ca..76cdc336 100644 --- a/conflowgen/analyses/yard_capacity_analysis.py +++ b/conflowgen/analyses/yard_capacity_analysis.py @@ -3,7 +3,9 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.descriptive_datatypes import UsedYardCapacityOverTime from conflowgen.domain_models.container import Container from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_hour_based_time_window, get_hour_based_range @@ -15,12 +17,12 @@ class YardCapacityAnalysis(AbstractAnalysis): as it is the case with :class:`.YardCapacityAnalysisReport`. """ + @DataSummariesCache.cache_result def get_used_yard_capacity_over_time( self, storage_requirement: typing.Union[str, typing.Collection, StorageRequirement] = "all", smoothen_peaks: bool = True, - use_cache: bool = True - ) -> typing.Dict[datetime.datetime, float]: + ) -> UsedYardCapacityOverTime: """ For each hour, the containers entering and leaving the yard are checked. Based on this, the required yard capacity in TEU can be deduced - it is simply the maximum of these values. In addition, with the parameter @@ -46,10 +48,10 @@ def get_used_yard_capacity_over_time( a collection of :class:`StorageRequirement` enum values (as a list, set, or similar), or a single :class:`StorageRequirement` enum value. smoothen_peaks: Whether to smoothen the peaks. - use_cache: - Use cache instead of re-calculating the arrival and departure time of the container. Returns: - A series of the used yard capacity in TEU over the time. + UsedYardCapacityOverTime: A namedtuple consisting of two dictionaries. The first dictionary represents the + used yard capacity in TEU over the time. The second dictionary represents the used yard capacity + in terms of the number of boxes over the time. """ selected_containers = Container.select() @@ -62,14 +64,14 @@ def get_used_yard_capacity_over_time( for container in selected_containers: container_stays.append( ( - container.get_arrival_time(use_cache=use_cache), - container.get_departure_time(use_cache=use_cache), + container.get_arrival_time(), + container.get_departure_time(), container.occupied_teu ) ) if len(container_stays) == 0: - return {} + return UsedYardCapacityOverTime(teu={}, containers={}) first_arrival, _, _ = min(container_stays, key=lambda x: x[0]) _, last_pickup, _ = max(container_stays, key=lambda x: x[1]) @@ -77,7 +79,14 @@ def get_used_yard_capacity_over_time( first_time_window = get_hour_based_time_window(first_arrival) - datetime.timedelta(hours=1) last_time_window = get_hour_based_time_window(last_pickup) + datetime.timedelta(hours=1) - used_yard_capacity: typing.Dict[datetime.datetime, float] = { + used_yard_capacity_teu: typing.Dict[datetime.datetime, float] = { + time_window: 0 + for time_window in get_hour_based_range( + first_time_window, last_time_window, include_end=(not smoothen_peaks) + ) + } + + used_yard_capacity_boxes: typing.Dict[datetime.datetime, int] = { time_window: 0 for time_window in get_hour_based_range( first_time_window, last_time_window, include_end=(not smoothen_peaks) @@ -90,6 +99,7 @@ def get_used_yard_capacity_over_time( for time_window in get_hour_based_range( time_window_at_entering, time_window_at_leaving, include_end=(not smoothen_peaks) ): - used_yard_capacity[time_window] += teu_factor_of_container + used_yard_capacity_teu[time_window] += teu_factor_of_container + used_yard_capacity_boxes[time_window] += 1 - return used_yard_capacity + return UsedYardCapacityOverTime(teu=used_yard_capacity_teu, containers=used_yard_capacity_boxes) diff --git a/conflowgen/analyses/yard_capacity_analysis_report.py b/conflowgen/analyses/yard_capacity_analysis_report.py index 5ffefa7a..c66cc9e3 100644 --- a/conflowgen/analyses/yard_capacity_analysis_report.py +++ b/conflowgen/analyses/yard_capacity_analysis_report.py @@ -16,6 +16,9 @@ class YardCapacityAnalysisReport(AbstractReportWithMatplotlib): """ This analysis report takes the data structure as generated by :class:`.YardCapacityAnalysis` and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example YardCapacityAnalysisReport \ + `_. """ report_description = """ @@ -115,4 +118,4 @@ def _get_used_yard_capacity_based_on_storage_requirement( yard_capacity_over_time = self.analysis.get_used_yard_capacity_over_time( storage_requirement=storage_requirement ) - return storage_requirement, yard_capacity_over_time + return storage_requirement, yard_capacity_over_time.teu diff --git a/conflowgen/api/container_dwell_time_distribution_manager.py b/conflowgen/api/container_dwell_time_distribution_manager.py index 2eceb30a..7fa65bc2 100644 --- a/conflowgen/api/container_dwell_time_distribution_manager.py +++ b/conflowgen/api/container_dwell_time_distribution_manager.py @@ -1,10 +1,14 @@ import typing +import datetime -from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.api import AbstractDistributionManager +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ ContainerDwellTimeDistributionRepository -from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.application.services.average_container_dwell_time_calculator_service import \ + AverageContainerDwellTimeCalculatorService from conflowgen.tools.continuous_distribution import ContinuousDistribution @@ -20,7 +24,7 @@ def __init__(self): def get_container_dwell_time_distribution( self ) -> typing.Dict[ModeOfTransport, typing.Dict[ - ModeOfTransport, typing.Dict[StorageRequirement, ContinuousDistribution]]]: + ModeOfTransport, typing.Dict[StorageRequirement, ContinuousDistribution]]]: """ Returns: @@ -56,3 +60,22 @@ def set_container_dwell_time_distribution( self.container_dwell_time_distribution_repository.set_distributions( sanitized_distribution ) + DataSummariesCache.reset_cache() + + @staticmethod + def get_average_container_dwell_time(start_date: datetime.date, end_date: datetime.date) -> float: + """ + Uses :class:`.ModeOfTransportDistributionManager` to calculate the expected average container dwell time + based on the scheduled container flow. + + Args: + start_date: The earliest day to consider for scheduled vehicles + end_date: The latest day to consider for scheduled vehicles + + Returns: + Weighted average of all container dwell times based on inbound and outbound vehicle capacities + """ + return AverageContainerDwellTimeCalculatorService().get_average_container_dwell_time( + start_date=start_date, + end_date=end_date + ) diff --git a/conflowgen/api/container_flow_generation_manager.py b/conflowgen/api/container_flow_generation_manager.py index 8783e0c4..4cb3e0ea 100644 --- a/conflowgen/api/container_flow_generation_manager.py +++ b/conflowgen/api/container_flow_generation_manager.py @@ -2,6 +2,8 @@ import logging import typing +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.application.repositories.container_flow_generation_properties_repository import \ ContainerFlowGenerationPropertiesRepository from conflowgen.flow_generator.container_flow_generation_service import \ @@ -51,13 +53,15 @@ def set_properties( self.container_flow_generation_properties_repository.set_container_flow_generation_properties( properties ) + DataSummariesCache.reset_cache() def get_properties(self) -> typing.Dict[str, typing.Union[str, datetime.date, float, int]]: """ Returns: The properties of the container flow. """ - properties = self.container_flow_generation_properties_repository.get_container_flow_generation_properties() + properties: ContainerFlowGenerationProperties = (self.container_flow_generation_properties_repository. + get_container_flow_generation_properties()) return { 'name': properties.name, 'start_date': properties.start_date, diff --git a/conflowgen/api/container_length_distribution_manager.py b/conflowgen/api/container_length_distribution_manager.py index 2293069a..3277ab9a 100644 --- a/conflowgen/api/container_length_distribution_manager.py +++ b/conflowgen/api/container_length_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ ContainerLengthDistributionRepository @@ -41,3 +42,4 @@ def set_container_length_distribution( values_are_frequencies=True ) self.container_length_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/container_weight_distribution_manager.py b/conflowgen/api/container_weight_distribution_manager.py index 9e892827..2fcce369 100644 --- a/conflowgen/api/container_weight_distribution_manager.py +++ b/conflowgen/api/container_weight_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.container_weight_distribution_repository import \ ContainerWeightDistributionRepository @@ -41,3 +42,4 @@ def set_container_weight_distribution( values_are_frequencies=True ) self.container_weight_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/database_chooser.py b/conflowgen/api/database_chooser.py index f41774a4..a64a601e 100644 --- a/conflowgen/api/database_chooser.py +++ b/conflowgen/api/database_chooser.py @@ -3,6 +3,7 @@ from peewee import SqliteDatabase +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.database_connection.sqlite_database_connection import SqliteDatabaseConnection @@ -44,6 +45,7 @@ def load_existing_sqlite_database(self, file_name: str) -> None: """ if self.peewee_sqlite_db is not None: self._close_and_reset_db() + DataSummariesCache.reset_cache() self.peewee_sqlite_db = self.sqlite_database_connection.choose_database(file_name, create=False, reset=False) def create_new_sqlite_database( @@ -77,6 +79,7 @@ def create_new_sqlite_database( self.peewee_sqlite_db = self.sqlite_database_connection.choose_database( file_name, create=True, reset=overwrite, **seeder_options ) + DataSummariesCache.reset_cache() def close_current_connection(self) -> None: """ @@ -88,6 +91,8 @@ def close_current_connection(self) -> None: raise NoCurrentConnectionException("You must first create a connection to an SQLite database.") def _close_and_reset_db(self): - self.logger.debug("Closing current database connection.") + path_to_sqlite_database = self.sqlite_database_connection.path_to_sqlite_database + self.logger.debug(f"Closing current database connection {path_to_sqlite_database}.") self.peewee_sqlite_db.close() self.peewee_sqlite_db = None + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/mode_of_transport_distribution_manager.py b/conflowgen/api/mode_of_transport_distribution_manager.py index de9497f8..41763a73 100644 --- a/conflowgen/api/mode_of_transport_distribution_manager.py +++ b/conflowgen/api/mode_of_transport_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -45,3 +46,4 @@ def set_mode_of_transport_distribution( self.mode_of_transport_distribution_repository.set_mode_of_transport_distributions( sanitized_distribution ) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/port_call_manager.py b/conflowgen/api/port_call_manager.py index ae55168b..5da8191d 100644 --- a/conflowgen/api/port_call_manager.py +++ b/conflowgen/api/port_call_manager.py @@ -2,6 +2,7 @@ import datetime import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.factories.schedule_factory import ScheduleFactory from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -101,6 +102,7 @@ def add_vehicle( next_destinations=next_destinations, vehicle_arrives_every_k_days=vehicle_arrives_every_k_days ) + DataSummariesCache.reset_cache() def has_schedule( self, diff --git a/conflowgen/api/storage_requirement_distribution_manager.py b/conflowgen/api/storage_requirement_distribution_manager.py index fa604503..6bf6c92c 100644 --- a/conflowgen/api/storage_requirement_distribution_manager.py +++ b/conflowgen/api/storage_requirement_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.data_types.container_length import ContainerLength @@ -43,3 +44,4 @@ def set_storage_requirement_distribution( values_are_frequencies=True ) self.storage_requirement_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/api/truck_arrival_distribution_manager.py b/conflowgen/api/truck_arrival_distribution_manager.py index d4a2ebff..8be94772 100644 --- a/conflowgen/api/truck_arrival_distribution_manager.py +++ b/conflowgen/api/truck_arrival_distribution_manager.py @@ -1,5 +1,6 @@ import typing +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.api import AbstractDistributionManager from conflowgen.domain_models.distribution_repositories.truck_arrival_distribution_repository import \ TruckArrivalDistributionRepository @@ -28,7 +29,7 @@ def get_truck_arrival_distribution(self) -> typing.Dict[int, float]: """ return self.truck_arrival_distribution_repository.get_distribution() - def set_truck_arrival_distribution(self, distribution: typing.Dict[int, float]) -> None: + def set_truck_arrival_distribution(self, distribution: typing.Dict[float, float]) -> None: """ Args: @@ -39,7 +40,8 @@ def set_truck_arrival_distribution(self, distribution: typing.Dict[int, float]) """ sanitized_distribution = self._normalize_and_validate_distribution_without_dependent_variables( distribution, - int, + float, values_are_frequencies=True ) self.truck_arrival_distribution_repository.set_distribution(sanitized_distribution) + DataSummariesCache.reset_cache() diff --git a/conflowgen/application/data_types/export_file_format.py b/conflowgen/application/data_types/export_file_format.py index 5acce731..feab09fb 100644 --- a/conflowgen/application/data_types/export_file_format.py +++ b/conflowgen/application/data_types/export_file_format.py @@ -18,7 +18,7 @@ class ExportFileFormat(enum.Enum): xlsx = "xlsx" """ - The xlsx file format can be opened by e.g. Microsoft Excel which might help to quickly analyse the output data. + The xlsx file format can be opened by, e.g., Microsoft Excel which might help to quickly analyse the output data. However, this file format comes with known limitations listed at https://support.microsoft.com/en-us/office/excel-specifications-and-limits-1672b34d-7043-467e-8e27-269d656771c3. On January 4th, 2022, the known maximum number of rows is 1,048,576. @@ -31,7 +31,7 @@ class ExportFileFormat(enum.Enum): The xls format is the precursor of the xlsx format. This should only be used if a software demands this file format. Older versions of Excel had more restrictions on the size, e.g. Excel 2003 is known to have only supported 65,536 - rows (see e.g. + rows (see, e.g., http://web.archive.org/web/20140819001409/http://news.office-watch.com/t/n.aspx?articleid=1408&zoneid=9) which is less than what large terminals nowadays handle within a month. Even with a hypothetical TEU factor of 2, this only reaches 1,572,864 TEU throughput per year. diff --git a/conflowgen/application/models/random_seed_store.py b/conflowgen/application/models/random_seed_store.py new file mode 100644 index 00000000..41d022ca --- /dev/null +++ b/conflowgen/application/models/random_seed_store.py @@ -0,0 +1,22 @@ +from peewee import AutoField, CharField, IntegerField, BooleanField + +from conflowgen.domain_models.base_model import BaseModel + + +class RandomSeedStore(BaseModel): + """ + This table contains a random seed for each class or function that contains randomness + """ + id = AutoField() + + name = CharField( + help_text="The name of the class, function, or other type of object." + ) + + is_random = BooleanField( + help_text="Whether the value is meant to change between invocations of the generation process." + ) + + random_seed = IntegerField( + help_text="The last used random seed." + ) diff --git a/conflowgen/application/repositories/random_seed_store_repository.py b/conflowgen/application/repositories/random_seed_store_repository.py new file mode 100644 index 00000000..7f2780ba --- /dev/null +++ b/conflowgen/application/repositories/random_seed_store_repository.py @@ -0,0 +1,81 @@ +import logging +import random +import typing +import time + +from conflowgen.application.models.random_seed_store import RandomSeedStore + + +class RandomSeedStoreRepository: + + random_seed_offset = 0 + + def __init__(self): + self.logger = logging.getLogger("conflowgen") + + def get_random_seed(self, seed_name: str, log_loading_process: bool = False) -> float: + random_seed: float + random_seed_store = RandomSeedStore.get_or_none( + RandomSeedStore.name == seed_name + ) + if random_seed_store is not None: + if random_seed_store.is_random: + # there is a previous seed but we are told to overwrite it + previous_seed = random_seed_store.random_seed + random_seed = self._get_random_seed() + random_seed_store.random_seed = random_seed + random_seed_store.save() + if log_loading_process: + self.logger.debug( + f"Replace seed {previous_seed} with {random_seed} for '{seed_name}' for the new round." + ) + else: + # there is a previous seed and we should re-use it + random_seed = random_seed_store.random_seed + if log_loading_process: + self.logger.debug(f"Re-use seed {random_seed} for '{seed_name}'") + else: + # there is no previous seed available, enter the current seed and return its value + random_seed = self._get_random_seed() + RandomSeedStore.create( + name=seed_name, + random_seed=random_seed, + is_random=True + ) + if log_loading_process: + self.logger.debug(f"Randomly set seed {random_seed} for '{seed_name}'") + return random_seed + + @classmethod + def _get_random_seed(cls) -> int: + cls.random_seed_offset += 1 + return int(time.time_ns() + cls.random_seed_offset) + + def fix_random_seed( + self, seed_name: str, random_seed: typing.Optional[int], log_loading_process: bool = False + ) -> None: + if random_seed is None: + random_seed = self._get_random_seed() + random_seed_store = RandomSeedStore.get_or_none( + RandomSeedStore.name == seed_name + ) + if random_seed_store is None: + random_seed_store = RandomSeedStore.create( + name=seed_name, + is_random=False, + random_seed=random_seed + ) + else: + random_seed_store.random_seed = random_seed + if log_loading_process: + self.logger.debug(f"Set seed {random_seed} for '{seed_name}'") + random_seed_store.save() + + +_random_seed_store_repository = RandomSeedStoreRepository() + + +def get_initialised_random_object(seed_name: str, log_loading_process: bool = True) -> random.Random: + random_seed = RandomSeedStoreRepository().get_random_seed(seed_name, log_loading_process=log_loading_process) + seeded_random = random.Random(x=random_seed) + return seeded_random diff --git a/conflowgen/application/services/average_container_dwell_time_calculator_service.py b/conflowgen/application/services/average_container_dwell_time_calculator_service.py new file mode 100644 index 00000000..49b263c9 --- /dev/null +++ b/conflowgen/application/services/average_container_dwell_time_calculator_service.py @@ -0,0 +1,47 @@ +import datetime + +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager +from conflowgen.api.mode_of_transport_distribution_manager import ModeOfTransportDistributionManager +from conflowgen.api.storage_requirement_distribution_manager import StorageRequirementDistributionManager +from conflowgen.application.services.inbound_and_outbound_vehicle_capacity_calculator_service import \ + InboundAndOutboundVehicleCapacityCalculatorService +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ + ContainerDwellTimeDistributionRepository + + +class AverageContainerDwellTimeCalculatorService: + + @staticmethod + def get_average_container_dwell_time(start_date: datetime.date, end_date: datetime.date) -> float: + inbound_vehicle_capacity = InboundAndOutboundVehicleCapacityCalculatorService.get_inbound_capacity_of_vehicles( + start_date=start_date, + end_date=end_date + ) + mode_of_transport_distribution = ModeOfTransportDistributionManager().get_mode_of_transport_distribution() + container_length_distribution = ContainerLengthDistributionManager().get_container_length_distribution() + container_storage_requirement_distribution = \ + StorageRequirementDistributionManager().get_storage_requirement_distribution() + container_dwell_time_distribution = ContainerDwellTimeDistributionRepository(). \ + get_distributions() + average_container_dwell_time = 0 + total_containers = 0 + for delivering_vehicle_type in ModeOfTransport: + for picking_up_vehicle_type in ModeOfTransport: + for container_length in ContainerLength: + for storage_requirement in StorageRequirement: + num_containers = inbound_vehicle_capacity.containers[delivering_vehicle_type] * \ + mode_of_transport_distribution[delivering_vehicle_type][ + picking_up_vehicle_type] * \ + container_length_distribution[container_length] * \ + container_storage_requirement_distribution[container_length][ + storage_requirement] + total_containers += num_containers + average_container_dwell_time += \ + container_dwell_time_distribution[delivering_vehicle_type][picking_up_vehicle_type][ + storage_requirement].average * num_containers + + average_container_dwell_time /= total_containers + return average_container_dwell_time diff --git a/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py b/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py new file mode 100644 index 00000000..5b5583fe --- /dev/null +++ b/conflowgen/application/services/inbound_and_outbound_vehicle_capacity_calculator_service.py @@ -0,0 +1,175 @@ +import datetime +from typing import Dict + +import numpy as np + +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.descriptive_datatypes import ContainerVolumeByVehicleType +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.factories.fleet_factory import create_arrivals_within_time_range +from conflowgen.domain_models.large_vehicle_schedule import Schedule + + +class InboundAndOutboundVehicleCapacityCalculatorService: + + @staticmethod + @DataSummariesCache.cache_result + def get_truck_capacity_for_export_containers( + inbound_capacity_of_vehicles: Dict[ModeOfTransport, float] + ) -> float: + """ + Get the capacity in TEU which is transported by truck. Currently, during the generation process each import + container is picked up by one truck and for each import container, in the next step one export container is + created. + Thus, this method accounts for both import and export. + """ + truck_capacity = 0 + vehicle_type: ModeOfTransport + for vehicle_type in ModeOfTransport.get_scheduled_vehicles(): + number_of_containers_delivered_to_terminal_by_vehicle_type = inbound_capacity_of_vehicles[vehicle_type] + mode_of_transport_distribution_of_vehicle_type = \ + ModeOfTransportDistributionRepository().get_distribution()[vehicle_type] + vehicle_to_truck_fraction = mode_of_transport_distribution_of_vehicle_type[ModeOfTransport.truck] + number_of_containers_to_pick_up_by_truck_from_vehicle_type = \ + number_of_containers_delivered_to_terminal_by_vehicle_type * vehicle_to_truck_fraction + truck_capacity += number_of_containers_to_pick_up_by_truck_from_vehicle_type + return truck_capacity + + @staticmethod + @DataSummariesCache.cache_result + def get_inbound_capacity_of_vehicles( + start_date: datetime.date, + end_date: datetime.date + ) -> ContainerVolumeByVehicleType: + """ + For the inbound capacity, first vehicles that adhere to a schedule are considered. Trucks, which are created + depending on the outbound distribution, are created based on the assumptions of the further container flow + generation process. + """ + inbound_container_volume_in_containers: Dict[ModeOfTransport, float] = { + vehicle_type: 0 + for vehicle_type in ModeOfTransport + } + inbound_container_volume_in_teu: Dict[ModeOfTransport, float] = { + vehicle_type: 0 + for vehicle_type in ModeOfTransport + } + + at_least_one_schedule_exists: bool = False + + for schedule in Schedule.select(): + at_least_one_schedule_exists = True + arrivals = create_arrivals_within_time_range( + start_date, + schedule.vehicle_arrives_at, + end_date, + schedule.vehicle_arrives_every_k_days, + schedule.vehicle_arrives_at_time + ) + moved_inbound_volumes = (len(arrivals) # number of vehicles that are planned + * schedule.average_moved_capacity) # moved TEU capacity of each vehicle + inbound_container_volume_in_teu[schedule.vehicle_type] += moved_inbound_volumes + inbound_container_volume_in_containers[schedule.vehicle_type] += moved_inbound_volumes / \ + ContainerLengthDistributionRepository.get_teu_factor() + + if at_least_one_schedule_exists: + inbound_container_volume_in_teu[ModeOfTransport.truck] = \ + InboundAndOutboundVehicleCapacityCalculatorService.get_truck_capacity_for_export_containers( + inbound_container_volume_in_teu + ) + inbound_container_volume_in_containers[ModeOfTransport.truck] = \ + inbound_container_volume_in_teu[ModeOfTransport.truck] / \ + ContainerLengthDistributionRepository.get_teu_factor() + + return ContainerVolumeByVehicleType( + containers=inbound_container_volume_in_containers, + teu=inbound_container_volume_in_teu + ) + + @staticmethod + @DataSummariesCache.cache_result + def get_outbound_capacity_of_vehicles(start_date, end_date, transportation_buffer) \ + -> OutboundUsedAndMaximumCapacity: + """ + For the outbound capacity, both the used outbound capacity (estimated) and the maximum outbound capacity is + reported. If a vehicle type reaches the maximum outbound capacity, this means that containers need to be + redistributed to other vehicle types due to a lack of capacity. The capacities are only calculated in TEU, not + in containers. + """ + outbound_used_containers: Dict[ModeOfTransport, float] = { + vehicle_type: 0 + for vehicle_type in ModeOfTransport + } + outbound_maximum_containers: Dict[ModeOfTransport, float] = { + vehicle_type: 0 + for vehicle_type in ModeOfTransport + } + outbound_used_capacity_in_teu: Dict[ModeOfTransport, float] = { + vehicle_type: 0 + for vehicle_type in ModeOfTransport + } + outbound_maximum_capacity_in_teu: Dict[ModeOfTransport, float] = { + vehicle_type: 0 + for vehicle_type in ModeOfTransport + } + + schedule: Schedule + for schedule in Schedule.select(): + assert schedule.average_moved_capacity <= schedule.average_vehicle_capacity, \ + "A vehicle cannot move a larger amount of containers (in TEU) than its capacity, " \ + f"the input data is malformed. Schedule '{schedule.service_name}' of vehicle type " \ + f"{schedule.vehicle_type} has an average moved capacity of {schedule.average_moved_capacity} but an " \ + f"averaged vehicle capacity of {schedule.average_vehicle_capacity}." + + arrivals = create_arrivals_within_time_range( + start_date, + schedule.vehicle_arrives_at, + end_date, + schedule.vehicle_arrives_every_k_days, + schedule.vehicle_arrives_at_time + ) + + # If all container flows are balanced, only the average moved capacity is required + container_volume_moved_by_vessels_in_teu = len(arrivals) * schedule.average_moved_capacity + outbound_used_capacity_in_teu[schedule.vehicle_type] += container_volume_moved_by_vessels_in_teu + outbound_used_containers[schedule.vehicle_type] += container_volume_moved_by_vessels_in_teu / \ + ContainerLengthDistributionRepository.get_teu_factor() + + # If there are unbalanced container flows, a vehicle departs with more containers than it delivered + maximum_capacity_of_vehicle_in_teu = min( + schedule.average_moved_capacity * (1 + transportation_buffer), + schedule.average_vehicle_capacity + ) + total_maximum_capacity_moved_by_vessel = len(arrivals) * maximum_capacity_of_vehicle_in_teu + outbound_maximum_capacity_in_teu[schedule.vehicle_type] += total_maximum_capacity_moved_by_vessel + outbound_maximum_containers[schedule.vehicle_type] += total_maximum_capacity_moved_by_vessel / \ + ContainerLengthDistributionRepository.get_teu_factor() + + inbound_capacity = InboundAndOutboundVehicleCapacityCalculatorService.\ + get_inbound_capacity_of_vehicles(start_date, end_date) + outbound_used_capacity_in_teu[ModeOfTransport.truck] = \ + InboundAndOutboundVehicleCapacityCalculatorService.get_truck_capacity_for_export_containers( + inbound_capacity.teu + ) + outbound_used_containers[ModeOfTransport.truck] = \ + outbound_used_capacity_in_teu[ModeOfTransport.truck] / \ + ContainerLengthDistributionRepository.get_teu_factor() + + outbound_maximum_capacity_in_teu[ModeOfTransport.truck] = np.nan # Trucks can always be added as required + outbound_maximum_containers[ModeOfTransport.truck] = np.nan + + return OutboundUsedAndMaximumCapacity( + used=ContainerVolumeByVehicleType( + containers=outbound_used_containers, + teu=outbound_used_capacity_in_teu + ), + maximum=ContainerVolumeByVehicleType( + containers=outbound_maximum_containers, + teu=outbound_maximum_capacity_in_teu + ) + ) diff --git a/conflowgen/tests/application_models/__init__.py b/conflowgen/data_summaries/__init__.py similarity index 100% rename from conflowgen/tests/application_models/__init__.py rename to conflowgen/data_summaries/__init__.py diff --git a/conflowgen/data_summaries/data_summaries_cache.py b/conflowgen/data_summaries/data_summaries_cache.py new file mode 100644 index 00000000..3c97b328 --- /dev/null +++ b/conflowgen/data_summaries/data_summaries_cache.py @@ -0,0 +1,60 @@ +# Decorator class for preview and analysis result caching +from functools import wraps + + +class DataSummariesCache: + """ + This class is used to cache the results of the data summaries (analyses and previews). This is useful when the + same data summary is requested multiple times, e.g., when generating a report. In this case, the data summary + computation is only performed once and the result is cached. The next time the same data summary is requested, the + cached result is returned instead of computing the data summary again. This can significantly speed up the report + generation process. + To use this class, simply decorate the data summary function with the :meth:`.DataSummariesCache.cache_result` + decorator. + The cache is automatically reset when input data changes or a new database is used. This can also be done manually + by calling :meth:`.DataSummariesCache.reset_cache`. + """ + + cached_results = {} + _hit_counter = {} # For internal testing purposes + + # Decorator function to accept function as argument, and return cached result if available or compute and cache + # result + @classmethod + def cache_result(cls, func): + """ + Decorator function to accept function as argument, and return cached result if available or compute and cache + result. + """ + @wraps(func) + def wrapper(*args, **kwargs): + # Create key from function id, name and arguments + key = str(id(func)) + repr(args) + repr(kwargs) + + # Adjust hit counter + function_name = func.__name__ + if function_name not in cls._hit_counter: + cls._hit_counter[function_name] = 0 + cls._hit_counter[function_name] += 1 + + # Check if key exists in cache + if key in cls.cached_results: + return cls.cached_results[key] + + # If not, compute result + result = func(*args, **kwargs) + + # Cache new result + cls.cached_results[key] = result + return result + + return wrapper + + # Reset cache + @classmethod + def reset_cache(cls): + """ + Resets the cache. + """ + cls.cached_results = {} + cls._hit_counter = {} diff --git a/conflowgen/database_connection/create_tables.py b/conflowgen/database_connection/create_tables.py index 3a3d70f2..69d6646d 100644 --- a/conflowgen/database_connection/create_tables.py +++ b/conflowgen/database_connection/create_tables.py @@ -3,6 +3,7 @@ import peewee from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.arrival_information import TruckArrivalInformationForPickup, \ TruckArrivalInformationForDelivery from conflowgen.domain_models.container import Container @@ -40,7 +41,8 @@ def create_tables(sql_db_connection: peewee.Database) -> peewee.Database: TruckArrivalInformationForPickup, TruckArrivalInformationForDelivery, StorageRequirementDistribution, - ContainerDwellTimeDistribution + ContainerDwellTimeDistribution, + RandomSeedStore, ]) for table_with_index in ( Destination, diff --git a/conflowgen/database_connection/sqlite_database_connection.py b/conflowgen/database_connection/sqlite_database_connection.py index f0cd3138..527004dc 100644 --- a/conflowgen/database_connection/sqlite_database_connection.py +++ b/conflowgen/database_connection/sqlite_database_connection.py @@ -7,11 +7,13 @@ from peewee import SqliteDatabase from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.application.repositories.random_seed_store_repository import get_initialised_random_object from conflowgen.database_connection.create_tables import create_tables from conflowgen.domain_models.base_model import database_proxy from conflowgen.domain_models.container import Container from conflowgen.domain_models.distribution_seeders import seed_all_distributions from conflowgen.domain_models.vehicle import Truck, DeepSeaVessel, Feeder, Barge, Train +from conflowgen.tools import get_convert_to_random_value class SqliteDatabaseIsMissingException(Exception): @@ -52,11 +54,13 @@ class SqliteDatabaseConnection: ) def __init__(self, sqlite_databases_directory: Optional[str] = None): + self.seeded_random = None if sqlite_databases_directory is None: sqlite_databases_directory = self.SQLITE_DEFAULT_DIR sqlite_databases_directory = os.path.abspath(sqlite_databases_directory) self.sqlite_databases_directory = sqlite_databases_directory + self.path_to_sqlite_database = "" self.logger = logging.getLogger("conflowgen") @@ -82,16 +86,15 @@ def choose_database( **seeder_options ) -> SqliteDatabase: if database_name == ":memory:": - path_to_sqlite_database = ":memory:" + self.path_to_sqlite_database = ":memory:" sqlite_database_existed_before = False else: - path_to_sqlite_database, sqlite_database_existed_before = self._load_or_create_sqlite_file_on_hard_drive( - database_name=database_name, create=create, reset=reset - ) + self.path_to_sqlite_database, sqlite_database_existed_before = ( + self._load_or_create_sqlite_file_on_hard_drive(database_name=database_name, create=create, reset=reset)) - self.logger.debug(f"Opening file {path_to_sqlite_database}") + self.logger.debug(f"Opening file {self.path_to_sqlite_database}") self.sqlite_db_connection = SqliteDatabase( - path_to_sqlite_database, + self.path_to_sqlite_database, pragmas=self.SQLITE_DEFAULT_SETTINGS ) database_proxy.initialize(self.sqlite_db_connection) @@ -103,12 +106,12 @@ def choose_database( self.logger.debug(f'foreign_keys: {self.sqlite_db_connection.foreign_keys}') if not sqlite_database_existed_before or reset: - self.logger.debug(f"Creating new database at {path_to_sqlite_database}") + self.logger.debug(f"Creating new database at {self.path_to_sqlite_database}") create_tables(self.sqlite_db_connection) self.logger.debug("Seed with default values...") seed_all_distributions(**seeder_options) else: - self.logger.debug(f"Open existing database at {path_to_sqlite_database}") + self.logger.debug(f"Open existing database at {self.path_to_sqlite_database}") container_flow_properties: ContainerFlowGenerationProperties | None = \ ContainerFlowGenerationProperties.get_or_none() @@ -123,6 +126,11 @@ def choose_database( for vehicle in (DeepSeaVessel, Feeder, Barge, Train, Truck, Container): self.logger.debug(f"Number entries in table '{vehicle.__name__}': {vehicle.select().count()}") + self.seeded_random = get_initialised_random_object(self.__class__.__name__) + random_bits = self.seeded_random.getrandbits(100) + convert_to_random_value = get_convert_to_random_value(random_bits) + self.sqlite_db_connection.func('assign_random_value')(convert_to_random_value) + return self.sqlite_db_connection def delete_database(self, database_name: str) -> None: diff --git a/conflowgen/descriptive_datatypes/__init__.py b/conflowgen/descriptive_datatypes/__init__.py index b6a3eb27..c8d25ad0 100644 --- a/conflowgen/descriptive_datatypes/__init__.py +++ b/conflowgen/descriptive_datatypes/__init__.py @@ -26,17 +26,26 @@ class HinterlandModalSplit(typing.NamedTuple): truck_capacity: float -class OutboundUsedAndMaximumCapacity(typing.NamedTuple): +class ContainerVolume(typing.NamedTuple): """ - This tuple keeps track of how much each vehicle type transports on the outbound journey and what the maximum - capacity is. + Several KPIs at container terminals can be both expressed in boxes and TEU. """ + #: The container volume expressed in TEU + teu: float - #: The container volume that is actually transported, summarized by vehicle type. - used: ContainerVolumeByVehicleType + #: The container volume expressed in number of boxes + containers: float - #: The container volume that could be transported if all capacities had been used, summarized by vehicle type. - maximum: ContainerVolumeByVehicleType + +class InboundAndOutboundContainerVolume(typing.NamedTuple): + """ + Note both the inbound and outbound container volume. + """ + #: The container volume transported by vehicles on their inbound journey + inbound: ContainerVolume + + #: The container volume transported by vehicles on their outbound journey + outbound: ContainerVolume class ContainerVolumeByVehicleType(typing.NamedTuple): @@ -52,6 +61,19 @@ class ContainerVolumeByVehicleType(typing.NamedTuple): containers: typing.Optional[typing.Dict[ModeOfTransport, float]] +class OutboundUsedAndMaximumCapacity(typing.NamedTuple): + """ + This tuple keeps track of how much each vehicle type transports on the outbound journey and what the maximum + capacity is. + """ + + #: The container volume that is actually transported, summarized by vehicle type. + used: ContainerVolumeByVehicleType + + #: The container volume that could be transported if all capacities had been used, summarized by vehicle type. + maximum: ContainerVolumeByVehicleType + + class ContainerVolumeFromOriginToDestination(typing.NamedTuple): """ Several KPIs at container terminals can be both expressed in boxes per hour and TEU per hour (or a different time @@ -81,3 +103,27 @@ class VehicleIdentifier(typing.NamedTuple): #: The time of arrival of the vehicle at the terminal. vehicle_arrival_time: datetime.datetime + + +class UsedYardCapacityOverTime(typing.NamedTuple): + """ + Represents yard capacity in TEU and number of boxes. + """ + + #: The yard capacity expressed in TEU + teu: typing.Dict[datetime.datetime, float] + + #: The yard capacity expressed in number of boxes + containers: typing.Dict[datetime.datetime, int] + + +class ContainersTransportedByTruck(typing.NamedTuple): + """ + Represents the containers moved by trucks. + """ + + #: The number of containers moved on the inbound journey + inbound: float + + #: The number of containers moved on the outbound journey + outbound: float diff --git a/conflowgen/domain_models/container.py b/conflowgen/domain_models/container.py index 9ce58379..043c9ad4 100644 --- a/conflowgen/domain_models/container.py +++ b/conflowgen/domain_models/container.py @@ -6,91 +6,107 @@ from .arrival_information import TruckArrivalInformationForDelivery, TruckArrivalInformationForPickup from .base_model import BaseModel -from .data_types.container_length import CONTAINER_LENGTH_TO_OCCUPIED_TEU +from .data_types.container_length import CONTAINER_LENGTH_TO_OCCUPIED_TEU, ContainerLength from .field_types.container_length import ContainerLengthField from .field_types.mode_of_transport import ModeOfTransportField from .field_types.storage_requirement import StorageRequirementField from .large_vehicle_schedule import Destination from .vehicle import LargeScheduledVehicle from .vehicle import Truck +from .data_types.storage_requirement import StorageRequirement from ..domain_models.data_types.mode_of_transport import ModeOfTransport +class FaultyDataException(Exception): + def __init__(self, message): + self.message = message + super().__init__(self.message) + + +class NoPickupVehicleException(Exception): + def __init__(self, container, vehicle_type): + self.container = container + self.vehicle_type = vehicle_type + message = f"The container {self.container} is not picked up by any vehicle even though a vehicle of type " \ + f"{self.vehicle_type} should be there." + super().__init__(message) + + class Container(BaseModel): """A representation of the physical container that is moved through the yard.""" id = AutoField() - weight = IntegerField( + weight: int = IntegerField( null=False, help_text="The weight of the container (approximated). This value should suit to the container weight " "distribution." ) - length = ContainerLengthField( + length: ContainerLength = ContainerLengthField( null=False, help_text="The length of the container in feet, typically 20' or 40' are used in international trade." ) - storage_requirement = StorageRequirementField( + storage_requirement: StorageRequirement = StorageRequirementField( null=False, help_text="Some containers must be stored separately, e.g. if they are reefers or dangerous goods containers." ) - delivered_by = ModeOfTransportField( + delivered_by: ModeOfTransport = ModeOfTransportField( null=False, help_text="This vehicle type delivers this container to the terminal. This helps to quickly pick the correct " "foreign key in the next step and is thus just additional information." ) - picked_up_by_initial = ModeOfTransportField( + picked_up_by_initial: ModeOfTransport = ModeOfTransportField( null=False, help_text="This vehicle type is first drawn randomly for picking up the container. It might be overwritten " "later because no vehicle satisfies the constraints, e.g. because all vehicles of that type arrive " "too early or too late or they are already full. Large deviations between `picked_up_by_initial` " "and `picked_up_by` might indicate calibration issues with the random distributions or schedules." ) - picked_up_by = ModeOfTransportField( + picked_up_by: ModeOfTransport = ModeOfTransportField( null=False, help_text="This vehicle type is later actually used for picking up the container. This helps to quickly pick " "the correct foreign key in the next step and is thus just additional information." ) - delivered_by_large_scheduled_vehicle = ForeignKeyField( + delivered_by_large_scheduled_vehicle: LargeScheduledVehicle = ForeignKeyField( LargeScheduledVehicle, null=True, help_text="Points at the large scheduled vehicle it is delivered by (null if truck). " "Any arrival information of the container is attached to that vehicle." ) - delivered_by_truck = ForeignKeyField( + delivered_by_truck: Truck = ForeignKeyField( Truck, null=True, help_text="Points at the truck it is delivered by (null if large scheduled vehicle). " "Any arrival information of the container is attached to that vehicle)." ) - picked_up_by_large_scheduled_vehicle = ForeignKeyField( + picked_up_by_large_scheduled_vehicle: LargeScheduledVehicle = ForeignKeyField( LargeScheduledVehicle, null=True, help_text="Points at the large scheduled vehicle it is picked up by (null if truck). " "Any departure information of the container is attached to that vehicle." ) - picked_up_by_truck = ForeignKeyField( + picked_up_by_truck: Truck = ForeignKeyField( Truck, null=True, help_text="Points at the truck it is picked up by (null if large scheduled vehicle). " "Any departure information of the container is attached to that vehicle." ) - destination = ForeignKeyField( + destination: Destination = ForeignKeyField( Destination, null=True, help_text="Points at the next destination of the container. Only applicable if picked up by a large scheduled " "vehicle. This information is sometimes used for better container stacking in the yard. For vessels, " "this can be regarded as a simplified stowage plan, likewise for trains and barges." ) - emergency_pickup = BooleanField( + emergency_pickup: bool = BooleanField( default=False, help_text="This indicates that no regular means of transport was available so that a vehicle had to be called " "explicitly to pick up the container so that the maximum dwell time is not exceeded." ) - cached_arrival_time = DateTimeField( + cached_arrival_time: datetime.datetime = DateTimeField( default=None, null=True, help_text="This field is used to cache the arrival time for faster evaluation of analyses." ) - cached_departure_time = DateTimeField( + cached_departure_time: datetime.datetime = DateTimeField( default=None, null=True, help_text="This field is used to cache the departure time for faster evaluation of analyses." @@ -100,12 +116,10 @@ class Container(BaseModel): def occupied_teu(self) -> float: return CONTAINER_LENGTH_TO_OCCUPIED_TEU[self.length] - def get_arrival_time(self, use_cache: bool) -> datetime.datetime: + def get_arrival_time(self) -> datetime.datetime: - if use_cache: - if self.cached_arrival_time is not None: - # noinspection PyTypeChecker - return self.cached_arrival_time + if self.cached_arrival_time is not None: + return self.cached_arrival_time container_arrival_time: datetime.datetime if self.delivered_by == ModeOfTransport.truck: @@ -118,18 +132,16 @@ def get_arrival_time(self, use_cache: bool) -> datetime.datetime: large_scheduled_vehicle: LargeScheduledVehicle = self.delivered_by_large_scheduled_vehicle container_arrival_time = large_scheduled_vehicle.scheduled_arrival else: - raise Exception(f"Faulty data: {self}") + raise FaultyDataException(f"Faulty data: {self}") self.cached_arrival_time = container_arrival_time self.save() return container_arrival_time - def get_departure_time(self, use_cache: bool) -> datetime.datetime: + def get_departure_time(self) -> datetime.datetime: - if use_cache: - if self.cached_departure_time is not None: - # noinspection PyTypeChecker - return self.cached_departure_time + if self.cached_departure_time is not None: + return self.cached_departure_time container_departure_time: datetime.datetime if self.picked_up_by_truck is not None: @@ -143,8 +155,7 @@ def get_departure_time(self, use_cache: bool) -> datetime.datetime: vehicle: LargeScheduledVehicle = self.picked_up_by_large_scheduled_vehicle container_departure_time = vehicle.scheduled_arrival else: - raise Exception(f"The container {self} is not picked up by any vehicle even though a vehicle of type " - f"{self.picked_up_by} should be there.") + raise NoPickupVehicleException(self, self.picked_up_by) self.cached_departure_time = container_departure_time self.save() diff --git a/conflowgen/domain_models/data_types/container_length.py b/conflowgen/domain_models/data_types/container_length.py index 54106894..a90bbc94 100644 --- a/conflowgen/domain_models/data_types/container_length.py +++ b/conflowgen/domain_models/data_types/container_length.py @@ -20,7 +20,7 @@ class ContainerLength(enum.Enum): other = -1 # doc: Any other length usually does not fit into the standardized slots and handling processes. @classmethod - def get_factor(cls, container_length: ContainerLength) -> float: + def get_teu_factor(cls, container_length: ContainerLength) -> float: """ Each container occupies a certain amount of space when stored. This required space is measured in TEU. @@ -36,6 +36,10 @@ def get_factor(cls, container_length: ContainerLength) -> float: """ return CONTAINER_LENGTH_TO_OCCUPIED_TEU[container_length] + @classmethod + def get_maximum_teu_factor(cls) -> float: + return MAXIMUM_OCCUPIED_TEU + def __str__(self) -> str: """ The textual representation is, e.g., '20 feet' instead of '' so it is easier to @@ -79,3 +83,5 @@ def cast_element_type(cls, text: str) -> ContainerLength | None: The TEU factor for the value 'other' is chosen to be rather large because it is assumed to be difficult to find a proper storage position. """ + +MAXIMUM_OCCUPIED_TEU = max(list(CONTAINER_LENGTH_TO_OCCUPIED_TEU.values())) diff --git a/conflowgen/domain_models/distribution_models/truck_arrival_distribution.py b/conflowgen/domain_models/distribution_models/truck_arrival_distribution.py index 955ba18a..048540fb 100644 --- a/conflowgen/domain_models/distribution_models/truck_arrival_distribution.py +++ b/conflowgen/domain_models/distribution_models/truck_arrival_distribution.py @@ -1,11 +1,11 @@ -from peewee import FloatField, IntegerField +from peewee import FloatField from conflowgen.domain_models.base_model import BaseModel class TruckArrivalDistribution(BaseModel): """The truck arrival distribution (both inbound and outbound journeys)""" - hour_in_the_week = IntegerField(null=False, primary_key=True, unique=True) + hour_in_the_week = FloatField(null=False, primary_key=True, unique=True) fraction = FloatField(null=False) def __repr__(self): diff --git a/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py b/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py index 113b5688..485abfef 100644 --- a/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py +++ b/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py @@ -10,6 +10,10 @@ from conflowgen.tools.continuous_distribution import ContinuousDistribution +class ContainerDwellTimeCouldNotBeCastedException(Exception): + pass + + class ContainerDwellTimeDistributionRepository: @staticmethod @@ -68,7 +72,7 @@ def set_distributions( elif isinstance(container_dwell_time_distribution, dict): distribution_properties = container_dwell_time_distribution else: - raise Exception( + raise ContainerDwellTimeCouldNotBeCastedException( f"The container dwell time distribution representation " f"'{container_dwell_time_distribution}' could not be casted." ) diff --git a/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py b/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py index 220435b9..d4df7c04 100644 --- a/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py +++ b/conflowgen/domain_models/distribution_repositories/container_length_distribution_repository.py @@ -1,6 +1,7 @@ import math from typing import Dict +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.data_types.container_length import ContainerLength @@ -53,3 +54,17 @@ def set_distribution(cls, container_lengths: Dict[ContainerLength, float]): container_length=container_length, fraction=fraction ).save() + + @classmethod + @DataSummariesCache.cache_result + def get_teu_factor(cls) -> float: + """ + Calculates and returns the TEU factor based on the container length distribution. + """ + # Loop through container lengths and calculate weighted average of all container lengths + container_length_weighted_average = 0.0 + container_length_distribution = cls.get_distribution() + for container_length, fraction in container_length_distribution.items(): + container_length_weighted_average += ContainerLength.get_teu_factor(container_length) * fraction + assert 0 < container_length_weighted_average < ContainerLength.get_maximum_teu_factor() + return container_length_weighted_average diff --git a/conflowgen/domain_models/distribution_validators.py b/conflowgen/domain_models/distribution_validators.py index 4dc50c66..dfe9a527 100644 --- a/conflowgen/domain_models/distribution_validators.py +++ b/conflowgen/domain_models/distribution_validators.py @@ -135,7 +135,7 @@ def _check_all_keys_are_of_intended_primitive_type( return sanitized_distribution -SUPPORTED_PRIMITIVE_KEY_TYPES = (int, ) +SUPPORTED_PRIMITIVE_KEY_TYPES = (int, float) def _check_all_required_keys_are_set_in_distribution( @@ -194,7 +194,7 @@ def _check_value_range_of_frequencies_in_distribution( def validate_distribution_with_no_dependent_variables( distribution: Dict[enum.Enum | int, Any], - key_type: Type[enum.Enum] | Type[int], + key_type: Type[enum.Enum] | Type[float], values_are_frequencies: bool ) -> Dict[enum.Enum | int, float]: sanitized_distribution = _check_all_required_keys_are_set_in_distribution(distribution, key_type) diff --git a/conflowgen/domain_models/factories/container_factory.py b/conflowgen/domain_models/factories/container_factory.py index 8e01e954..bd3aab8a 100644 --- a/conflowgen/domain_models/factories/container_factory.py +++ b/conflowgen/domain_models/factories/container_factory.py @@ -1,7 +1,6 @@ from __future__ import annotations import math -import random from typing import Dict, MutableSequence, Sequence, Type from conflowgen.domain_models.container import Container @@ -19,6 +18,7 @@ from conflowgen.domain_models.repositories.large_scheduled_vehicle_repository import LargeScheduledVehicleRepository from conflowgen.domain_models.vehicle import AbstractLargeScheduledVehicle, LargeScheduledVehicle from conflowgen.tools.distribution_approximator import DistributionApproximator +from conflowgen.application.repositories.random_seed_store_repository import get_initialised_random_object class ContainerFactory: @@ -26,12 +26,10 @@ class ContainerFactory: Creates containers according to the distributions which are either hard-coded or stored in the database. """ - ignored_capacity = ContainerLength.get_factor(ContainerLength.other) - - random_seed = 1 + ignored_capacity = ContainerLength.get_maximum_teu_factor() def __init__(self): - self.seeded_random = random.Random(x=self.random_seed) + self.seeded_random = get_initialised_random_object(self.__class__.__name__) self.mode_of_transportation_distribution: dict[ModeOfTransport, dict[ModeOfTransport, float]] | None = None self.container_length_distribution: dict[ContainerLength, float] | None = None self.container_weight_distribution: dict[ContainerLength, dict[int, float]] | None = None diff --git a/conflowgen/domain_models/repositories/large_scheduled_vehicle_repository.py b/conflowgen/domain_models/repositories/large_scheduled_vehicle_repository.py index 933c1fec..05563689 100644 --- a/conflowgen/domain_models/repositories/large_scheduled_vehicle_repository.py +++ b/conflowgen/domain_models/repositories/large_scheduled_vehicle_repository.py @@ -9,7 +9,7 @@ class LargeScheduledVehicleRepository: - ignored_capacity = ContainerLength.get_factor(ContainerLength.other) + ignored_capacity = ContainerLength.get_teu_factor(ContainerLength.other) def __init__(self): self.transportation_buffer = None @@ -44,7 +44,7 @@ def block_capacity_for_inbound_journey( # calculate new free capacity free_capacity_in_teu = self.free_capacity_for_inbound_journey_buffer[vehicle] - used_capacity_in_teu = ContainerLength.get_factor(container_length=container.length) + used_capacity_in_teu = ContainerLength.get_teu_factor(container_length=container.length) new_free_capacity_in_teu = free_capacity_in_teu - used_capacity_in_teu assert new_free_capacity_in_teu >= 0, f"vehicle {vehicle} is overloaded, " \ f"free_capacity_in_teu: {free_capacity_in_teu}, " \ @@ -65,7 +65,7 @@ def block_capacity_for_outbound_journey( # calculate new free capacity free_capacity_in_teu = self.free_capacity_for_outbound_journey_buffer[vehicle] - used_capacity_in_teu = ContainerLength.get_factor(container_length=container.length) + used_capacity_in_teu = ContainerLength.get_teu_factor(container_length=container.length) new_free_capacity_in_teu = free_capacity_in_teu - used_capacity_in_teu assert new_free_capacity_in_teu >= 0, f"vehicle {vehicle} is overloaded, " \ f"free_capacity_in_teu: {free_capacity_in_teu}, " \ @@ -133,10 +133,10 @@ def _get_free_capacity_in_teu( loaded_other_containers = container_counter(vehicle, ContainerLength.other) free_capacity_in_teu = ( maximum_capacity - - loaded_20_foot_containers * ContainerLength.get_factor(ContainerLength.twenty_feet) - - loaded_40_foot_containers * ContainerLength.get_factor(ContainerLength.forty_feet) - - loaded_45_foot_containers * ContainerLength.get_factor(ContainerLength.forty_five_feet) - - loaded_other_containers * ContainerLength.get_factor(ContainerLength.other) + - loaded_20_foot_containers * ContainerLength.get_teu_factor(ContainerLength.twenty_feet) + - loaded_40_foot_containers * ContainerLength.get_teu_factor(ContainerLength.forty_feet) + - loaded_45_foot_containers * ContainerLength.get_teu_factor(ContainerLength.forty_five_feet) + - loaded_other_containers * ContainerLength.get_teu_factor(ContainerLength.other) ) vehicle_name = vehicle.large_scheduled_vehicle.vehicle_name assert free_capacity_in_teu >= 0, f"vehicle {vehicle} of type {vehicle.get_mode_of_transport()} with the " \ diff --git a/conflowgen/domain_models/repositories/schedule_repository.py b/conflowgen/domain_models/repositories/schedule_repository.py index 53b93b4e..b45382a3 100644 --- a/conflowgen/domain_models/repositories/schedule_repository.py +++ b/conflowgen/domain_models/repositories/schedule_repository.py @@ -41,7 +41,7 @@ def get_departing_vehicles( ) # Check for each of the vehicles how much it has already loaded - required_capacity_in_teu = ContainerLength.get_factor(required_capacity) + required_capacity_in_teu = ContainerLength.get_teu_factor(required_capacity) vehicles_with_sufficient_capacity = [] vehicle: Type[AbstractLargeScheduledVehicle] for vehicle in vehicles: diff --git a/conflowgen/domain_models/vehicle.py b/conflowgen/domain_models/vehicle.py index da998bfe..b0b830ae 100644 --- a/conflowgen/domain_models/vehicle.py +++ b/conflowgen/domain_models/vehicle.py @@ -6,6 +6,7 @@ import datetime import uuid +from abc import abstractmethod from typing import Type from peewee import AutoField, BooleanField, CharField, ForeignKeyField, DateTimeField @@ -119,12 +120,14 @@ def __repr__(self): class AbstractLargeScheduledVehicle(BaseModel): @property + @abstractmethod def large_scheduled_vehicle(self) -> LargeScheduledVehicle: - raise Exception("You must pick one of the concrete subclasses, this is the common parent class.") + pass @staticmethod + @abstractmethod def get_mode_of_transport() -> ModeOfTransport: - raise Exception("You must pick one of the concrete subclasses, this is the common parent class.") + pass @staticmethod def map_mode_of_transport_to_class( diff --git a/conflowgen/flow_generator/abstract_truck_for_containers_manager.py b/conflowgen/flow_generator/abstract_truck_for_containers_manager.py index 84052a97..c52d450d 100644 --- a/conflowgen/flow_generator/abstract_truck_for_containers_manager.py +++ b/conflowgen/flow_generator/abstract_truck_for_containers_manager.py @@ -2,10 +2,10 @@ import abc import logging import math -import random -from typing import List, Tuple, Union, Optional, Dict, Sequence +import typing from conflowgen.tools.weekly_distribution import WeeklyDistribution +from ..application.repositories.random_seed_store_repository import get_initialised_random_object from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.container import Container from ..domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ @@ -17,19 +17,30 @@ from ..tools.continuous_distribution import ContinuousDistribution, multiply_discretized_probability_densities +class SumOfProbabilitiesDoesNotEqualOneException(Exception): + pass + + +class UnknownDistributionPropertyException(Exception): + pass + + class AbstractTruckForContainersManager(abc.ABC): def __init__(self): self.logger = logging.getLogger("conflowgen") + self.seeded_random = get_initialised_random_object(self.__class__.__name__) + self.container_dwell_time_distribution_repository = ContainerDwellTimeDistributionRepository() self.container_dwell_time_distributions: \ - Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, ContinuousDistribution]]] | None \ + typing.Dict[ModeOfTransport, typing.Dict[ + ModeOfTransport, typing.Dict[StorageRequirement, ContinuousDistribution]]] | None \ = None self.truck_arrival_distribution_repository = TruckArrivalDistributionRepository() self.truck_arrival_distributions: \ - Dict[ModeOfTransport, Dict[StorageRequirement, WeeklyDistribution | None]] = { + typing.Dict[ModeOfTransport, typing.Dict[StorageRequirement, WeeklyDistribution | None]] = { vehicle: { storage_requirement: None for storage_requirement in StorageRequirement @@ -37,7 +48,7 @@ def __init__(self): } self.vehicle_factory = VehicleFactory() - self.time_window_length_in_hours: Optional[int] = None + self.time_window_length_in_hours: typing.Optional[int] = None @abc.abstractmethod def _get_container_dwell_time_distribution( @@ -56,7 +67,7 @@ def reload_distributions( self ) -> None: # noinspection PyTypeChecker - hour_of_the_week_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]] = \ + hour_of_the_week_fraction_pairs: typing.List[typing.Union[typing.Tuple[int, float], typing.Tuple[int, int]]] = \ list(self.truck_arrival_distribution_repository.get_distribution().items()) self.time_window_length_in_hours = hour_of_the_week_fraction_pairs[1][0] - hour_of_the_week_fraction_pairs[0][0] @@ -65,7 +76,7 @@ def reload_distributions( def _update_truck_arrival_and_container_dwell_time_distributions( self, - hour_of_the_week_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]] + hour_of_the_week_fraction_pairs: typing.List[typing.Union[typing.Tuple[int, float], typing.Tuple[int, int]]] ) -> None: for vehicle_type in ModeOfTransport: for storage_requirement in StorageRequirement: @@ -103,14 +114,15 @@ def _get_distributions( return container_dwell_time_distribution, truck_arrival_distribution @abc.abstractmethod - def _get_truck_arrival_distributions(self, container: Container) -> Dict[StorageRequirement, WeeklyDistribution]: + def _get_truck_arrival_distributions(self, container: Container) -> typing.Dict[ + StorageRequirement, WeeklyDistribution]: pass def _get_time_window_of_truck_arrival( self, container_dwell_time_distribution: ContinuousDistribution, - truck_arrival_distribution_slice: Dict[int, float], - _debug_check_distribution_property: Optional[str] = None + truck_arrival_distribution_slice: typing.Dict[int, float], + _debug_check_distribution_property: typing.Optional[str] = None ) -> int: """ Returns: @@ -128,7 +140,7 @@ def _get_time_window_of_truck_arrival( ) if sum(total_probabilities) == 0: # bad circumstances, no slot available - raise Exception( + raise SumOfProbabilitiesDoesNotEqualOneException( f"No truck slots available! {truck_arrival_probabilities} and {total_probabilities} just do not match." ) @@ -142,9 +154,9 @@ def _get_time_window_of_truck_arrival( elif _debug_check_distribution_property == "average": selected_time_window = int(round(container_dwell_time_distribution.average)) else: - raise Exception(f"Unknown: {_debug_check_distribution_property}") + raise UnknownDistributionPropertyException(_debug_check_distribution_property) else: - selected_time_window = random.choices( + selected_time_window = self.seeded_random.choices( population=time_windows_for_truck_arrival, weights=total_probabilities )[0] @@ -170,7 +182,7 @@ def _get_time_window_of_truck_arrival( return selected_time_window @staticmethod - def _drop_where_zero(sequence: Sequence, filter_sequence: Sequence) -> list: + def _drop_where_zero(sequence: typing.Sequence, filter_sequence: typing.Sequence) -> list: new_sequence = [] for element, filter_element in zip(sequence, filter_sequence): if filter_element: diff --git a/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py b/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py index 8c4b064d..c506dba8 100644 --- a/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py +++ b/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py @@ -1,8 +1,8 @@ from __future__ import annotations import logging -import random from typing import Dict, Type, List +from conflowgen.application.repositories.random_seed_store_repository import get_initialised_random_object from conflowgen.domain_models.container import Container from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -15,9 +15,11 @@ class AllocateSpaceForContainersDeliveredByTruckService: - ignored_capacity = ContainerLength.get_factor(ContainerLength.other) + ignored_capacity = ContainerLength.get_teu_factor(ContainerLength.other) def __init__(self): + self.seeded_random = get_initialised_random_object(self.__class__.__name__) + self.logger = logging.getLogger("conflowgen") self.mode_of_transport_distribution_repository = ModeOfTransportDistributionRepository() self.mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] | None = None @@ -40,9 +42,14 @@ def _get_number_containers_to_allocate() -> int: As long as the container length distribution for inbound and outbound containers are the same, using the number of containers should lead to the same amount of containers as if we had taken the TEU capacity which is more complex to calculate. + We do not consider the emergency pick-ups, i.e. the cases when a container was picked up by a truck just because + no truck was available. + These trucks artificially increase the import and export flows in case the container was originally a + transshipment container and without this correction out of the sudden we have two containers in the yard. """ number_containers: int = Container.select().where( - Container.picked_up_by == ModeOfTransport.truck + (Container.picked_up_by == ModeOfTransport.truck) + & ~Container.emergency_pickup ).count() return number_containers @@ -74,8 +81,7 @@ def allocate(self) -> None: successful_assignment = 0 teu_total = 0 - for i in range(number_containers_to_allocate): - i += 1 + for i in range(1, number_containers_to_allocate + 1): if i % 1000 == 0 or i == 1 or i == number_containers_to_allocate: self.logger.info( f"Progress: {i} / {number_containers_to_allocate} ({i / number_containers_to_allocate:.2%}) " @@ -131,7 +137,7 @@ def allocate(self) -> None: continue # try again (possibly new vehicle type, definitely not same vehicle again) container = self.container_factory.create_container_for_delivering_truck(vehicle) - teu_total += ContainerLength.get_factor(container.length) + teu_total += ContainerLength.get_teu_factor(container.length) self.large_scheduled_vehicle_repository.block_capacity_for_outbound_journey(vehicle, container) successful_assignment += 1 break # success, no further looping to search for a suitable vehicle @@ -155,7 +161,7 @@ def _pick_vehicle_type( return None # pick vehicle type - vehicle_type: ModeOfTransport = random.choices( + vehicle_type: ModeOfTransport = self.seeded_random.choices( population=vehicle_types, weights=frequency_of_vehicle_types )[0] @@ -178,7 +184,7 @@ def _pick_vehicle( "by trucks.") return None - vehicle: Type[AbstractLargeScheduledVehicle] = random.choices( + vehicle: Type[AbstractLargeScheduledVehicle] = self.seeded_random.choices( population=list(vehicle_distribution.keys()), weights=list(vehicle_distribution.values()) )[0] diff --git a/conflowgen/flow_generator/assign_destination_to_container_service.py b/conflowgen/flow_generator/assign_destination_to_container_service.py index ef1f6951..b19dae1c 100644 --- a/conflowgen/flow_generator/assign_destination_to_container_service.py +++ b/conflowgen/flow_generator/assign_destination_to_container_service.py @@ -1,9 +1,11 @@ from __future__ import annotations import logging -import random -from typing import Iterable, Dict +import typing +from peewee import fn + +from conflowgen.application.repositories.random_seed_store_repository import get_initialised_random_object from conflowgen.domain_models.container import Container from conflowgen.domain_models.distribution_repositories.container_destination_distribution_repository import \ ContainerDestinationDistributionRepository @@ -16,8 +18,10 @@ class AssignDestinationToContainerService: logger = logging.getLogger("conflowgen") def __init__(self): + self.seeded_random = get_initialised_random_object(self.__class__.__name__) + self.repository = ContainerDestinationDistributionRepository() - self.distribution: Dict[Schedule, Dict[Destination, float]] | None = None + self.distribution: typing.Dict[Schedule, typing.Dict[Destination, float]] | None = None self.reload_distributions() def reload_distributions(self): @@ -25,10 +29,7 @@ def reload_distributions(self): self.logger.debug("Loading destination distribution...") for schedule, distribution_for_schedule in self.distribution.items(): self.logger.debug(f"Load destination distribution for service '{schedule.service_name}' by " - f"{schedule.vehicle_type}") - for destination, fraction in distribution_for_schedule.items(): - self.logger.debug(f"Destination '{destination.destination_name}' is frequented by {100*fraction:.2f}% " - f"of the containers and is number {destination.sequence_id}") + f"{schedule.vehicle_type} with {len(distribution_for_schedule)} destinations") def assign(self) -> None: """ @@ -37,7 +38,7 @@ def assign(self) -> None: in the following. This step can only be done if the next destinations of the vehicle are determined in the schedule (this is an optional user input). The frequency is expressed in boxes. """ - destination_with_distinct_schedules: Iterable[Destination] = Destination.select( + destination_with_distinct_schedules: typing.Iterable[Destination] = Destination.select( Destination.belongs_to_schedule).distinct() schedules = [ destination.belongs_to_schedule @@ -49,10 +50,12 @@ def assign(self) -> None: self.logger.debug(f"Assign destinations to containers that leave the terminal with the service " f"'{schedule.service_name}' of the vehicle type {schedule.vehicle_type}, " f"progress: {i+1} / {number_iterations} ({100*(i + 1)/number_iterations:.2f}%)") - containers_moving_according_to_schedule: Iterable[Container] = Container.select().join( + containers_moving_according_to_schedule: typing.Iterable[Container] = Container.select().join( LargeScheduledVehicle, on=Container.picked_up_by_large_scheduled_vehicle ).where( Container.picked_up_by_large_scheduled_vehicle.schedule == schedule + ).order_by( + fn.assign_random_value(Container.id) ) distribution_for_schedule = self.distribution[schedule] destinations = list(distribution_for_schedule.keys()) @@ -60,7 +63,7 @@ def assign(self) -> None: container: Container for container in containers_moving_according_to_schedule: - sampled_destination = random.choices( + sampled_destination = self.seeded_random.choices( population=destinations, weights=frequency_of_destinations )[0] diff --git a/conflowgen/flow_generator/container_flow_generation_service.py b/conflowgen/flow_generator/container_flow_generation_service.py index bdc84680..9a772284 100644 --- a/conflowgen/flow_generator/container_flow_generation_service.py +++ b/conflowgen/flow_generator/container_flow_generation_service.py @@ -18,6 +18,7 @@ TruckForExportContainersManager from conflowgen.flow_generator.truck_for_import_containers_manager import \ TruckForImportContainersManager +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache class ContainerFlowGenerationService: @@ -73,6 +74,8 @@ def container_flow_data_exists() -> bool: return len(Container.select().limit(1)) == 1 def generate(self): + self.logger.info("Resetting preview and analysis cache...") + DataSummariesCache.reset_cache() self.logger.info("Remove previous data...") self.clear_previous_container_flow() self.logger.info("Reloading properties and distributions...") diff --git a/conflowgen/flow_generator/large_scheduled_vehicle_creation_service.py b/conflowgen/flow_generator/large_scheduled_vehicle_creation_service.py index b1091b01..170d6a0b 100644 --- a/conflowgen/flow_generator/large_scheduled_vehicle_creation_service.py +++ b/conflowgen/flow_generator/large_scheduled_vehicle_creation_service.py @@ -2,6 +2,8 @@ from typing import List, Type import logging +from peewee import fn + from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.factories.container_factory import ContainerFactory from conflowgen.domain_models.factories.fleet_factory import FleetFactory @@ -37,7 +39,9 @@ def reload_properties( def create(self) -> None: assert self.container_flow_start_date is not None assert self.container_flow_end_date is not None - schedules = Schedule.select() + schedules = Schedule.select().order_by( + fn.assign_random_value(Schedule.id) + ) number_schedules = schedules.count() for i, schedule in enumerate(schedules): i += 1 diff --git a/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py b/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py index 8c2dd5a0..f0819892 100644 --- a/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py +++ b/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py @@ -2,13 +2,14 @@ import datetime import logging import math -import random from typing import Tuple, List, Dict, Type, Sequence import numpy as np # noinspection PyProtectedMember from peewee import fn, JOIN, ModelSelect +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from ..application.repositories.random_seed_store_repository import get_initialised_random_object from ..domain_models.data_types.container_length import ContainerLength from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForDelivery @@ -25,12 +26,9 @@ class LargeScheduledVehicleForOnwardTransportationManager: - random_seed = 1 - - use_cache = True - def __init__(self): - self.seeded_random = random.Random(x=self.random_seed) + self.seeded_random = get_initialised_random_object(self.__class__.__name__) + self.logger = logging.getLogger("conflowgen") self.schedule_repository = ScheduleRepository() self.large_scheduled_vehicle_repository = self.schedule_repository.large_scheduled_vehicle_repository @@ -76,7 +74,7 @@ def choose_departing_vehicle_for_containers(self) -> None: # This way no vehicle has an advantage over another by its earlier arrival (getting better slots etc.) selected_containers: ModelSelect = Container.select( ).order_by( - fn.Random() + fn.assign_random_value(Container.id) ).where( (Container.picked_up_by << ModeOfTransport.get_scheduled_vehicles()) & (Container.delivered_by << ModeOfTransport.get_scheduled_vehicles()) @@ -197,7 +195,7 @@ def _draw_vehicle( vehicles_and_their_respective_free_capacity = {} for vehicle in available_vehicles: free_capacity = self.large_scheduled_vehicle_repository.get_free_capacity_for_outbound_journey(vehicle) - if free_capacity >= ContainerLength.get_factor(ContainerLength.other): + if free_capacity >= ContainerLength.get_teu_factor(ContainerLength.other): vehicles_and_their_respective_free_capacity[vehicle] = free_capacity if len(available_vehicles) == 0: @@ -254,10 +252,11 @@ def _get_dwell_times(self, container: Container) -> Tuple[int, int]: return minimum_dwell_time_in_hours, maximum_dwell_time_in_hours + @DataSummariesCache.cache_result def _get_arrival_time_of_container(self, container: Container) -> datetime.datetime: """get container arrival from correct source """ - return container.get_arrival_time(use_cache=self.use_cache) + return container.get_arrival_time() def _find_alternative_mode_of_transportation( self, diff --git a/conflowgen/flow_generator/truck_for_export_containers_manager.py b/conflowgen/flow_generator/truck_for_export_containers_manager.py index 140a3188..53bc0106 100644 --- a/conflowgen/flow_generator/truck_for_export_containers_manager.py +++ b/conflowgen/flow_generator/truck_for_export_containers_manager.py @@ -1,9 +1,11 @@ from __future__ import annotations import datetime -import random from typing import Dict, Optional -from .abstract_truck_for_containers_manager import AbstractTruckForContainersManager +from peewee import fn + +from .abstract_truck_for_containers_manager import AbstractTruckForContainersManager, \ + UnknownDistributionPropertyException from ..domain_models.data_types.container_length import ContainerLength from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForDelivery @@ -75,7 +77,7 @@ def _get_container_delivery_time( # arrival within the last time slot close_to_time_window_length = self.time_window_length_in_hours - (1 / 60) - random_time_component: float = random.uniform(0, close_to_time_window_length) + random_time_component: float = self.seeded_random.uniform(0, close_to_time_window_length) if _debug_check_distribution_property is not None: if _debug_check_distribution_property == "minimum": @@ -85,7 +87,7 @@ def _get_container_delivery_time( elif _debug_check_distribution_property == "average": random_time_component = 0 else: - raise Exception(f"Unknown: {_debug_check_distribution_property}") + raise UnknownDistributionPropertyException(f"Unknown: {_debug_check_distribution_property}") truck_arrival_time = ( # go back to the earliest time window @@ -113,6 +115,8 @@ def generate_trucks_for_delivering(self) -> None: """ containers = Container.select().where( Container.delivered_by == ModeOfTransport.truck + ).order_by( + fn.assign_random_value(Container.id) ) number_containers = containers.count() self.logger.info( @@ -143,6 +147,6 @@ def generate_trucks_for_delivering(self) -> None: ) container.delivered_by_truck = truck container.save() - teu_total += ContainerLength.get_factor(container.length) + teu_total += ContainerLength.get_teu_factor(container.length) self.logger.info(f"All {number_containers} trucks that deliver a container are created now, moving " f"{teu_total} TEU.") diff --git a/conflowgen/flow_generator/truck_for_import_containers_manager.py b/conflowgen/flow_generator/truck_for_import_containers_manager.py index 13bdaa0c..313a2083 100644 --- a/conflowgen/flow_generator/truck_for_import_containers_manager.py +++ b/conflowgen/flow_generator/truck_for_import_containers_manager.py @@ -1,8 +1,10 @@ import datetime -import random from typing import Dict, Optional -from .abstract_truck_for_containers_manager import AbstractTruckForContainersManager +from peewee import fn + +from .abstract_truck_for_containers_manager import AbstractTruckForContainersManager, \ + UnknownDistributionPropertyException from ..domain_models.data_types.container_length import ContainerLength from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForPickup @@ -57,7 +59,7 @@ def _get_container_pickup_time( # arrival within the last time slot close_to_time_window_length = self.time_window_length_in_hours - (1 / 60) - random_time_component: float = random.uniform(0, close_to_time_window_length) + random_time_component: float = self.seeded_random.uniform(0, close_to_time_window_length) if _debug_check_distribution_property is not None: if _debug_check_distribution_property == "minimum": @@ -67,7 +69,7 @@ def _get_container_pickup_time( elif _debug_check_distribution_property == "average": random_time_component = 1 else: - raise Exception(f"Unknown: {_debug_check_distribution_property}") + raise UnknownDistributionPropertyException(f"Unknown: {_debug_check_distribution_property}") truck_arrival_time = ( container_arrival_time.replace(minute=0, second=0, microsecond=0) @@ -88,6 +90,8 @@ def _get_container_pickup_time( def generate_trucks_for_picking_up(self): containers = Container.select().where( Container.picked_up_by == ModeOfTransport.truck + ).order_by( + fn.assign_random_value(Container.id) ) number_containers = containers.count() self.logger.info( @@ -121,6 +125,6 @@ def generate_trucks_for_picking_up(self): ) container.picked_up_by_truck = truck container.save() - teu_total += ContainerLength.get_factor(container.length) + teu_total += ContainerLength.get_teu_factor(container.length) self.logger.info(f"All {number_containers} trucks that pick up a container have been generated, moving " f"{teu_total} TEU.") diff --git a/conflowgen/logging/logging.py b/conflowgen/logging/logging.py index eb25d9c9..531c084f 100644 --- a/conflowgen/logging/logging.py +++ b/conflowgen/logging/logging.py @@ -59,10 +59,15 @@ def setup_logger( logger = logging.getLogger("conflowgen") logger.setLevel(logging.DEBUG) - flow_handler = logging.StreamHandler(stream=sys.stdout) - flow_handler.setLevel(logging.DEBUG) - flow_handler.setFormatter(formatter) - logger.addHandler(flow_handler) + stream_handlers = [handler for handler in logger.handlers if isinstance(handler, logging.StreamHandler)] + if any(handler.stream == sys.stdout for handler in stream_handlers): + logger.warning("Duplicate StreamHandler streaming to sys.stdout detected. " + "Skipping adding another StreamHandler.") + else: + stream_handler = logging.StreamHandler(stream=sys.stdout) + stream_handler.setLevel(logging.DEBUG) + stream_handler.setFormatter(formatter) + logger.addHandler(stream_handler) if not os.path.isdir(logging_directory): logger.debug(f"Creating log directory at {logging_directory}") diff --git a/conflowgen/previews/__init__.py b/conflowgen/previews/__init__.py index b4119250..46e5dd30 100644 --- a/conflowgen/previews/__init__.py +++ b/conflowgen/previews/__init__.py @@ -3,6 +3,8 @@ from .inbound_and_outbound_vehicle_capacity_preview_report import InboundAndOutboundVehicleCapacityPreviewReport from .container_flow_by_vehicle_type_preview_report import ContainerFlowByVehicleTypePreviewReport from .modal_split_preview_report import ModalSplitPreviewReport +from .quay_side_throughput_preview_report import QuaySideThroughputPreviewReport +from .truck_gate_throughput_preview_report import TruckGateThroughputPreviewReport from .vehicle_capacity_exceeded_preview_report import VehicleCapacityUtilizationOnOutboundJourneyPreviewReport from ..reporting import AbstractReport from ..reporting.auto_reporter import AutoReporter @@ -14,6 +16,8 @@ VehicleCapacityUtilizationOnOutboundJourneyPreviewReport, ContainerFlowByVehicleTypePreviewReport, ModalSplitPreviewReport, + QuaySideThroughputPreviewReport, + TruckGateThroughputPreviewReport ] diff --git a/conflowgen/previews/container_flow_by_vehicle_type_preview.py b/conflowgen/previews/container_flow_by_vehicle_type_preview.py index 5779389d..ce2c7b6f 100644 --- a/conflowgen/previews/container_flow_by_vehicle_type_preview.py +++ b/conflowgen/previews/container_flow_by_vehicle_type_preview.py @@ -2,6 +2,7 @@ import datetime from typing import Dict +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ @@ -49,6 +50,7 @@ def __init__( transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -58,6 +60,7 @@ def hypothesize_with_mode_of_transport_distribution( ) self.mode_of_transport_distribution = mode_of_transport_distribution + @DataSummariesCache.cache_result def get_inbound_to_outbound_flow( self ) -> Dict[ModeOfTransport, Dict[ModeOfTransport, float]]: diff --git a/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py b/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py index 2670fc44..f208c102 100644 --- a/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py +++ b/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py @@ -137,21 +137,18 @@ def get_report_as_graph(self, **kwargs) -> plotly.graph_objects.Figure: data=[ plotly.graph_objects.Sankey( arrangement='perpendicular', - node=dict( - pad=15, - thickness=20, - line=dict( - color="black", - width=0.5 - ), - label=inbound_labels + outbound_labels, - color="dimgray", - ), - link=dict( - source=source_ids_with_duplication, - target=target_ids_with_duplication, - value=value - ) + node={ + 'pad': 15, + 'thickness': 20, + 'line': {'color': 'black', 'width': 0.5}, + 'label': inbound_labels + outbound_labels, + 'color': 'dimgray' + }, + link={ + 'source': source_ids_with_duplication, + 'target': target_ids_with_duplication, + 'value': value + } ) ] ) diff --git a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py index f8ab382b..2e0a741a 100644 --- a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py +++ b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py @@ -1,16 +1,16 @@ from __future__ import annotations import datetime from typing import Dict -import numpy as np +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity, ContainerVolumeByVehicleType from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository -from conflowgen.domain_models.factories.fleet_factory import create_arrivals_within_time_range -from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.application.services.inbound_and_outbound_vehicle_capacity_calculator_service import \ + InboundAndOutboundVehicleCapacityCalculatorService class InboundAndOutboundVehicleCapacityPreview(AbstractPreview): @@ -49,6 +49,7 @@ def __init__( self.mode_of_transport_distribution = ModeOfTransportDistributionRepository().get_distribution() + @DataSummariesCache.cache_result def _get_truck_capacity_for_export_containers( self, inbound_capacity_of_vehicles: Dict[ModeOfTransport, float] @@ -59,16 +60,10 @@ def _get_truck_capacity_for_export_containers( created. Thus, this method accounts for both import and export. """ - truck_capacity = 0 - for vehicle_type in ModeOfTransport.get_scheduled_vehicles(): - number_of_containers_delivered_to_terminal_by_vehicle_type = inbound_capacity_of_vehicles[vehicle_type] - mode_of_transport_distribution_of_vehicle_type = self.mode_of_transport_distribution[vehicle_type] - vehicle_to_truck_fraction = mode_of_transport_distribution_of_vehicle_type[ModeOfTransport.truck] - number_of_containers_to_pick_up_by_truck_from_vehicle_type = \ - number_of_containers_delivered_to_terminal_by_vehicle_type * vehicle_to_truck_fraction - truck_capacity += number_of_containers_to_pick_up_by_truck_from_vehicle_type - return truck_capacity + return InboundAndOutboundVehicleCapacityCalculatorService.\ + get_truck_capacity_for_export_containers(inbound_capacity_of_vehicles) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -78,38 +73,17 @@ def hypothesize_with_mode_of_transport_distribution( ) self.mode_of_transport_distribution = mode_of_transport_distribution + @DataSummariesCache.cache_result def get_inbound_capacity_of_vehicles(self) -> ContainerVolumeByVehicleType: """ For the inbound capacity, first vehicles that adhere to a schedule are considered. Trucks, which are created depending on the outbound distribution, are created based on the assumptions of the further container flow generation process. """ - inbound_capacity_in_teu: Dict[ModeOfTransport, float] = { - vehicle_type: 0 - for vehicle_type in ModeOfTransport - } - - for schedule in Schedule.select(): - arrivals = create_arrivals_within_time_range( - self.start_date, - schedule.vehicle_arrives_at, - self.end_date, - schedule.vehicle_arrives_every_k_days, - schedule.vehicle_arrives_at_time - ) - total_capacity_moved_by_vessel = (len(arrivals) # number of vehicles that are planned - * schedule.average_moved_capacity) # TEU capacity of each vehicle - inbound_capacity_in_teu[schedule.vehicle_type] += total_capacity_moved_by_vessel - - inbound_capacity_in_teu[ModeOfTransport.truck] = self._get_truck_capacity_for_export_containers( - inbound_capacity_in_teu - ) - - return ContainerVolumeByVehicleType( - containers=None, - teu=inbound_capacity_in_teu - ) + return InboundAndOutboundVehicleCapacityCalculatorService.\ + get_inbound_capacity_of_vehicles(self.start_date, self.end_date) + @DataSummariesCache.cache_result def get_outbound_capacity_of_vehicles(self) -> OutboundUsedAndMaximumCapacity: """ For the outbound capacity, both the used outbound capacity (estimated) and the maximum outbound capacity is @@ -117,57 +91,5 @@ def get_outbound_capacity_of_vehicles(self) -> OutboundUsedAndMaximumCapacity: redistributed to other vehicle types due to a lack of capacity. The capacities are only calculated in TEU, not in containers. """ - outbound_used_capacity_in_teu: Dict[ModeOfTransport, float] = { - vehicle_type: 0 - for vehicle_type in ModeOfTransport - } - outbound_maximum_capacity_in_teu: Dict[ModeOfTransport, float] = { - vehicle_type: 0 - for vehicle_type in ModeOfTransport - } - - schedule: Schedule - for schedule in Schedule.select(): - - assert schedule.average_moved_capacity <= schedule.average_vehicle_capacity, \ - "A vehicle cannot move a larger amount of containers (in TEU) than its capacity, " \ - f"the input data is malformed. Schedule '{schedule.service_name}' of vehicle type " \ - f"{schedule.vehicle_type} has an average moved capacity of {schedule.average_moved_capacity} but an " \ - f"averaged vehicle capacity of {schedule.average_vehicle_capacity}." - - arrivals = create_arrivals_within_time_range( - self.start_date, - schedule.vehicle_arrives_at, - self.end_date, - schedule.vehicle_arrives_every_k_days, - schedule.vehicle_arrives_at_time - ) - - # If all container flows are balanced, only the average moved capacity is required - total_average_capacity_moved_by_vessel_in_teu = len(arrivals) * schedule.average_moved_capacity - outbound_used_capacity_in_teu[schedule.vehicle_type] += total_average_capacity_moved_by_vessel_in_teu - - # If there are unbalanced container flows, a vehicle departs with more containers than it delivered - maximum_capacity_of_vehicle_in_teu = min( - schedule.average_moved_capacity * (1 + self.transportation_buffer), - schedule.average_vehicle_capacity - ) - total_maximum_capacity_moved_by_vessel = len(arrivals) * maximum_capacity_of_vehicle_in_teu - outbound_maximum_capacity_in_teu[schedule.vehicle_type] += total_maximum_capacity_moved_by_vessel - - inbound_capacity = self.get_inbound_capacity_of_vehicles() - outbound_used_capacity_in_teu[ModeOfTransport.truck] = self._get_truck_capacity_for_export_containers( - inbound_capacity.teu - ) - outbound_maximum_capacity_in_teu[ModeOfTransport.truck] = np.nan # Trucks can always be added as required - - return OutboundUsedAndMaximumCapacity( - used=ContainerVolumeByVehicleType( - containers=None, - teu=outbound_used_capacity_in_teu - ), - maximum=ContainerVolumeByVehicleType( - containers=None, - teu=outbound_maximum_capacity_in_teu - ) - ) + return InboundAndOutboundVehicleCapacityCalculatorService.\ + get_outbound_capacity_of_vehicles(self.start_date, self.end_date, self.transportation_buffer) diff --git a/conflowgen/previews/modal_split_preview.py b/conflowgen/previews/modal_split_preview.py index 7138c4a0..75266672 100644 --- a/conflowgen/previews/modal_split_preview.py +++ b/conflowgen/previews/modal_split_preview.py @@ -2,6 +2,7 @@ import datetime from typing import Dict +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.previews.container_flow_by_vehicle_type_preview import \ ContainerFlowByVehicleTypePreview @@ -58,6 +59,7 @@ def __init__( transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -66,6 +68,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution ) + @DataSummariesCache.cache_result def get_transshipment_and_hinterland_split(self) -> TransshipmentAndHinterlandSplit: """ Returns: @@ -90,6 +93,7 @@ def get_transshipment_and_hinterland_split(self) -> TransshipmentAndHinterlandSp hinterland_capacity=hinterland_capacity ) + @DataSummariesCache.cache_result def get_modal_split_for_hinterland( self, inbound: bool, diff --git a/conflowgen/previews/quay_side_throughput_preview.py b/conflowgen/previews/quay_side_throughput_preview.py new file mode 100644 index 00000000..5dd858c9 --- /dev/null +++ b/conflowgen/previews/quay_side_throughput_preview.py @@ -0,0 +1,82 @@ +import typing +from abc import ABC +from datetime import datetime + +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.previews.container_flow_by_vehicle_type_preview import ContainerFlowByVehicleTypePreview +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable +from conflowgen.previews.abstract_preview import AbstractPreview +from conflowgen.descriptive_datatypes import InboundAndOutboundContainerVolume, ContainerVolume + + +class QuaySideThroughputPreview(AbstractPreview, ABC): + """ + This preview calculates the quayside throughput based on the schedules. + + The preview returns a data structure that can be used for generating reports (e.g., in text or as a figure). The + preview is intended to provide an estimate of the quayside throughput for the given inputs. + """ + + QUAY_SIDE_VEHICLES = { + ModeOfTransport.deep_sea_vessel, + ModeOfTransport.feeder, + # barges are counted as hinterland here + } + + def __init__(self, start_date: datetime.date, end_date: datetime.date, transportation_buffer: float): + super().__init__(start_date, end_date, transportation_buffer) + self.container_flow_by_vehicle_type = ( + ContainerFlowByVehicleTypePreview( + self.start_date, + self.end_date, + self.transportation_buffer, + ) + ) + + @DataSummariesCache.cache_result + def hypothesize_with_mode_of_transport_distribution( + self, + mode_of_transport_distribution: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] + ): + validate_distribution_with_one_dependent_variable( + mode_of_transport_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True + ) + self.container_flow_by_vehicle_type.hypothesize_with_mode_of_transport_distribution( + mode_of_transport_distribution + ) + + @DataSummariesCache.cache_result + def get_quay_side_throughput(self) -> InboundAndOutboundContainerVolume: + inbound_to_outbound_flow = self.container_flow_by_vehicle_type.get_inbound_to_outbound_flow() + + quayside_inbound_container_volume_in_teu: int = 0 + quayside_outbound_container_volume_in_teu: int = 0 + + inbound_vehicle_type: ModeOfTransport + outbound_vehicle_type: ModeOfTransport + for inbound_vehicle_type, to_outbound_flow in inbound_to_outbound_flow.items(): + for outbound_vehicle_type, container_volume in to_outbound_flow.items(): + if inbound_vehicle_type in self.QUAY_SIDE_VEHICLES: + quayside_inbound_container_volume_in_teu += container_volume + if outbound_vehicle_type in self.QUAY_SIDE_VEHICLES: + quayside_outbound_container_volume_in_teu += container_volume + + teu_factor = ContainerLengthDistributionRepository().get_teu_factor() + + epsilon = 0.1 + + result = InboundAndOutboundContainerVolume( + inbound=ContainerVolume( + teu=quayside_inbound_container_volume_in_teu, + containers=int((quayside_inbound_container_volume_in_teu + epsilon) / teu_factor) + ), + outbound=ContainerVolume( + teu=quayside_outbound_container_volume_in_teu, + containers=int((quayside_outbound_container_volume_in_teu + epsilon) / teu_factor) + ) + ) + + return result diff --git a/conflowgen/previews/quay_side_throughput_preview_report.py b/conflowgen/previews/quay_side_throughput_preview_report.py new file mode 100644 index 00000000..a1822bba --- /dev/null +++ b/conflowgen/previews/quay_side_throughput_preview_report.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from typing import Dict + +import pandas as pd + +from conflowgen.descriptive_datatypes import InboundAndOutboundContainerVolume +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.previews.quay_side_throughput_preview import QuaySideThroughputPreview +from conflowgen.reporting import AbstractReportWithMatplotlib + + +class QuaySideThroughputPreviewReport(AbstractReportWithMatplotlib): + """ + This preview report takes the data structure as generated by + :class:`.QuaySideThroughputPreview` + and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example QuaySideThroughputPreviewReport `_. + """ + + report_description = """ + This report previews the inbound and outbound traffic at the quay side. + This is only an estimate, additional restrictions (such as the dwell time restrictions) might further + reduce the number of containers one vehicle can in fact pick up for its outbound journey. + """ + + def __init__(self): + super().__init__() + self._df = None + self.preview = QuaySideThroughputPreview( + start_date=self.start_date, + end_date=self.end_date, + transportation_buffer=self.transportation_buffer + ) + + def hypothesize_with_mode_of_transport_distribution( + self, + mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] + ): + self.preview.hypothesize_with_mode_of_transport_distribution(mode_of_transport_distribution) + + def get_report_as_text( + self, **kwargs + ) -> str: + assert len(kwargs) == 0, f"No keyword arguments supported for {self.__class__.__name__}" + + quay_side_throughput = self._get_quay_side_throughput() + + # create string representation + report = "\n" + report += "discharged (in containers) " + report += "loaded (in containers)" + report += "\n" + + report += f"{int(round(quay_side_throughput.inbound.containers)):>26} " + report += f"{int(round(quay_side_throughput.outbound.containers)):>22}" + report += "\n" + + report += "(rounding errors might exist)\n" + return report + + def get_report_as_graph(self, **kwargs) -> object: + assert len(kwargs) == 0, f"No keyword arguments supported for {self.__class__.__name__}" + + quay_side_throughput = self._get_quay_side_throughput() + + series = pd.Series({ + "Number discharged containers": quay_side_throughput.inbound.containers, + "Number loaded containers": quay_side_throughput.outbound.containers + }, name="Quayside Throughput") + + ax = series.plot.barh() + + ax.bar_label(ax.containers[0]) + + ax.set_xlabel("Number containers") + + return ax + + def _get_quay_side_throughput(self) -> InboundAndOutboundContainerVolume: + assert self.start_date is not None + assert self.end_date is not None + assert self.transportation_buffer is not None + self.preview.update( + start_date=self.start_date, + end_date=self.end_date, + transportation_buffer=self.transportation_buffer + ) + # gather data + quay_side_throughput = self.preview.get_quay_side_throughput() + return quay_side_throughput diff --git a/conflowgen/previews/truck_gate_throughput_preview.py b/conflowgen/previews/truck_gate_throughput_preview.py new file mode 100644 index 00000000..54ee0ee7 --- /dev/null +++ b/conflowgen/previews/truck_gate_throughput_preview.py @@ -0,0 +1,107 @@ +import typing +from abc import ABC +from builtins import bool +from datetime import datetime + +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ + InboundAndOutboundVehicleCapacityPreview +from conflowgen.api.truck_arrival_distribution_manager import TruckArrivalDistributionManager +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable +from conflowgen.previews.abstract_preview import AbstractPreview +from conflowgen.descriptive_datatypes import ContainersTransportedByTruck + + +class NumberTrucksPerWeek(typing.NamedTuple): + #: The number of containers moved on the inbound journey + inbound: float + + #: The number of containers moved on the outbound journey + outbound: float + + +class TruckGateThroughputPreview(AbstractPreview, ABC): + """ + This preview shows the distribution of truck traffic throughout a given week + + The preview returns a data structure that can be used for generating reports (e.g., in text or as a figure). The + preview is intended to provide an estimate of the truck gate throughput for the given inputs. It does not + consider all factors that may impact the actual truck gate throughput. + """ + + def __init__(self, start_date: datetime.date, end_date: datetime.date, transportation_buffer: float): + super().__init__(start_date, end_date, transportation_buffer) + self.inbound_and_outbound_vehicle_capacity_preview = ( + InboundAndOutboundVehicleCapacityPreview( + self.start_date, + self.end_date, + self.transportation_buffer, + ) + ) + + @DataSummariesCache.cache_result + def hypothesize_with_mode_of_transport_distribution( + self, + mode_of_transport_distribution: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] + ): + validate_distribution_with_one_dependent_variable( + mode_of_transport_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True + ) + self.inbound_and_outbound_vehicle_capacity_preview.hypothesize_with_mode_of_transport_distribution( + mode_of_transport_distribution) + + @DataSummariesCache.cache_result + def _get_total_trucks(self) -> ContainersTransportedByTruck: + # Calculate the truck capacity for export containers using the inbound container capacities + inbound_used_and_maximum_capacity = self.inbound_and_outbound_vehicle_capacity_preview.\ + get_inbound_capacity_of_vehicles() + outbound_used_and_maximum_capacity = self.inbound_and_outbound_vehicle_capacity_preview.\ + get_outbound_capacity_of_vehicles() + + # Get the total truck capacity in TEU + inbound_containers_transported_by_truck = inbound_used_and_maximum_capacity.containers[ModeOfTransport.truck] + outbound_containers_transported_by_truck = outbound_used_and_maximum_capacity.used.containers[ + ModeOfTransport.truck] + + total_containers_transported_by_truck = ContainersTransportedByTruck( + inbound=inbound_containers_transported_by_truck, + outbound=outbound_containers_transported_by_truck + ) + + return total_containers_transported_by_truck + + @DataSummariesCache.cache_result + def _get_number_of_trucks_per_week(self) -> NumberTrucksPerWeek: + # Calculate average number of trucks per week + num_weeks = (self.end_date - self.start_date).days / 7 + total_trucks = self._get_total_trucks() + inbound_trucks_per_week = total_trucks.inbound / num_weeks + outbound_trucks_per_week = total_trucks.outbound / num_weeks + + total_weekly_trucks = NumberTrucksPerWeek( + inbound=inbound_trucks_per_week, + outbound=outbound_trucks_per_week + ) + + return total_weekly_trucks + + @DataSummariesCache.cache_result + def get_weekly_truck_arrivals(self, inbound: bool = True, outbound: bool = True) -> typing.Dict[int, int]: + + assert inbound or outbound, "At least one of inbound or outbound must be True" + + # Get truck arrival distribution + truck_arrival_probability_distribution = TruckArrivalDistributionManager().\ + get_truck_arrival_distribution() + + truck_arrival_integer_distribution = {} + weekly_trucks = self._get_number_of_trucks_per_week() + for time, probability in truck_arrival_probability_distribution.items(): + truck_arrival_integer_distribution[time] = 0 + if inbound: + truck_arrival_integer_distribution[time] += int(round(probability * weekly_trucks.inbound)) + if outbound: + truck_arrival_integer_distribution[time] += int(round(probability * weekly_trucks.outbound)) + + return truck_arrival_integer_distribution diff --git a/conflowgen/previews/truck_gate_throughput_preview_report.py b/conflowgen/previews/truck_gate_throughput_preview_report.py new file mode 100644 index 00000000..16b1aa88 --- /dev/null +++ b/conflowgen/previews/truck_gate_throughput_preview_report.py @@ -0,0 +1,134 @@ +from abc import ABC +import typing +import pandas as pd + +import matplotlib +import matplotlib.ticker +import matplotlib.axes +from matplotlib import pyplot as plt + +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.previews.truck_gate_throughput_preview import TruckGateThroughputPreview +from conflowgen.reporting import AbstractReportWithMatplotlib + + +class TruckGateThroughputPreviewReport(AbstractReportWithMatplotlib, ABC): + """ + This preview report takes the data structure as generated by + :class:`.TruckGateThroughputPreview` + and creates a comprehensible representation for the user, either as text or as a graph. + The visual and table are expected to approximately look like in the + `example TruckGateThroughputPreviewReport `_. + """ + + report_description = """This report previews the average truck gate throughput throughout the week as defined by + schedules and input distributions.""" + + def __init__(self): + super().__init__() + self.preview = TruckGateThroughputPreview( + start_date=self.start_date, + end_date=self.end_date, + transportation_buffer=self.transportation_buffer + ) + + def hypothesize_with_mode_of_transport_distribution( + self, + mode_of_transport_distribution: typing.Dict[ModeOfTransport, typing.Dict[ModeOfTransport, float]] + ): + self.preview.hypothesize_with_mode_of_transport_distribution(mode_of_transport_distribution) + + def _get_updated_preview(self) -> TruckGateThroughputPreview: + assert self.start_date is not None + assert self.end_date is not None + assert self.transportation_buffer is not None + self.preview.update( + start_date=self.start_date, + end_date=self.end_date, + transportation_buffer=self.transportation_buffer + ) + return self.preview + + def get_report_as_text(self, inbound: bool = True, outbound: bool = True, **kwargs) -> str: + truck_distribution = self.preview.get_weekly_truck_arrivals(inbound, outbound) + data = [ + {'minimum': float('inf'), 'maximum': 0, 'average': 0.0, 'sum': 0} + for _ in range(8) # Monday to Sunday plus week total + ] + + fewest_trucks_in_a_day = float('inf') + fewest_trucks_day = '' + most_trucks_in_a_day = 0 + most_trucks_day = '' + average_trucks_in_a_day = 0.0 + + count = 0 + # Find min, max, and average for each day of the week + for time in sorted(truck_distribution): + day = int(time // 24) + if day == 0: + count += 1 # Count the number of data points in a single day + data[day]['minimum'] = min(data[day]['minimum'], truck_distribution[time]) + data[day]['maximum'] = max(data[day]['maximum'], truck_distribution[time]) + data[day]['sum'] += truck_distribution[time] + + # Calculate average + for day in range(7): + data[day]['average'] = data[day]['sum'] / count + data[7]['minimum'] = min(data[7]['minimum'], data[day]['minimum']) + data[7]['maximum'] = max(data[7]['maximum'], data[day]['maximum']) + data[7]['sum'] += data[day]['sum'] + if data[day]['sum'] < fewest_trucks_in_a_day: + fewest_trucks_in_a_day = data[day]['sum'] + fewest_trucks_day = self.days_of_the_week[day] + if data[day]['sum'] > most_trucks_in_a_day: + most_trucks_in_a_day = data[day]['sum'] + most_trucks_day = self.days_of_the_week[day] + most_trucks_in_a_day = max(most_trucks_in_a_day, data[day]['sum']) + average_trucks_in_a_day += data[day]['sum'] + + data[7]['average'] = data[7]['sum'] / (count * 7) + average_trucks_in_a_day /= 7 + + # Create a table with pandas for hourly view + df = pd.DataFrame(data, index=self.days_of_the_week + ['Total']) + df = df.round() + df = df.astype(int) + + df = df.rename_axis('Day of the week') + df = df.rename(columns={ + 'minimum': 'Minimum (trucks/h)', 'maximum': 'Maximum (trucks/h)', 'average': 'Average (trucks/h)', + 'sum': 'Sum (trucks/24h)'}) + + table_string = "Hourly view:\n" + df.to_string() + "\n" + table_string += \ + "Fewest trucks in a day: " + str(int(fewest_trucks_in_a_day)) + " on " + fewest_trucks_day + "\n" + table_string += \ + "Most trucks in a day: " + str(int(most_trucks_in_a_day)) + " on " + most_trucks_day + "\n" + table_string += \ + "Average trucks per day: " + str(int(average_trucks_in_a_day)) + + return table_string + + def get_report_as_graph(self, inbound: bool = True, outbound: bool = True, **kwargs) -> matplotlib.axes.Axes: + # Retrieve the truck distribution + truck_distribution = self.preview.get_weekly_truck_arrivals(inbound, outbound) + + # Plot the truck arrival distribution + hour_in_week, value = zip(*list(sorted(truck_distribution.items()))) + weekday_in_week = [x / 24 + 1 for x in hour_in_week] + + fig, ax = plt.subplots(figsize=(15, 3)) + plt.plot(weekday_in_week, value) + plt.xlim([1, 7]) # plot from Monday to Sunday + ax.xaxis.grid(True, which="minor", color="lightgray") # every hour + ax.xaxis.grid(True, which="major", color="k") # every day + ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(1 / 24)) # every hour + + plt.title("Expected truck arrival pattern") + ax.set_xticks(list(range(1, 8))) # every day + ax.set_xticklabels(self.days_of_the_week) + plt.xlabel("Week day") + plt.ylabel("Number of trucks") + + return ax diff --git a/conflowgen/previews/vehicle_capacity_exceeded_preview.py b/conflowgen/previews/vehicle_capacity_exceeded_preview.py index f54f697b..9e003abb 100644 --- a/conflowgen/previews/vehicle_capacity_exceeded_preview.py +++ b/conflowgen/previews/vehicle_capacity_exceeded_preview.py @@ -2,6 +2,7 @@ import datetime from typing import Dict, NamedTuple +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable from conflowgen.previews.abstract_preview import AbstractPreview from conflowgen.previews.container_flow_by_vehicle_type_preview import \ @@ -74,6 +75,7 @@ def __init__( transportation_buffer=transportation_buffer ) + @DataSummariesCache.cache_result def hypothesize_with_mode_of_transport_distribution( self, mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] @@ -88,6 +90,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution ) + @DataSummariesCache.cache_result def compare( self ) -> Dict[ModeOfTransport, RequiredAndMaximumCapacityComparison]: diff --git a/conflowgen/reporting/__init__.py b/conflowgen/reporting/__init__.py index 2f22a1dc..3d2f71b4 100644 --- a/conflowgen/reporting/__init__.py +++ b/conflowgen/reporting/__init__.py @@ -36,6 +36,9 @@ class AbstractReport(abc.ABC): ModeOfTransport.truck ] + #: The days of the week + days_of_the_week = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] + @property @abc.abstractmethod def report_description(self) -> str: @@ -141,7 +144,7 @@ def show_report_as_graph(self, **kwargs) -> None: kwargs.pop("static", None) kwargs.pop("display_as_ipython_svg", None) - with plt.style.context('seaborn-colorblind'): + with plt.style.context('seaborn-v0_8-colorblind'): self.get_report_as_graph(**kwargs) plt.show(block=True) diff --git a/conflowgen/reporting/auto_reporter.py b/conflowgen/reporting/auto_reporter.py index 6ecd115a..957a3ab6 100644 --- a/conflowgen/reporting/auto_reporter.py +++ b/conflowgen/reporting/auto_reporter.py @@ -61,6 +61,7 @@ def present_reports(self, reports: typing.Iterable[typing.Type[AbstractReport]]) ) else: report_as_text = report_instance.get_report_as_text() + assert report_as_text, "Report should not be empty" self.output.display_verbatim(report_as_text) if self.as_graph: try: diff --git a/conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py b/conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py index 2d04ba11..daaf4f01 100644 --- a/conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py +++ b/conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py @@ -2,6 +2,7 @@ from conflowgen.analyses.truck_gate_throughput_analysis_report import TruckGateThroughputAnalysisReport from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.arrival_information import TruckArrivalInformationForPickup, \ TruckArrivalInformationForDelivery from conflowgen.domain_models.container import Container @@ -92,7 +93,8 @@ def setUp(self) -> None: ModeOfTransportDistribution, Destination, ContainerFlowGenerationProperties, - Train + Train, + RandomSeedStore ]) mode_of_transport_distribution_seeder.seed() ContainerFlowGenerationProperties.create( diff --git a/conflowgen/tests/analyses/test_yard_capacity_analysis.py b/conflowgen/tests/analyses/test_yard_capacity_analysis.py index c908a49c..08f55dc6 100644 --- a/conflowgen/tests/analyses/test_yard_capacity_analysis.py +++ b/conflowgen/tests/analyses/test_yard_capacity_analysis.py @@ -37,7 +37,7 @@ def setUp(self) -> None: def test_with_no_data(self): """If no schedules are provided, no capacity is needed""" - empty_yard = self.analysis.get_used_yard_capacity_over_time() + empty_yard = self.analysis.get_used_yard_capacity_over_time().teu self.assertEqual(empty_yard, {}) def test_with_single_container(self): @@ -80,11 +80,11 @@ def test_with_single_container(self): picked_up_by_truck=truck ) - used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=False) + used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=False).teu self.assertEqual(len(used_yard_over_time), 28) self.assertSetEqual(set(used_yard_over_time.values()), {0, 1}) - used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=True) + used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=True).teu self.assertEqual(len(used_yard_over_time), 27) self.assertSetEqual(set(used_yard_over_time.values()), {0, 1}) @@ -147,11 +147,11 @@ def test_with_two_containers(self): picked_up_by_truck=truck_2 ) - used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=False) + used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=False).teu self.assertEqual(len(used_yard_over_time), 28) self.assertSetEqual(set(used_yard_over_time.values()), {0, 1, 3}) - used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=True) + used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=True).teu self.assertEqual(len(used_yard_over_time), 27) self.assertSetEqual(set(used_yard_over_time.values()), {0, 1, 3}) @@ -194,13 +194,13 @@ def test_with_container_group(self): picked_up_by_large_scheduled_vehicle=feeder_lsv_2 ) - used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=False) + used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=False).teu self.assertEqual(len(used_yard_over_time), 75) self.assertSetEqual(set(used_yard_over_time.values()), {0, 20}) self.assertIn(now.replace(minute=0, second=0, microsecond=0), set(used_yard_over_time.keys())) self.assertListEqual(list(used_yard_over_time.values()), [0] + [20] * 73 + [0]) - used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=True) + used_yard_over_time = self.analysis.get_used_yard_capacity_over_time(smoothen_peaks=True).teu self.assertEqual(len(used_yard_over_time), 74) self.assertSetEqual(set(used_yard_over_time.values()), {0, 20}) self.assertIn(now.replace(minute=0, second=0, microsecond=0), set(used_yard_over_time.keys())) diff --git a/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py b/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py index 467bf053..01e66442 100644 --- a/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py +++ b/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py @@ -1,8 +1,22 @@ import unittest import unittest.mock -from conflowgen import ContainerDwellTimeDistributionManager +from conflowgen.api.container_dwell_time_distribution_manager import ContainerDwellTimeDistributionManager +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution +from conflowgen.domain_models.distribution_models.storage_requirement_distribution import StorageRequirementDistribution from conflowgen.domain_models.distribution_seeders import container_dwell_time_distribution_seeder +from conflowgen.domain_models.distribution_seeders import storage_requirement_distribution_seeder + +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db class TestContainerDwellTimeDistributionManager(unittest.TestCase): @@ -10,8 +24,70 @@ class TestContainerDwellTimeDistributionManager(unittest.TestCase): SAMPLE_DISTRIBUTION = container_dwell_time_distribution_seeder.DEFAULT_CONTAINER_DWELL_TIME_DISTRIBUTIONS def setUp(self) -> None: + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerDwellTimeDistribution, + ContainerLengthDistribution, + StorageRequirementDistribution + ]) + self.container_dwell_time_distribution_manager = ContainerDwellTimeDistributionManager() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + + container_dwell_time_distribution_manager = ContainerDwellTimeDistributionManager() + container_dwell_time_distribution_manager.set_container_dwell_time_distribution(self.SAMPLE_DISTRIBUTION) + + storage_requirement_distribution_seeder.seed() + def test_get_container_dwell_time_distributions(self): with unittest.mock.patch.object( self.container_dwell_time_distribution_manager.container_dwell_time_distribution_repository, diff --git a/conflowgen/tests/api/test_container_flow_generation_manager.py b/conflowgen/tests/api/test_container_flow_generation_manager.py index 592e7403..62c03e3c 100644 --- a/conflowgen/tests/api/test_container_flow_generation_manager.py +++ b/conflowgen/tests/api/test_container_flow_generation_manager.py @@ -4,6 +4,7 @@ from conflowgen.api.container_flow_generation_manager import ContainerFlowGenerationManager from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule @@ -18,7 +19,8 @@ def setUp(self) -> None: sqlite_db.create_tables([ ContainerFlowGenerationProperties, ModeOfTransportDistribution, - Schedule + Schedule, + RandomSeedStore ]) mode_of_transport_distribution_seeder.seed() self.container_flow_generation_manager = ContainerFlowGenerationManager() diff --git a/conflowgen/tests/api/test_truck_arrival_distribution_manager.py b/conflowgen/tests/api/test_truck_arrival_distribution_manager.py index 2ed2ae5b..8fd0b868 100644 --- a/conflowgen/tests/api/test_truck_arrival_distribution_manager.py +++ b/conflowgen/tests/api/test_truck_arrival_distribution_manager.py @@ -5,7 +5,6 @@ class TestTruckArrivalDistributionManager(unittest.TestCase): - ARRIVAL_DISTRIBUTION = { 3: .2, 4: .8 @@ -24,10 +23,356 @@ def test_get_truck_arrival_distributions(self): self.assertEqual(distribution, self.ARRIVAL_DISTRIBUTION) def test_set_truck_arrival_distributions(self): - with unittest.mock.patch.object( self.truck_arrival_distribution_manager.truck_arrival_distribution_repository, 'set_distribution', return_value=None) as mock_method: self.truck_arrival_distribution_manager.set_truck_arrival_distribution(self.ARRIVAL_DISTRIBUTION) mock_method.assert_called_once_with(self.ARRIVAL_DISTRIBUTION) + + def test_set_truck_arrival_distribution_with_half_hour_windows(self): + tad = { + 0.0: 0.0, + 0.5: 0.0, + 1.0: 0.0, + 1.5: 0.0, + 2.0: 0.0, + 2.5: 0.0, + 3.0: 0.0, + 3.5: 0.0, + 4.0: 0.0, + 4.5: 0.0, + 5.0: 0.0, + 5.5: 0.0, + 6.0: 0.0, + 6.5: 0.0, + 7.0: 0.0, + 7.5: 0.0, + 8.0: 0.005847356259863457, + 8.5: 0.005847356259863457, + 9.0: 0.006884424813828916, + 9.5: 0.006884424813828916, + 10.0: 0.008240784915529742, + 10.5: 0.008240784915529742, + 11.0: 0.010754542541876863, + 11.5: 0.010754542541876863, + 12.0: 0.009065614385411223, + 12.5: 0.009065614385411223, + 13.0: 0.00956682661232754, + 13.5: 0.00956682661232754, + 14.0: 0.009676877930280697, + 14.5: 0.009676877930280697, + 15.0: 0.007198538886305298, + 15.5: 0.007198538886305298, + 16.0: 0.008977906910623057, + 16.5: 0.008977906910623057, + 17.0: 0.0069943804588238085, + 17.5: 0.0069943804588238085, + 18.0: 0.005381820143341635, + 18.5: 0.005381820143341635, + 19.0: 0.004515467462119917, + 19.5: 0.004515467462119917, + 20.0: 0.003327233560870352, + 20.5: 0.0, + 21.0: 0.0, + 21.5: 0.0, + 22.0: 0.0, + 22.5: 0.0, + 23.0: 0.0, + 23.5: 0.0, + 24.0: 0.0, + 24.5: 0.0, + 25.0: 0.0, + 25.5: 0.0, + 26.0: 0.0, + 26.5: 0.0, + 27.0: 0.0, + 27.5: 0.0, + 28.0: 0.0, + 28.5: 0.0, + 29.0: 0.0, + 29.5: 0.0, + 30.0: 0.0, + 30.5: 0.0, + 31.0: 0.0, + 31.5: 0.0, + 32.0: 0.006041312488159616, + 32.5: 0.006041312488159616, + 33.0: 0.007328821183642256, + 33.5: 0.007328821183642256, + 34.0: 0.008852314535559925, + 34.5: 0.008852314535559925, + 35.0: 0.00838027636330664, + 35.5: 0.00838027636330664, + 36.0: 0.009907439316224021, + 36.5: 0.009907439316224021, + 37.0: 0.009169313456252479, + 37.5: 0.009169313456252479, + 38.0: 0.009614722815201775, + 38.5: 0.009614722815201775, + 39.0: 0.00787503623358249, + 39.5: 0.00787503623358249, + 40.0: 0.007824251062007128, + 40.5: 0.007824251062007128, + 41.0: 0.00705486596186395, + 41.5: 0.00705486596186395, + 42.0: 0.004398697886964745, + 42.5: 0.004398697886964745, + 43.0: 0.003750005820315642, + 43.5: 0.003750005820315642, + 44.0: 0.0034171233780298876, + 44.5: 0.0, + 45.0: 0.0, + 45.5: 0.0, + 46.0: 0.0, + 46.5: 0.0, + 47.0: 0.0, + 47.5: 0.0, + 48.0: 0.0, + 48.5: 0.0, + 49.0: 0.0, + 49.5: 0.0, + 50.0: 0.0, + 50.5: 0.0, + 51.0: 0.0, + 51.5: 0.0, + 52.0: 0.0, + 52.5: 0.0, + 53.0: 0.0, + 53.5: 0.0, + 54.0: 0.0, + 54.5: 0.0, + 55.0: 0.0, + 55.5: 0.0, + 56.0: 0.006745061258333995, + 56.5: 0.006745061258333995, + 57.0: 0.007679410701646271, + 57.5: 0.007679410701646271, + 58.0: 0.008482538433133749, + 58.5: 0.008482538433133749, + 59.0: 0.009062186684434759, + 59.5: 0.009062186684434759, + 60.0: 0.00909081220731496, + 60.5: 0.00909081220731496, + 61.0: 0.011583665479640732, + 61.5: 0.011583665479640732, + 62.0: 0.009624665427407022, + 62.5: 0.009624665427407022, + 63.0: 0.008408359880097303, + 63.5: 0.008408359880097303, + 64.0: 0.007806016845642667, + 64.5: 0.007806016845642667, + 65.0: 0.006535274260445081, + 65.5: 0.006535274260445081, + 66.0: 0.0057751620805421774, + 66.5: 0.0057751620805421774, + 67.0: 0.004285593715597633, + 67.5: 0.004285593715597633, + 68.0: 0.002796016187253771, + 68.5: 0.0, + 69.0: 0.0, + 69.5: 0.0, + 70.0: 0.0, + 70.5: 0.0, + 71.0: 0.0, + 71.5: 0.0, + 72.0: 0.0, + 72.5: 0.0, + 73.0: 0.0, + 73.5: 0.0, + 74.0: 0.0, + 74.5: 0.0, + 75.0: 0.0, + 75.5: 0.0, + 76.0: 0.0, + 76.5: 0.0, + 77.0: 0.0, + 77.5: 0.0, + 78.0: 0.0, + 78.5: 0.0, + 79.0: 0.0, + 79.5: 0.0, + 80.0: 0.00668500119579781, + 80.5: 0.00668500119579781, + 81.0: 0.008059864990389558, + 81.5: 0.008059864990389558, + 82.0: 0.009857405383896607, + 82.5: 0.009857405383896607, + 83.0: 0.00989603470422583, + 83.5: 0.00989603470422583, + 84.0: 0.012743096638284358, + 84.5: 0.012743096638284358, + 85.0: 0.010655364933628404, + 85.5: 0.010655364933628404, + 86.0: 0.010917545311219544, + 86.5: 0.010917545311219544, + 87.0: 0.010015960025975905, + 87.5: 0.010015960025975905, + 88.0: 0.00955277551986375, + 88.5: 0.00955277551986375, + 89.0: 0.007851974044966025, + 89.5: 0.007851974044966025, + 90.0: 0.005257200592342844, + 90.5: 0.005257200592342844, + 91.0: 0.004092778923079977, + 91.5: 0.004092778923079977, + 92.0: 0.003939305601388119, + 92.5: 0.0, + 93.0: 0.0, + 93.5: 0.0, + 94.0: 0.0, + 94.5: 0.0, + 95.0: 0.0, + 95.5: 0.0, + 96.0: 0.0, + 96.5: 0.0, + 97.0: 0.0, + 97.5: 0.0, + 98.0: 0.0, + 98.5: 0.0, + 99.0: 0.0, + 99.5: 0.0, + 100.0: 0.0, + 100.5: 0.0, + 101.0: 0.0, + 101.5: 0.0, + 102.0: 0.0, + 102.5: 0.0, + 103.0: 0.0, + 103.5: 0.0, + 104.0: 0.006851392669705531, + 104.5: 0.006851392669705531, + 105.0: 0.010274798552864527, + 105.5: 0.010274798552864527, + 106.0: 0.011685874595427376, + 106.5: 0.011685874595427376, + 107.0: 0.01058627130541297, + 107.5: 0.01058627130541297, + 108.0: 0.010527093567814597, + 108.5: 0.010527093567814597, + 109.0: 0.012387483771322302, + 109.5: 0.012387483771322302, + 110.0: 0.010640715187610906, + 110.5: 0.010640715187610906, + 111.0: 0.00882546777802846, + 111.5: 0.00882546777802846, + 112.0: 0.008153561979994874, + 112.5: 0.008153561979994874, + 113.0: 0.006259105707922169, + 113.5: 0.006259105707922169, + 114.0: 0.005860855283263588, + 114.5: 0.005860855283263588, + 115.0: 0.0028579368743328936, + 115.5: 0.0028579368743328936, + 116.0: 0.002756732334354128, + 116.5: 0.0, + 117.0: 0.0, + 117.5: 0.0, + 118.0: 0.0, + 118.5: 0.0, + 119.0: 0.0, + 119.5: 0.0, + 120.0: 0.0, + 120.5: 0.0, + 121.0: 0.0, + 121.5: 0.0, + 122.0: 0.0, + 122.5: 0.0, + 123.0: 0.0, + 123.5: 0.0, + 124.0: 0.0, + 124.5: 0.0, + 125.0: 0.0, + 125.5: 0.0, + 126.0: 0.0, + 126.5: 0.0, + 127.0: 0.0, + 127.5: 0.0, + 128.0: 0.0009936554715091123, + 128.5: 0.0009936554715091123, + 129.0: 0.0009210861041335698, + 129.5: 0.0009210861041335698, + 130.0: 0.0004654632814999111, + 130.5: 0.0004654632814999111, + 131.0: 0.0003279024939137455, + 131.5: 0.0003279024939137455, + 132.0: 0.00029778216297555397, + 132.5: 0.00029778216297555397, + 133.0: 0.0, + 133.5: 0.0, + 134.0: 0.0, + 134.5: 0.0, + 135.0: 0.0, + 135.5: 0.0, + 136.0: 0.0, + 136.5: 0.0, + 137.0: 0.0, + 137.5: 0.0, + 138.0: 0.0, + 138.5: 0.0, + 139.0: 0.0, + 139.5: 0.0, + 140.0: 0.0, + 140.5: 0.0, + 141.0: 0.0, + 141.5: 0.0, + 142.0: 0.0, + 142.5: 0.0, + 143.0: 0.0, + 143.5: 0.0, + 144.0: 0.0, + 144.5: 0.0, + 145.0: 0.0, + 145.5: 0.0, + 146.0: 0.0, + 146.5: 0.0, + 147.0: 0.0, + 147.5: 0.0, + 148.0: 0.0, + 148.5: 0.0, + 149.0: 0.0, + 149.5: 0.0, + 150.0: 0.0, + 150.5: 0.0, + 151.0: 0.0, + 151.5: 0.0, + 152.0: 0.0, + 152.5: 0.0, + 153.0: 0.0, + 153.5: 0.0, + 154.0: 0.0, + 154.5: 0.0, + 155.0: 0.0, + 155.5: 0.0, + 156.0: 0.0, + 156.5: 0.0, + 157.0: 0.0, + 157.5: 0.0, + 158.0: 0.0, + 158.5: 0.0, + 159.0: 0.0, + 159.5: 0.0, + 160.0: 0.0, + 160.5: 0.0, + 161.0: 0.0, + 161.5: 0.0, + 162.0: 0.0, + 162.5: 0.0, + 163.0: 0.0, + 163.5: 0.0, + 164.0: 0.0, + 164.5: 0.0, + 165.0: 0.0, + 165.5: 0.0, + 166.0: 0.0, + 166.5: 0.0, + 167.0: 0.0, + 167.5: 0.0 + } + + with unittest.mock.patch.object( + self.truck_arrival_distribution_manager.truck_arrival_distribution_repository, + 'set_distribution', + return_value=None) as mock_method: + self.truck_arrival_distribution_manager.set_truck_arrival_distribution(tad) + mock_method.assert_called_once_with(tad) diff --git a/conflowgen/tests/application_models/repositories/__init__.py b/conflowgen/tests/application/__init__.py similarity index 100% rename from conflowgen/tests/application_models/repositories/__init__.py rename to conflowgen/tests/application/__init__.py diff --git a/conflowgen/tests/application/reports/__init__.py b/conflowgen/tests/application/reports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/tests/application_models/repositories/test_container_stream_statistics_report.py b/conflowgen/tests/application/reports/test_container_flow_statistics_report.py similarity index 100% rename from conflowgen/tests/application_models/repositories/test_container_stream_statistics_report.py rename to conflowgen/tests/application/reports/test_container_flow_statistics_report.py diff --git a/conflowgen/tests/application/repositories/__init__.py b/conflowgen/tests/application/repositories/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/tests/application_models/repositories/test_container_stream_generation_properties_repository.py b/conflowgen/tests/application/repositories/test_container_flow_generation_properties_repository.py similarity index 100% rename from conflowgen/tests/application_models/repositories/test_container_stream_generation_properties_repository.py rename to conflowgen/tests/application/repositories/test_container_flow_generation_properties_repository.py diff --git a/conflowgen/tests/application/repositories/test_random_seed_store_repository.py b/conflowgen/tests/application/repositories/test_random_seed_store_repository.py new file mode 100644 index 00000000..6d8e455f --- /dev/null +++ b/conflowgen/tests/application/repositories/test_random_seed_store_repository.py @@ -0,0 +1,73 @@ +import time +import unittest + +from conflowgen.application.models.random_seed_store import RandomSeedStore +from conflowgen.application.repositories.random_seed_store_repository import RandomSeedStoreRepository +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestRandomSeedStoreRepository(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + RandomSeedStore + ]) + self.repository = RandomSeedStoreRepository() + + def test_get_empty_entry(self): + with self.assertLogs('conflowgen', level='DEBUG') as context: + random_seed = self.repository.get_random_seed("empty_entry", True) + self.assertIsInstance(random_seed, int) + self.assertEqual(len(context.output), 1) + logged_message = context.output[0] + self.assertRegex(logged_message, r"Randomly set seed \d+ for 'empty_entry'") + + def test_fix_existing_entry(self): + seed = int(time.time()) + with self.assertLogs('conflowgen', level='DEBUG') as context: + self.repository.fix_random_seed("fix_seed", seed, True) + self.assertEqual(len(context.output), 1) + logged_message = context.output[0] + self.assertRegex(logged_message, r"Set seed \d+ for 'fix_seed'") + + def test_reuse_existing_entry(self): + seed = int(time.time()) + RandomSeedStore.create( + name="reuse_existing", + random_seed=seed, + is_random=False + ) + random_seed = self.repository.get_random_seed("reuse_existing", False) + self.assertEqual(random_seed, seed) + + def test_do_not_reuse_existing_random_entry(self): + seed = int(time.time()) + RandomSeedStore.create( + name="reuse_existing", + random_seed=seed, + is_random=True + ) + random_seed = self.repository.get_random_seed("reuse_existing", False) + self.assertNotEqual(random_seed, seed) + + def test_fix_and_reuse_journey(self): + for _ in range(10): + seed = int(time.time()) + with self.assertLogs('conflowgen', level='DEBUG') as context: + self.repository.fix_random_seed("fix_and_reuse", seed, True) + self.assertEqual(len(context.output), 1) + logged_message = context.output[0] + self.assertRegex(logged_message, rf"Set seed {seed} for 'fix_and_reuse'") + + with self.assertLogs('conflowgen', level='DEBUG') as context: + retrieved_seed = self.repository.get_random_seed("fix_and_reuse", True) + self.assertEqual(len(context.output), 1) + logged_message = context.output[0] + self.assertRegex( + logged_message, + fr"Re-use seed {retrieved_seed} for 'fix_and_reuse'" + ) + + self.assertEqual(seed, retrieved_seed) diff --git a/conflowgen/tests/application/services/__init__.py b/conflowgen/tests/application/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/tests/application/services/test_average_container_dwell_time_calculator_service.py b/conflowgen/tests/application/services/test_average_container_dwell_time_calculator_service.py new file mode 100644 index 00000000..4b5b5228 --- /dev/null +++ b/conflowgen/tests/application/services/test_average_container_dwell_time_calculator_service.py @@ -0,0 +1 @@ +# TODO write a corresponding test! diff --git a/conflowgen/tests/application/services/test_export_container_flow_service.py b/conflowgen/tests/application/services/test_export_container_flow_service.py new file mode 100644 index 00000000..9a8a40cb --- /dev/null +++ b/conflowgen/tests/application/services/test_export_container_flow_service.py @@ -0,0 +1,2 @@ +# TODO write a corresponding test! +# In-memory operations are preferred over IO operations to prolong the life of our hard drives diff --git a/conflowgen/tests/application/services/test_inbound_and_outbound_vehicle_capacity_calculator_service.py b/conflowgen/tests/application/services/test_inbound_and_outbound_vehicle_capacity_calculator_service.py new file mode 100644 index 00000000..567cd154 --- /dev/null +++ b/conflowgen/tests/application/services/test_inbound_and_outbound_vehicle_capacity_calculator_service.py @@ -0,0 +1,3 @@ +# TODO write a corresponding test! +# This could be done by using the tests of InboundAndOutboundVehicleCapacityPreview here and then check in the preview +# with unittest mocks whether the arguments are properly passed on. diff --git a/conflowgen/tests/data_summaries/__init__.py b/conflowgen/tests/data_summaries/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/tests/data_summaries/test_data_summaries_cache.py b/conflowgen/tests/data_summaries/test_data_summaries_cache.py new file mode 100644 index 00000000..993cc97f --- /dev/null +++ b/conflowgen/tests/data_summaries/test_data_summaries_cache.py @@ -0,0 +1,540 @@ +import unittest +import datetime +from functools import wraps + +from conflowgen import ContainerLength, TruckArrivalDistributionManager, ModeOfTransport, TruckGateThroughputPreview +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestDataSummariesCache(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + self.now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 1, + ContainerLength.forty_feet: 0, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=self.now, + end_date=self.now + datetime.timedelta(weeks=2) + ) # mostly use default values + arrival_distribution = { + 3: .2, + 4: .8 + } + truck_arrival_distribution_manager = TruckArrivalDistributionManager() + truck_arrival_distribution_manager.set_truck_arrival_distribution(arrival_distribution) + self.preview = TruckGateThroughputPreview( + start_date=self.now.date(), + end_date=(self.now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + self.cache = DataSummariesCache() # This is technically incorrect usage of the cache as it should never be + # instantiated, but it's the easiest way to test it + + def test_sanity(self): + # Define a function to be decorated + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def my_function(n): + return n ** 2 + + # Test case 1: Call the decorated function with argument 5 + result = my_function(5) + self.assertEqual(result, 25, "Result of 5^2 should be 25") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "There should be one cached result") + self.assertEqual(list(DataSummariesCache.cached_results.values())[0], 25, "Cached result should be 25") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'my_function': 1}, "Hit counter should be 1") + + # Test case 2: Call the decorated function with argument 5 again + # This should retrieve the cached result from the previous call + result = my_function(5) + self.assertEqual(result, 25, "Result of 5^2 should be 25") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "There should be one cached result") + self.assertEqual(list(DataSummariesCache.cached_results.values())[0], 25, "Cached result should be 25") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'my_function': 2}, "Hit counter should be 2") + + # Test case 3: Call the decorated function with argument 10 + result = my_function(10) + self.assertEqual(result, 100, "Result of 10^2 should be 100") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "There should be two cached results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 100 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 100") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'my_function': 3}, "Hit counter should be 3") + + def test_with_preview(self): + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 1, + 'get_teu_factor': 5, + }, "Incorrect hit counter" + ) + + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 2, + 'get_teu_factor': 5, + }, + "Incorrect hit counter" + ) + # Only get_weekly_truck_arrivals should be called again as the other functions are cached + + def test_with_adjusted_preview(self): + # Create a preview, adjust input distribution, then create another preview + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") + self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 1, + 'get_teu_factor': 5, + }, "Incorrect hit counter") + + arrival_distribution = { + 3: .1, + 4: .4, + 5: .5 + } + truck_arrival_distribution_manager = TruckArrivalDistributionManager() + truck_arrival_distribution_manager.set_truck_arrival_distribution(arrival_distribution) + self.preview = TruckGateThroughputPreview( + start_date=self.now.date(), + end_date=(self.now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + preview = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(preview, {3: 6, 4: 24, 5: 30}, "New result is incorrect") + self.assertEqual( + len(DataSummariesCache.cached_results), 10, + "There should be 9 cached results, because the preview was adjusted") + self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and + {3: 6, 4: 24, 5: 30} in list(DataSummariesCache.cached_results.values()), + "Incorrect results cached") + # pylint: disable=protected-access + self.assertDictEqual( + DataSummariesCache._hit_counter, + {'_get_number_of_trucks_per_week': 1, + '_get_total_trucks': 1, + 'get_truck_capacity_for_export_containers': 2, + 'get_inbound_capacity_of_vehicles': 3, + 'get_outbound_capacity_of_vehicles': 2, + 'get_weekly_truck_arrivals': 1, + 'get_teu_factor': 5, + }, + "Incorrect hit counter" + ) + # Hit counter should be the same as before, because the preview was adjusted i.e. the cache was reset, and then + # we re-ran the same functions + + def test_cache_reset(self): + @DataSummariesCache.cache_result + def increment_counter(counter): + return counter + 1 + + # Check initial state + self.assertEqual(len(DataSummariesCache.cached_results), 0, "Initial cache should be empty") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {}, "Initial hit counter should be empty") + + # Call the function and check cache and hit counter + result = increment_counter(5) + self.assertEqual(result, 6, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(6 in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'increment_counter': 1}, + "Hit counter should be 1 for 'increment_counter'") + + # Reset cache and check again + DataSummariesCache.reset_cache() + self.assertEqual(len(DataSummariesCache.cached_results), 0, "Cache should be empty after reset") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {}, "Hit counter should be empty after reset") + + def test_cache_with_different_function_args(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def add_numbers(a, b): + return a + b + + # Call the function with different arguments and check if the results are cached correctly + result1 = add_numbers(1, 2) + result2 = add_numbers(3, 4) + self.assertEqual(result1, 3, "Incorrect result returned") + self.assertEqual(result2, 7, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()) and + 7 in list(DataSummariesCache.cached_results.values()), "Cached results should be 3 and 7") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add_numbers': 2}, + "Hit counter should be 2 for 'add_numbers'") + + # Call the function with the same arguments and check if the results are retrieved from the cache + result3 = add_numbers(1, 2) + self.assertEqual(result3, 3, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should still have two results") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()) and + 7 in list(DataSummariesCache.cached_results.values()), "Cached results should be 3 and 7") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add_numbers': 3}, + "Hit counter should be 3 for 'add_numbers'") + + def test_cache_with_different_functions(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def square(n): + return n ** 2 + + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def cube(n): + return n ** 3 + + # Call the functions and check if the results are cached correctly + result1 = square(5) + result2 = cube(5) + self.assertEqual(result1, 25, "Incorrect result returned") + self.assertEqual(result2, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'square': 1, 'cube': 1}, + "Hit counter should be 1 for both 'square' and 'cube'") + + # Call the functions again and check if the results are retrieved from the cache + result3 = square(5) + result4 = cube(5) + self.assertEqual(result3, 25, "Incorrect result returned") + self.assertEqual(result4, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should still have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'square': 2, 'cube': 2}, + "Hit counter should be 2 for both 'square' and 'cube'") + + def test_cache_with_no_args(self): + @DataSummariesCache.cache_result + def get_constant(): + return 42 + + # Call the function and check if the result is cached + constant1 = get_constant() + self.assertEqual(constant1, 42, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(42 in list(DataSummariesCache.cached_results.values()), "Cached result should be 42") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'get_constant': 1}, + "Hit counter should be 1 for 'get_constant'") + + # Call the function again and check if the result is retrieved from the cache + constant2 = get_constant() + self.assertEqual(constant2, 42, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should still have one result") + self.assertTrue(42 in list(DataSummariesCache.cached_results.values()), "Cached result should still be 42") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'get_constant': 2}, + "Hit counter should be 2 for 'get_constant'") + + def test_cache_with_default_args(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def power(n, p=2): + return n ** p + + # Call the function with and without default argument and check if the results are cached correctly + result1 = power(5) + result2 = power(5, 3) + self.assertEqual(result1, 25, "Incorrect result returned") + self.assertEqual(result2, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'power': 2}, "Hit counter should be 2 for 'power'") + + # Call the function with the same arguments and check if the results are retrieved from the cache + result3 = power(5) + result4 = power(5, 3) + self.assertEqual(result3, 25, "Incorrect result returned") + self.assertEqual(result4, 125, "Incorrect result returned") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should still have two results") + self.assertTrue(25 in list(DataSummariesCache.cached_results.values()) and + 125 in list(DataSummariesCache.cached_results.values()), "Cached results should be 25 and 125") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'power': 4}, "Hit counter should be 4 for 'power'") + + def test_docstring_preservation(self): + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def square(n): + """Return the square of a number.""" + return n ** 2 + + self.assertEqual(square.__doc__, "Return the square of a number.", "Docstring should be preserved") + + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def cube(n): + """Return the cube of a number.""" + return n ** 3 + + self.assertEqual(cube.__doc__, "Return the cube of a number.", "Docstring should be preserved") + + def test_cache_none(self): + @DataSummariesCache.cache_result + def return_none(): + return None + + self.assertEqual(return_none(), None, "Function should return None") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(None in list(DataSummariesCache.cached_results.values()), "None should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_none': 1}) + + def test_cache_float(self): + @DataSummariesCache.cache_result + def return_float(): + return 3.14 + + self.assertEqual(return_float(), 3.14, "Function should return float") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(3.14 in list(DataSummariesCache.cached_results.values()), "Float should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_float': 1}) + + def test_cache_string(self): + @DataSummariesCache.cache_result + def return_string(): + return "hello" + + self.assertEqual(return_string(), "hello", "Function should return string") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue("hello" in list(DataSummariesCache.cached_results.values()), "String should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_string': 1}) + + def test_cache_list(self): + @DataSummariesCache.cache_result + def return_list(): + return [1, 2, 3] + + self.assertEqual(return_list(), [1, 2, 3], "Function should return list") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue([1, 2, 3] in list(DataSummariesCache.cached_results.values()), "List should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_list': 1}) + + def test_cache_dictionary(self): + @DataSummariesCache.cache_result + def return_dictionary(): + return {"a": 1, "b": 2} + + self.assertEqual(return_dictionary(), {"a": 1, "b": 2}, "Function should return dictionary") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue({"a": 1, "b": 2} in list(DataSummariesCache.cached_results.values()), "Dictionary should be " + "cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_dictionary': 1}) + + def test_cache_custom_object(self): + class CustomObject: + pass + + @DataSummariesCache.cache_result + def return_custom_object(): + return CustomObject() + + self.assertIsInstance(return_custom_object(), CustomObject, "Function should return custom object") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertIsInstance(list(DataSummariesCache.cached_results.values())[0], CustomObject, + "Function should return an instance of CustomObject") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'return_custom_object': 1}) + + def test_nested_decorator(self): + # pylint: disable=invalid-name + def simple_decorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs) + + return wrapper + + @DataSummariesCache.cache_result + @simple_decorator + # pylint: disable=invalid-name + def add(a, b): + """Adds two numbers.""" + return a + b + + # Initial call + result = add(1, 2) + self.assertEqual(result, 3, "Function should return the sum of the two arguments") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()), "Result should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add': 1}, "Cache should have one hit") + + # Repeated call + result = add(1, 2) + self.assertEqual(result, 3, "Function should return the sum of the two arguments (from cache)") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should still have one result") + self.assertTrue(3 in list(DataSummariesCache.cached_results.values()), "Result should still be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter, {'add': 2}, "Cache should have two hits") + + # Check function metadata + self.assertEqual(add.__name__, 'add', "Function name should be preserved") + self.assertEqual(add.__doc__.strip(), 'Adds two numbers.', "Docstring should be preserved") + + def test_class_methods(self): + class TestClass: + def __init__(self): + self.counter = 0 + + @DataSummariesCache.cache_result + # pylint: disable=invalid-name + def method(self, a, b): + """Adds two numbers and the instance counter.""" + self.counter = getattr(self, 'counter', 0) + 1 + return a + b + self.counter + + # Create instance and call method + instance = TestClass() + result = instance.method(1, 2) + self.assertEqual(result, 4, "Method should return the sum of the two arguments and the counter") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") + self.assertTrue(4 in list(DataSummariesCache.cached_results.values()), "Result should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter['method'], 1) + + # Repeated call + result = instance.method(1, 2) + self.assertEqual(result, 4, "Method should return the cached result") + self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should still have one result") + self.assertTrue(4 in list(DataSummariesCache.cached_results.values()), "Result should still be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter['method'], 2) + + # Call with different instance + another_instance = TestClass() + result = another_instance.method(1, 2) + self.assertEqual(result, 4, + "Method should return the sum of the two arguments and the counter (from new instance)") + self.assertEqual(len(DataSummariesCache.cached_results), 2, "Cache should have two results") + self.assertTrue(4 in list(DataSummariesCache.cached_results.values()), "Both results should be cached") + # pylint: disable=protected-access + self.assertEqual(DataSummariesCache._hit_counter['method'], 3) diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py b/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py index 76f84036..c8174561 100644 --- a/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py +++ b/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py @@ -108,3 +108,53 @@ def test_set_container_lengths_which_do_not_add_up_to_one(self) -> None: ContainerLength.other: 0 } ) + + def test_get_teu_factor_all_twenty_feet(self): + ContainerLengthDistributionRepository.set_distribution({ + ContainerLength.twenty_feet: 1, + ContainerLength.forty_feet: 0, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + teu_factor = ContainerLengthDistributionRepository.get_teu_factor() + self.assertEqual(teu_factor, 1, "TEU factor should be 1 when all containers are 20 feet.") + + def test_get_teu_factor_when_half_of_containers_are_forty_feet(self): + ContainerLengthDistributionRepository.set_distribution( + { + ContainerLength.twenty_feet: 0.5, + ContainerLength.forty_feet: 0.5, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + } + ) + teu_factor = ContainerLengthDistributionRepository.get_teu_factor() + self.assertEqual( + teu_factor, 1.5, + "TEU factor should be 1.5 when half of the containers are 20 feet and half are 40 feet.") + + def test_get_teu_factor_all_forty_feet(self) -> None: + ContainerLengthDistributionRepository.set_distribution( + { + ContainerLength.twenty_feet: 0, + ContainerLength.forty_feet: 1, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + } + ) + self.assertEqual( + ContainerLengthDistributionRepository.get_teu_factor(), 2, + "TEU factor should be 2 when all containers are 40 feet.") + + def test_get_teu_factor_all_forty_five_feet(self) -> None: + ContainerLengthDistributionRepository.set_distribution( + { + ContainerLength.twenty_feet: 0, + ContainerLength.forty_feet: 0, + ContainerLength.forty_five_feet: 1, + ContainerLength.other: 0 + } + ) + self.assertEqual( + ContainerLengthDistributionRepository.get_teu_factor(), 2.25, + "TEU factor should be 2.25 when all containers are 45 feet.") diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_truck_arrival_distribution_repository.py b/conflowgen/tests/domain_models/distribution_repositories/test_truck_arrival_distribution_repository.py index 363c5932..f1aeb495 100644 --- a/conflowgen/tests/domain_models/distribution_repositories/test_truck_arrival_distribution_repository.py +++ b/conflowgen/tests/domain_models/distribution_repositories/test_truck_arrival_distribution_repository.py @@ -178,6 +178,345 @@ class TestTruckArrivalDistributionRepository(unittest.TestCase): 166: 0.0, 167: 0.0} + half_hourly_data = { + 0.0: 0.0, + 0.5: 0.0, + 1.0: 0.0, + 1.5: 0.0, + 2.0: 0.0, + 2.5: 0.0, + 3.0: 0.0, + 3.5: 0.0, + 4.0: 0.0, + 4.5: 0.0, + 5.0: 0.0, + 5.5: 0.0, + 6.0: 0.0, + 6.5: 0.0, + 7.0: 0.0, + 7.5: 0.0, + 8.0: 0.005847356259863457, + 8.5: 0.005847356259863457, + 9.0: 0.006884424813828916, + 9.5: 0.006884424813828916, + 10.0: 0.008240784915529742, + 10.5: 0.008240784915529742, + 11.0: 0.010754542541876863, + 11.5: 0.010754542541876863, + 12.0: 0.009065614385411223, + 12.5: 0.009065614385411223, + 13.0: 0.00956682661232754, + 13.5: 0.00956682661232754, + 14.0: 0.009676877930280697, + 14.5: 0.009676877930280697, + 15.0: 0.007198538886305298, + 15.5: 0.007198538886305298, + 16.0: 0.008977906910623057, + 16.5: 0.008977906910623057, + 17.0: 0.0069943804588238085, + 17.5: 0.0069943804588238085, + 18.0: 0.005381820143341635, + 18.5: 0.005381820143341635, + 19.0: 0.004515467462119917, + 19.5: 0.004515467462119917, + 20.0: 0.003327233560870352, + 20.5: 0.0, + 21.0: 0.0, + 21.5: 0.0, + 22.0: 0.0, + 22.5: 0.0, + 23.0: 0.0, + 23.5: 0.0, + 24.0: 0.0, + 24.5: 0.0, + 25.0: 0.0, + 25.5: 0.0, + 26.0: 0.0, + 26.5: 0.0, + 27.0: 0.0, + 27.5: 0.0, + 28.0: 0.0, + 28.5: 0.0, + 29.0: 0.0, + 29.5: 0.0, + 30.0: 0.0, + 30.5: 0.0, + 31.0: 0.0, + 31.5: 0.0, + 32.0: 0.006041312488159616, + 32.5: 0.006041312488159616, + 33.0: 0.007328821183642256, + 33.5: 0.007328821183642256, + 34.0: 0.008852314535559925, + 34.5: 0.008852314535559925, + 35.0: 0.00838027636330664, + 35.5: 0.00838027636330664, + 36.0: 0.009907439316224021, + 36.5: 0.009907439316224021, + 37.0: 0.009169313456252479, + 37.5: 0.009169313456252479, + 38.0: 0.009614722815201775, + 38.5: 0.009614722815201775, + 39.0: 0.00787503623358249, + 39.5: 0.00787503623358249, + 40.0: 0.007824251062007128, + 40.5: 0.007824251062007128, + 41.0: 0.00705486596186395, + 41.5: 0.00705486596186395, + 42.0: 0.004398697886964745, + 42.5: 0.004398697886964745, + 43.0: 0.003750005820315642, + 43.5: 0.003750005820315642, + 44.0: 0.0034171233780298876, + 44.5: 0.0, + 45.0: 0.0, + 45.5: 0.0, + 46.0: 0.0, + 46.5: 0.0, + 47.0: 0.0, + 47.5: 0.0, + 48.0: 0.0, + 48.5: 0.0, + 49.0: 0.0, + 49.5: 0.0, + 50.0: 0.0, + 50.5: 0.0, + 51.0: 0.0, + 51.5: 0.0, + 52.0: 0.0, + 52.5: 0.0, + 53.0: 0.0, + 53.5: 0.0, + 54.0: 0.0, + 54.5: 0.0, + 55.0: 0.0, + 55.5: 0.0, + 56.0: 0.006745061258333995, + 56.5: 0.006745061258333995, + 57.0: 0.007679410701646271, + 57.5: 0.007679410701646271, + 58.0: 0.008482538433133749, + 58.5: 0.008482538433133749, + 59.0: 0.009062186684434759, + 59.5: 0.009062186684434759, + 60.0: 0.00909081220731496, + 60.5: 0.00909081220731496, + 61.0: 0.011583665479640732, + 61.5: 0.011583665479640732, + 62.0: 0.009624665427407022, + 62.5: 0.009624665427407022, + 63.0: 0.008408359880097303, + 63.5: 0.008408359880097303, + 64.0: 0.007806016845642667, + 64.5: 0.007806016845642667, + 65.0: 0.006535274260445081, + 65.5: 0.006535274260445081, + 66.0: 0.0057751620805421774, + 66.5: 0.0057751620805421774, + 67.0: 0.004285593715597633, + 67.5: 0.004285593715597633, + 68.0: 0.002796016187253771, + 68.5: 0.0, + 69.0: 0.0, + 69.5: 0.0, + 70.0: 0.0, + 70.5: 0.0, + 71.0: 0.0, + 71.5: 0.0, + 72.0: 0.0, + 72.5: 0.0, + 73.0: 0.0, + 73.5: 0.0, + 74.0: 0.0, + 74.5: 0.0, + 75.0: 0.0, + 75.5: 0.0, + 76.0: 0.0, + 76.5: 0.0, + 77.0: 0.0, + 77.5: 0.0, + 78.0: 0.0, + 78.5: 0.0, + 79.0: 0.0, + 79.5: 0.0, + 80.0: 0.00668500119579781, + 80.5: 0.00668500119579781, + 81.0: 0.008059864990389558, + 81.5: 0.008059864990389558, + 82.0: 0.009857405383896607, + 82.5: 0.009857405383896607, + 83.0: 0.00989603470422583, + 83.5: 0.00989603470422583, + 84.0: 0.012743096638284358, + 84.5: 0.012743096638284358, + 85.0: 0.010655364933628404, + 85.5: 0.010655364933628404, + 86.0: 0.010917545311219544, + 86.5: 0.010917545311219544, + 87.0: 0.010015960025975905, + 87.5: 0.010015960025975905, + 88.0: 0.00955277551986375, + 88.5: 0.00955277551986375, + 89.0: 0.007851974044966025, + 89.5: 0.007851974044966025, + 90.0: 0.005257200592342844, + 90.5: 0.005257200592342844, + 91.0: 0.004092778923079977, + 91.5: 0.004092778923079977, + 92.0: 0.003939305601388119, + 92.5: 0.0, + 93.0: 0.0, + 93.5: 0.0, + 94.0: 0.0, + 94.5: 0.0, + 95.0: 0.0, + 95.5: 0.0, + 96.0: 0.0, + 96.5: 0.0, + 97.0: 0.0, + 97.5: 0.0, + 98.0: 0.0, + 98.5: 0.0, + 99.0: 0.0, + 99.5: 0.0, + 100.0: 0.0, + 100.5: 0.0, + 101.0: 0.0, + 101.5: 0.0, + 102.0: 0.0, + 102.5: 0.0, + 103.0: 0.0, + 103.5: 0.0, + 104.0: 0.006851392669705531, + 104.5: 0.006851392669705531, + 105.0: 0.010274798552864527, + 105.5: 0.010274798552864527, + 106.0: 0.011685874595427376, + 106.5: 0.011685874595427376, + 107.0: 0.01058627130541297, + 107.5: 0.01058627130541297, + 108.0: 0.010527093567814597, + 108.5: 0.010527093567814597, + 109.0: 0.012387483771322302, + 109.5: 0.012387483771322302, + 110.0: 0.010640715187610906, + 110.5: 0.010640715187610906, + 111.0: 0.00882546777802846, + 111.5: 0.00882546777802846, + 112.0: 0.008153561979994874, + 112.5: 0.008153561979994874, + 113.0: 0.006259105707922169, + 113.5: 0.006259105707922169, + 114.0: 0.005860855283263588, + 114.5: 0.005860855283263588, + 115.0: 0.0028579368743328936, + 115.5: 0.0028579368743328936, + 116.0: 0.002756732334354128, + 116.5: 0.0, + 117.0: 0.0, + 117.5: 0.0, + 118.0: 0.0, + 118.5: 0.0, + 119.0: 0.0, + 119.5: 0.0, + 120.0: 0.0, + 120.5: 0.0, + 121.0: 0.0, + 121.5: 0.0, + 122.0: 0.0, + 122.5: 0.0, + 123.0: 0.0, + 123.5: 0.0, + 124.0: 0.0, + 124.5: 0.0, + 125.0: 0.0, + 125.5: 0.0, + 126.0: 0.0, + 126.5: 0.0, + 127.0: 0.0, + 127.5: 0.0, + 128.0: 0.0009936554715091123, + 128.5: 0.0009936554715091123, + 129.0: 0.0009210861041335698, + 129.5: 0.0009210861041335698, + 130.0: 0.0004654632814999111, + 130.5: 0.0004654632814999111, + 131.0: 0.0003279024939137455, + 131.5: 0.0003279024939137455, + 132.0: 0.00029778216297555397, + 132.5: 0.00029778216297555397, + 133.0: 0.0, + 133.5: 0.0, + 134.0: 0.0, + 134.5: 0.0, + 135.0: 0.0, + 135.5: 0.0, + 136.0: 0.0, + 136.5: 0.0, + 137.0: 0.0, + 137.5: 0.0, + 138.0: 0.0, + 138.5: 0.0, + 139.0: 0.0, + 139.5: 0.0, + 140.0: 0.0, + 140.5: 0.0, + 141.0: 0.0, + 141.5: 0.0, + 142.0: 0.0, + 142.5: 0.0, + 143.0: 0.0, + 143.5: 0.0, + 144.0: 0.0, + 144.5: 0.0, + 145.0: 0.0, + 145.5: 0.0, + 146.0: 0.0, + 146.5: 0.0, + 147.0: 0.0, + 147.5: 0.0, + 148.0: 0.0, + 148.5: 0.0, + 149.0: 0.0, + 149.5: 0.0, + 150.0: 0.0, + 150.5: 0.0, + 151.0: 0.0, + 151.5: 0.0, + 152.0: 0.0, + 152.5: 0.0, + 153.0: 0.0, + 153.5: 0.0, + 154.0: 0.0, + 154.5: 0.0, + 155.0: 0.0, + 155.5: 0.0, + 156.0: 0.0, + 156.5: 0.0, + 157.0: 0.0, + 157.5: 0.0, + 158.0: 0.0, + 158.5: 0.0, + 159.0: 0.0, + 159.5: 0.0, + 160.0: 0.0, + 160.5: 0.0, + 161.0: 0.0, + 161.5: 0.0, + 162.0: 0.0, + 162.5: 0.0, + 163.0: 0.0, + 163.5: 0.0, + 164.0: 0.0, + 164.5: 0.0, + 165.0: 0.0, + 165.5: 0.0, + 166.0: 0.0, + 166.5: 0.0, + 167.0: 0.0, + 167.5: 0.0 + } + def setUp(self) -> None: """Create container database in memory""" sqlite_db = setup_sqlite_in_memory_db() @@ -205,6 +544,15 @@ def test_happy_path(self): self.repository.get_distribution() ) + def test_happy_path_with_half_hourly_data(self): + self.repository.set_distribution( + self.half_hourly_data + ) + self.assertDictEqual( + self.half_hourly_data, + self.repository.get_distribution() + ) + def test_set_twice(self): """e.g., no exception is thrown while refreshing the data in the database. """ diff --git a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py index dfbfaeb0..ccd84951 100644 --- a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py +++ b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py @@ -1,6 +1,7 @@ import datetime import unittest +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.container import Container from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.container_weight_distribution import ContainerWeightDistribution @@ -32,7 +33,8 @@ def setUp(self) -> None: ContainerWeightDistribution, ContainerLengthDistribution, Destination, - StorageRequirementDistribution + StorageRequirementDistribution, + RandomSeedStore ]) mode_of_transport_distribution_seeder.seed() container_weight_distribution_seeder.seed() diff --git a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py index 6f1420af..b873d5e0 100644 --- a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py +++ b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py @@ -1,6 +1,7 @@ import datetime import unittest +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery, \ TruckArrivalInformationForPickup from conflowgen.domain_models.container import Container @@ -38,7 +39,8 @@ def setUp(self) -> None: TruckArrivalInformationForPickup, ContainerLengthDistribution, Destination, - StorageRequirementDistribution + StorageRequirementDistribution, + RandomSeedStore ]) mode_of_transport_distribution_seeder.seed() container_weight_distribution_seeder.seed() diff --git a/conflowgen/tests/domain_models/test_container.py b/conflowgen/tests/domain_models/test_container.py index a8a1b61c..369f65cd 100644 --- a/conflowgen/tests/domain_models/test_container.py +++ b/conflowgen/tests/domain_models/test_container.py @@ -3,10 +3,11 @@ """ import unittest +from dataclasses import dataclass from peewee import IntegrityError -from conflowgen.domain_models.container import Container +from conflowgen.domain_models.container import Container, FaultyDataException, NoPickupVehicleException from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement @@ -101,3 +102,51 @@ def test_container_repr(self) -> None: "" ) + + def test_faulty_data_exception(self): + @dataclass + class BogusModeOfTransport: + value: int + name: str + + def __init__(self, value: int, name: str): + self.value = value + self.name = name + + mode_of_transport = BogusModeOfTransport(1, "Bogus") + + container = Container.create( + weight=10, + delivered_by=mode_of_transport, + picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard + ) + + with self.assertRaises(FaultyDataException): + container.get_arrival_time() + + def test_no_pickup_vehicle_exception(self): + @dataclass + class BogusModeOfTransport: + value: int + name: str + + def __init__(self, value: int, name: str): + self.value = value + self.name = name + + mode_of_transport = BogusModeOfTransport(1, "Bogus") + + container = Container.create( + weight=10, + delivered_by=ModeOfTransport.barge, + picked_up_by=mode_of_transport, + picked_up_by_initial=mode_of_transport, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard + ) + + with self.assertRaises(NoPickupVehicleException): + container.get_departure_time() diff --git a/conflowgen/tests/domain_models/test_vehicle.py b/conflowgen/tests/domain_models/test_vehicle.py index 09f15634..ee046b56 100644 --- a/conflowgen/tests/domain_models/test_vehicle.py +++ b/conflowgen/tests/domain_models/test_vehicle.py @@ -9,7 +9,7 @@ TruckArrivalInformationForPickup, TruckArrivalInformationForDelivery from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.large_vehicle_schedule import Schedule -from conflowgen.domain_models.vehicle import Feeder, LargeScheduledVehicle, Truck +from conflowgen.domain_models.vehicle import Feeder, LargeScheduledVehicle, Truck, Barge from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -44,6 +44,16 @@ def test_save_truck_picking_up_a_container_to_database(self) -> None: ) self.assertIsNotNone(truck) + def test_repr(self) -> None: + truck = Truck.create( + delivers_container=True, + picks_up_container=False + ) + self.assertEqual( + repr(truck), + "" + ) + class TestFeeder(unittest.TestCase): @@ -76,3 +86,111 @@ def test_save_feeder_to_database(self) -> None: Feeder.create( large_scheduled_vehicle=lsv ) + + def test_repr(self) -> None: + """Check if feeder can be saved""" + now = datetime.datetime.now() + schedule = Schedule.create( + service_name="MyTestFeederLine", + vehicle_type=ModeOfTransport.feeder, + vehicle_arrives_at=now, + average_vehicle_capacity=1100, + average_moved_capacity=200 + ) + lsv = LargeScheduledVehicle.create( + vehicle_name="TestFeeder1", + capacity_in_teu=1000, + moved_capacity=200, + scheduled_arrival=now, + schedule=schedule + ) + feeder = Feeder.create( + large_scheduled_vehicle=lsv + ) + self.assertEqual( + repr(feeder), + "" + ) + + +class TestBarge(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + sqlite_db = setup_sqlite_in_memory_db() + sqlite_db.create_tables([ + Barge, + LargeScheduledVehicle, + Schedule, + ]) + + def test_save_barge_to_database(self) -> None: + """Check if barge can be saved""" + now = datetime.datetime.now() + schedule = Schedule.create( + service_name="MyTestBargeLine", + vehicle_type=ModeOfTransport.barge, + vehicle_arrives_at=now, + average_vehicle_capacity=1100, + average_moved_capacity=200 + ) + lsv = LargeScheduledVehicle.create( + vehicle_name="TestBarge1", + capacity_in_teu=1000, + moved_capacity=200, + scheduled_arrival=now, + schedule=schedule + ) + Barge.create( + large_scheduled_vehicle=lsv + ) + + def test_repr(self) -> None: + """Check if barge can be saved""" + now = datetime.datetime.now() + schedule = Schedule.create( + service_name="MyTestBargeLine", + vehicle_type=ModeOfTransport.barge, + vehicle_arrives_at=now, + average_vehicle_capacity=1100, + average_moved_capacity=200 + ) + lsv = LargeScheduledVehicle.create( + vehicle_name="TestBarge1", + capacity_in_teu=1000, + moved_capacity=200, + scheduled_arrival=now, + schedule=schedule + ) + barge = Barge.create( + large_scheduled_vehicle=lsv + ) + self.assertEqual( + repr(barge), + "" + ) + + def test_get_mode_of_transport(self) -> None: + """Check if barge can be saved""" + now = datetime.datetime.now() + schedule = Schedule.create( + service_name="MyTestBargeLine", + vehicle_type=ModeOfTransport.barge, + vehicle_arrives_at=now, + average_vehicle_capacity=1100, + average_moved_capacity=200 + ) + lsv = LargeScheduledVehicle.create( + vehicle_name="TestBarge1", + capacity_in_teu=1000, + moved_capacity=200, + scheduled_arrival=now, + schedule=schedule + ) + barge = Barge.create( + large_scheduled_vehicle=lsv + ) + self.assertEqual( + barge.get_mode_of_transport(), + ModeOfTransport.barge + ) diff --git a/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py b/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py index 2a6df45e..4e749554 100644 --- a/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py +++ b/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py @@ -1,6 +1,7 @@ import datetime import unittest +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery, \ TruckArrivalInformationForPickup from conflowgen.domain_models.container import Container @@ -42,7 +43,8 @@ def setUp(self) -> None: ModeOfTransportDistribution, ContainerLengthDistribution, ContainerWeightDistribution, - StorageRequirementDistribution + StorageRequirementDistribution, + RandomSeedStore ]) mode_of_transport_distribution_seeder.seed() diff --git a/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py b/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py index 5f7542d2..6818e4c7 100644 --- a/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py +++ b/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py @@ -1,6 +1,7 @@ import datetime import unittest +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.flow_generator.assign_destination_to_container_service import \ AssignDestinationToContainerService from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery, \ @@ -33,7 +34,8 @@ def setUp(self) -> None: Truck, Feeder, DeepSeaVessel, - ModeOfTransportDistribution + ModeOfTransportDistribution, + RandomSeedStore, ]) self.repository = ContainerDestinationDistributionRepository() self.service = AssignDestinationToContainerService() diff --git a/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py b/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py index fa60047f..b6091a1f 100644 --- a/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py +++ b/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py @@ -1,6 +1,7 @@ import unittest from conflowgen import ContainerLength, StorageRequirement +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.container import Container from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder @@ -23,7 +24,8 @@ def setUp(self) -> None: Schedule, Destination, Truck, - LargeScheduledVehicle + LargeScheduledVehicle, + RandomSeedStore, ]) mode_of_transport_distribution_seeder.seed() self.container_flow_generator_service = ContainerFlowGenerationService() diff --git a/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py b/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py index 591fa8c3..1bb2ded2 100644 --- a/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py +++ b/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py @@ -3,6 +3,7 @@ from conflowgen import PortCallManager from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.application.repositories.container_flow_generation_properties_repository import \ ContainerFlowGenerationPropertiesRepository from conflowgen.database_connection.create_tables import create_tables @@ -29,7 +30,8 @@ def setUp(self) -> None: ModeOfTransportDistribution, Schedule, StorageRequirementDistribution, - ContainerDwellTimeDistribution + ContainerDwellTimeDistribution, + RandomSeedStore, ]) mode_of_transport_distribution_seeder.seed() container_dwell_time_distribution_seeder.seed() diff --git a/conflowgen/tests/flow_generator/test_distribution_approximator.py b/conflowgen/tests/flow_generator/test_distribution_approximator.py index 65e870c2..01b435ea 100644 --- a/conflowgen/tests/flow_generator/test_distribution_approximator.py +++ b/conflowgen/tests/flow_generator/test_distribution_approximator.py @@ -5,11 +5,20 @@ import collections import unittest +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.tools.distribution_approximator import DistributionApproximator, SamplerExhaustedException +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db class TestDistributionApproximator(unittest.TestCase): + def setUp(self) -> None: + """Create container database in memory""" + sqlite_db = setup_sqlite_in_memory_db() + sqlite_db.create_tables([ + RandomSeedStore + ]) + def test_happy_path(self) -> None: """This is the happy path""" distribution_approximator = DistributionApproximator({ diff --git a/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py b/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py index dd02630a..55286a7b 100644 --- a/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py +++ b/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py @@ -3,6 +3,7 @@ import unittest.mock from typing import Iterable +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery, \ TruckArrivalInformationForPickup from conflowgen.domain_models.container import Container @@ -27,7 +28,7 @@ class TestLargeScheduledVehicleForExportContainersManager(unittest.TestCase): def setUp(self) -> None: """Create container database in memory""" sqlite_db = setup_sqlite_in_memory_db() - sqlite_db.create_tables([ + sqlite_db.create_tables({ Schedule, LargeScheduledVehicle, Train, @@ -41,7 +42,8 @@ def setUp(self) -> None: Destination, ModeOfTransportDistribution, ContainerDwellTimeDistribution, - ]) + RandomSeedStore, + }) mode_of_transport_distribution_seeder.seed() container_dwell_time_distribution_seeder.seed() @@ -171,7 +173,7 @@ def test_do_not_overload_feeder_with_truck_traffic(self): feeder.large_scheduled_vehicle.moved_capacity = 10 # in TEU containers = [self._create_container_for_truck(truck) for _ in range(10)] self.assertEqual(Container.select().count(), 10) - teu_generated = sum((ContainerLength.get_factor(container.length) for container in containers)) + teu_generated = sum((ContainerLength.get_teu_factor(container.length) for container in containers)) self.assertGreaterEqual(teu_generated, 10, "Generating 10 containers with each at least 1 TEU must result in a " "total TEU of more than 10 TEU") @@ -185,7 +187,7 @@ def test_do_not_overload_feeder_with_truck_traffic(self): teu_loaded = 0 for container in containers_reloaded: # pylint: disable=E1133 self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) - teu_loaded += ContainerLength.get_factor(container.length) + teu_loaded += ContainerLength.get_teu_factor(container.length) self.assertLessEqual(teu_loaded, 10, "Feeder must not be loaded with more than 10 TEU") def test_do_not_overload_feeder_with_train_traffic(self): @@ -200,7 +202,7 @@ def test_do_not_overload_feeder_with_train_traffic(self): feeder.save() self.assertEqual(Container.select().count(), 90) - teu_generated = sum((ContainerLength.get_factor(container.length) for container in containers)) + teu_generated = sum((ContainerLength.get_teu_factor(container.length) for container in containers)) self.assertEqual(teu_generated, 90) self.manager.choose_departing_vehicle_for_containers() @@ -213,7 +215,7 @@ def test_do_not_overload_feeder_with_train_traffic(self): teu_loaded = 0 for container in containers_reloaded: # pylint: disable=not-an-iterable self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) - teu_loaded += ContainerLength.get_factor(container.length) + teu_loaded += ContainerLength.get_teu_factor(container.length) self.assertLessEqual(teu_loaded, 80, "Feeder must not be loaded with more than what it can carry") def test_do_not_load_if_the_time_span_is_too_long(self): @@ -228,7 +230,7 @@ def test_do_not_load_if_the_time_span_is_too_long(self): feeder.save() self.assertEqual(Container.select().count(), 90) - teu_generated = sum((ContainerLength.get_factor(container.length) for container in containers)) + teu_generated = sum((ContainerLength.get_teu_factor(container.length) for container in containers)) self.assertEqual(teu_generated, 90) self.manager.choose_departing_vehicle_for_containers() @@ -256,7 +258,7 @@ def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles(self): feeder.save() self.assertEqual(Container.select().count(), 180) - teu_generated = sum((ContainerLength.get_factor(container.length) for container in containers)) + teu_generated = sum((ContainerLength.get_teu_factor(container.length) for container in containers)) self.assertEqual(teu_generated, 180) self.manager.choose_departing_vehicle_for_containers() @@ -269,7 +271,7 @@ def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles(self): teu_loaded = 0 for container in containers_reloaded: # pylint: disable=not-an-iterable self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) - teu_loaded += ContainerLength.get_factor(container.length) + teu_loaded += ContainerLength.get_teu_factor(container.length) self.assertLessEqual(teu_loaded, 80, "Feeder must not be loaded with more than what it can carry") def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles_and_changing_container_lengths(self): @@ -293,7 +295,7 @@ def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles_and_changing_ feeder.save() self.assertEqual(Container.select().count(), 180) - teu_generated = sum((ContainerLength.get_factor(container.length) for container in containers)) + teu_generated = sum((ContainerLength.get_teu_factor(container.length) for container in containers)) self.assertEqual(teu_generated, 270) self.manager.choose_departing_vehicle_for_containers() @@ -306,7 +308,7 @@ def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles_and_changing_ teu_loaded = 0 for container in containers_reloaded: # pylint: disable=not-an-iterable self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) - teu_loaded += ContainerLength.get_factor(container.length) + teu_loaded += ContainerLength.get_teu_factor(container.length) self.assertLessEqual(teu_loaded, 80, "Feeder must not be loaded with more than what it can carry") def test_nothing_to_do(self): diff --git a/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py b/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py index fe96b980..4bb684ce 100644 --- a/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py +++ b/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py @@ -11,6 +11,7 @@ import matplotlib.pyplot as plt import numpy as np +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery from conflowgen.flow_generator.truck_for_export_containers_manager import \ TruckForExportContainersManager @@ -46,7 +47,8 @@ def setUp(self) -> None: Truck, LargeScheduledVehicle, Schedule, - TruckArrivalInformationForDelivery + TruckArrivalInformationForDelivery, + RandomSeedStore, ]) truck_arrival_distribution_seeder.seed() container_dwell_time_distribution_seeder.seed() @@ -362,9 +364,9 @@ def test_delivery_time_maximum(self): maximum = datetime.datetime(2021, 8, 8, 12) - datetime.timedelta(hours=467) self.assertEqual(maximum, delivery_time) - containder_dwell_time = (container_departure_time - maximum).total_seconds() / 3600 - self.assertGreater(distribution_1.maximum, containder_dwell_time) - self.assertLess(distribution_1.minimum, containder_dwell_time) + container_dwell_time = (container_departure_time - maximum).total_seconds() / 3600 + self.assertGreater(distribution_1.maximum, container_dwell_time) + self.assertLess(distribution_1.minimum, container_dwell_time) def test_delivery_time_average(self): container_departure_time = datetime.datetime( @@ -389,7 +391,7 @@ def test_delivery_time_average(self): self.assertEqual(156, distribution.average) self.assertEqual(468, distribution.maximum) - # the distribution is inversed and the random_time_component is set to zero. Actually, the value can rise close + # the distribution is inverted and the random_time_component is set to zero. Actually, the value can rise close # to one, meaning that the last `-1` would be much smaller. average = datetime.datetime(2021, 8, 8, 12) - datetime.timedelta(hours=(468 - 156 - 1)) self.assertEqual(average, delivery_time) diff --git a/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py b/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py index 79e668d0..515cbf47 100644 --- a/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py +++ b/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py @@ -11,6 +11,7 @@ import matplotlib.pyplot as plt import numpy as np +from conflowgen.application.models.random_seed_store import RandomSeedStore from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement from conflowgen.domain_models.data_types.container_length import ContainerLength @@ -46,7 +47,8 @@ def setUp(self) -> None: LargeScheduledVehicle, Destination, Schedule, - TruckArrivalInformationForPickup + TruckArrivalInformationForPickup, + RandomSeedStore, ]) truck_arrival_distribution_seeder.seed() container_dwell_time_distribution_seeder.seed() diff --git a/conflowgen/tests/notebooks/compare_truck_arrival_distribution_with_results.ipynb b/conflowgen/tests/notebooks/compare_truck_arrival_distribution_with_results.ipynb index 78986f79..17d58359 100644 --- a/conflowgen/tests/notebooks/compare_truck_arrival_distribution_with_results.ipynb +++ b/conflowgen/tests/notebooks/compare_truck_arrival_distribution_with_results.ipynb @@ -22,7 +22,9 @@ "\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", - "import matplotlib" + "import matplotlib\n", + "import pandas as pd\n", + "from scipy.stats import gmean" ] }, { @@ -278,6 +280,147 @@ "plt.show()" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "92cf438e-a43a-4fd0-be86-484c84e013e8", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "truck_gate_throughput_analysis_report.get_report_as_graph(\n", + " start_date=datetime.datetime(2021, 6, 28),\n", + " end_date=datetime.datetime(2021, 7, 31)\n", + ")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "60a43f06-6e09-4567-a940-be3be90bd5b8", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "conflowgen.run_all_previews(as_graph=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0e7a1561-7c6a-4fb5-8531-f891c07ed68c", + "metadata": {}, + "outputs": [], + "source": [ + "conflowgen.run_all_analyses(as_graph=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f2e8acc-72c5-4827-9c60-a07115b57487", + "metadata": {}, + "outputs": [], + "source": [ + "truck_gate_throughput_preview_report = conflowgen.TruckGateThroughputPreviewReport()\n", + "truck_gate_throughput_preview_report.show_report_as_graph()\n", + "\n", + "truck_gate_throughput_analysis_report = conflowgen.TruckGateThroughputAnalysisReport()\n", + "truck_gate_throughput_analysis_report.show_report_as_graph()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b86b2b3c-8b4b-444b-b5d8-4e72b832479e", + "metadata": {}, + "outputs": [], + "source": [ + "# Get analysis data\n", + "truck_gate_throughput_analysis = conflowgen.TruckGateThroughputAnalysis()\n", + "truck_gate_throughput_analysis_data = truck_gate_throughput_analysis.get_throughput_over_time()\n", + "\n", + "# Convert the dictionary to a pandas Series\n", + "series_data = pd.Series(truck_gate_throughput_analysis_data)\n", + "\n", + "# Set the datetime values as the index\n", + "series_data.index = pd.to_datetime(series_data.index)\n", + "\n", + "# Filter the data for the period in July 2021\n", + "filtered_data = series_data.loc['2021-07-5':'2021-07-31']\n", + "\n", + "# Group the data by week starting on Monday\n", + "weekly_grouped_data = filtered_data.groupby(pd.Grouper(freq=\"W-SUN\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4d93c78b-4636-4345-84b8-f89ad99c044e", + "metadata": {}, + "outputs": [], + "source": [ + "# Plotting\n", + "fig, ax = plt.subplots(figsize=(10, 6))\n", + "\n", + "# Iterate over each group and plot the data\n", + "for i, (group_name, group_data) in enumerate(weekly_grouped_data):\n", + " # Extract the start date of the week for labeling\n", + " week_start = group_data.index[0].strftime('%Y-%m-%d')\n", + "\n", + " if i == 0:\n", + " offset = 168 - len(group_data)\n", + " else:\n", + " offset = 0\n", + "\n", + " # Plot the data for the week\n", + " ax.plot(range(offset, offset + len(group_data)), group_data.values, label=week_start, linestyle='--')\n", + "\n", + "\n", + "# Create a custom key function to combine the day of the week and hour of the day\n", + "def key_func(index):\n", + " return (index.dayofweek * 24) + index.hour\n", + "\n", + "\n", + "# Group the data using the custom key function\n", + "hourly_grouped_data = filtered_data.groupby(key_func)\n", + "\n", + "# Take average of grouped_data\n", + "average_grouped_data = hourly_grouped_data.apply(gmean)\n", + "average_week_range = range(0, len(average_grouped_data)) # Adjust the range to start from 0\n", + "ax.plot(average_week_range, average_grouped_data.values, label='Average', color='black', linestyle='solid')\n", + "\n", + "# Plot preview\n", + "start_date = datetime.date(year=2021, month=7, day=5)\n", + "end_date = datetime.date(year=2021, month=7, day=31)\n", + "transportation_buffer = 0.2\n", + "truck_gate_throughput_preview = conflowgen.TruckGateThroughputPreview(start_date, end_date, transportation_buffer)\n", + "preview_data = truck_gate_throughput_preview.get_weekly_truck_arrivals(True, True)\n", + "preview_series = pd.Series(preview_data)\n", + "preview_series.index = pd.to_datetime(preview_series.index)\n", + "average_week_range = range(0, len(preview_series))\n", + "ax.plot(average_week_range, preview_series.values, label='Preview', color='yellow', linestyle='solid')\n", + "\n", + "# Set x-axis tick labels\n", + "ax.set_xticks([12, 36, 60, 84, 108, 132, 156]) # Set the ticks at the middle of each day\n", + "ax.set_xticklabels(['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'])\n", + "\n", + "# Add legend and labels\n", + "ax.legend(title='Week Start')\n", + "ax.set_xlabel('Day of the Week')\n", + "ax.set_ylabel('Truck Arrivals')\n", + "\n", + "# Show the plot\n", + "plt.show(block=True)\n", + "\n", + "print(\"Total weekly trucks according to preview: \", preview_series.sum())\n", + "print(\"Total weekly trucks according to average: \", average_grouped_data.sum())" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/conflowgen/tests/notebooks/fast_analyses_for_proof_of_concept.ipynb b/conflowgen/tests/notebooks/fast_analyses_for_proof_of_concept.ipynb index 58ef35f8..738b9dbf 100644 --- a/conflowgen/tests/notebooks/fast_analyses_for_proof_of_concept.ipynb +++ b/conflowgen/tests/notebooks/fast_analyses_for_proof_of_concept.ipynb @@ -5,7 +5,7 @@ "id": "8ae71614-f852-4f1c-8c93-357bf47ad9e7", "metadata": {}, "source": [ - "# Detailed Analyses for CTA example" + "# Fast Analyses for POC example" ] }, { diff --git a/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview.py b/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview.py index e6a35125..77fd5011 100644 --- a/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview.py +++ b/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview.py @@ -1,7 +1,10 @@ import datetime import unittest +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -17,7 +20,8 @@ def setUp(self) -> None: self.sqlite_db = setup_sqlite_in_memory_db() self.sqlite_db.create_tables([ Schedule, - ModeOfTransportDistribution + ModeOfTransportDistribution, + ContainerLengthDistribution ]) now = datetime.datetime.now() @@ -59,6 +63,16 @@ def setUp(self) -> None: } }) + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + self.preview = ContainerFlowByVehicleTypePreview( start_date=now.date(), end_date=(now + datetime.timedelta(weeks=2)).date(), diff --git a/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py b/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py index f65fa47d..3734cf90 100644 --- a/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py +++ b/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py @@ -1,9 +1,12 @@ import datetime import unittest +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository from conflowgen.domain_models.large_vehicle_schedule import Schedule @@ -19,7 +22,8 @@ def setUp(self) -> None: self.sqlite_db.create_tables([ Schedule, ModeOfTransportDistribution, - ContainerFlowGenerationProperties + ContainerFlowGenerationProperties, + ContainerLengthDistribution ]) ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ ModeOfTransport.truck: { @@ -58,6 +62,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + now = datetime.datetime.now() ContainerFlowGenerationProperties.create( start_date=now, diff --git a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py index f378e4cc..7e7e340d 100644 --- a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py +++ b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py @@ -3,7 +3,10 @@ import numpy as np +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -19,7 +22,8 @@ def setUp(self) -> None: self.sqlite_db = setup_sqlite_in_memory_db() self.sqlite_db.create_tables([ Schedule, - ModeOfTransportDistribution + ModeOfTransportDistribution, + ContainerLengthDistribution ]) now = datetime.datetime.now() ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ @@ -59,6 +63,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + self.preview = InboundAndOutboundVehicleCapacityPreview( start_date=now.date(), end_date=(now + datetime.timedelta(weeks=2)).date(), diff --git a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py index 8008d18b..0148dbaf 100644 --- a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py +++ b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py @@ -1,7 +1,10 @@ import datetime +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -19,7 +22,8 @@ def setUp(self) -> None: self.sqlite_db.create_tables([ Schedule, ModeOfTransportDistribution, - ContainerFlowGenerationProperties + ContainerFlowGenerationProperties, + ContainerLengthDistribution ]) ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ ModeOfTransport.truck: { @@ -58,6 +62,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + now = datetime.datetime.now() ContainerFlowGenerationProperties.create( start_date=now, diff --git a/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py b/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py index e13eb84c..ea168ae4 100644 --- a/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py +++ b/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py @@ -1,7 +1,10 @@ import datetime import unittest +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -16,7 +19,8 @@ def setUp(self) -> None: self.sqlite_db = setup_sqlite_in_memory_db() self.sqlite_db.create_tables([ Schedule, - ModeOfTransportDistribution + ModeOfTransportDistribution, + ContainerLengthDistribution ]) now = datetime.datetime.now() ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ @@ -56,6 +60,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + self.preview = ModalSplitPreview( start_date=now.date(), end_date=(now + datetime.timedelta(weeks=2)).date(), diff --git a/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py b/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py index 2190bfa7..f6950b48 100644 --- a/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py +++ b/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py @@ -1,7 +1,10 @@ import datetime import unittest +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -16,7 +19,8 @@ def setUp(self) -> None: self.sqlite_db = setup_sqlite_in_memory_db() self.sqlite_db.create_tables([ Schedule, - ModeOfTransportDistribution + ModeOfTransportDistribution, + ContainerLengthDistribution ]) now = datetime.datetime.now() ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ @@ -56,6 +60,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + self.preview = ModalSplitPreview( start_date=now.date(), end_date=(now + datetime.timedelta(weeks=2)).date(), diff --git a/conflowgen/tests/previews/test_modal_split_preview_report.py b/conflowgen/tests/previews/test_modal_split_preview_report.py index 14ad1ae3..0c84cb43 100644 --- a/conflowgen/tests/previews/test_modal_split_preview_report.py +++ b/conflowgen/tests/previews/test_modal_split_preview_report.py @@ -1,7 +1,10 @@ import datetime +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -18,7 +21,8 @@ def setUp(self) -> None: self.sqlite_db.create_tables([ Schedule, ModeOfTransportDistribution, - ContainerFlowGenerationProperties + ContainerFlowGenerationProperties, + ContainerLengthDistribution ]) ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ ModeOfTransport.truck: { @@ -57,6 +61,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + now = datetime.datetime.now() ContainerFlowGenerationProperties.create( start_date=now, diff --git a/conflowgen/tests/previews/test_quay_side_throughput_preview.py b/conflowgen/tests/previews/test_quay_side_throughput_preview.py new file mode 100644 index 00000000..fa070c1c --- /dev/null +++ b/conflowgen/tests/previews/test_quay_side_throughput_preview.py @@ -0,0 +1,119 @@ +import unittest +import datetime + +from conflowgen import ModeOfTransport, ContainerLength +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.previews.quay_side_throughput_preview import QuaySideThroughputPreview +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestQuaySideThroughputPreview(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 0, + ContainerLength.forty_feet: 1, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=now, + end_date=now + datetime.timedelta(weeks=2) + ) # mostly use default values + + self.preview = QuaySideThroughputPreview( + start_date=now.date(), + end_date=(now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + + def test_empty(self): + volume = self.preview.get_quay_side_throughput() + volume_i = volume.inbound + volume_o = volume.outbound + volume_i_teu = volume_i.teu + volume_i_box = volume_i.containers + volume_o_teu = volume_o.teu + volume_o_box = volume_o.containers + self.assertEqual(volume_i_teu, 0) + self.assertEqual(volume_i_box, 0) + self.assertEqual(volume_o_teu, 0) + self.assertEqual(volume_o_box, 0) + + def test_one_feeder(self): + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=150, + vehicle_arrives_every_k_days=-1 + ) + volume = self.preview.get_quay_side_throughput() + volume_i = volume.inbound + volume_o = volume.outbound + volume_i_teu = volume_i.teu + volume_i_box = volume_i.containers + volume_o_teu = volume_o.teu + volume_o_box = volume_o.containers + self.assertAlmostEqual(volume_i_teu, 150) + self.assertAlmostEqual(volume_i_box, 75) + self.assertAlmostEqual(volume_o_teu, 72) + self.assertAlmostEqual(volume_o_box, 36) diff --git a/conflowgen/tests/previews/test_quay_side_throughput_preview_report.py b/conflowgen/tests/previews/test_quay_side_throughput_preview_report.py new file mode 100644 index 00000000..7e89322d --- /dev/null +++ b/conflowgen/tests/previews/test_quay_side_throughput_preview_report.py @@ -0,0 +1,121 @@ +import datetime +import unittest + +from conflowgen import ModeOfTransport, ContainerLength +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.previews.quay_side_throughput_preview_report import QuaySideThroughputPreviewReport +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestQuaySideThroughputPreviewReport(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 0, + ContainerLength.forty_feet: 1, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=now, + end_date=now + datetime.timedelta(weeks=2) + ) # mostly use default values + + self.preview_report = QuaySideThroughputPreviewReport() + + def test_report_with_no_schedules_as_graph(self): + """Not throwing an exception is sufficient""" + axes = self.preview_report.get_report_as_graph() + self.assertIsNotNone(axes) + + def test_report_with_schedules_as_graph(self): + """Not throwing an exception is sufficient for now""" + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=400, + average_moved_capacity=300, + vehicle_arrives_every_k_days=-1 + ) + axes = self.preview_report.get_report_as_graph() + self.assertIsNotNone(axes) + + def test_text_report(self): + # pylint: disable=protected-access + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=24000, + average_moved_capacity=24000 + ) + report = self.preview_report.get_report_as_text() + # flake8: noqa: W291 (ignore trailing whitespace in text report) + expected_report = \ + ''' +discharged (in containers) loaded (in containers) + 12000 5760 +(rounding errors might exist) +''' + self.assertEqual(report, expected_report) diff --git a/conflowgen/tests/previews/test_run_all_previews.py b/conflowgen/tests/previews/test_run_all_previews.py index 1428bf1d..abcd0a8f 100644 --- a/conflowgen/tests/previews/test_run_all_previews.py +++ b/conflowgen/tests/previews/test_run_all_previews.py @@ -26,7 +26,7 @@ def setUp(self) -> None: def test_with_no_data_as_text(self): with self.assertLogs('conflowgen', level='INFO') as context: run_all_previews(as_text=True) - self.assertEqual(len(context.output), 14) + self.assertEqual(len(context.output), 20) # Test only some entries. The detailed tests should be done in the unit test of the respective report. self.assertEqual( @@ -46,4 +46,4 @@ def test_with_no_data_as_graph(self): with unittest.mock.patch('matplotlib.pyplot.show'): with self.assertLogs('conflowgen', level='INFO') as context: run_all_previews(as_text=False, as_graph=True, static_graphs=True) - self.assertEqual(len(context.output), 11) + self.assertEqual(len(context.output), 15) diff --git a/conflowgen/tests/previews/test_truck_gate_throughput_preview.py b/conflowgen/tests/previews/test_truck_gate_throughput_preview.py new file mode 100644 index 00000000..52447103 --- /dev/null +++ b/conflowgen/tests/previews/test_truck_gate_throughput_preview.py @@ -0,0 +1,188 @@ +import unittest +import datetime + +from conflowgen import ModeOfTransport, ContainerLength, TruckArrivalDistributionManager +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.previews.truck_gate_throughput_preview import TruckGateThroughputPreview +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestTruckGateThroughputPreview(unittest.TestCase): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 1, + ContainerLength.forty_feet: 0, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=now, + end_date=now + datetime.timedelta(weeks=2) + ) # mostly use default values + arrival_distribution = { + 3: .2, + 4: .8 + } + truck_arrival_distribution_manager = TruckArrivalDistributionManager() + truck_arrival_distribution_manager.set_truck_arrival_distribution(arrival_distribution) + + self.preview = TruckGateThroughputPreview( + start_date=now.date(), + end_date=(now + datetime.timedelta(weeks=2)).date(), + transportation_buffer=0.0 + ) + + def test_get_total_trucks(self): + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + # pylint: disable=protected-access + total_trucks = self.preview._get_total_trucks() + # 300 TEU arrive by feeder + # 300 TEU * 0.2 (from mode of transport distribution) = 60 TEU to be exported by truck + # Only twenty-feet containers used, so 60 TEU = 60 trucks needed + self.assertAlmostEqual(total_trucks.inbound, 60) + self.assertAlmostEqual(total_trucks.outbound, 60) + self.assertAlmostEqual(sum(total_trucks), 120) + + def test_get_weekly_trucks(self): + # pylint: disable=protected-access + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + weekly_trucks = self.preview._get_number_of_trucks_per_week() + # 60 trucks total (from test_get_total_trucks above) + # 60 trucks / 2 weeks = 30 trucks per week + self.assertAlmostEqual(weekly_trucks.inbound, 30) + self.assertAlmostEqual(weekly_trucks.outbound, 30) + self.assertAlmostEqual(sum(weekly_trucks), 60) + + def test_get_truck_distribution(self): + # Test case 1 + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=300, + average_moved_capacity=300 + ) + weekly_truck_distribution = self.preview.get_weekly_truck_arrivals(True, False) + self.assertEqual(weekly_truck_distribution, {3: 6, 4: 24}) + weekly_truck_distribution = self.preview.get_weekly_truck_arrivals(True, True) + self.assertEqual(weekly_truck_distribution, {3: 12, 4: 48}) + weekly_truck_distribution = self.preview.get_weekly_truck_arrivals(False, True) + self.assertEqual(weekly_truck_distribution, {3: 6, 4: 24}) + + def test_with_no_outbound_truck_traffic(self): + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0.6, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0.6, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + weekly_truck_distribution = self.preview.get_weekly_truck_arrivals(False, True) + self.assertEqual(weekly_truck_distribution, {3: 0, 4: 0}) diff --git a/conflowgen/tests/previews/test_truck_gate_throughput_preview_report.py b/conflowgen/tests/previews/test_truck_gate_throughput_preview_report.py new file mode 100644 index 00000000..f1460e93 --- /dev/null +++ b/conflowgen/tests/previews/test_truck_gate_throughput_preview_report.py @@ -0,0 +1,300 @@ +import datetime + +from conflowgen import ModeOfTransport, ContainerLength, TruckArrivalDistributionManager +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution +from conflowgen.domain_models.distribution_repositories.container_length_distribution_repository import \ + ContainerLengthDistributionRepository +from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ + ModeOfTransportDistributionRepository +from conflowgen.domain_models.large_vehicle_schedule import Schedule +from conflowgen.previews.truck_gate_throughput_preview_report import TruckGateThroughputPreviewReport +from conflowgen.tests.autoclose_matplotlib import UnitTestCaseWithMatplotlib +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestTruckGateThroughputPreviewReport(UnitTestCaseWithMatplotlib): + + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + ModeOfTransportDistribution, + ContainerLengthDistribution, + ContainerFlowGenerationProperties, + TruckArrivalDistribution + ]) + now = datetime.datetime.now() + ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ + ModeOfTransport.truck: { + ModeOfTransport.truck: 0.1, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.4, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.train: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.barge: { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: 0.5, + ModeOfTransport.deep_sea_vessel: 0.5 + }, + ModeOfTransport.feeder: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + }, + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: 0.2, + ModeOfTransport.train: 0.4, + ModeOfTransport.barge: 0.1, + ModeOfTransport.feeder: 0.15, + ModeOfTransport.deep_sea_vessel: 0.15 + } + }) + ContainerLengthDistributionRepository().set_distribution({ + ContainerLength.twenty_feet: 1, + ContainerLength.forty_feet: 0, + ContainerLength.forty_five_feet: 0, + ContainerLength.other: 0 + }) + ContainerFlowGenerationProperties.create( + start_date=now, + end_date=now + datetime.timedelta(weeks=2) + ) # mostly use default values + arrival_distribution = {0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.0039591265543534575, + 6: 0.008280755354708402, + 7: 0.00787052138708076, + 8: 0.009048448164603814, + 9: 0.010653252222483504, + 10: 0.012752141622641803, + 11: 0.016642037255734387, + 12: 0.014028517880762, + 13: 0.014804115031537253, + 14: 0.014974413128949352, + 15: 0.011139325718994135, + 16: 0.013892795598075644, + 17: 0.01082340227148447, + 18: 0.008328057746798652, + 19: 0.006987426702627708, + 20: 0.005148702946956847, + 21: 0.0030022110241690898, + 22: 0.0022556664886468924, + 23: 0.002490824815783658, + 24: 0.001903829363512033, + 25: 0.0021963463393818504, + 26: 0.001702371138626582, + 27: 0.0021438383478597847, + 28: 0.0024202228363111615, + 29: 0.006458109051981418, + 30: 0.009296920847765565, + 31: 0.008129901930327036, + 32: 0.009348584294496615, + 33: 0.011340930095712323, + 34: 0.013698448606867216, + 35: 0.01296799663104594, + 36: 0.015331193639106963, + 37: 0.014188986240397503, + 38: 0.014878231656167027, + 39: 0.01218616653188358, + 40: 0.012107579394020204, + 41: 0.010917000115475164, + 42: 0.006806732487834122, + 43: 0.005802918750649381, + 44: 0.005287802279192979, + 45: 0.0028202830127811215, + 46: 0.0019358005313836828, + 47: 0.0024196460473237236, + 48: 0.0016307576755443523, + 49: 0.0019988666796929904, + 50: 0.001446034417884346, + 51: 0.0010097489273788896, + 52: 0.0022229861377374384, + 53: 0.008228976109664983, + 54: 0.00916729394725238, + 55: 0.008981193048564363, + 56: 0.010437595120044508, + 57: 0.011883447250428468, + 58: 0.013126241314098189, + 59: 0.0140232137102564, + 60: 0.014067510063763042, + 61: 0.017925057408950704, + 62: 0.014893617277832918, + 63: 0.01301145426124103, + 64: 0.012079362990869175, + 65: 0.010112961782918234, + 66: 0.00893673181616467, + 67: 0.006631710275002562, + 68: 0.004326674554006004, + 69: 0.004305598082248182, + 70: 0.0022903162137174965, + 71: 0.0024661555911701296, + 72: 0.0011415664927662006, + 73: 0.0012494109397148158, + 74: 0.0009989509275061823, + 75: 0.0009419532259761962, + 76: 0.002040252335905318, + 77: 0.00518431625514197, + 78: 0.009913000508486947, + 79: 0.010654141394182583, + 80: 0.010344655620812727, + 81: 0.012472178423578372, + 82: 0.015253769000857457, + 83: 0.015313545656682602, + 84: 0.01971921057376204, + 85: 0.016488565599922105, + 86: 0.016894274684674377, + 87: 0.015499123208186931, + 88: 0.01478237177250456, + 89: 0.012150479118805851, + 90: 0.008135216144988145, + 91: 0.006333340451456769, + 92: 0.006095849295750999, + 93: 0.004708883365054937, + 94: 0.003413326087863949, + 95: 0.0017118289895981984, + 96: 0.0026912758548089605, + 97: 0.0021584624941145677, + 98: 0.0023228922170533146, + 99: 0.001604168692757123, + 100: 0.0027305554397402476, + 101: 0.0065523938632102915, + 102: 0.009520380832912196, + 103: 0.010997001773196237, + 104: 0.010602136875550094, + 105: 0.015899660970804596, + 106: 0.018083220148664984, + 107: 0.0163816471763427, + 108: 0.01629007302430533, + 109: 0.019168920074881534, + 110: 0.01646589595887871, + 111: 0.013656904790633789, + 112: 0.012617169136636602, + 113: 0.009685606800402495, + 114: 0.009069337128450136, + 115: 0.004422493262915178, + 116: 0.0042658850465993456, + 117: 0.0030436628208826318, + 118: 0.0016924428501923685, + 119: 0.002152265219068244, + 120: 0.0028091995053135693, + 121: 0.0022128380816916287, + 122: 0.0020158483718963533, + 123: 0.0010395871009478725, + 124: 0.0009474696390102265, + 125: 0.0011628071003245448, + 126: 0.001418797422137764, + 127: 0.0016522620284370162, + 128: 0.0015376248047583672, + 129: 0.0014253278743416424, + 130: 0.0007202777097896012, + 131: 0.0005074102872076632, + 132: 0.0004608008040356385, + 133: 0.0, + 134: 0.0, + 135: 0.0, + 136: 0.0, + 137: 0.0, + 138: 0.0, + 139: 0.0, + 140: 0.0, + 141: 0.0, + 142: 0.0, + 143: 0.0, + 144: 0.0, + 145: 0.0, + 146: 0.0, + 147: 0.0, + 148: 0.0, + 149: 0.0, + 150: 0.0, + 151: 0.0, + 152: 0.0, + 153: 0.0, + 154: 0.0, + 155: 0.0, + 156: 0.0, + 157: 0.0, + 158: 0.0, + 159: 0.0, + 160: 0.0, + 161: 0.0, + 162: 0.0, + 163: 0.0, + 164: 0.0, + 165: 0.0, + 166: 0.0, + 167: 0.0} + truck_arrival_distribution_manager = TruckArrivalDistributionManager() + truck_arrival_distribution_manager.set_truck_arrival_distribution(arrival_distribution) + + self.preview_report = TruckGateThroughputPreviewReport() + + def test_report_without_schedule(self): + report = self.preview_report.get_report_as_graph() + self.assertIsNotNone(report) + + def test_report_with_schedule(self): + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=24000, + average_moved_capacity=24000 + ) + report = self.preview_report.get_report_as_graph() + self.assertIsNotNone(report) + + def test_text_report(self): + # pylint: disable=protected-access + two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=two_days_later.date(), + vehicle_arrives_every_k_days=-1, + vehicle_arrives_at_time=two_days_later.time(), + average_vehicle_capacity=24000, + average_moved_capacity=24000 + ) + report = self.preview_report.get_report_as_text() + # flake8: noqa: W291 (ignore trailing whitespace in text report) + expected_report = \ + '''Hourly view: + Minimum (trucks/h) Maximum (trucks/h) Average (trucks/h) Sum (trucks/24h) +Day of the week +Monday 0 80 36 854 +Tuesday 8 74 37 894 +Wednesday 4 86 39 936 +Thursday 4 94 42 1016 +Friday 8 92 42 1016 +Saturday 0 14 4 84 +Sunday 0 0 0 0 +Total 0 94 29 4800 +Fewest trucks in a day: 0 on Sunday +Most trucks in a day: 1016 on Thursday +Average trucks per day: 685''' + self.assertEqual(report, expected_report) + updated_preview = self.preview_report._get_updated_preview() + self.assertIsNotNone(updated_preview) diff --git a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py index 08eaaec1..6223bd39 100644 --- a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py +++ b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py @@ -3,7 +3,10 @@ import numpy as np +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -18,7 +21,8 @@ def setUp(self) -> None: self.sqlite_db = setup_sqlite_in_memory_db() self.sqlite_db.create_tables([ Schedule, - ModeOfTransportDistribution + ModeOfTransportDistribution, + ContainerLengthDistribution ]) now = datetime.datetime.now() ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ @@ -58,6 +62,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + self.preview = VehicleCapacityExceededPreview( start_date=now.date(), end_date=(now + datetime.timedelta(weeks=2)).date(), diff --git a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py index 337d7d05..48e3e5b9 100644 --- a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py +++ b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py @@ -1,7 +1,10 @@ import datetime +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository @@ -19,7 +22,8 @@ def setUp(self) -> None: self.sqlite_db.create_tables([ Schedule, ModeOfTransportDistribution, - ContainerFlowGenerationProperties + ContainerFlowGenerationProperties, + ContainerLengthDistribution ]) ModeOfTransportDistributionRepository().set_mode_of_transport_distributions({ ModeOfTransport.truck: { @@ -58,6 +62,17 @@ def setUp(self) -> None: ModeOfTransport.deep_sea_vessel: 0.15 } }) + + container_length_manager = ContainerLengthDistributionManager() + container_length_manager.set_container_length_distribution( # Set default container length distribution + { + ContainerLength.other: 0.001, + ContainerLength.twenty_feet: 0.4, + ContainerLength.forty_feet: 0.57, + ContainerLength.forty_five_feet: 0.029 + } + ) + now = datetime.datetime.now() ContainerFlowGenerationProperties.create( start_date=now, diff --git a/conflowgen/tests/substitute_peewee_database.py b/conflowgen/tests/substitute_peewee_database.py index 863a6c55..d5d68ecd 100644 --- a/conflowgen/tests/substitute_peewee_database.py +++ b/conflowgen/tests/substitute_peewee_database.py @@ -1,6 +1,10 @@ +import random + from peewee import SqliteDatabase +from conflowgen.data_summaries.data_summaries_cache import DataSummariesCache from conflowgen.domain_models.base_model import database_proxy +from conflowgen.tools import get_convert_to_random_value def setup_sqlite_in_memory_db() -> SqliteDatabase: @@ -12,4 +16,7 @@ def setup_sqlite_in_memory_db() -> SqliteDatabase: }) database_proxy.initialize(sqlite_db) sqlite_db.connect() + random_bits = random.getrandbits(100) + sqlite_db.func('assign_random_value')(get_convert_to_random_value(random_bits)) + DataSummariesCache.reset_cache() return sqlite_db diff --git a/conflowgen/tools/__init__.py b/conflowgen/tools/__init__.py index 66325796..a80406dd 100644 --- a/conflowgen/tools/__init__.py +++ b/conflowgen/tools/__init__.py @@ -1,6 +1,7 @@ """ A collection of tools for which no nicer name has been found yet. """ +import hashlib from typing import Callable, Any, TypeVar DecoratedType = TypeVar('DecoratedType') # pylint: disable=invalid-name @@ -21,3 +22,11 @@ def hashable(obj: Any) -> bool: except TypeError: return False return True + + +def get_convert_to_random_value(random_bits): + def convert_to_random_value(row_id): + hash_value = hashlib.new('sha256') + hash_value.update((random_bits + row_id).to_bytes(16, 'big')) + return hash_value.hexdigest() + return convert_to_random_value diff --git a/conflowgen/tools/distribution_approximator.py b/conflowgen/tools/distribution_approximator.py index 41f81945..23b99376 100644 --- a/conflowgen/tools/distribution_approximator.py +++ b/conflowgen/tools/distribution_approximator.py @@ -6,6 +6,8 @@ import numpy as np +from conflowgen.application.repositories.random_seed_store_repository import get_initialised_random_object + class SamplerExhaustedException(Exception): """No more samples can be sampled from the sampler""" @@ -14,14 +16,13 @@ class SamplerExhaustedException(Exception): class DistributionApproximator: - random_seed = 1 + class_level_seeded_random: None | random.Random = None - def __init__(self, number_instances_per_category: Dict[any, int]) -> None: - """ - Args: - number_instances_per_category: For each key (category) the number of instances to draw is given - """ - self.seeded_random = random.Random(x=self.random_seed) + def __init__(self, number_instances_per_category: Dict[any, int], context_of_usage: str = "") -> None: + self.seeded_random = get_initialised_random_object( + self.__class__.__name__ + "__" + context_of_usage, + log_loading_process=False + ) self.target_distribution = np.array( list(number_instances_per_category.values()), dtype=np.int64 @@ -34,8 +35,16 @@ def __init__(self, number_instances_per_category: Dict[any, int]) -> None: def from_distribution( cls, distribution: Dict[any, float], - number_items: int + number_items: int, + context_of_usage: str = "" ) -> DistributionApproximator: + + if cls.class_level_seeded_random is None: + cls.class_level_seeded_random = get_initialised_random_object( + "DistributionApproximator__class", + log_loading_process=False + ) + assert math.isclose(sum(distribution.values()), 1, abs_tol=.001), \ f"All probabilities must sum to 1, but you only achieved {sum(distribution.values())}" @@ -49,9 +58,8 @@ def from_distribution( # Thus, we need to fill the missing items by randomly drawing some of them. number_items_in_category_estimation = sum(probability_based_instance_estimation.values()) if number_items_in_category_estimation < number_items: - seeded_random = random.Random(x=cls.random_seed) items_lost_to_rounding = number_items - number_items_in_category_estimation - randomly_chosen_categories = seeded_random.choices( + randomly_chosen_categories = cls.class_level_seeded_random.choices( population=list(distribution.keys()), weights=list(distribution.values()), k=items_lost_to_rounding @@ -59,7 +67,8 @@ def from_distribution( for category in randomly_chosen_categories: probability_based_instance_estimation[category] += 1 distribution_approximator = DistributionApproximator( - probability_based_instance_estimation + probability_based_instance_estimation, + context_of_usage=context_of_usage ) return distribution_approximator diff --git a/docs/api.rst b/docs/api.rst index e3f0fd20..68cde3e6 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -9,6 +9,8 @@ Domain datatypes .. autoenum:: conflowgen.ContainerLength :members: +.. autonamedtuple:: conflowgen.ContainersTransportedByTruck + .. autonamedtuple:: conflowgen.ContainerVolumeByVehicleType .. autonamedtuple:: conflowgen.ContainerVolumeFromOriginToDestination @@ -28,6 +30,8 @@ Domain datatypes .. autonamedtuple:: conflowgen.TransshipmentAndHinterlandSplit +.. autonamedtuple:: conflowgen.UsedYardCapacityOverTime + .. autonamedtuple:: conflowgen.VehicleIdentifier @@ -91,6 +95,12 @@ Generating previews .. autoclass:: conflowgen.ModalSplitPreviewReport :members: +.. autoclass:: conflowgen.QuaySideThroughputPreview + :members: + +.. autoclass:: conflowgen.QuaySideThroughputPreviewReport + :members: + .. autofunction:: conflowgen.run_all_previews .. autoclass:: conflowgen.VehicleCapacityExceededPreview @@ -99,6 +109,13 @@ Generating previews .. autoclass:: conflowgen.VehicleCapacityUtilizationOnOutboundJourneyPreviewReport :members: +.. autoclass:: conflowgen.TruckGateThroughputPreview + :members: + +.. autoclass:: conflowgen.TruckGateThroughputPreviewReport + :members: + + Running analyses ================ @@ -199,6 +216,9 @@ you can define them here and pass them as parameters to the aforementioned funct .. autoclass:: conflowgen.DisplayAsPlainText :members: +.. autoclass:: conflowgen.DataSummariesCache + :members: + Exporting data ============== diff --git a/docs/background.rst b/docs/background.rst index abf680dd..2c007579 100644 --- a/docs/background.rst +++ b/docs/background.rst @@ -185,8 +185,7 @@ While assessing the currently available alternatives, among others we have come - :cite:t:`briskorn2019generator` developed a test data generator that may be used to simulate yard crane container handling processes. - Their generic approach generates test examples of crane scheduling issues and is available at - :cite:`briskorn2019generator-software`. + Their generic approach generates test examples of crane scheduling issues. - A technique for the evaluation of quay crane scheduling models and solution methods is presented by :cite:t:`meisel2011unified`. @@ -208,7 +207,7 @@ ConFlowGen has been first presented at the International Conference on Dynamics If ConFlowGen served you well in your research, and you would like to acknowledge the project in your publication, we would be glad if you mention our work as defined in our `CITATION.cff `_. -If you just need a BibTeX entry for your citation software, this should do the job: +If you just need a BibTeX entry for your citation software, this one should do the job: .. code-block:: bibtex @@ -225,3 +224,14 @@ If you just need a BibTeX entry for your citation software, this should do the j title = {Container Flow Generation for Maritime Container Terminals}, year = {2022} } + +At a second occasion, ConFlowGen has been presented at the Annual General Assembly of the +World Association for Waterborne Transport Infrastructure (PIANC) +in 2023 in Oslo. +The contribution +`Synthetically generating traffic scenarios for simulation-based container terminal planning \ +`_ +has been awarded with the +`De Paepe-Willems Award `_. +The paper highlights how ConFlowGen can support terminal planners in designing terminal interfaces and determining +the required yard capacity. diff --git a/docs/conf.py b/docs/conf.py index 9cf174f0..5011db21 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,6 +18,7 @@ # documentation are executed. Because whenever matplotlib is imported in a Jupyter Notebook for the first time, # it leaves the message "Matplotlib is building the font cache; this may take a moment." which is not looking nice. from matplotlib.font_manager import fontManager + fontManager.get_default_size() # just some random action so that the import is not flagged as unnecessary # include conflowgen from source code, avoid being served an outdated installation @@ -38,7 +39,6 @@ current_year = datetime.datetime.now().year project_copyright = f'{current_year}, {author}' - # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be @@ -54,7 +54,6 @@ 'sphinx.ext.autosectionlabel', # create reference for each section 'sphinx.ext.viewcode', # create html page for each source file and link between it and the docs - # external dependencies 'sphinxcontrib.cairosvgconverter', # allow PDF creation 'sphinxcontrib.bibtex', # allow bib style citation 'myst_parser', # allow Markdown text, e.g., for documents from the GitHub repository @@ -70,7 +69,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '.tools'] +exclude_patterns = [ + 'Thumbs.db', '.DS_Store', # OS-specific + '_build', # Sphinx-specific + '.tools', # project-specific + '**.ipynb_checkpoints', '**.virtual_documents' # specific for Jupyter Notebooks +] add_module_names = False @@ -210,43 +214,19 @@ """ -# -- Setting up git lfs if Missing --------------------------------------------- - simplepdf_debug = True - - -def _install_git_lfs_on_linux_on_the_fly() -> str: - """ - A dirty hack as there is no clean way how to install git lfs on Read the Docs at the moment. - """ - _git_lfs_cmd = "./git-lfs" - if os.path.isfile(_git_lfs_cmd): - return _git_lfs_cmd - - os.system("echo 'Installing git-lfs on-the-fly'") - version = 'v3.2.0' - file_to_download = f'git-lfs-linux-amd64-{version}.tar.gz' - folder_inside_file = f"git-lfs-{version[1:]}" - if not os.path.isfile(file_to_download): - os.system( - f'wget https://github.com/git-lfs/git-lfs/releases/download/{version}/{file_to_download}' - ) # download git lfs - os.system(f'tar xvfz {file_to_download} -C ./.tools') # extract to ./.tools subdirectory - os.system(f'ls ./.tools/') - os.system(f'ls ./.tools/{folder_inside_file}') - os.system(f'cp ./.tools/{folder_inside_file}/git-lfs ./git-lfs') # take command (don't care about readme etc.) - os.system('./git-lfs install') # make lfs available in current repository - os.system("echo 'git-lfs is installed'") - return _git_lfs_cmd - - if os.environ.get("IS_RTD", False): os.system("echo 'We are currently on the Read-the-Docs server (or somebody just set IS_RTD to true)'") - git_lfs_cmd = _install_git_lfs_on_linux_on_the_fly() os.system("echo 'Fetching sqlite databases'") - os.system( - f"yes | {git_lfs_cmd} fetch -p -I '**/notebooks/data/prepared_dbs/*.sqlite'" - ) # download sqlite databases from remote, say yes to trusting certs - os.system("echo 'Start checking out the file'") - os.system(f'{git_lfs_cmd} checkout') # Replace SQLite database LFS references with the actual files - os.system("echo 'Checkout finished'") + database_names = ["demo_continental_gateway", "demo_deham_cta", "demo_poc"] # List of database names to download + sqlite_databases_directory = "notebooks/data/prepared_dbs/" + os.system("echo 'Current directory:'") + os.system("pwd") # Print current directory; we expect to be in the docs folder + os.makedirs(sqlite_databases_directory, exist_ok=True) # Create the destination folder if it doesn't exist + for database_name in database_names: + os.system(f'echo "Fetching {database_name}"') + file_url = f'https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/{database_name}.sqlite' + os.system(f'curl -LJO "{file_url}"') + os.system(f'echo \'mv "{database_name}.sqlite" "{sqlite_databases_directory}"\'') + os.system(f'mv "{database_name}.sqlite" "{sqlite_databases_directory}"') + os.system("echo 'sqlite databases fetched'") diff --git a/docs/notebooks/analyses.ipynb b/docs/notebooks/analyses.ipynb index 1b3d8110..9b3ff56a 100644 --- a/docs/notebooks/analyses.ipynb +++ b/docs/notebooks/analyses.ipynb @@ -58,6 +58,16 @@ "database_chooser.load_existing_sqlite_database(\"demo_poc.sqlite\")" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "1a83ad5a-1afa-468d-a254-d0137ebdf260", + "metadata": {}, + "outputs": [], + "source": [ + "preferred_matplotlib_style = \"seaborn-v0_8-colorblind\"" + ] + }, { "cell_type": "markdown", "id": "3a6a56f9-45ad-497d-a243-82d81130a7d7", @@ -232,7 +242,7 @@ "source": [ "yard_capacity_analysis_report = conflowgen.YardCapacityAnalysisReport()\n", "\n", - "with plt.style.context(\"seaborn-colorblind\"):\n", + "with plt.style.context(preferred_matplotlib_style):\n", " yard_capacity_analysis_report.get_report_as_graph(\n", " storage_requirement=conflowgen.StorageRequirement.reefer\n", " )\n", @@ -254,7 +264,7 @@ "metadata": {}, "outputs": [], "source": [ - "with plt.style.context(\"seaborn-colorblind\"):\n", + "with plt.style.context(preferred_matplotlib_style):\n", " yard_capacity_analysis_report.get_report_as_graph(\n", " storage_requirement=[\n", " conflowgen.StorageRequirement.standard,\n", @@ -310,7 +320,7 @@ "source": [ "container_dwell_time_report = conflowgen.ContainerDwellTimeAnalysisReport()\n", "\n", - "with plt.style.context(\"seaborn-colorblind\"):\n", + "with plt.style.context(preferred_matplotlib_style):\n", " container_dwell_time_report.get_report_as_graph(\n", " container_delivered_by_vehicle_type={\n", " conflowgen.ModeOfTransport.deep_sea_vessel,\n", diff --git a/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite b/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite index 56c5b06f..a375e91b 100644 --- a/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite +++ b/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:94d1fba77688d432ca9fe77a6e0241b0965b20cd5e1b78045a425ec14f8c5701 -size 19439616 +oid sha256:afe66aad30052ed3f1883f7147fa0a302fc84f84f55ccbb52f40aa6ddb1d1a44 +size 18980864 diff --git a/docs/notebooks/data/prepared_dbs/demo_poc.sqlite b/docs/notebooks/data/prepared_dbs/demo_poc.sqlite index d62021c2..9622ba2e 100644 --- a/docs/notebooks/data/prepared_dbs/demo_poc.sqlite +++ b/docs/notebooks/data/prepared_dbs/demo_poc.sqlite @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:3629b38dd6dd868dfb04aa5e0f5686a2749fbc4de7a56384a218f2a415d8d1c1 -size 335872 +oid sha256:1ea43c0a5ed2e40a7aae26e57ce0ef8d4e63bf1638396baacd173a3afa0f80ae +size 323584 diff --git a/docs/notebooks/in_spotlight.ipynb b/docs/notebooks/in_spotlight.ipynb index 2a3ee409..a19aa8ae 100644 --- a/docs/notebooks/in_spotlight.ipynb +++ b/docs/notebooks/in_spotlight.ipynb @@ -70,7 +70,7 @@ "metadata": {}, "outputs": [], "source": [ - "plt.style.use(\"seaborn-colorblind\")" + "plt.style.use(\"seaborn-v0_8-colorblind\")" ] }, { diff --git a/docs/notebooks/input_distributions.ipynb b/docs/notebooks/input_distributions.ipynb index 6c7219ed..b89e6ce7 100644 --- a/docs/notebooks/input_distributions.ipynb +++ b/docs/notebooks/input_distributions.ipynb @@ -53,7 +53,7 @@ "metadata": {}, "outputs": [], "source": [ - "plt.style.use(\"seaborn-colorblind\")" + "plt.style.use(\"seaborn-v0_8-colorblind\")" ] }, { diff --git a/docs/references.bib b/docs/references.bib index 052a9a7a..e6750acf 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -94,14 +94,6 @@ @Article{briskorn2019generator doi={10.1007/s00291-018-0529-z}, } -@online{briskorn2019generator-software, - author = {Wiehl, Andreas}, - title = {Instances Generator}, - howpublished = {\url{http://www.instances.de/dfg/}}, - note = {Accessed: 2022-07-20}, - year = {2018} -} - @article{meisel2011unified, title = {A unified approach for the evaluation of quay crane scheduling models and algorithms}, journal = {Computers \& Operations Research}, diff --git a/examples/Python_Script/demo_DEHAM_CTA.py b/examples/Python_Script/demo_DEHAM_CTA.py index 18b55480..65b15e3a 100644 --- a/examples/Python_Script/demo_DEHAM_CTA.py +++ b/examples/Python_Script/demo_DEHAM_CTA.py @@ -87,7 +87,9 @@ logger.info(__doc__) # Pick database -database_chooser = conflowgen.DatabaseChooser() +database_chooser = conflowgen.DatabaseChooser( + sqlite_databases_directory=os.path.join(this_dir, "databases") +) demo_file_name = "demo_deham_cta.sqlite" database_chooser.create_new_sqlite_database( demo_file_name, diff --git a/examples/Python_Script/demo_poc.py b/examples/Python_Script/demo_poc.py index d14072a7..fb0a1844 100644 --- a/examples/Python_Script/demo_poc.py +++ b/examples/Python_Script/demo_poc.py @@ -37,7 +37,9 @@ logger.info(__doc__) # Pick database -database_chooser = conflowgen.DatabaseChooser() +database_chooser = conflowgen.DatabaseChooser( + sqlite_databases_directory=os.path.join(this_dir, "databases") +) demo_file_name = "demo_poc.sqlite" database_chooser.create_new_sqlite_database( demo_file_name, diff --git a/run_ci_light.bat b/run_ci_light.bat index bf4a9e6e..5197cf0c 100644 --- a/run_ci_light.bat +++ b/run_ci_light.bat @@ -28,7 +28,7 @@ IF "%CONDA_PREFIX%" NEQ "" ( ) ECHO.It seems like you are not in an isolated development environment. In a later step, the current version of -ECHO.ConFlowgen will be installed as a library. Please abort if you do not want to clutter your Python installation. +ECHO.ConFlowGen will be installed as a library. Please abort if you do not want to clutter your Python installation. ECHO.If you actually are in an isolated development environment, feel free to improve this check. :AGAIN