From 5713338fd58150673dcd4f2653db419a711645d9 Mon Sep 17 00:00:00 2001 From: 1kastner Date: Tue, 2 Aug 2022 16:53:28 +0200 Subject: [PATCH] Consider container dwell times (#130) --- .flake8_nb | 1 + .../workflows/installation-from-remote.yaml | 12 - .github/workflows/linting.yml | 7 +- .pylintrc | 9 +- conflowgen/__init__.py | 57 +- .../__init__.py | 2 + .../abstract_analysis.py} | 4 +- .../analyses/container_dwell_time_analysis.py | 116 +++ .../container_dwell_time_analysis_report.py | 146 +++ ...low_adjustment_by_vehicle_type_analysis.py | 5 +- ...ustment_by_vehicle_type_analysis_report.py | 11 +- ...stment_by_vehicle_type_analysis_summary.py | 2 +- ...by_vehicle_type_analysis_summary_report.py | 2 +- ...container_flow_by_vehicle_type_analysis.py | 4 +- ...er_flow_by_vehicle_type_analysis_report.py | 9 +- ..._and_outbound_vehicle_capacity_analysis.py | 7 +- ...tbound_vehicle_capacity_analysis_report.py | 8 +- ...d_vehicle_capacity_utilization_analysis.py | 16 +- ...le_capacity_utilization_analysis_report.py | 61 +- .../modal_split_analysis.py | 14 +- .../analyses/modal_split_analysis_report.py | 84 ++ .../quay_side_throughput_analysis.py | 4 +- .../quay_side_throughput_analysis_report.py | 2 +- .../truck_gate_throughput_analysis.py | 5 +- .../truck_gate_throughput_analysis_report.py | 2 +- .../yard_capacity_analysis.py | 17 +- .../yard_capacity_analysis_report.py | 14 +- conflowgen/api/__init__.py | 51 +- ...ntainer_dwell_time_distribution_manager.py | 54 ++ .../api/container_flow_generation_manager.py | 59 +- .../container_length_distribution_manager.py | 3 +- ...torage_requirement_distribution_manager.py | 3 +- .../container_weight_distribution_manager.py | 3 +- .../mode_of_transport_distribution_manager.py | 3 +- .../api/truck_arrival_distribution_manager.py | 3 +- .../data_types/export_file_format.py | 2 +- .../container_flow_generation_properties.py | 28 +- ...r_flow_generation_properties_repository.py | 22 +- .../services/export_container_flow_service.py | 5 +- .../database_connection/create_tables.py | 5 +- conflowgen/descriptive_datatypes/__init__.py | 2 +- .../domain_models/arrival_information.py | 9 +- conflowgen/domain_models/container.py | 10 +- .../data_types/container_length.py | 1 + .../data_types/storage_requirement.py | 17 +- .../container_dwell_time_distribution.py | 26 + .../distribution_repositories/__init__.py | 41 +- ...iner_dwell_time_distribution_repository.py | 75 ++ ...de_of_transport_distribution_repository.py | 4 +- .../distribution_seeders/__init__.py | 3 +- ...ontainer_dwell_time_distribution_seeder.py | 288 ++++++ .../container_length_distribution_seeder.py | 2 +- .../container_weight_distribution_seeder.py | 2 +- .../domain_models/distribution_validators.py | 62 +- conflowgen/domain_models/seeders/__init__.py | 57 -- .../abstract_truck_for_containers_manager.py | 134 +++ ...r_containers_delivered_by_truck_service.py | 1 + ...assign_destination_to_container_service.py | 4 +- .../container_flow_generation_service.py | 45 +- ...hicle_for_onward_transportation_manager.py | 122 +-- .../truck_for_export_containers_manager.py | 90 +- .../truck_for_import_containers_manager.py | 61 +- conflowgen/metadata.py | 2 +- .../modal_split_analysis_report.py | 190 ---- .../container_flow_by_vehicle_type_preview.py | 2 +- ...ner_flow_by_vehicle_type_preview_report.py | 12 +- ...d_and_outbound_vehicle_capacity_preview.py | 6 +- ...utbound_vehicle_capacity_preview_report.py | 10 +- conflowgen/previews/modal_split_preview.py | 6 +- .../previews/modal_split_preview_report.py | 149 +-- .../vehicle_capacity_exceeded_preview.py | 2 +- ...ehicle_capacity_exceeded_preview_report.py | 5 +- conflowgen/reporting/__init__.py | 33 +- conflowgen/reporting/modal_split_report.py | 144 +++ conflowgen/reporting/no_data_plot.py | 11 +- conflowgen/reporting/output_style.py | 6 +- .../__init__.py | 0 .../test_container_dwell_time_analysis.py | 159 ++++ ...st_container_dwell_time_analysis_report.py | 164 ++++ ...low_adjustment_by_vehicle_type_analysis.py | 2 +- ...ustment_by_vehicle_type_analysis_report.py | 2 +- ...stment_by_vehicle_type_analysis_summary.py | 2 +- ...by_vehicle_type_analysis_summary_report.py | 2 +- ...container_flow_by_vehicle_type_analysis.py | 2 +- ...er_flow_by_vehicle_type_analysis_report.py | 2 +- ..._and_outbound_vehicle_capacity_analysis.py | 16 +- ...tbound_vehicle_capacity_analysis_report.py | 9 +- ..._outbound_capacity_utilization_analysis.py | 2 +- ...nd_capacity_utilization_analysis_report.py | 8 +- .../test_modal_split_analysis.py | 10 +- .../test_modal_split_analysis_report.py | 66 +- .../test_quay_side_throughput_analysis.py | 2 +- ...st_quay_side_throughput_analysis_report.py | 2 +- .../test_run_all_analyses.py} | 15 +- .../test_truck_gate_throughput_analysis.py | 2 +- ...t_truck_gate_throughput_analysis_report.py | 2 +- .../test_yard_capacity_analysis.py | 2 +- .../test_yard_capacity_analysis_report.py | 2 +- ...ntainer_dwell_time_distribution_manager.py | 32 + .../test_container_flow_generation_manager.py | 8 +- ...orage_requirements_distribution_manager.py | 30 + ..._mode_of_transport_distribution_manager.py | 15 +- ...stream_generation_properties_repository.py | 28 +- ...test_container_stream_statistics_report.py | 6 +- ...ontainer_dwell_time_distribution_seeder.py | 39 + ...st_container_length_distribution_seeder.py | 2 +- ...st_container_weight_distribution_seeder.py | 2 +- ...t_mode_of_transport_distribution_seeder.py | 2 +- ...ample_container_dwell_time_distribution.py | 888 ++++++++++++++++++ ...iner_dwell_time_distribution_repository.py | 56 ++ ...ontainer_length_distribution_repository.py | 6 +- ...ontainer_weight_distribution_repository.py | 5 +- .../test_normalize_dependent_distribution.py | 24 +- .../test_normalize_distribution.py | 16 +- ...ory__create_for_large_scheduled_vehicle.py | 4 +- ...est_container_factory__create_for_truck.py | 4 +- ...ctory__create_arrivals_within_timerange.py | 2 +- ...test_fleet_factory__create_feeder_fleet.py | 6 +- .../test_vehicle_factory__create_barge.py | 12 +- ...vehicle_factory__create_deep_sea_vessel.py | 14 +- .../test_vehicle_factory__create_feeder.py | 12 +- .../test_vehicle_factory__create_train.py | 10 +- .../test_vehicle_factory__create_truck.py | 2 +- .../tests/domain_models/test_container.py | 32 +- .../test_distribution_validators.py | 90 +- .../tests/domain_models/test_vehicle.py | 11 +- ...r_containers_delivered_by_truck_service.py | 2 +- ...assign_destination_to_container_service.py | 4 +- ...tor_service__container_flow_data_exists.py | 8 +- ...tainer_flow_generator_service__generate.py | 15 +- .../test_distribution_approximator.py | 24 +- ...xport_container_flow_service__container.py | 8 +- ...hicle_for_onward_transportation_manager.py | 32 +- ...est_truck_for_export_containers_manager.py | 112 ++- ...est_truck_for_import_containers_manager.py | 176 +++- .../analyses_with_missing_data.ipynb | 147 +++ .../tests/notebooks/data/logger/.gitkeep | 0 conflowgen/tests/notebooks/index.ipynb | 37 + .../previews_with_missing_data.ipynb | 147 +++ ...ner_flow_by_vehicle_type_preview_report.py | 23 +- ...d_and_outbound_vehicle_capacity_preview.py | 11 +- ...utbound_vehicle_capacity_preview_report.py | 23 +- ...preview__get_modal_split_for_hinterland.py | 2 +- ..._modal_split_preview__get_transshipment.py | 9 +- .../test_modal_split_preview_report.py | 85 +- .../tests/previews/test_run_all_previews.py | 22 +- .../test_vehicle_capacity_exceeded_preview.py | 6 +- ...ehicle_capacity_exceeded_preview_report.py | 23 +- ...etical_distribution__clipped_log_normal.py | 35 + ...liply_discretized_probability_densities.py | 13 + conflowgen/tools/continuous_distribution.py | 129 +++ conflowgen/tools/weekly_distribution.py | 51 +- docs/api.rst | 27 +- docs/conf.py | 3 +- docs/index.rst | 1 + docs/notebooks/analyses.ipynb | 32 +- .../data/prepared_dbs/demo_deham_cta.sqlite | 4 +- .../data/prepared_dbs/demo_poc.sqlite | 4 +- docs/notebooks/first_steps.ipynb | 2 +- docs/notebooks/in_spotlight.ipynb | 566 +++++++++++ docs/notebooks/input_distributions.ipynb | 131 ++- docs/notebooks/previews.ipynb | 4 +- docs/references.bib | 6 + run_ci_light.bat | 7 +- setup.py | 13 +- 165 files changed, 4938 insertions(+), 1333 deletions(-) rename conflowgen/{posthoc_analyses => analyses}/__init__.py (96%) rename conflowgen/{posthoc_analyses/abstract_posthoc_analysis.py => analyses/abstract_analysis.py} (94%) create mode 100644 conflowgen/analyses/container_dwell_time_analysis.py create mode 100644 conflowgen/analyses/container_dwell_time_analysis_report.py rename conflowgen/{posthoc_analyses => analyses}/container_flow_adjustment_by_vehicle_type_analysis.py (93%) rename conflowgen/{posthoc_analyses => analyses}/container_flow_adjustment_by_vehicle_type_analysis_report.py (94%) rename conflowgen/{posthoc_analyses => analyses}/container_flow_adjustment_by_vehicle_type_analysis_summary.py (96%) rename conflowgen/{posthoc_analyses => analyses}/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py (97%) rename conflowgen/{posthoc_analyses => analyses}/container_flow_by_vehicle_type_analysis.py (91%) rename conflowgen/{posthoc_analyses => analyses}/container_flow_by_vehicle_type_analysis_report.py (95%) rename conflowgen/{posthoc_analyses => analyses}/inbound_and_outbound_vehicle_capacity_analysis.py (92%) rename conflowgen/{posthoc_analyses => analyses}/inbound_and_outbound_vehicle_capacity_analysis_report.py (91%) rename conflowgen/{posthoc_analyses => analyses}/inbound_to_outbound_vehicle_capacity_utilization_analysis.py (87%) rename conflowgen/{posthoc_analyses => analyses}/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py (80%) rename conflowgen/{posthoc_analyses => analyses}/modal_split_analysis.py (88%) create mode 100644 conflowgen/analyses/modal_split_analysis_report.py rename conflowgen/{posthoc_analyses => analyses}/quay_side_throughput_analysis.py (95%) rename conflowgen/{posthoc_analyses => analyses}/quay_side_throughput_analysis_report.py (97%) rename conflowgen/{posthoc_analyses => analyses}/truck_gate_throughput_analysis.py (94%) rename conflowgen/{posthoc_analyses => analyses}/truck_gate_throughput_analysis_report.py (96%) rename conflowgen/{posthoc_analyses => analyses}/yard_capacity_analysis.py (92%) rename conflowgen/{posthoc_analyses => analyses}/yard_capacity_analysis_report.py (91%) create mode 100644 conflowgen/api/container_dwell_time_distribution_manager.py create mode 100644 conflowgen/domain_models/distribution_models/container_dwell_time_distribution.py create mode 100644 conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py create mode 100644 conflowgen/domain_models/distribution_seeders/container_dwell_time_distribution_seeder.py create mode 100644 conflowgen/flow_generator/abstract_truck_for_containers_manager.py delete mode 100644 conflowgen/posthoc_analyses/modal_split_analysis_report.py create mode 100644 conflowgen/reporting/modal_split_report.py rename conflowgen/tests/{posthoc_analyses => analyses}/__init__.py (100%) create mode 100644 conflowgen/tests/analyses/test_container_dwell_time_analysis.py create mode 100644 conflowgen/tests/analyses/test_container_dwell_time_analysis_report.py rename conflowgen/tests/{posthoc_analyses => analyses}/test_container_flow_adjustment_by_vehicle_type_analysis.py (99%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_container_flow_adjustment_by_vehicle_type_analysis_report.py (98%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py (96%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py (97%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_container_flow_by_vehicle_type_analysis.py (97%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_container_flow_by_vehicle_type_analysis_report.py (98%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_inbound_and_outbound_vehicle_capacity_analysis.py (94%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_inbound_and_outbound_vehicle_capacity_analysis_report.py (95%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_inbound_to_outbound_capacity_utilization_analysis.py (97%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_inbound_to_outbound_capacity_utilization_analysis_report.py (94%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_modal_split_analysis.py (97%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_modal_split_analysis_report.py (71%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_quay_side_throughput_analysis.py (98%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_quay_side_throughput_analysis_report.py (98%) rename conflowgen/tests/{posthoc_analyses/test_run_all_posthoc_analyses.py => analyses/test_run_all_analyses.py} (58%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_truck_gate_throughput_analysis.py (98%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_truck_gate_throughput_analysis_report.py (98%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_yard_capacity_analysis.py (98%) rename conflowgen/tests/{posthoc_analyses => analyses}/test_yard_capacity_analysis_report.py (98%) create mode 100644 conflowgen/tests/api/test_container_dwell_time_distribution_manager.py create mode 100644 conflowgen/tests/api/test_container_storage_requirements_distribution_manager.py create mode 100644 conflowgen/tests/domain_models/distribution_model_seeder/test_container_dwell_time_distribution_seeder.py create mode 100644 conflowgen/tests/domain_models/distribution_repositories/example_container_dwell_time_distribution.py create mode 100644 conflowgen/tests/domain_models/distribution_repositories/test_container_dwell_time_distribution_repository.py create mode 100644 conflowgen/tests/notebooks/analyses_with_missing_data.ipynb create mode 100644 conflowgen/tests/notebooks/data/logger/.gitkeep create mode 100644 conflowgen/tests/notebooks/index.ipynb create mode 100644 conflowgen/tests/notebooks/previews_with_missing_data.ipynb create mode 100644 conflowgen/tests/tools/test_theoretical_distribution__clipped_log_normal.py create mode 100644 conflowgen/tests/tools/test_theoretical_distribution__mutliply_discretized_probability_densities.py create mode 100644 conflowgen/tools/continuous_distribution.py create mode 100644 docs/notebooks/in_spotlight.ipynb diff --git a/.flake8_nb b/.flake8_nb index 4afe617d..85db564d 100644 --- a/.flake8_nb +++ b/.flake8_nb @@ -12,5 +12,6 @@ ignore = F821 # Variables such as 'In' or 'display' are not detected and variables of imported Jupyter Notebook cannot be detected E501 # Allow long lines W503 # Long lines need to be broken somewhere and otherwise W504 is violated + E402 # Module level imports are decided on by developers show_source = True diff --git a/.github/workflows/installation-from-remote.yaml b/.github/workflows/installation-from-remote.yaml index 7486877d..1316d7f7 100644 --- a/.github/workflows/installation-from-remote.yaml +++ b/.github/workflows/installation-from-remote.yaml @@ -22,10 +22,6 @@ jobs: - name: Skip Duplicate Actions uses: fkirc/skip-duplicate-actions@v3.4.1 - - uses: actions/checkout@v2 - with: - lfs: false - - uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true @@ -50,10 +46,6 @@ jobs: - name: Skip Duplicate Actions uses: fkirc/skip-duplicate-actions@v3.4.1 - - uses: actions/checkout@v2 - with: - lfs: false - - uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true @@ -87,10 +79,6 @@ jobs: - name: Skip Duplicate Actions uses: fkirc/skip-duplicate-actions@v3.4.1 - - uses: actions/checkout@v2 - with: - lfs: false - - uses: actions/setup-python@v4 with: python-version: '3.10' diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 79234c9a..0965a8b0 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -39,8 +39,11 @@ jobs: - name: Check code quality with pylint run: | - pylint conflowgen && pylint setup.py + pylint conflowgen + pylint setup.py + pylint conflowgen.tests - name: Check code quality with flake8 and flake8_nb run: | - flake8 && flake8_nb + flake8 + flake8_nb diff --git a/.pylintrc b/.pylintrc index 8f089473..cb92569d 100644 --- a/.pylintrc +++ b/.pylintrc @@ -4,7 +4,8 @@ fail-under=8.0 # Files or directories to be skipped. They should be base names, not paths. -ignore=.git +ignore=.git, + tests # Add files or directories matching the regex patterns to the ignore-list. The # regex matches against paths. @@ -57,6 +58,7 @@ disable= R1710, # inconsistent-return-statements -- leave it up to the programmers to decide this W0511, # fixme - allow todos in the code -- this should be eventually changed but it is not top priority W0122, # exec-used -- as this module only works locally (no networking), executing arbitrary code bears no threats + R0201, # no-self-use -- leave it up to the programmers to decide this [MESSAGES CONTROL] @@ -191,7 +193,10 @@ good-names=i, Run, _, df, - ax + ax, + x, + xs, + mu # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted diff --git a/conflowgen/__init__.py b/conflowgen/__init__.py index 77b0f6b1..12e2ca1d 100644 --- a/conflowgen/__init__.py +++ b/conflowgen/__init__.py @@ -1,7 +1,8 @@ -# List of classes +# Distribution managers from conflowgen.api.container_length_distribution_manager import ContainerLengthDistributionManager from conflowgen.api.container_weight_distribution_manager import ContainerWeightDistributionManager from conflowgen.api.container_flow_generation_manager import ContainerFlowGenerationManager +from conflowgen.api.container_dwell_time_distribution_manager import ContainerDwellTimeDistributionManager from conflowgen.api.database_chooser import DatabaseChooser from conflowgen.api.export_container_flow_manager import ExportContainerFlowManager from conflowgen.api.mode_of_transport_distribution_manager import ModeOfTransportDistributionManager @@ -10,6 +11,7 @@ from conflowgen.api.container_storage_requirement_distribution_manager import \ ContainerStorageRequirementDistributionManager +# Previews and their reports from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview_report import \ InboundAndOutboundVehicleCapacityPreviewReport from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ @@ -24,36 +26,43 @@ from conflowgen.previews.modal_split_preview import ModalSplitPreview from conflowgen.previews.modal_split_preview_report import ModalSplitPreviewReport -from conflowgen.posthoc_analyses.inbound_and_outbound_vehicle_capacity_analysis import \ +# Analyses and their reports +from conflowgen.analyses.inbound_and_outbound_vehicle_capacity_analysis import \ InboundAndOutboundVehicleCapacityAnalysis -from conflowgen.posthoc_analyses.inbound_and_outbound_vehicle_capacity_analysis_report import \ +from conflowgen.analyses.inbound_and_outbound_vehicle_capacity_analysis_report import \ InboundAndOutboundVehicleCapacityAnalysisReport -from conflowgen.posthoc_analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ +from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ InboundToOutboundVehicleCapacityUtilizationAnalysis -from conflowgen.posthoc_analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis_report import \ +from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis_report import \ InboundToOutboundVehicleCapacityUtilizationAnalysisReport -from conflowgen.posthoc_analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis -from conflowgen.posthoc_analyses.container_flow_by_vehicle_type_analysis_report import \ +from conflowgen.analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis +from conflowgen.analyses.container_flow_by_vehicle_type_analysis_report import \ ContainerFlowByVehicleTypeAnalysisReport -from conflowgen.posthoc_analyses.modal_split_analysis import ModalSplitAnalysis -from conflowgen.posthoc_analyses.modal_split_analysis_report import ModalSplitAnalysisReport -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis import \ +from conflowgen.analyses.modal_split_analysis import ModalSplitAnalysis +from conflowgen.analyses.modal_split_analysis_report import ModalSplitAnalysisReport +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis import \ ContainerFlowAdjustmentByVehicleTypeAnalysis -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_report import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_report import \ ContainerFlowAdjustmentByVehicleTypeAnalysisReport -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ ContainerFlowAdjustmentByVehicleTypeAnalysisSummary -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_summary_report import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary_report import \ ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport -from conflowgen.posthoc_analyses.yard_capacity_analysis import YardCapacityAnalysis -from conflowgen.posthoc_analyses.yard_capacity_analysis_report import YardCapacityAnalysisReport -from conflowgen.posthoc_analyses.quay_side_throughput_analysis import QuaySideThroughputAnalysis -from conflowgen.posthoc_analyses.quay_side_throughput_analysis_report import QuaySideThroughputAnalysisReport -from conflowgen.posthoc_analyses.truck_gate_throughput_analysis import TruckGateThroughputAnalysis -from conflowgen.posthoc_analyses.truck_gate_throughput_analysis_report import TruckGateThroughputAnalysisReport +from conflowgen.analyses.yard_capacity_analysis import YardCapacityAnalysis +from conflowgen.analyses.yard_capacity_analysis_report import YardCapacityAnalysisReport +from conflowgen.analyses.quay_side_throughput_analysis import QuaySideThroughputAnalysis +from conflowgen.analyses.quay_side_throughput_analysis_report import QuaySideThroughputAnalysisReport +from conflowgen.analyses.truck_gate_throughput_analysis import TruckGateThroughputAnalysis +from conflowgen.analyses.truck_gate_throughput_analysis_report import TruckGateThroughputAnalysisReport +from conflowgen.analyses.container_dwell_time_analysis import ContainerDwellTimeAnalysis +from conflowgen.analyses.container_dwell_time_analysis_report import ContainerDwellTimeAnalysisReport +# Specific classes for reports from conflowgen.reporting.output_style import DisplayAsMarkupLanguage, DisplayAsPlainText, DisplayAsMarkdown +# Specific classes for distributions +from conflowgen.tools.continuous_distribution import ContinuousDistribution + # List of enums from conflowgen.application.data_types.export_file_format import ExportFileFormat from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -62,18 +71,18 @@ # List of functions from conflowgen.logging.logging import setup_logger -from conflowgen.posthoc_analyses import run_all_analyses +from conflowgen.analyses import run_all_analyses from conflowgen.previews import run_all_previews # List of named tuples from conflowgen.previews.vehicle_capacity_exceeded_preview import RequiredAndMaximumCapacityComparison from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import OutboundUsedAndMaximumCapacity -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import ContainersAndTEUContainerFlowPair -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ +from conflowgen.analyses.abstract_analysis import ContainersAndTEUContainerFlowPair +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ ContainerFlowAdjustedToVehicleType -from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandComparison +from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandSplit from conflowgen.descriptive_datatypes import HinterlandModalSplit -from conflowgen.posthoc_analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ +from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ CompleteVehicleIdentifier # Add metadata constants diff --git a/conflowgen/posthoc_analyses/__init__.py b/conflowgen/analyses/__init__.py similarity index 96% rename from conflowgen/posthoc_analyses/__init__.py rename to conflowgen/analyses/__init__.py index 497768e2..2ab1b2a6 100644 --- a/conflowgen/posthoc_analyses/__init__.py +++ b/conflowgen/analyses/__init__.py @@ -1,6 +1,7 @@ import logging from typing import Optional, Union, Callable, Iterable, Type +from .container_dwell_time_analysis_report import ContainerDwellTimeAnalysisReport from .container_flow_adjustment_by_vehicle_type_analysis_report import \ ContainerFlowAdjustmentByVehicleTypeAnalysisReport from .container_flow_adjustment_by_vehicle_type_analysis_summary_report import \ @@ -25,6 +26,7 @@ ContainerFlowAdjustmentByVehicleTypeAnalysisReport, ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport, ModalSplitAnalysisReport, + ContainerDwellTimeAnalysisReport, QuaySideThroughputAnalysisReport, TruckGateThroughputAnalysisReport, YardCapacityAnalysisReport, diff --git a/conflowgen/posthoc_analyses/abstract_posthoc_analysis.py b/conflowgen/analyses/abstract_analysis.py similarity index 94% rename from conflowgen/posthoc_analyses/abstract_posthoc_analysis.py rename to conflowgen/analyses/abstract_analysis.py index 07641b4f..a642ea84 100644 --- a/conflowgen/posthoc_analyses/abstract_posthoc_analysis.py +++ b/conflowgen/analyses/abstract_analysis.py @@ -45,7 +45,7 @@ def get_week_based_range(start: datetime.date, end: datetime.date) -> List[datet ] + [end] -class AbstractPostHocAnalysis(abc.ABC): +class AbstractAnalysis(abc.ABC): def __init__( self, @@ -68,7 +68,7 @@ def update( transportation_buffer: Optional[float] ): """ - As the transportation buffer is not stored in the database, for some analyses it needs to be provided. + For some analyses, the transportation buffer needs to be provided. Args: transportation_buffer: The buffer, e.g. 0.2 means that 20% more containers (in TEU) can be put on a vessel diff --git a/conflowgen/analyses/container_dwell_time_analysis.py b/conflowgen/analyses/container_dwell_time_analysis.py new file mode 100644 index 00000000..0f52dfba --- /dev/null +++ b/conflowgen/analyses/container_dwell_time_analysis.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +import datetime +from typing import Collection, Union + +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery, \ + TruckArrivalInformationForPickup +from conflowgen.domain_models.container import Container +from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck +from conflowgen.analyses.abstract_analysis import AbstractAnalysis +from conflowgen.tools import hashable + + +class ContainerDwellTimeAnalysis(AbstractAnalysis): + """ + This analysis can be run after the synthetic data has been generated. + The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) + as it is the case with :class:`.ContainerDwellTimeAnalysisReport`. + """ + + @staticmethod + def get_container_dwell_times( + container_delivered_by_vehicle_type: Union[str, Collection[ModeOfTransport], ModeOfTransport] = "all", + container_picked_up_by_vehicle_type: Union[str, Collection[ModeOfTransport], ModeOfTransport] = "all", + storage_requirement: Union[str, Collection[StorageRequirement], StorageRequirement] = "all" + ) -> set[datetime.timedelta]: + """ + The containers are filtered according to the provided vehicle types and storage requirements. + Then, the time between the arrival of the container in the yard and the departure of the container is + calculated. + + Args: + container_delivered_by_vehicle_type: One of + ``"all"``, + a collection of :class:`ModeOfTransport` enum values (as a list, set, or similar), or + a single :class:`ModeOfTransport` enum value. + container_picked_up_by_vehicle_type: One of + ``"all"``, + a collection of :class:`ModeOfTransport` enum values (as a list, set, or similar), or + a single :class:`ModeOfTransport` enum value. + storage_requirement: One of + ``"all"``, + a collection of :class:`StorageRequirement` enum values (as a list, set, or similar), or + a single :class:`StorageRequirement` enum value. + + Returns: + A set of container dwell times. + """ + container_dwell_times: set[datetime.timedelta] = set() + + selected_containers = Container.select() + + if storage_requirement != "all": + if hashable(storage_requirement) and storage_requirement in set(StorageRequirement): + selected_containers = selected_containers.where( + Container.storage_requirement == storage_requirement + ) + else: # assume it is some kind of collection (list, set, ...) + selected_containers = selected_containers.where( + Container.storage_requirement << storage_requirement + ) + + if container_delivered_by_vehicle_type != "all": + if hashable(container_delivered_by_vehicle_type) \ + and container_delivered_by_vehicle_type in set(ModeOfTransport): + selected_containers = selected_containers.where( + Container.delivered_by == container_delivered_by_vehicle_type + ) + else: # assume it is some kind of collection (list, set, ...) + selected_containers = selected_containers.where( + Container.delivered_by << container_delivered_by_vehicle_type + ) + + if container_picked_up_by_vehicle_type != "all": + if hashable(container_picked_up_by_vehicle_type) \ + and container_picked_up_by_vehicle_type in set(ModeOfTransport): + selected_containers = selected_containers.where( + Container.picked_up_by == container_picked_up_by_vehicle_type + ) + else: # assume it is some kind of collection (list, set, ...) + selected_containers = selected_containers.where( + Container.picked_up_by << container_picked_up_by_vehicle_type + ) + + for container in selected_containers: + container_enters_yard: datetime.datetime + container_leaves_yard: datetime.datetime + if container.delivered_by_truck is not None: + truck: Truck = container.delivered_by_truck + arrival_time_information: TruckArrivalInformationForDelivery = \ + truck.truck_arrival_information_for_delivery + container_enters_yard = arrival_time_information.realized_container_delivery_time + elif container.delivered_by_large_scheduled_vehicle is not None: + vehicle: LargeScheduledVehicle = container.delivered_by_large_scheduled_vehicle + container_enters_yard = vehicle.scheduled_arrival + else: + raise Exception(f"Faulty data: {container}") + + if container.picked_up_by_truck is not None: + truck: Truck = container.picked_up_by_truck + arrival_time_information: TruckArrivalInformationForPickup = \ + truck.truck_arrival_information_for_pickup + container_leaves_yard = arrival_time_information.realized_container_pickup_time + elif container.picked_up_by_large_scheduled_vehicle is not None: + vehicle: LargeScheduledVehicle = container.picked_up_by_large_scheduled_vehicle + container_leaves_yard = vehicle.scheduled_arrival + else: + raise Exception(f"Faulty data: {container}") + + container_dwell_time = container_leaves_yard - container_enters_yard + + container_dwell_times.add(container_dwell_time) + + return container_dwell_times diff --git a/conflowgen/analyses/container_dwell_time_analysis_report.py b/conflowgen/analyses/container_dwell_time_analysis_report.py new file mode 100644 index 00000000..f88e9335 --- /dev/null +++ b/conflowgen/analyses/container_dwell_time_analysis_report.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import datetime +import statistics +from typing import Any +import pandas as pd +import seaborn as sns + +from conflowgen .domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.analyses.container_dwell_time_analysis import ContainerDwellTimeAnalysis +from conflowgen.reporting import AbstractReportWithMatplotlib +from conflowgen.reporting.no_data_plot import no_data_graph + +sns.set_palette(sns.color_palette()) + + +class ContainerDwellTimeAnalysisReport(AbstractReportWithMatplotlib): + """ + This analysis report takes the data structure as generated by :class:`.ContainerDwellTimeAnalysis` + and creates a comprehensible representation for the user, either as text or as a graph. + """ + + report_description = """ + Analyse the container dwell times. + In the text version of the report, only the statistics are reported. + In the visual version of the report, the dwell time distributions are plotted. + """ + + def __init__(self): + super().__init__() + self.analysis = ContainerDwellTimeAnalysis() + + def get_report_as_text(self, **kwargs) -> str: + """ + The report as a text is represented as a table suitable for logging. + It uses a human-readable formatting style. + For the exact interpretation of the parameter, check + :meth:`.ContainerDwellTimeAnalysis.get_container_dwell_times`. + + Keyword Args: + container_delivered_by_vehicle_type: One of + ``"all"``, + a collection of :class:`ModeOfTransport` enum values (as a list, set, or similar), or + a single :class:`ModeOfTransport` enum value. + container_picked_up_by_vehicle_type: One of + ``"all"``, + a collection of :class:`ModeOfTransport` enum values (as a list, set, or similar), or + a single :class:`ModeOfTransport` enum value. + storage_requirement: One of + ``"all"``, + a collection of :class:`StorageRequirement` enum values (as a list, set, or similar), or + a single :class:`StorageRequirement` enum value. + + Returns: + The report in text format (possibly spanning over several lines). + """ + + container_delivered_by_vehicle_type = kwargs.pop("container_delivered_by_vehicle_type", "all") + container_picked_up_by_vehicle_type = kwargs.get("container_picked_up_by_vehicle_type", "all") + storage_requirement = kwargs.get("storage_requirement", "all") + assert len(kwargs) == 0, f"The following keys have not been processed: {list(kwargs.keys())}" + + container_dwell_times: set[datetime.timedelta] = self.analysis.get_container_dwell_times( + container_delivered_by_vehicle_type=container_delivered_by_vehicle_type, + container_picked_up_by_vehicle_type=container_picked_up_by_vehicle_type, + storage_requirement=storage_requirement + ) + + container_dwell_times_in_hours = [ + int(round(dwell_time.total_seconds() / 3600)) for dwell_time in container_dwell_times + ] + + if container_dwell_times_in_hours: + minimum_container_dwell_time = min(container_dwell_times_in_hours) + maximum_container_dwell_timey = max(container_dwell_times_in_hours) + average_container_dwell_time = statistics.mean(container_dwell_times_in_hours) + stddev_container_dwell_time = statistics.stdev(container_dwell_times_in_hours) + else: + minimum_container_dwell_time = maximum_container_dwell_timey = average_container_dwell_time = 0 + stddev_container_dwell_time = -1 + + # create string representation + report = "\n" + report += "container is delivered by vehicle type = " + self._get_vehicle_representation( + container_delivered_by_vehicle_type) + "\n" + report += "container picked up by vehicle type = " + self._get_vehicle_representation( + container_picked_up_by_vehicle_type) + "\n" + report += "storage requirement = " + self._get_storage_requirement_representation(storage_requirement) + "\n" + report += " (reported in h)\n" + report += f"minimum container dwell time: {minimum_container_dwell_time:>10.1f}\n" + report += f"average container dwell time: {average_container_dwell_time:>10.1f}\n" + report += f"maximum container dwell time: {maximum_container_dwell_timey:>10.1f}\n" + report += f"standard deviation: {stddev_container_dwell_time:>10.1f}\n" + report += "(rounding errors might exist)\n" + return report + + def get_report_as_graph(self, **kwargs) -> object: + """ + The report as a graph is represented as a line graph using pandas. + + Keyword Args: + storage_requirement: Either a single storage requirement of type :class:`.StorageRequirement` or a whole + collection of them e.g. passed as a :class:`list` or :class:`set`. + For the exact interpretation of the parameter, check + :meth:`.YardCapacityAnalysis.get_used_yard_capacity_over_time`. + + Returns: + The matplotlib axis of the bar chart. + """ + + container_delivered_by_vehicle_type = kwargs.get("container_delivered_by_vehicle_type", "all") + container_picked_up_by_vehicle_type = kwargs.get("container_picked_up_by_vehicle_type", "all") + storage_requirement = kwargs.get("storage_requirement", "all") + + container_dwell_times: set[datetime.timedelta] = self.analysis.get_container_dwell_times( + container_delivered_by_vehicle_type=container_delivered_by_vehicle_type, + container_picked_up_by_vehicle_type=container_picked_up_by_vehicle_type, + storage_requirement=storage_requirement + ) + + if len(container_dwell_times) == 0: + return no_data_graph() + + container_dwell_times_in_hours = [ + int(round(dwell_time.total_seconds() / 3600)) for dwell_time in container_dwell_times + ] + series = pd.Series(list(container_dwell_times_in_hours)) + ax = series.plot.hist() + + title = "" + title += "container is delivered by vehicle type = " + self._get_vehicle_representation( + container_delivered_by_vehicle_type) + "\n" + title += "container picked up by vehicle type = " + self._get_vehicle_representation( + container_picked_up_by_vehicle_type) + "\n" + title += "storage requirement = " + self._get_storage_requirement_representation(storage_requirement) + "\n" + + ax.set_xlabel("Container Dwell Time (h)") + ax.set_title(title) + return ax + + def _get_vehicle_representation(self, vehicle_type: Any) -> str: + return self._get_enum_or_enum_set_representation(vehicle_type, ModeOfTransport) + + def _get_storage_requirement_representation(self, storage_requirement: Any) -> str: + return self._get_enum_or_enum_set_representation(storage_requirement, StorageRequirement) diff --git a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py similarity index 93% rename from conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis.py rename to conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py index 55f31fdc..78d1c8fb 100644 --- a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis.py @@ -4,11 +4,10 @@ from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis, \ - ContainersAndTEUContainerFlowPair +from conflowgen.analyses.abstract_analysis import AbstractAnalysis, ContainersAndTEUContainerFlowPair -class ContainerFlowAdjustmentByVehicleTypeAnalysis(AbstractPostHocAnalysis): +class ContainerFlowAdjustmentByVehicleTypeAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) diff --git a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py similarity index 94% rename from conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py rename to conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py index f8a7eb41..b9e84966 100644 --- a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_report.py @@ -1,10 +1,11 @@ from __future__ import annotations import itertools +import logging import plotly.graph_objects as go -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis import \ ContainerFlowAdjustmentByVehicleTypeAnalysis from conflowgen.reporting import AbstractReportWithPlotly @@ -24,6 +25,8 @@ class ContainerFlowAdjustmentByVehicleTypeAnalysisReport(AbstractReportWithPlotl in case no vehicle of the initial outbound vehicle type is left within the maximum dwell time. """ + logger = logging.getLogger("conflowgen") + def __init__(self): super().__init__() self.analysis = ContainerFlowAdjustmentByVehicleTypeAnalysis() @@ -94,6 +97,10 @@ def get_report_as_graph(self) -> object: for vehicle_type_initial in initial_to_adjusted_outbound_flow_in_teu.keys() for vehicle_type_adjusted in initial_to_adjusted_outbound_flow_in_teu[vehicle_type_initial].keys() ] + + if sum(value) == 0: + self.logger.warning("No data available for plotting") + initial_labels = [ str(vehicle_type_initial).replace("_", " ").capitalize() + ":
Initial: " + str( round(sum(initial_to_adjusted_outbound_flow_in_teu[vehicle_type_initial].values()), 2)) @@ -133,7 +140,7 @@ def get_report_as_graph(self) -> object: fig.update_layout( title_text="Container flow from initial vehicle type A to adjusted vehicle type B in TEU as for some " - "containers the initially intended vehicle type was not available due to constraints " + "containers
the initially intended vehicle type was not available due to constraints " "(schedules, dwell times, etc.).", font_size=10, width=900, diff --git a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py similarity index 96% rename from conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py rename to conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py index f18fcd4c..09501469 100644 --- a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary.py @@ -2,7 +2,7 @@ from typing import NamedTuple from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis import \ ContainerFlowAdjustmentByVehicleTypeAnalysis diff --git a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py similarity index 97% rename from conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py rename to conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py index db0c8ec4..9b0aebdd 100644 --- a/conflowgen/posthoc_analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py +++ b/conflowgen/analyses/container_flow_adjustment_by_vehicle_type_analysis_summary_report.py @@ -2,7 +2,7 @@ import pandas as pd -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ ContainerFlowAdjustmentByVehicleTypeAnalysisSummary from conflowgen.reporting import AbstractReportWithMatplotlib from conflowgen.reporting.no_data_plot import no_data_graph diff --git a/conflowgen/posthoc_analyses/container_flow_by_vehicle_type_analysis.py b/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py similarity index 91% rename from conflowgen/posthoc_analyses/container_flow_by_vehicle_type_analysis.py rename to conflowgen/analyses/container_flow_by_vehicle_type_analysis.py index d48952b3..3b9045cc 100644 --- a/conflowgen/posthoc_analyses/container_flow_by_vehicle_type_analysis.py +++ b/conflowgen/analyses/container_flow_by_vehicle_type_analysis.py @@ -4,10 +4,10 @@ from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis +from conflowgen.analyses.abstract_analysis import AbstractAnalysis -class ContainerFlowByVehicleTypeAnalysis(AbstractPostHocAnalysis): +class ContainerFlowByVehicleTypeAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) diff --git a/conflowgen/posthoc_analyses/container_flow_by_vehicle_type_analysis_report.py b/conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py similarity index 95% rename from conflowgen/posthoc_analyses/container_flow_by_vehicle_type_analysis_report.py rename to conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py index 647ecba4..6360785f 100644 --- a/conflowgen/posthoc_analyses/container_flow_by_vehicle_type_analysis_report.py +++ b/conflowgen/analyses/container_flow_by_vehicle_type_analysis_report.py @@ -1,10 +1,11 @@ from __future__ import annotations import itertools +import logging import plotly.graph_objects as go -from conflowgen.posthoc_analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis +from conflowgen.analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis from conflowgen.reporting import AbstractReportWithPlotly @@ -19,6 +20,8 @@ class ContainerFlowByVehicleTypeAnalysisReport(AbstractReportWithPlotly): the inbound and outbound journey for each container. """ + logger = logging.getLogger("conflowgen") + def __init__(self): super().__init__() self.analysis = ContainerFlowByVehicleTypeAnalysis() @@ -77,6 +80,10 @@ def get_report_as_graph(self) -> object: for inbound_vehicle_type in inbound_to_outbound_flow.keys() for outbound_vehicle_type in inbound_to_outbound_flow[inbound_vehicle_type].keys() ] + + if sum(value) == 0: + self.logger.warning("No data available for plotting") + inbound_labels = [ str(inbound_vehicle_type).replace("_", " ").capitalize() + ":
Inbound: " + str( round(sum(inbound_to_outbound_flow[inbound_vehicle_type].values()), 2)) diff --git a/conflowgen/posthoc_analyses/inbound_and_outbound_vehicle_capacity_analysis.py b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py similarity index 92% rename from conflowgen/posthoc_analyses/inbound_and_outbound_vehicle_capacity_analysis.py rename to conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py index 32493802..edb5e297 100644 --- a/conflowgen/posthoc_analyses/inbound_and_outbound_vehicle_capacity_analysis.py +++ b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis.py @@ -1,16 +1,17 @@ from __future__ import annotations from typing import Dict +import numpy as np from conflowgen.domain_models.container import Container from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.vehicle import LargeScheduledVehicle -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis +from conflowgen.analyses.abstract_analysis import AbstractAnalysis -class InboundAndOutboundVehicleCapacityAnalysis(AbstractPostHocAnalysis): +class InboundAndOutboundVehicleCapacityAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) @@ -73,7 +74,7 @@ def get_outbound_capacity_of_vehicles(self) -> OutboundUsedAndMaximumCapacity: vehicle_type: ModeOfTransport = large_scheduled_vehicle.schedule.vehicle_type outbound_maximum_capacity[vehicle_type] += maximum_capacity_of_vehicle - outbound_maximum_capacity[ModeOfTransport.truck] = -1 # Not meaningful, trucks can always be added as required + outbound_maximum_capacity[ModeOfTransport.truck] = np.nan # Trucks can always be added as required return OutboundUsedAndMaximumCapacity( used=outbound_actual_capacity, diff --git a/conflowgen/posthoc_analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py similarity index 91% rename from conflowgen/posthoc_analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py rename to conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py index 8e484a45..125df8f9 100644 --- a/conflowgen/posthoc_analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py +++ b/conflowgen/analyses/inbound_and_outbound_vehicle_capacity_analysis_report.py @@ -1,8 +1,10 @@ from __future__ import annotations + +import numpy as np import pandas as pd import seaborn as sns -from conflowgen.posthoc_analyses.inbound_and_outbound_vehicle_capacity_analysis import \ +from conflowgen.analyses.inbound_and_outbound_vehicle_capacity_analysis import \ InboundAndOutboundVehicleCapacityAnalysis from conflowgen.reporting import AbstractReportWithMatplotlib @@ -41,10 +43,12 @@ def get_report_as_text(self) -> str: report += "\n" for vehicle_type in self.order_of_vehicle_types_in_report: vehicle_type_as_text = str(vehicle_type).replace("_", " ") + max_capacities_repr = -1 if np.isnan(outbound_maximum_capacities[vehicle_type]) \ + else outbound_maximum_capacities[vehicle_type] report += f"{vehicle_type_as_text:<15} " report += f"{inbound_capacities[vehicle_type]:>16.1f} " report += f"{outbound_actual_capacities[vehicle_type]:>24.1f} " - report += f"{outbound_maximum_capacities[vehicle_type]:>21.1f}" + report += f"{max_capacities_repr:>21.1f}" report += "\n" report += "(rounding errors might exist)\n" return report diff --git a/conflowgen/posthoc_analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py similarity index 87% rename from conflowgen/posthoc_analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py rename to conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py index 0c8332d8..7b47adb5 100644 --- a/conflowgen/posthoc_analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py +++ b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis.py @@ -7,7 +7,7 @@ from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.large_vehicle_schedule import Schedule from conflowgen.domain_models.vehicle import LargeScheduledVehicle -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis +from conflowgen.analyses.abstract_analysis import AbstractAnalysis from conflowgen.tools import hashable @@ -20,7 +20,7 @@ class CompleteVehicleIdentifier(NamedTuple): vehicle_name: str -class InboundToOutboundVehicleCapacityUtilizationAnalysis(AbstractPostHocAnalysis): +class InboundToOutboundVehicleCapacityUtilizationAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) @@ -47,21 +47,19 @@ def get_inbound_and_outbound_capacity_of_each_vehicle( """ capacities: Dict[CompleteVehicleIdentifier, (float, float)] = {} - base_selection = LargeScheduledVehicle.select().join(Schedule) - if vehicle_type == "all": - selected_large_scheduled_vehicles = base_selection - else: + selected_vehicles = LargeScheduledVehicle.select().join(Schedule) + if vehicle_type != "all": if hashable(vehicle_type) and vehicle_type in set(ModeOfTransport): - selected_large_scheduled_vehicles = base_selection.where( + selected_vehicles = selected_vehicles.where( LargeScheduledVehicle.schedule.vehicle_type == vehicle_type ) else: # assume it is some kind of collection (list, set, ...) - selected_large_scheduled_vehicles = base_selection.where( + selected_vehicles = selected_vehicles.where( LargeScheduledVehicle.schedule.vehicle_type << vehicle_type ) vehicle: LargeScheduledVehicle - for vehicle in selected_large_scheduled_vehicles: + for vehicle in selected_vehicles: vehicle_schedule: Schedule = vehicle.schedule mode_of_transport = vehicle_schedule.vehicle_type service_name = vehicle_schedule.service_name diff --git a/conflowgen/posthoc_analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py similarity index 80% rename from conflowgen/posthoc_analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py rename to conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py index 8bda7a20..6dcfe9af 100644 --- a/conflowgen/posthoc_analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py +++ b/conflowgen/analyses/inbound_to_outbound_vehicle_capacity_utilization_analysis_report.py @@ -1,13 +1,13 @@ from __future__ import annotations -from typing import Tuple, Any, Dict, Iterable, Optional +from typing import Tuple, Any, Dict, Optional import matplotlib.pyplot as plt import matplotlib.ticker import pandas as pd from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.posthoc_analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ +from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ InboundToOutboundVehicleCapacityUtilizationAnalysis, CompleteVehicleIdentifier from conflowgen.reporting import AbstractReportWithMatplotlib from conflowgen.reporting.no_data_plot import no_data_graph @@ -58,9 +58,14 @@ def get_report_as_text(self, **kwargs) -> str: Returns: The report in text format (possibly spanning over several lines). """ - vehicle_type, capacities = self._get_capacities_depending_on_vehicle_type(kwargs) + vehicle_type_any = kwargs.pop("vehicle_type", "all") + + vehicle_type_description, capacities = self._get_capacities_depending_on_vehicle_type(vehicle_type_any) + + assert len(kwargs) == 0, f"Keyword(s) {kwargs.keys()} have not been processed" + report = "\n" - report += "vehicle type = " + vehicle_type + "\n" + report += "vehicle type = " + vehicle_type_description + "\n" report += "vehicle identifier " report += "inbound capacity (in TEU) " report += "outbound capacity (in TEU)" @@ -91,9 +96,14 @@ def get_report_as_graph(self, **kwargs) -> object: Returns: The matplotlib figure """ - plot_type = kwargs.get("plot_type", "both") + plot_type = kwargs.pop("plot_type", "both") + + vehicle_type_any = kwargs.pop("vehicle_type", "all") + + vehicle_type_description, capacities = self._get_capacities_depending_on_vehicle_type(vehicle_type_any) + + assert len(kwargs) == 0, f"Keyword(s) {kwargs.keys()} have not been processed" - vehicle_type, capacities = self._get_capacities_depending_on_vehicle_type(kwargs) if len(capacities) == 0: return no_data_graph() @@ -101,14 +111,14 @@ def get_report_as_graph(self, **kwargs) -> object: if plot_type == "absolute": fig, ax = plt.subplots(1, 1) - self._plot_absolute_values(df, vehicle_type, ax=ax) + self._plot_absolute_values(df, vehicle_type_description, ax=ax) elif plot_type == "relative": fig, ax = plt.subplots(1, 1) - self._plot_relative_values(df, vehicle_type, ax=ax) + self._plot_relative_values(df, vehicle_type_description, ax=ax) elif plot_type == "both": fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(10, 5)) - self._plot_absolute_values(df, vehicle_type, ax=ax1) - self._plot_relative_values(df, vehicle_type, ax=ax2) + self._plot_absolute_values(df, vehicle_type_description, ax=ax1) + self._plot_relative_values(df, vehicle_type_description, ax=ax2) plt.subplots_adjust(wspace=0.4) else: raise Exception(f"Plot type '{plot_type}' is not supported.") @@ -120,16 +130,6 @@ def get_report_as_graph(self, **kwargs) -> object: ) return fig - @staticmethod - def _get_vehicle_type_representation(vehicle_type: Any) -> str: - if vehicle_type is None: - return "all" - if isinstance(vehicle_type, ModeOfTransport): - return str(vehicle_type) - if isinstance(vehicle_type, Iterable): - return " & ".join([str(element) for element in vehicle_type]) - return str(vehicle_type) - def _plot_absolute_values( self, df: pd.DataFrame, vehicle_type: str, ax: Optional[matplotlib.pyplot.axis] = None ) -> matplotlib.pyplot.axis: @@ -147,7 +147,10 @@ def _plot_absolute_values( return ax def _plot_relative_values( - self, df: pd.DataFrame, vehicle_type: str, ax: Optional[matplotlib.pyplot.axis] = None + self, + df: pd.DataFrame, + vehicle_type: str, + ax: Optional[matplotlib.pyplot.axis] = None ) -> matplotlib.pyplot.axis: ax = df.plot.scatter(x="inbound capacity (fixed)", y="ratio", ax=ax) ax.axline((0, (1 + self.transportation_buffer)), slope=0, color='black', label='Maximum outbound capacity') @@ -170,14 +173,10 @@ def _convert_analysis_to_df(self, capacities: Dict[CompleteVehicleIdentifier, Tu return df def _get_capacities_depending_on_vehicle_type( - self, kwargs + self, + vehicle_type_any: Any ) -> Tuple[str, Dict[CompleteVehicleIdentifier, Tuple[float, float]]]: - if "vehicle_type" in kwargs: - vehicle_type = kwargs["vehicle_type"] - capacities = self.analysis.get_inbound_and_outbound_capacity_of_each_vehicle( - vehicle_type=vehicle_type - ) - else: - vehicle_type = None - capacities = self.analysis.get_inbound_and_outbound_capacity_of_each_vehicle() - return self._get_vehicle_type_representation(vehicle_type), capacities + capacities = self.analysis.get_inbound_and_outbound_capacity_of_each_vehicle( + vehicle_type=vehicle_type_any + ) + return self._get_enum_or_enum_set_representation(vehicle_type_any, ModeOfTransport), capacities diff --git a/conflowgen/posthoc_analyses/modal_split_analysis.py b/conflowgen/analyses/modal_split_analysis.py similarity index 88% rename from conflowgen/posthoc_analyses/modal_split_analysis.py rename to conflowgen/analyses/modal_split_analysis.py index 4d28d402..1b19e243 100644 --- a/conflowgen/posthoc_analyses/modal_split_analysis.py +++ b/conflowgen/analyses/modal_split_analysis.py @@ -2,13 +2,13 @@ from typing import Dict from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis -from conflowgen.posthoc_analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis -from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandComparison +from conflowgen.analyses.abstract_analysis import AbstractAnalysis +from conflowgen.analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis +from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandSplit from conflowgen.descriptive_datatypes import HinterlandModalSplit -class ModalSplitAnalysis(AbstractPostHocAnalysis): +class ModalSplitAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) @@ -30,7 +30,7 @@ def __init__(self): super().__init__() self.container_flow_by_vehicle_type_analysis = ContainerFlowByVehicleTypeAnalysis() - def get_transshipment_and_hinterland_fraction(self) -> TransshipmentAndHinterlandComparison: + def get_transshipment_and_hinterland_split(self) -> TransshipmentAndHinterlandSplit: """ Returns: The amount of containers in TEU dedicated for or coming from the hinterland versus the amount of containers @@ -49,12 +49,12 @@ def get_transshipment_and_hinterland_fraction(self) -> TransshipmentAndHinterlan else: hinterland_capacity += capacity - return TransshipmentAndHinterlandComparison( + return TransshipmentAndHinterlandSplit( transshipment_capacity=transshipment_capacity, hinterland_capacity=hinterland_capacity ) - def get_modal_split_for_hinterland( + def get_modal_split_for_hinterland_traffic( self, inbound: bool, outbound: bool diff --git a/conflowgen/analyses/modal_split_analysis_report.py b/conflowgen/analyses/modal_split_analysis_report.py new file mode 100644 index 00000000..1d4e6064 --- /dev/null +++ b/conflowgen/analyses/modal_split_analysis_report.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import seaborn as sns + +from conflowgen.analyses.modal_split_analysis import ModalSplitAnalysis +from conflowgen.reporting import AbstractReportWithMatplotlib, modal_split_report +from conflowgen.reporting.modal_split_report import plot_modal_splits + +sns.set_palette(sns.color_palette()) + + +class ModalSplitAnalysisReport(AbstractReportWithMatplotlib): + """ + This analysis report takes the data structure as generated by :class:`.ModalSplitAnalysis` + and creates a comprehensible representation for the user, either as text or as a graph. + """ + + report_description = """ + Analyze the amount of containers dedicated for or coming from the hinterland compared to the amount of containers + that are transshipment. + """ + + def __init__(self): + super().__init__() + self.analysis = ModalSplitAnalysis() + + def get_report_as_text( + self + ) -> str: + """ + The report as a text is represented as a table suitable for logging. It uses a human-readable formatting style. + """ + # gather data + transshipment_and_hinterland_split = self.analysis.get_transshipment_and_hinterland_split() + + modal_split_in_hinterland_inbound_traffic = self.analysis.get_modal_split_for_hinterland_traffic( + inbound=True, outbound=False + ) + + modal_split_in_hinterland_outbound_traffic = self.analysis.get_modal_split_for_hinterland_traffic( + inbound=False, outbound=True + ) + + modal_split_in_hinterland_traffic_both_directions = self.analysis.get_modal_split_for_hinterland_traffic( + inbound=True, outbound=True + ) + + report = modal_split_report.insert_values_in_template( + transshipment_and_hinterland_split=transshipment_and_hinterland_split, + modal_split_in_hinterland_inbound_traffic=modal_split_in_hinterland_inbound_traffic, + modal_split_in_hinterland_outbound_traffic=modal_split_in_hinterland_outbound_traffic, + modal_split_in_hinterland_traffic_both_directions=modal_split_in_hinterland_traffic_both_directions + ) + + return report + + def get_report_as_graph(self) -> object: + """ + The report as a graph is represented as a set of pie charts using pandas. + + Returns: + The matplotlib axis of the last bar chart. + """ + + # gather data + transshipment_and_hinterland_split = self.analysis.get_transshipment_and_hinterland_split() + modal_split_for_hinterland_inbound = self.analysis.get_modal_split_for_hinterland_traffic( + inbound=True, outbound=False + ) + modal_split_for_hinterland_outbound = self.analysis.get_modal_split_for_hinterland_traffic( + inbound=False, outbound=True + ) + modal_split_for_hinterland_both = self.analysis.get_modal_split_for_hinterland_traffic( + inbound=True, outbound=True + ) + + axes = plot_modal_splits( + transshipment_and_hinterland_split=transshipment_and_hinterland_split, + modal_split_in_hinterland_both_directions=modal_split_for_hinterland_both, + modal_split_in_hinterland_inbound_traffic=modal_split_for_hinterland_inbound, + modal_split_in_hinterland_outbound_traffic=modal_split_for_hinterland_outbound, + ) + + return axes diff --git a/conflowgen/posthoc_analyses/quay_side_throughput_analysis.py b/conflowgen/analyses/quay_side_throughput_analysis.py similarity index 95% rename from conflowgen/posthoc_analyses/quay_side_throughput_analysis.py rename to conflowgen/analyses/quay_side_throughput_analysis.py index 9c4ecc75..280778d8 100644 --- a/conflowgen/posthoc_analyses/quay_side_throughput_analysis.py +++ b/conflowgen/analyses/quay_side_throughput_analysis.py @@ -5,12 +5,12 @@ from conflowgen.domain_models.container import Container from conflowgen.domain_models.vehicle import LargeScheduledVehicle -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis, get_week_based_time_window, \ +from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_week_based_time_window, \ get_week_based_range from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -class QuaySideThroughputAnalysis(AbstractPostHocAnalysis): +class QuaySideThroughputAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) diff --git a/conflowgen/posthoc_analyses/quay_side_throughput_analysis_report.py b/conflowgen/analyses/quay_side_throughput_analysis_report.py similarity index 97% rename from conflowgen/posthoc_analyses/quay_side_throughput_analysis_report.py rename to conflowgen/analyses/quay_side_throughput_analysis_report.py index 8a3a251b..de4d7439 100644 --- a/conflowgen/posthoc_analyses/quay_side_throughput_analysis_report.py +++ b/conflowgen/analyses/quay_side_throughput_analysis_report.py @@ -5,7 +5,7 @@ import seaborn as sns import matplotlib.pyplot as plt -from conflowgen.posthoc_analyses.quay_side_throughput_analysis import QuaySideThroughputAnalysis +from conflowgen.analyses.quay_side_throughput_analysis import QuaySideThroughputAnalysis from conflowgen.reporting import AbstractReportWithMatplotlib from conflowgen.reporting.no_data_plot import no_data_graph sns.set_palette(sns.color_palette()) diff --git a/conflowgen/posthoc_analyses/truck_gate_throughput_analysis.py b/conflowgen/analyses/truck_gate_throughput_analysis.py similarity index 94% rename from conflowgen/posthoc_analyses/truck_gate_throughput_analysis.py rename to conflowgen/analyses/truck_gate_throughput_analysis.py index bf034a4f..26ca4e06 100644 --- a/conflowgen/posthoc_analyses/truck_gate_throughput_analysis.py +++ b/conflowgen/analyses/truck_gate_throughput_analysis.py @@ -7,12 +7,11 @@ TruckArrivalInformationForPickup from conflowgen.domain_models.container import Container from conflowgen.domain_models.vehicle import Truck -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis, get_hour_based_time_window,\ - get_hour_based_range +from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_hour_based_time_window, get_hour_based_range from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -class TruckGateThroughputAnalysis(AbstractPostHocAnalysis): +class TruckGateThroughputAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) diff --git a/conflowgen/posthoc_analyses/truck_gate_throughput_analysis_report.py b/conflowgen/analyses/truck_gate_throughput_analysis_report.py similarity index 96% rename from conflowgen/posthoc_analyses/truck_gate_throughput_analysis_report.py rename to conflowgen/analyses/truck_gate_throughput_analysis_report.py index daa10f3f..76746754 100644 --- a/conflowgen/posthoc_analyses/truck_gate_throughput_analysis_report.py +++ b/conflowgen/analyses/truck_gate_throughput_analysis_report.py @@ -5,7 +5,7 @@ import seaborn as sns import matplotlib.pyplot as plt -from conflowgen.posthoc_analyses.truck_gate_throughput_analysis import TruckGateThroughputAnalysis +from conflowgen.analyses.truck_gate_throughput_analysis import TruckGateThroughputAnalysis from conflowgen.reporting import AbstractReportWithMatplotlib from conflowgen.reporting.no_data_plot import no_data_graph diff --git a/conflowgen/posthoc_analyses/yard_capacity_analysis.py b/conflowgen/analyses/yard_capacity_analysis.py similarity index 92% rename from conflowgen/posthoc_analyses/yard_capacity_analysis.py rename to conflowgen/analyses/yard_capacity_analysis.py index 0b3b0020..fa34f69c 100644 --- a/conflowgen/posthoc_analyses/yard_capacity_analysis.py +++ b/conflowgen/analyses/yard_capacity_analysis.py @@ -9,12 +9,11 @@ from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck -from conflowgen.posthoc_analyses.abstract_posthoc_analysis import AbstractPostHocAnalysis, get_hour_based_time_window, \ - get_hour_based_range +from conflowgen.analyses.abstract_analysis import AbstractAnalysis, get_hour_based_time_window, get_hour_based_range from conflowgen.tools import hashable -class YardCapacityAnalysis(AbstractPostHocAnalysis): +class YardCapacityAnalysis(AbstractAnalysis): """ This analysis can be run after the synthetic data has been generated. The analysis returns a data structure that can be used for generating reports (e.g., in text or as a figure) @@ -49,18 +48,18 @@ def get_used_yard_capacity_over_time( """ container_stays: List[Tuple[datetime.datetime, datetime.datetime, float]] = [] - container: Container - if storage_requirement == "all": - selected_containers = Container.select() - else: + selected_containers = Container.select() + if storage_requirement != "all": if hashable(storage_requirement) and storage_requirement in set(StorageRequirement): - selected_containers = Container.select().where( + selected_containers = selected_containers.where( Container.storage_requirement == storage_requirement ) else: # assume it is some kind of collection (list, set, ...) - selected_containers = Container.select().where( + selected_containers = selected_containers.where( Container.storage_requirement << storage_requirement ) + + container: Container for container in selected_containers: container_enters_yard: datetime.datetime container_leaves_yard: datetime.datetime diff --git a/conflowgen/posthoc_analyses/yard_capacity_analysis_report.py b/conflowgen/analyses/yard_capacity_analysis_report.py similarity index 91% rename from conflowgen/posthoc_analyses/yard_capacity_analysis_report.py rename to conflowgen/analyses/yard_capacity_analysis_report.py index 9bc98b20..7fd3ac83 100644 --- a/conflowgen/posthoc_analyses/yard_capacity_analysis_report.py +++ b/conflowgen/analyses/yard_capacity_analysis_report.py @@ -3,12 +3,11 @@ import datetime import statistics from typing import Tuple, Any, Dict -from collections.abc import Iterable import pandas as pd import seaborn as sns from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement -from conflowgen.posthoc_analyses.yard_capacity_analysis import YardCapacityAnalysis +from conflowgen.analyses.yard_capacity_analysis import YardCapacityAnalysis from conflowgen.reporting import AbstractReportWithMatplotlib from conflowgen.reporting.no_data_plot import no_data_graph @@ -117,15 +116,8 @@ def get_report_as_graph(self, **kwargs) -> object: ax.set_title("Used yard capacity analysis") return ax - @staticmethod - def _get_storage_requirement_representation(storage_requirement: Any) -> str: - if storage_requirement is None: - return "all" - if isinstance(storage_requirement, StorageRequirement): - return str(storage_requirement) - if isinstance(storage_requirement, Iterable): - return " & ".join([str(element) for element in storage_requirement]) - return str(storage_requirement) + def _get_storage_requirement_representation(self, storage_requirement: Any) -> str: + return self._get_enum_or_enum_set_representation(storage_requirement, StorageRequirement) def _get_used_yard_capacity_based_on_storage_requirement( self, kwargs diff --git a/conflowgen/api/__init__.py b/conflowgen/api/__init__.py index c3da7b64..4a2fe57d 100644 --- a/conflowgen/api/__init__.py +++ b/conflowgen/api/__init__.py @@ -2,9 +2,9 @@ from typing import Dict, Any, Type, TypeVar from conflowgen.domain_models.distribution_repositories import normalize_distribution_with_no_dependent_variable, \ - normalize_distribution_with_one_dependent_variable + normalize_distribution_with_one_dependent_variable, normalize_distribution_with_two_dependent_variables from conflowgen.domain_models.distribution_validators import validate_distribution_with_no_dependent_variables, \ - validate_distribution_with_one_dependent_variable + validate_distribution_with_one_dependent_variable, validate_distribution_with_two_dependent_variables class AbstractDistributionManager(abc.ABC): @@ -13,28 +13,61 @@ class AbstractDistributionManager(abc.ABC): KeyEnumSecondLevel = TypeVar('KeyEnumSecondLevel') + KeyEnumThirdLevel = TypeVar('KeyEnumThirdLevel') + @staticmethod def _normalize_and_validate_distribution_without_dependent_variables( distribution: Dict[Any, float], - key_type: Type[KeyEnumFirstLevel] + key_type: Type[KeyEnumFirstLevel], + values_are_frequencies: bool ) -> Dict[KeyEnumFirstLevel, float]: - normalized_distribution = normalize_distribution_with_no_dependent_variable(distribution) + normalized_distribution = normalize_distribution_with_no_dependent_variable( + distribution, + values_are_frequencies=values_are_frequencies + ) validated_distribution = validate_distribution_with_no_dependent_variables( normalized_distribution, - key_type + key_type, + values_are_frequencies=True ) return validated_distribution @staticmethod def _normalize_and_validate_distribution_with_one_dependent_variable( - distribution: Dict[Any, Dict[Any, float]], + distribution: Dict[Any, Dict[Any, Any]], key_type_first_level: Type[KeyEnumFirstLevel], - key_type_second_level: Type[KeyEnumSecondLevel] + key_type_second_level: Type[KeyEnumSecondLevel], + values_are_frequencies: bool ) -> Dict[KeyEnumFirstLevel, Dict[KeyEnumSecondLevel, float]]: - normalized_distribution = normalize_distribution_with_one_dependent_variable(distribution) + normalized_distribution = normalize_distribution_with_one_dependent_variable( + distribution, + values_are_frequencies=values_are_frequencies + ) validated_distribution = validate_distribution_with_one_dependent_variable( normalized_distribution, key_type_first_level, - key_type_second_level + key_type_second_level, + values_are_frequencies + ) + return validated_distribution + + @staticmethod + def _normalize_and_validate_distribution_with_two_dependent_variables( + distribution: Dict[Any, Dict[Any, Dict[Any, Any]]], + key_type_first_level: Type[KeyEnumFirstLevel], + key_type_second_level: Type[KeyEnumSecondLevel], + key_type_third_level: Type[KeyEnumThirdLevel], + values_are_frequencies: bool + ) -> Dict[KeyEnumFirstLevel, Dict[KeyEnumSecondLevel, Dict[KeyEnumThirdLevel, Any]]]: + normalized_distribution = normalize_distribution_with_two_dependent_variables( + distribution, + values_are_frequencies=False + ) + validated_distribution = validate_distribution_with_two_dependent_variables( + normalized_distribution, + key_type_first_level, + key_type_second_level, + key_type_third_level, + values_are_frequencies=values_are_frequencies ) return validated_distribution diff --git a/conflowgen/api/container_dwell_time_distribution_manager.py b/conflowgen/api/container_dwell_time_distribution_manager.py new file mode 100644 index 00000000..68740d9d --- /dev/null +++ b/conflowgen/api/container_dwell_time_distribution_manager.py @@ -0,0 +1,54 @@ +from typing import Dict, Any + +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.api import AbstractDistributionManager +from conflowgen.domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ + ContainerDwellTimeDistributionRepository +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.tools.continuous_distribution import ContinuousDistribution + + +class ContainerDwellTimeDistributionManager(AbstractDistributionManager): + """ + This is the interface to set and get the distribution that controls how long the container remains in the yard + before it is loaded onto a vehicle and leaves again. + """ + + def __init__(self): + self.container_dwell_time_distribution_repository = ContainerDwellTimeDistributionRepository() + + def get_container_dwell_time_distribution( + self + ) -> Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, ContinuousDistribution]]]: + """ + + Returns: + The container dwell time distribution depends on the vehicle the container is delivered by, picked up by, + and the storage requirement. + """ + return self.container_dwell_time_distribution_repository.get_distributions() + + def set_container_dwell_time_distribution( + self, + distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, Dict[str, Any]]]] + ) -> None: + """ + The container dwell time distribution depends on the vehicle the container is delivered by, picked up by, + and the storage requirement. + + A distribution is described by the following parameters: + * distribution_name (str) - The name of the distribution. Currently, only 'lognormal' is supported. + * average (float) - The expected mean + * minimum (float) - The lower bound + * maximum (float) - The upper bound + """ + sanitized_distribution = self._normalize_and_validate_distribution_with_two_dependent_variables( + distribution, + ModeOfTransport, + ModeOfTransport, + StorageRequirement, + values_are_frequencies=False + ) + self.container_dwell_time_distribution_repository.set_distributions( + sanitized_distribution + ) diff --git a/conflowgen/api/container_flow_generation_manager.py b/conflowgen/api/container_flow_generation_manager.py index 619f7413..7f902794 100644 --- a/conflowgen/api/container_flow_generation_manager.py +++ b/conflowgen/api/container_flow_generation_manager.py @@ -12,8 +12,7 @@ class ContainerFlowGenerationManager: """ This manager provides the interface to set the properties (i.e., not the distributions that are handled elsewhere) and trigger the synthetic container flow generation. - If not provided, for many of these values `default values `_ - exist. + If not provided, `default values `_ are used automatically. """ def __init__(self): @@ -26,12 +25,6 @@ def set_properties( start_date: datetime.date, end_date: datetime.date, name: Optional[str] = None, - minimum_dwell_time_of_import_containers_in_hours: Optional[int] = None, - maximum_dwell_time_of_import_containers_in_hours: Optional[int] = None, - minimum_dwell_time_of_export_containers_in_hours: Optional[int] = None, - maximum_dwell_time_of_export_containers_in_hours: Optional[int] = None, - minimum_dwell_time_of_transshipment_containers_in_hours: Optional[int] = None, - maximum_dwell_time_of_transshipment_containers_in_hours: Optional[int] = None, transportation_buffer: Optional[float] = None ) -> None: """ @@ -41,18 +34,6 @@ def set_properties( end_date: The latest day any scheduled vehicle arrives. Trucks that pick up containers might arrive later though. name: The name of the generated synthetic container flow which helps to distinguish different scenarios. - minimum_dwell_time_of_import_containers_in_hours: No vehicle arrives earlier than this amount of hours - to pick up an import container that has previously been dropped off. - maximum_dwell_time_of_import_containers_in_hours: No vehicles arrives later than this amount of hours after - the previous vehicle which has dropped off the import container has arrived. - minimum_dwell_time_of_export_containers_in_hours: No vehicle arrives earlier than this amount of hours - to pick up an export container that has previously been dropped off. - maximum_dwell_time_of_export_containers_in_hours: No vehicles arrives later than this amount of hours after - the previous vehicle which has dropped off the export container has arrived. - minimum_dwell_time_of_transshipment_containers_in_hours: No vehicle arrives earlier than this amount of - hours to pick up a transshipment container that has previously been dropped off. - maximum_dwell_time_of_transshipment_containers_in_hours: No vehicles arrives later than this amount of hours - after the previous vehicle which has dropped off the transshipment container has arrived. transportation_buffer: Determines how many percent more of the inbound journey capacity is used at most to transport containers on the outbound journey. """ @@ -64,30 +45,6 @@ def set_properties( properties.start_date = start_date properties.end_date = end_date - if minimum_dwell_time_of_import_containers_in_hours is not None: - properties.minimum_dwell_time_of_import_containers_in_hours = \ - minimum_dwell_time_of_import_containers_in_hours - - if maximum_dwell_time_of_import_containers_in_hours is not None: - properties.maximum_dwell_time_of_import_containers_in_hours = \ - maximum_dwell_time_of_import_containers_in_hours - - if minimum_dwell_time_of_export_containers_in_hours is not None: - properties.minimum_dwell_time_of_export_containers_in_hours = \ - minimum_dwell_time_of_export_containers_in_hours - - if maximum_dwell_time_of_export_containers_in_hours is not None: - properties.maximum_dwell_time_of_export_containers_in_hours = \ - maximum_dwell_time_of_export_containers_in_hours - - if minimum_dwell_time_of_transshipment_containers_in_hours is not None: - properties.minimum_dwell_time_of_transshipment_containers_in_hours = \ - minimum_dwell_time_of_transshipment_containers_in_hours - - if maximum_dwell_time_of_transshipment_containers_in_hours is not None: - properties.maximum_dwell_time_of_transshipment_containers_in_hours = \ - maximum_dwell_time_of_transshipment_containers_in_hours - if transportation_buffer is not None: properties.transportation_buffer = transportation_buffer @@ -106,18 +63,6 @@ def get_properties(self) -> Dict[str, Union[str, datetime.date, float, int]]: 'start_date': properties.start_date, 'end_date': properties.end_date, 'transportation_buffer': properties.transportation_buffer, - 'minimum_dwell_time_of_import_containers_in_hours': - properties.minimum_dwell_time_of_import_containers_in_hours, - 'minimum_dwell_time_of_export_containers_in_hours': - properties.minimum_dwell_time_of_export_containers_in_hours, - 'minimum_dwell_time_of_transshipment_containers_in_hours': - properties.minimum_dwell_time_of_transshipment_containers_in_hours, - 'maximum_dwell_time_of_import_containers_in_hours': - properties.maximum_dwell_time_of_import_containers_in_hours, - 'maximum_dwell_time_of_export_containers_in_hours': - properties.maximum_dwell_time_of_export_containers_in_hours, - 'maximum_dwell_time_of_transshipment_containers_in_hours': - properties.maximum_dwell_time_of_transshipment_containers_in_hours } def container_flow_data_exists(self) -> bool: @@ -136,7 +81,7 @@ def generate(self, overwrite: bool = True) -> None: Generate the synthetic container flow according to all the information stored in the database so far. This triggers a multistep procedure of generating vehicles and the containers which are delivered or picked up by the vehicles. - More is described in the Section + This process is described in the Section `Data Generation Process `_. The invocation of this method overwrites any already existent data in the database. Consider checking for diff --git a/conflowgen/api/container_length_distribution_manager.py b/conflowgen/api/container_length_distribution_manager.py index 8941a281..9027916c 100644 --- a/conflowgen/api/container_length_distribution_manager.py +++ b/conflowgen/api/container_length_distribution_manager.py @@ -37,6 +37,7 @@ def set_container_length_distribution( """ sanitized_distribution = self._normalize_and_validate_distribution_without_dependent_variables( container_lengths, - ContainerLength + ContainerLength, + values_are_frequencies=True ) self.container_length_repository.set_distribution(sanitized_distribution) diff --git a/conflowgen/api/container_storage_requirement_distribution_manager.py b/conflowgen/api/container_storage_requirement_distribution_manager.py index 72905aaa..8643bfd6 100644 --- a/conflowgen/api/container_storage_requirement_distribution_manager.py +++ b/conflowgen/api/container_storage_requirement_distribution_manager.py @@ -37,6 +37,7 @@ def set_storage_requirement_distribution( sanitized_distribution = self._normalize_and_validate_distribution_with_one_dependent_variable( storage_requirements, ContainerLength, - StorageRequirement + StorageRequirement, + values_are_frequencies=True ) self.storage_requirement_repository.set_distribution(sanitized_distribution) diff --git a/conflowgen/api/container_weight_distribution_manager.py b/conflowgen/api/container_weight_distribution_manager.py index b71428f8..3c2501ab 100644 --- a/conflowgen/api/container_weight_distribution_manager.py +++ b/conflowgen/api/container_weight_distribution_manager.py @@ -37,6 +37,7 @@ def set_container_weight_distribution( sanitized_distribution = self._normalize_and_validate_distribution_with_one_dependent_variable( container_weights, ContainerLength, - int + int, + values_are_frequencies=True ) self.container_weight_repository.set_distribution(sanitized_distribution) diff --git a/conflowgen/api/mode_of_transport_distribution_manager.py b/conflowgen/api/mode_of_transport_distribution_manager.py index 4bc8d62c..890c36a0 100644 --- a/conflowgen/api/mode_of_transport_distribution_manager.py +++ b/conflowgen/api/mode_of_transport_distribution_manager.py @@ -39,7 +39,8 @@ def set_mode_of_transport_distribution( sanitized_distribution = self._normalize_and_validate_distribution_with_one_dependent_variable( distribution, ModeOfTransport, - ModeOfTransport + ModeOfTransport, + values_are_frequencies=True ) self.mode_of_transport_distribution_repository.set_mode_of_transport_distributions( sanitized_distribution diff --git a/conflowgen/api/truck_arrival_distribution_manager.py b/conflowgen/api/truck_arrival_distribution_manager.py index 31d746d6..0d14f455 100644 --- a/conflowgen/api/truck_arrival_distribution_manager.py +++ b/conflowgen/api/truck_arrival_distribution_manager.py @@ -39,6 +39,7 @@ def set_truck_arrival_distribution(self, distribution: Dict[int, float]) -> None """ sanitized_distribution = self._normalize_and_validate_distribution_without_dependent_variables( distribution, - int + int, + values_are_frequencies=True ) self.truck_arrival_distribution_repository.set_distribution(sanitized_distribution) diff --git a/conflowgen/application/data_types/export_file_format.py b/conflowgen/application/data_types/export_file_format.py index 86af9621..5acce731 100644 --- a/conflowgen/application/data_types/export_file_format.py +++ b/conflowgen/application/data_types/export_file_format.py @@ -22,7 +22,7 @@ class ExportFileFormat(enum.Enum): However, this file format comes with known limitations listed at https://support.microsoft.com/en-us/office/excel-specifications-and-limits-1672b34d-7043-467e-8e27-269d656771c3. On January 4th, 2022, the known maximum number of rows is 1,048,576. - Thus, if e.g. 1.1 million containers are generated, opening this xlsx file in Excel is not supported by the + Thus, if, e.g., 1.1 million containers are generated, opening this xlsx file in Excel is not supported by the specifications. """ diff --git a/conflowgen/application/models/container_flow_generation_properties.py b/conflowgen/application/models/container_flow_generation_properties.py index ef7b069d..6fb182b4 100644 --- a/conflowgen/application/models/container_flow_generation_properties.py +++ b/conflowgen/application/models/container_flow_generation_properties.py @@ -1,13 +1,8 @@ import datetime -from peewee import AutoField, CharField, DateField, TimestampField, DateTimeField, IntegerField, FloatField +from peewee import AutoField, CharField, DateField, TimestampField, DateTimeField, FloatField -from conflowgen.domain_models.seeders import DEFAULT_MAXIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS, \ - DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS, \ - DEFAULT_MAXIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS, \ - DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS, \ - DEFAULT_MAXIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS, \ - DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS, DEFAULT_TRANSPORTATION_BUFFER +from conflowgen.domain_models.seeders import DEFAULT_TRANSPORTATION_BUFFER from conflowgen.domain_models.base_model import BaseModel @@ -41,25 +36,6 @@ class ContainerFlowGenerationProperties(BaseModel): help_text="The date these properties has been last updated" ) - maximum_dwell_time_of_import_containers_in_hours = IntegerField( - default=DEFAULT_MAXIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS - ) - - minimum_dwell_time_of_import_containers_in_hours = IntegerField( - default=DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS - ) - maximum_dwell_time_of_export_containers_in_hours = IntegerField( - default=DEFAULT_MAXIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS, - ) - minimum_dwell_time_of_export_containers_in_hours = IntegerField( - default=DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS, - ) - maximum_dwell_time_of_transshipment_containers_in_hours = IntegerField( - default=DEFAULT_MAXIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS, - ) - minimum_dwell_time_of_transshipment_containers_in_hours = IntegerField( - default=DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS, - ) transportation_buffer = FloatField( default=DEFAULT_TRANSPORTATION_BUFFER, ) diff --git a/conflowgen/application/repositories/container_flow_generation_properties_repository.py b/conflowgen/application/repositories/container_flow_generation_properties_repository.py index 93109737..57aa5b18 100644 --- a/conflowgen/application/repositories/container_flow_generation_properties_repository.py +++ b/conflowgen/application/repositories/container_flow_generation_properties_repository.py @@ -15,25 +15,6 @@ class MinimumNotStrictlySmallerThanMaximumException(Exception): class ContainerFlowGenerationPropertiesRepository: - @staticmethod - def _verify(properties) -> None: - if properties.end_date < properties.start_date: - raise InvalidTimeRangeException( - f"start date '{properties.start_date}' is later than end date '{properties.end_date}'" - ) - if (properties.minimum_dwell_time_of_import_containers_in_hours - >= properties.maximum_dwell_time_of_import_containers_in_hours): - raise MinimumNotStrictlySmallerThanMaximumException( - f"{properties.minimum_dwell_time_of_import_containers_in_hours} " - f">= {properties.maximum_dwell_time_of_import_containers_in_hours}" - ) - if (properties.minimum_dwell_time_of_export_containers_in_hours - >= properties.maximum_dwell_time_of_export_containers_in_hours): - raise MinimumNotStrictlySmallerThanMaximumException( - f"{properties.minimum_dwell_time_of_export_containers_in_hours} " - f">= {properties.maximum_dwell_time_of_export_containers_in_hours}" - ) - @staticmethod def get_container_flow_generation_properties() -> ContainerFlowGenerationProperties: all_properties = ContainerFlowGenerationProperties.select().execute() @@ -49,7 +30,8 @@ def get_container_flow_generation_properties() -> ContainerFlowGenerationPropert @classmethod def set_container_flow_generation_properties(cls, properties: ContainerFlowGenerationProperties) -> None: - cls._verify(properties) + if properties.start_date >= properties.end_date: + raise InvalidTimeRangeException() properties.save() number_properties_entries: int = ContainerFlowGenerationProperties().select().count() if number_properties_entries > 1: diff --git a/conflowgen/application/services/export_container_flow_service.py b/conflowgen/application/services/export_container_flow_service.py index 8cab9662..c751aa77 100644 --- a/conflowgen/application/services/export_container_flow_service.py +++ b/conflowgen/application/services/export_container_flow_service.py @@ -280,10 +280,11 @@ def export( self.logger.info(f"Creating folder at {path_to_target_folder}") os.mkdir(path_to_target_folder) - self.logger.info(f"Converting SQL database into file format '.{file_format.value}'") + file_format_str_repr = str(file_format.value) + self.logger.info(f"Converting SQL database into file format '.{file_format_str_repr}'") dfs = self._convert_sql_database_to_pandas_dataframe() for file_name, df in dfs.items(): - full_file_name = file_name + "." + file_format.value + full_file_name = file_name + "." + file_format_str_repr path_to_file = os.path.join( path_to_target_folder, full_file_name diff --git a/conflowgen/database_connection/create_tables.py b/conflowgen/database_connection/create_tables.py index c322a1d4..3a3d70f2 100644 --- a/conflowgen/database_connection/create_tables.py +++ b/conflowgen/database_connection/create_tables.py @@ -6,6 +6,8 @@ from conflowgen.domain_models.arrival_information import TruckArrivalInformationForPickup, \ TruckArrivalInformationForDelivery from conflowgen.domain_models.container import Container +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution from conflowgen.domain_models.distribution_models.container_length_distribution import ContainerLengthDistribution from conflowgen.domain_models.distribution_models.container_weight_distribution import ContainerWeightDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution @@ -37,7 +39,8 @@ def create_tables(sql_db_connection: peewee.Database) -> peewee.Database: TruckArrivalDistribution, TruckArrivalInformationForPickup, TruckArrivalInformationForDelivery, - StorageRequirementDistribution + StorageRequirementDistribution, + ContainerDwellTimeDistribution ]) for table_with_index in ( Destination, diff --git a/conflowgen/descriptive_datatypes/__init__.py b/conflowgen/descriptive_datatypes/__init__.py index 732c64c9..634e250a 100644 --- a/conflowgen/descriptive_datatypes/__init__.py +++ b/conflowgen/descriptive_datatypes/__init__.py @@ -3,7 +3,7 @@ from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -class TransshipmentAndHinterlandComparison(NamedTuple): +class TransshipmentAndHinterlandSplit(NamedTuple): """ This tuple keeps track of how much of the capacity is transshipment (i.e., dropped off and picked up by a vessel) and how much is hinterland (i.e., either dropped off or picked up by a vehicle that is not a vessel, e.g. a train diff --git a/conflowgen/domain_models/arrival_information.py b/conflowgen/domain_models/arrival_information.py index eb850d12..7a7d8c9a 100644 --- a/conflowgen/domain_models/arrival_information.py +++ b/conflowgen/domain_models/arrival_information.py @@ -12,7 +12,8 @@ class TruckArrivalInformationForPickup(BaseModel): null=True, help_text="At the time of berthing, do we know when the container will be picked up? " "null means that is not the case, otherwise the time is provided. " - "This information could be used for allocating a good container slot.") + "This information could be used for allocating a good container slot." + ) planned_container_pickup_time_after_initial_storage = DateTimeField( null=True, help_text="Some time after the container is stored in the yard, " @@ -34,7 +35,9 @@ class TruckArrivalInformationForDelivery(BaseModel): null=True, help_text="Terminals often allow trucks only to deliver a container a fixed timerange before " "its scheduled departure from the terminal by deep sea vessel or feeder. " - "This information could be used for yard template planning, i.e. reserving container slots.") + "This information could be used for yard template planning, i.e. reserving container slots." + ) realized_container_delivery_time = DateTimeField( null=False, - help_text="At this time, the container is finally delivered.") + help_text="At this time, the container is finally delivered." + ) diff --git a/conflowgen/domain_models/container.py b/conflowgen/domain_models/container.py index 7b405564..6eb62f44 100644 --- a/conflowgen/domain_models/container.py +++ b/conflowgen/domain_models/container.py @@ -84,9 +84,9 @@ class Container(BaseModel): def __repr__(self): return "" diff --git a/conflowgen/domain_models/data_types/container_length.py b/conflowgen/domain_models/data_types/container_length.py index cbebc20f..d6530cf5 100644 --- a/conflowgen/domain_models/data_types/container_length.py +++ b/conflowgen/domain_models/data_types/container_length.py @@ -41,6 +41,7 @@ def __str__(self) -> str: The textual representation is e.g. '20 feet' instead of '' so it is easier to read in the logs. """ + # noinspection PyTypeChecker if self.value > 0: return f"{self.value} feet" return "other" diff --git a/conflowgen/domain_models/data_types/storage_requirement.py b/conflowgen/domain_models/data_types/storage_requirement.py index 6af87798..113e052e 100644 --- a/conflowgen/domain_models/data_types/storage_requirement.py +++ b/conflowgen/domain_models/data_types/storage_requirement.py @@ -9,11 +9,14 @@ class StorageRequirement(enum.Enum): A container is stored in different areas of the yard depending on its requirements. """ - empty = "empty" # doc: An empty container is stored in an empty container yard. + empty = "empty" + """An empty container is stored in an empty container yard. + """ standard = "standard" """A standard container is stored in the full container yard and makes up most of the containers passing through a - container terminal.""" + container terminal. + """ reefer = "reefer" """A reefer container requires electricity (i.e., a reefer plug) to keep the inner temperature on a low level. @@ -21,7 +24,15 @@ class StorageRequirement(enum.Enum): dangerous_goods = "dangerous_goods" """A dangerous goods container needs a specially prepared storage area so they do not constitute a major hazard to - health and environment. These are also sometimes referred to as IMO containers.""" + health and environment. + These are also sometimes referred to as IMO containers. + """ + + @classmethod + def get_full_containers(cls): + """All containers that are not empty containers are also referred to as full containers. + """ + return {cls.standard, cls.reefer, cls.dangerous_goods} def __str__(self): """ diff --git a/conflowgen/domain_models/distribution_models/container_dwell_time_distribution.py b/conflowgen/domain_models/distribution_models/container_dwell_time_distribution.py new file mode 100644 index 00000000..a2511ea1 --- /dev/null +++ b/conflowgen/domain_models/distribution_models/container_dwell_time_distribution.py @@ -0,0 +1,26 @@ +from peewee import FloatField, CompositeKey, TextField + +from conflowgen.domain_models.base_model import BaseModel +from conflowgen.domain_models.field_types.mode_of_transport import ModeOfTransportField +from conflowgen.domain_models.field_types.storage_requirement import StorageRequirementField + + +class ContainerDwellTimeDistribution(BaseModel): + """The distribution of how long the container remains in the yard.""" + + # The key: The distribution depends on which vehicle delivers the container, which picks it up, and the type + delivered_by = ModeOfTransportField(null=False) + picked_up_by = ModeOfTransportField(null=False) + storage_requirement = StorageRequirementField(null=False) + + # Clipping: Allows to avoid extreme and unreasonable values + minimum_number_of_hours = FloatField(default=0) + maximum_number_of_hours = FloatField(default=-1) + + # Describing the actual distribution + distribution_name = TextField(null=False) + average_number_of_hours = FloatField(null=False) + variance = FloatField(null=True) + + class Meta: + primary_key = CompositeKey('delivered_by', 'picked_up_by', 'storage_requirement') diff --git a/conflowgen/domain_models/distribution_repositories/__init__.py b/conflowgen/domain_models/distribution_repositories/__init__.py index e37811b3..afff7305 100644 --- a/conflowgen/domain_models/distribution_repositories/__init__.py +++ b/conflowgen/domain_models/distribution_repositories/__init__.py @@ -5,7 +5,7 @@ logger = logging.getLogger("conflowgen") -def normalize_distribution_with_no_dependent_variable( +def ensure_all_frequencies_sum_up_to_one( distribution: Dict[Any, float], context: Optional[Any] = None ) -> Dict[Any, float]: @@ -21,13 +21,48 @@ def normalize_distribution_with_no_dependent_variable( return normalized_distribution +def normalize_distribution_with_no_dependent_variable( + distribution: Dict[Any, float], + values_are_frequencies: bool, + context: Optional[Any] = None +) -> Dict[Any, Any]: + if values_are_frequencies: + return ensure_all_frequencies_sum_up_to_one( + distribution, + context + ) + return distribution + + def normalize_distribution_with_one_dependent_variable( - distributions: Dict[Any, Dict[Any, float]] + distributions: Dict[Any, Dict[Any, float]], + values_are_frequencies: bool, + context: Optional[Any] = None ) -> Dict[Any, Dict[Any, float]]: normalized_distributions = {} for first_level_key, second_level_distribution in distributions.items(): + adapted_context = str(first_level_key) + if context: + adapted_context = f"{context} : " + adapted_context normalized_second_level_distribution = normalize_distribution_with_no_dependent_variable( - second_level_distribution, context=first_level_key + second_level_distribution, + values_are_frequencies=values_are_frequencies, + context=adapted_context + ) + normalized_distributions[first_level_key] = normalized_second_level_distribution + return normalized_distributions + + +def normalize_distribution_with_two_dependent_variables( + distributions: Dict[Any, Dict[Any, Dict[Any, float]]], + values_are_frequencies: bool +) -> Dict[Any, Dict[Any, Dict[Any, float]]]: + normalized_distributions = {} + for first_level_key, second_level_distribution in distributions.items(): + normalized_second_level_distribution = normalize_distribution_with_one_dependent_variable( + second_level_distribution, + context=first_level_key, + values_are_frequencies=values_are_frequencies ) normalized_distributions[first_level_key] = normalized_second_level_distribution return normalized_distributions diff --git a/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py b/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py new file mode 100644 index 00000000..839489a1 --- /dev/null +++ b/conflowgen/domain_models/distribution_repositories/container_dwell_time_distribution_repository.py @@ -0,0 +1,75 @@ +from typing import Dict, Any + +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_validators import validate_distribution_with_two_dependent_variables +from conflowgen.tools.continuous_distribution import ContinuousDistribution, ClippedLogNormal + + +class ContainerDwellTimeDistributionRepository: + + @staticmethod + def _get_distribution_entry( + delivered_by: ModeOfTransport, + picked_up_by: ModeOfTransport, + storage_requirement: StorageRequirement + ) -> ContinuousDistribution: + """Loads the distribution for the given transport direction and container type.""" + + entry: ContainerDwellTimeDistribution = ContainerDwellTimeDistribution.get( + (ContainerDwellTimeDistribution.delivered_by == delivered_by) + & (ContainerDwellTimeDistribution.picked_up_by == picked_up_by) + & (ContainerDwellTimeDistribution.storage_requirement == storage_requirement) + ) + if entry.distribution_name == "lognormal": + return ClippedLogNormal( + average=entry.average_number_of_hours, + variance=entry.variance, + minimum=entry.minimum_number_of_hours, + maximum=entry.maximum_number_of_hours, + unit="h" + ) + if entry.distribution_name: + raise RuntimeError(f"Distribution '{entry.distribution_name}' currently not supported") + raise RuntimeError(f"Distribution is not valid: {repr(entry.distribution_name)}") + + @classmethod + def get_distributions( + cls + ) -> Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, ContinuousDistribution]]]: + """Loads a distribution for which all fractions are normalized to sum up to 1 for each mode of transportation. + """ + distributions = { + mode_of_transport_i: { + mode_of_transport_j: { + storage_requirement: cls._get_distribution_entry( + mode_of_transport_i, mode_of_transport_j, storage_requirement + ) + for storage_requirement in StorageRequirement + } + for mode_of_transport_j in ModeOfTransport + } + for mode_of_transport_i in ModeOfTransport + } + return distributions + + @staticmethod + def set_distributions( + distributions: + Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, Dict[str, Any]]]] + ) -> None: + validate_distribution_with_two_dependent_variables( + distributions, ModeOfTransport, ModeOfTransport, StorageRequirement, values_are_frequencies=False + ) + ContainerDwellTimeDistribution.delete().execute() + for delivered_by, picked_up_by_distribution in distributions.items(): + for picked_up_by, storage_requirement_distribution in picked_up_by_distribution.items(): + for storage_requirement, distribution_properties in storage_requirement_distribution.items(): + ContainerDwellTimeDistribution.create( + delivered_by=delivered_by, + picked_up_by=picked_up_by, + storage_requirement=storage_requirement, + **distribution_properties + ) diff --git a/conflowgen/domain_models/distribution_repositories/mode_of_transport_distribution_repository.py b/conflowgen/domain_models/distribution_repositories/mode_of_transport_distribution_repository.py index b2b1e5de..d4e92696 100644 --- a/conflowgen/domain_models/distribution_repositories/mode_of_transport_distribution_repository.py +++ b/conflowgen/domain_models/distribution_repositories/mode_of_transport_distribution_repository.py @@ -47,7 +47,9 @@ def get_distribution(cls) -> Dict[ModeOfTransport, Dict[ModeOfTransport, float]] def set_mode_of_transport_distributions( distributions: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] ) -> None: - validate_distribution_with_one_dependent_variable(distributions, ModeOfTransport, ModeOfTransport) + validate_distribution_with_one_dependent_variable( + distributions, ModeOfTransport, ModeOfTransport, values_are_frequencies=True + ) ModeOfTransportDistribution.delete().execute() for delivered_by, picked_up_by_distribution in distributions.items(): for picked_up_by, fraction in picked_up_by_distribution.items(): diff --git a/conflowgen/domain_models/distribution_seeders/__init__.py b/conflowgen/domain_models/distribution_seeders/__init__.py index 9904be64..210c1416 100644 --- a/conflowgen/domain_models/distribution_seeders/__init__.py +++ b/conflowgen/domain_models/distribution_seeders/__init__.py @@ -1,6 +1,6 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder, \ container_weight_distribution_seeder, container_length_distribution_seeder, truck_arrival_distribution_seeder, \ - container_storage_requirement_distribution_seeder + container_storage_requirement_distribution_seeder, container_dwell_time_distribution_seeder def seed_all_distributions(**options) -> None: @@ -11,6 +11,7 @@ def seed_all_distributions(**options) -> None: **options: This allows to select different default values that are passed through to the seeder functions. """ mode_of_transport_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() container_weight_distribution_seeder.seed() container_length_distribution_seeder.seed() if "assume_tas" in options: diff --git a/conflowgen/domain_models/distribution_seeders/container_dwell_time_distribution_seeder.py b/conflowgen/domain_models/distribution_seeders/container_dwell_time_distribution_seeder.py new file mode 100644 index 00000000..337cbed4 --- /dev/null +++ b/conflowgen/domain_models/distribution_seeders/container_dwell_time_distribution_seeder.py @@ -0,0 +1,288 @@ +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ + ContainerDwellTimeDistributionRepository + + +DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS = 3 +""" +The minimum dwell time of import containers is the earliest time after the discharging and loading process has started +that a vehicle arrives from the hinterland and tries to pick up the container. +In practice, this is often determined by the IT system of the terminal operators which releases a container for being +picked up once the container is on the terminal (it has been successfully discharged). +The actual earliest feasible point is determined in the subsequent model which consumes the generated data because here +no sequence of discharge is determined, i.e., the container might be still on the vessel when the truck arrives. +Thus, this value must be checked for when using the synthetic data in, e.g., a simulation model or mathematical model. +""" + +DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS = 12 +""" +The minimum dwell time of export containers is the minimum time a container must reside on the terminal before the +vessel discharging and loading process starts. +This time is needed for, e.g., finalizing the stowage planning and avoiding that a container which is designated for a +vessel arrives shortly before vessel departure. +If the vehicle that delivers this container is waiting in a queue, actually the container might miss the vessel. +This cut-off is typically defined by the shipping company. +Here, as a simplification one cut-off period is used for all cases. +Both the time interval and the logic are inspired by expert interviews. +""" + +DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS = 3 +""" +The minimum dwell time for transshipment is the minimum time difference of arrival between two vessels. +This means that one vessel can request a container from another vessel if and only if the previous vessel has arrived +these *k* hours before the first one. +For short transshipment dwell times, it might result in a direct transfer from one vessel to the other without any +storage if the user decides to support such activities in their model (such as a simulation model or optimization +model). +""" + + +#: The container dwell time distribution is based on +#: :cite:p:`cto2021interview`. +#: The average container dwell times are taken from a report and reflect the reality at a given time for a specific +#: container terminal operator. +DEFAULT_AVERAGE_CONTAINER_DWELL_TIMES = { + + ModeOfTransport.truck: { + ModeOfTransport.truck: { + StorageRequirement.empty: 18.8, + **{ + container_type: 7.1 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.train: { + StorageRequirement.empty: 11.5, + **{ + container_type: 1.7 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.barge: { + StorageRequirement.empty: 7.4, + **{ + container_type: 10 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.feeder: { + StorageRequirement.empty: 13.0, + **{ + container_type: 3.58 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.deep_sea_vessel: { + StorageRequirement.empty: 13.4, + **{ + container_type: 6.5 + for container_type in StorageRequirement.get_full_containers() + } + }, + }, + + ModeOfTransport.train: { + ModeOfTransport.truck: { + StorageRequirement.empty: 9.5, + **{ + container_type: 2.9 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.train: { + StorageRequirement.empty: 8.3, + **{ + container_type: 12 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.barge: { + StorageRequirement.empty: 8.1, + **{ + container_type: 14.6 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.feeder: { + StorageRequirement.empty: 16, + **{ + container_type: 4.1 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.deep_sea_vessel: { + StorageRequirement.empty: 12.8, + **{ + container_type: 6.7 + for container_type in StorageRequirement.get_full_containers() + } + }, + }, + + ModeOfTransport.barge: { + ModeOfTransport.truck: { + StorageRequirement.empty: 9.6, + **{ + container_type: 8 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.train: { + StorageRequirement.empty: 8.2, + **{ + container_type: 10 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.barge: { + StorageRequirement.empty: 11.6, + **{ + container_type: 4.5 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.feeder: { + StorageRequirement.empty: 14.4, + **{ + container_type: 4.2 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.deep_sea_vessel: { + StorageRequirement.empty: 17.6, + **{ + container_type: 6.8 + for container_type in StorageRequirement.get_full_containers() + } + }, + }, + + ModeOfTransport.feeder: { + ModeOfTransport.truck: { + StorageRequirement.empty: 13.6, + **{ + container_type: 3.1 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.train: { + StorageRequirement.empty: 13.6, + **{ + container_type: 4 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.barge: { + StorageRequirement.empty: 8.2, + **{ + container_type: 2.4 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.feeder: { + StorageRequirement.empty: 10.6, + **{ + container_type: 3.8 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.deep_sea_vessel: { + StorageRequirement.empty: 14.6, + **{ + container_type: 8.3 + for container_type in StorageRequirement.get_full_containers() + } + }, + }, + + ModeOfTransport.deep_sea_vessel: { + ModeOfTransport.truck: { + StorageRequirement.empty: 12.2, + **{ + container_type: 3 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.train: { + StorageRequirement.empty: 11, + **{ + container_type: 3 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.barge: { + StorageRequirement.empty: 11.2, + **{ + container_type: 2.5 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.feeder: { + StorageRequirement.empty: 14, + **{ + container_type: 4.3 + for container_type in StorageRequirement.get_full_containers() + } + }, + ModeOfTransport.deep_sea_vessel: { + StorageRequirement.empty: 27.7, + **{ + container_type: 9.3 + for container_type in StorageRequirement.get_full_containers() + } + }, + } +} + + +_export = { + ModeOfTransport.truck: 0, + ModeOfTransport.train: 0, + ModeOfTransport.barge: 0, + ModeOfTransport.feeder: DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS, + ModeOfTransport.deep_sea_vessel: DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS +} + +_import_or_transshipment = { + ModeOfTransport.truck: DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS, + ModeOfTransport.train: DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS, + ModeOfTransport.barge: DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS, + ModeOfTransport.feeder: DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS, + ModeOfTransport.deep_sea_vessel: DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS +} + +#: The minimum container dwell times are an absolute value in hours. +#: These are composed into the origin-destination matrix. +DEFAULT_MINIMUM_DWELL_TIMES_IN_HOURS = { + ModeOfTransport.truck: _export, + ModeOfTransport.train: _export, + ModeOfTransport.barge: _export, + ModeOfTransport.feeder: _import_or_transshipment, + ModeOfTransport.deep_sea_vessel: _import_or_transshipment +} + + +DEFAULT_CONTAINER_DWELL_TIME_DISTRIBUTIONS = { + from_vehicle: { + to_vehicle: { + requirement: { + "distribution_name": + "lognormal", + "average_number_of_hours": + DEFAULT_AVERAGE_CONTAINER_DWELL_TIMES[from_vehicle][to_vehicle][requirement] * 24, + "variance": + DEFAULT_AVERAGE_CONTAINER_DWELL_TIMES[from_vehicle][to_vehicle][requirement] * 24 * 25, + "maximum_number_of_hours": + DEFAULT_AVERAGE_CONTAINER_DWELL_TIMES[from_vehicle][to_vehicle][requirement] * 24 * 3, + "minimum_number_of_hours": + DEFAULT_MINIMUM_DWELL_TIMES_IN_HOURS[from_vehicle][to_vehicle] + } for requirement in StorageRequirement + } for to_vehicle in ModeOfTransport + } for from_vehicle in ModeOfTransport +} + + +def seed(): + repository = ContainerDwellTimeDistributionRepository() + repository.set_distributions(DEFAULT_CONTAINER_DWELL_TIME_DISTRIBUTIONS) diff --git a/conflowgen/domain_models/distribution_seeders/container_length_distribution_seeder.py b/conflowgen/domain_models/distribution_seeders/container_length_distribution_seeder.py index a4088926..e005052b 100644 --- a/conflowgen/domain_models/distribution_seeders/container_length_distribution_seeder.py +++ b/conflowgen/domain_models/distribution_seeders/container_length_distribution_seeder.py @@ -22,7 +22,7 @@ } -def seed(): +def seed() -> None: ContainerLengthDistributionRepository().set_distribution( DEFAULT_CONTAINER_LENGTH_FREQUENCIES ) diff --git a/conflowgen/domain_models/distribution_seeders/container_weight_distribution_seeder.py b/conflowgen/domain_models/distribution_seeders/container_weight_distribution_seeder.py index e6afb247..6d6f59d9 100644 --- a/conflowgen/domain_models/distribution_seeders/container_weight_distribution_seeder.py +++ b/conflowgen/domain_models/distribution_seeders/container_weight_distribution_seeder.py @@ -50,7 +50,7 @@ } -def seed(): +def seed() -> None: for container_length, distribution in DEFAULT_CONTAINER_WEIGHT_DISTRIBUTION.items(): for container_weight_category, fraction in distribution.items(): ContainerWeightDistribution.create( diff --git a/conflowgen/domain_models/distribution_validators.py b/conflowgen/domain_models/distribution_validators.py index d1ea6a05..5219d4b6 100644 --- a/conflowgen/domain_models/distribution_validators.py +++ b/conflowgen/domain_models/distribution_validators.py @@ -167,7 +167,7 @@ def _check_all_required_keys_are_set_in_distribution( def _check_value_range_of_frequencies_in_distribution( - distribution: Dict[enum.Enum, float], + distribution: Dict[enum.Enum, Any], context: Optional[str] = None ) -> None: sum_of_probabilities = 0 @@ -193,19 +193,22 @@ def _check_value_range_of_frequencies_in_distribution( def validate_distribution_with_no_dependent_variables( - distribution: Dict[enum.Enum | int, float], - key_type: Type[enum.Enum] | Type[int] + distribution: Dict[enum.Enum | int, Any], + key_type: Type[enum.Enum] | Type[int], + values_are_frequencies: bool ) -> Dict[enum.Enum | int, float]: sanitized_distribution = _check_all_required_keys_are_set_in_distribution(distribution, key_type) - _check_value_range_of_frequencies_in_distribution(sanitized_distribution) + if values_are_frequencies: + _check_value_range_of_frequencies_in_distribution(sanitized_distribution) return sanitized_distribution def validate_distribution_with_one_dependent_variable( - distribution: Dict[enum.Enum, Dict[enum.Enum | int, float]], + distribution: Dict[enum.Enum, Dict[enum.Enum | int, Any]], key_type_of_independent_variable: Type[enum.Enum], - key_type_of_dependent_variable: Type[enum.Enum] | Type[int] -) -> Dict[enum.Enum, Dict[enum.Enum | int, float]]: + key_type_of_dependent_variable: Type[enum.Enum] | Type[int], + values_are_frequencies: bool +) -> Dict[enum.Enum, Dict[enum.Enum | int, Any]]: sanitized_distribution = _check_all_required_keys_are_set_in_distribution( distribution, key_type_of_independent_variable ) @@ -215,9 +218,46 @@ def validate_distribution_with_one_dependent_variable( key_type_of_dependent_variable, context=_format_dependent_variable(dependent_variable) ) - _check_value_range_of_frequencies_in_distribution( - sanitized_distribution_of_dependent_variable, - context=_format_dependent_variable(dependent_variable) - ) + if values_are_frequencies: + _check_value_range_of_frequencies_in_distribution( + sanitized_distribution_of_dependent_variable, + context=_format_dependent_variable(dependent_variable) + ) sanitized_distribution[dependent_variable] = sanitized_distribution_of_dependent_variable return sanitized_distribution + + +def validate_distribution_with_two_dependent_variables( + distribution: Dict[enum.Enum, Dict[enum.Enum, Dict[enum.Enum | int, Any]]], + key_type_of_independent_variable: Type[enum.Enum], + key_type_of_first_dependent_variable: Type[enum.Enum] | Type[int], + key_type_of_second_dependent_variable: Type[enum.Enum] | Type[int], + values_are_frequencies: bool +) -> Dict[enum.Enum, Dict[enum.Enum, Dict[enum.Enum | int, Any]]]: + sanitized_top_level_distribution = _check_all_required_keys_are_set_in_distribution( + distribution, key_type_of_independent_variable + ) + for first_dependent_variable, distribution_of_first_dependent_variable in sanitized_top_level_distribution.items(): + sanitized_distribution_of_first_dependent_variable = _check_all_required_keys_are_set_in_distribution( + distribution_of_first_dependent_variable, + key_type_of_first_dependent_variable, + context=_format_dependent_variable(first_dependent_variable) + ) + sanitized_top_level_distribution[first_dependent_variable] = sanitized_distribution_of_first_dependent_variable + + for second_dependent_variable, distribution_of_second_dependent_variable in \ + sanitized_distribution_of_first_dependent_variable.items(): + sanitized_distribution_of_second_dependent_variable = _check_all_required_keys_are_set_in_distribution( + distribution_of_second_dependent_variable, + key_type_of_second_dependent_variable, + context=_format_dependent_variable(second_dependent_variable) + ) + if values_are_frequencies: + _check_value_range_of_frequencies_in_distribution( + sanitized_distribution_of_second_dependent_variable, + context=_format_dependent_variable(second_dependent_variable) + ) + sanitized_top_level_distribution[first_dependent_variable][second_dependent_variable] = \ + sanitized_distribution_of_second_dependent_variable + + return sanitized_top_level_distribution diff --git a/conflowgen/domain_models/seeders/__init__.py b/conflowgen/domain_models/seeders/__init__.py index 310ad2bc..6edc2035 100644 --- a/conflowgen/domain_models/seeders/__init__.py +++ b/conflowgen/domain_models/seeders/__init__.py @@ -1,61 +1,4 @@ -DEFAULT_MAXIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS = (3 * 24) -""" -The maximum dwell time for import containers is set by the container terminal operators. -In practice, a later pickup would typically result in additional storage charges and is thus avoided by supply chain -partners. -The default value of 3 days is inspired by the pricing policy of HHLA as described in -:cite:p:`hhla2021quaytariff`. -""" - -DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS = 3 -""" -The minimum dwell time is the earliest time after the discharging and loading process has started that a truck tries to -pick up the container. -In practice, this is often determined by the IT system of the terminal operators which releases a container for being -picked up once the container is on the terminal (it has been successfully been discharged). -The actual earliest feasible point is determined in the subsequent model which consumes the generated data because here -no sequence of discharge is determined, i.e. the container might be still on the vessel when the truck arrives. -Thus, this value must be checked for when using the synthetic data in e.g. a simulation model or mathematical model. -""" - -DEFAULT_MAXIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS = (5 * 24) -""" -The maximum dwell time for export containers is set by the container terminal. -In practice, typically trucks are simply not allowed to deliver the container earlier than this. -The default value of 5 days is inspired by the pricing policy of HHLA as described in -:cite:p:`hhla2021quaytariff`. -""" - -DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS = 12 -""" -The minimum dwell time is the minimum time a container must reside on the terminal before the vessel discharging and -loading process starts. -This time is needed for e.g. finalizing the stowage planning and avoiding that a container which is designated for a -vessel arrives shortly before vessel departure. -If the vehicle that delivers this container is waiting in a queue, actually the container might miss the vessel. -This cut-off is typically defined by the shipping company. -Here, as a simplification one cut-off period is used for all cases. -Both the time intervall and the logic are inspired by expert interviews. -""" - -DEFAULT_MAXIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS = (7 * 24) -""" -The maximum dwell time for transshipment is the maximum time difference of arrival between two vessels. -The value of 7 days is inspired by -:cite:p:`hhla2021quaytariff`. -""" - -DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS = 3 -""" -The minimum dwell time for transshipment is the minimum time difference of arrival between two vessels. -This means that one vessel can request a container from another vessel if and only if the previous vessel has arrived -these k hours before the first one. -For short transshipment dwell times, it might result in a direct transfer from one vessel to the other without any -storage if the user decides to support such activities in their model (such as a simulation model or optimization -model). -""" - DEFAULT_TRANSPORTATION_BUFFER = 0.2 """ The export buffer describes how much more a vehicle typically takes from the terminal compared to the amount of diff --git a/conflowgen/flow_generator/abstract_truck_for_containers_manager.py b/conflowgen/flow_generator/abstract_truck_for_containers_manager.py new file mode 100644 index 00000000..c0f27099 --- /dev/null +++ b/conflowgen/flow_generator/abstract_truck_for_containers_manager.py @@ -0,0 +1,134 @@ +from __future__ import annotations +import abc +import logging +import math +import random +from typing import List, Tuple, Union, Optional, Dict + +from conflowgen.tools.weekly_distribution import WeeklyDistribution +from ..domain_models.data_types.storage_requirement import StorageRequirement +from ..domain_models.container import Container +from ..domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ + ContainerDwellTimeDistributionRepository +from ..domain_models.distribution_repositories.truck_arrival_distribution_repository import \ + TruckArrivalDistributionRepository +from ..domain_models.factories.vehicle_factory import VehicleFactory +from ..domain_models.data_types.mode_of_transport import ModeOfTransport +from ..tools.continuous_distribution import ContinuousDistribution, multiply_discretized_probability_densities + + +class AbstractTruckForContainersManager(abc.ABC): + def __init__(self): + self.logger = logging.getLogger("conflowgen") + + self.container_dwell_time_distribution_repository = ContainerDwellTimeDistributionRepository() + self.container_dwell_time_distributions: \ + Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, ContinuousDistribution]]] | None \ + = None + + self.truck_arrival_distribution_repository = TruckArrivalDistributionRepository() + + self.truck_arrival_distributions: \ + Dict[ModeOfTransport, Dict[StorageRequirement, WeeklyDistribution | None]] = { + vehicle: { + storage_requirement: None + for storage_requirement in StorageRequirement + } for vehicle in ModeOfTransport + } + + self.vehicle_factory = VehicleFactory() + self.time_window_length_in_hours: Optional[int] = None + + @abc.abstractmethod + def _get_container_dwell_time_distribution( + self, + vehicle: ModeOfTransport, + storage_requirement: StorageRequirement + ) -> ContinuousDistribution: + pass + + @abc.abstractmethod + def is_reversed(self) -> bool: + pass + + def reload_distributions( + self + ) -> None: + # noinspection PyTypeChecker + hour_of_the_week_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]] = \ + list(self.truck_arrival_distribution_repository.get_distribution().items()) + self.time_window_length_in_hours = hour_of_the_week_fraction_pairs[1][0] - hour_of_the_week_fraction_pairs[0][0] + + self.container_dwell_time_distributions = self.container_dwell_time_distribution_repository.get_distributions() + self._update_truck_arrival_distributions(hour_of_the_week_fraction_pairs) + + def _update_truck_arrival_distributions( + self, + hour_of_the_week_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]] + ) -> None: + for vehicle in ModeOfTransport: + for storage_requirement in StorageRequirement: + container_dwell_time_distribution = self._get_container_dwell_time_distribution( + vehicle, storage_requirement + ) + + # only work with full hours + container_dwell_time_distribution.minimum = int(math.ceil(container_dwell_time_distribution.minimum)) + container_dwell_time_distribution.maximum = int(math.floor(container_dwell_time_distribution.maximum)) + + earliest_possible_truck_slot_in_hours = container_dwell_time_distribution.minimum + last_possible_truck_slot_in_hours_after_arrival = \ + container_dwell_time_distribution.maximum - 1 # because the latest slot is reset + number_of_feasible_truck_slots = ( + last_possible_truck_slot_in_hours_after_arrival + - earliest_possible_truck_slot_in_hours + ) + + self.truck_arrival_distributions[vehicle][storage_requirement] = WeeklyDistribution( + hour_fraction_pairs=hour_of_the_week_fraction_pairs, + considered_time_window_in_hours=number_of_feasible_truck_slots, + minimum_dwell_time_in_hours=earliest_possible_truck_slot_in_hours, + is_reversed=self.is_reversed(), + context=f"{self.__class__.__name__} : {vehicle} : {storage_requirement}" + ) + + def _get_distributions( + self, + container: Container + ) -> tuple[ContinuousDistribution, WeeklyDistribution | None]: + + container_dwell_time_distribution = self.container_dwell_time_distributions[ + container.delivered_by][container.picked_up_by][container.storage_requirement] + + truck_arrival_distributions = self._get_truck_arrival_distributions(container) + truck_arrival_distribution = truck_arrival_distributions[container.storage_requirement] + + return container_dwell_time_distribution, truck_arrival_distribution + + @abc.abstractmethod + def _get_truck_arrival_distributions(self, container: Container) -> Dict[StorageRequirement, WeeklyDistribution]: + pass + + @staticmethod + def _get_time_window_of_truck_arrival( + container_dwell_time_distribution: ContinuousDistribution, + truck_arrival_distribution_slice: Dict[int, float] + ) -> int: + """ + Returns: + Number of hours after the earliest possible slot + """ + time_windows_for_truck_arrival = list(truck_arrival_distribution_slice.keys()) + truck_arrival_probabilities = list(truck_arrival_distribution_slice.values()) + container_dwell_time_probabilities = container_dwell_time_distribution.get_probabilities( + time_windows_for_truck_arrival + ) + total_probabilities = multiply_discretized_probability_densities( + truck_arrival_probabilities, + container_dwell_time_probabilities + ) + selected_time_window = random.choices( + population=time_windows_for_truck_arrival, + weights=total_probabilities + )[0] + return selected_time_window diff --git a/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py b/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py index 8386bdde..60b41987 100644 --- a/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py +++ b/conflowgen/flow_generator/allocate_space_for_containers_delivered_by_truck_service.py @@ -119,6 +119,7 @@ def allocate(self) -> None: if sum(all_free_capacities) == 0: # if there is no free vehicles left of a certain type... del truck_to_other_vehicle_distribution[vehicle_type] # drop this type and... continue # try again + vehicle: AbstractLargeScheduledVehicle = random.choices( population=list(vehicle_distribution.keys()), weights=list(vehicle_distribution.values()) diff --git a/conflowgen/flow_generator/assign_destination_to_container_service.py b/conflowgen/flow_generator/assign_destination_to_container_service.py index f5fb077b..ef1f6951 100644 --- a/conflowgen/flow_generator/assign_destination_to_container_service.py +++ b/conflowgen/flow_generator/assign_destination_to_container_service.py @@ -18,9 +18,9 @@ class AssignDestinationToContainerService: def __init__(self): self.repository = ContainerDestinationDistributionRepository() self.distribution: Dict[Schedule, Dict[Destination, float]] | None = None - self.reload_distribution() + self.reload_distributions() - def reload_distribution(self): + def reload_distributions(self): self.distribution = self.repository.get_distribution() self.logger.debug("Loading destination distribution...") for schedule, distribution_for_schedule in self.distribution.items(): diff --git a/conflowgen/flow_generator/container_flow_generation_service.py b/conflowgen/flow_generator/container_flow_generation_service.py index 6403dee6..a652c19b 100644 --- a/conflowgen/flow_generator/container_flow_generation_service.py +++ b/conflowgen/flow_generator/container_flow_generation_service.py @@ -42,61 +42,22 @@ def _update_generation_properties_and_distributions(self): self.container_flow_end_date: datetime.date = container_flow_generation_properties.end_date assert self.container_flow_start_date < self.container_flow_end_date - self.minimum_dwell_time_of_import_containers_in_hours: int = container_flow_generation_properties \ - .minimum_dwell_time_of_import_containers_in_hours - self.maximum_dwell_time_of_import_containers_in_hours: int = container_flow_generation_properties \ - .maximum_dwell_time_of_import_containers_in_hours - assert (self.minimum_dwell_time_of_import_containers_in_hours - < self.maximum_dwell_time_of_import_containers_in_hours) - - self.minimum_dwell_time_of_export_containers_in_hours: int = container_flow_generation_properties \ - .minimum_dwell_time_of_export_containers_in_hours - self.maximum_dwell_time_of_export_containers_in_hours: int = container_flow_generation_properties \ - .maximum_dwell_time_of_export_containers_in_hours - assert (self.minimum_dwell_time_of_export_containers_in_hours - < self.maximum_dwell_time_of_export_containers_in_hours) - - self.minimum_dwell_time_of_transshipment_containers_in_hours: int = container_flow_generation_properties \ - .minimum_dwell_time_of_transshipment_containers_in_hours - self.maximum_dwell_time_of_transshipment_containers_in_hours: int = container_flow_generation_properties \ - .maximum_dwell_time_of_transshipment_containers_in_hours - assert (self.minimum_dwell_time_of_transshipment_containers_in_hours - < self.maximum_dwell_time_of_transshipment_containers_in_hours) - self.transportation_buffer: float = container_flow_generation_properties.transportation_buffer assert -1 < self.transportation_buffer self.large_scheduled_vehicle_for_onward_transportation_manager.reload_properties( - minimum_dwell_time_of_import_containers_in_hours= - self.minimum_dwell_time_of_import_containers_in_hours, - minimum_dwell_time_of_export_containers_in_hours= - self.minimum_dwell_time_of_export_containers_in_hours, - minimum_dwell_time_of_transshipment_containers_in_hours= - self.minimum_dwell_time_of_transshipment_containers_in_hours, - maximum_dwell_time_of_import_containers_in_hours= - self.maximum_dwell_time_of_import_containers_in_hours, - maximum_dwell_time_of_export_containers_in_hours= - self.maximum_dwell_time_of_export_containers_in_hours, - maximum_dwell_time_of_transshipment_containers_in_hours= - self.maximum_dwell_time_of_transshipment_containers_in_hours, transportation_buffer=self.transportation_buffer ) self.allocate_space_for_containers_delivered_by_truck_service.reload_distribution( transportation_buffer=self.transportation_buffer ) - self.truck_for_import_containers_manager.reload_distribution( - minimum_dwell_time_in_hours=self.minimum_dwell_time_of_import_containers_in_hours, - maximum_dwell_time_in_hours=self.maximum_dwell_time_of_import_containers_in_hours - ) - self.truck_for_export_containers_manager.reload_distribution( - minimum_dwell_time_in_hours=self.minimum_dwell_time_of_export_containers_in_hours, - maximum_dwell_time_in_hours=self.maximum_dwell_time_of_export_containers_in_hours - ) + self.truck_for_import_containers_manager.reload_distributions() + self.truck_for_export_containers_manager.reload_distributions() self.large_scheduled_vehicle_creation_service.reload_properties( container_flow_start_date=self.container_flow_start_date, container_flow_end_date=self.container_flow_end_date ) - self.assign_destination_to_container_service.reload_distribution() + self.assign_destination_to_container_service.reload_distributions() @staticmethod def clear_previous_container_flow(): diff --git a/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py b/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py index 1b077df5..ba96fb4b 100644 --- a/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py +++ b/conflowgen/flow_generator/large_scheduled_vehicle_for_onward_transportation_manager.py @@ -1,18 +1,23 @@ from __future__ import annotations import datetime import logging +import math import random -from typing import Collection, Tuple, List +from typing import Collection, Tuple, List, Dict, Any from peewee import fn +from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForDelivery from ..domain_models.container import Container +from ..domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ + ContainerDwellTimeDistributionRepository from ..domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository from ..domain_models.data_types.mode_of_transport import ModeOfTransport from ..domain_models.repositories.schedule_repository import ScheduleRepository from ..domain_models.vehicle import AbstractLargeScheduledVehicle, LargeScheduledVehicle, Truck +from ..tools.continuous_distribution import ContinuousDistribution, multiply_discretized_probability_densities class LargeScheduledVehicleForOnwardTransportationManager: @@ -26,49 +31,22 @@ def __init__(self): self.number_assigned_containers = 0 self.number_not_assignable_containers = 0 - self.minimum_dwell_time_of_import_containers_in_hours = None - self.minimum_dwell_time_of_export_containers_in_hours = None - self.minimum_dwell_time_of_transshipment_containers_in_hours = None - self.maximum_dwell_time_of_import_containers_in_hours = None - self.maximum_dwell_time_of_export_containers_in_hours = None - self.maximum_dwell_time_of_transshipment_containers_in_hours = None + self.container_dwell_time_distribution_repository = ContainerDwellTimeDistributionRepository() + self.container_dwell_time_distributions: \ + Dict[ModeOfTransport, Dict[ModeOfTransport, Dict[StorageRequirement, ContinuousDistribution]]] | None \ + = None def reload_properties( self, - minimum_dwell_time_of_import_containers_in_hours: int, - minimum_dwell_time_of_transshipment_containers_in_hours: int, - minimum_dwell_time_of_export_containers_in_hours: int, - maximum_dwell_time_of_import_containers_in_hours: int, - maximum_dwell_time_of_transshipment_containers_in_hours: int, - maximum_dwell_time_of_export_containers_in_hours: int, transportation_buffer: float ): - # Minimum for import, export, and transshipment - self.minimum_dwell_time_of_import_containers_in_hours = minimum_dwell_time_of_import_containers_in_hours - self.minimum_dwell_time_of_export_containers_in_hours = minimum_dwell_time_of_export_containers_in_hours - self.minimum_dwell_time_of_transshipment_containers_in_hours = \ - minimum_dwell_time_of_transshipment_containers_in_hours - - # Maximum for import, export, and transshipment - self.maximum_dwell_time_of_import_containers_in_hours = maximum_dwell_time_of_import_containers_in_hours - self.maximum_dwell_time_of_export_containers_in_hours = maximum_dwell_time_of_export_containers_in_hours - self.maximum_dwell_time_of_transshipment_containers_in_hours = \ - maximum_dwell_time_of_transshipment_containers_in_hours - - assert (self.minimum_dwell_time_of_import_containers_in_hours - < self.maximum_dwell_time_of_import_containers_in_hours) - assert (self.minimum_dwell_time_of_export_containers_in_hours - < self.maximum_dwell_time_of_export_containers_in_hours) - assert (self.minimum_dwell_time_of_transshipment_containers_in_hours - < self.maximum_dwell_time_of_transshipment_containers_in_hours) - assert -1 < transportation_buffer self.schedule_repository.set_transportation_buffer(transportation_buffer) self.logger.debug(f"Using transportation buffer of {transportation_buffer} when choosing the departing " f"vehicles that adhere a schedule.") + self.container_dwell_time_distributions = self.container_dwell_time_distribution_repository.get_distributions() self.large_scheduled_vehicle_repository = self.schedule_repository.large_scheduled_vehicle_repository - self.mode_of_transport_distribution = self.mode_of_transport_distribution_repository.get_distribution() def choose_departing_vehicle_for_containers(self) -> None: @@ -107,9 +85,9 @@ def choose_departing_vehicle_for_containers(self) -> None: minimum_dwell_time_in_hours, maximum_dwell_time_in_hours = self._get_dwell_times(container) - # this value has been randomly drawn during container generation for the inbound traffic - # we try to adhere to that value as good as possible - initial_departing_vehicle_type = container.picked_up_by + # This value has been randomly drawn during container generation for the inbound traffic. + # We try to adhere to that value as well as possible. + initial_departing_vehicle_type = container.picked_up_by_initial # Get all vehicles which could be used for the onward transportation of the container available_vehicles = self.schedule_repository.get_departing_vehicles( @@ -140,15 +118,34 @@ def _pick_vehicle_for_container( available_vehicles: List[AbstractLargeScheduledVehicle], container: Container ) -> AbstractLargeScheduledVehicle: - """pick vehicle with the probability of its free capacity + """Pick vehicle with the probability of its free capacity """ - vehicle_distribution = { + vehicle_availability = { vehicle: self.large_scheduled_vehicle_repository.get_free_capacity_for_outbound_journey(vehicle) for vehicle in available_vehicles } + available_vehicles = list(vehicle_availability.keys()) + + container_arrival = self._get_arrival_time_of_container(container) + associated_dwell_times = [] + for vehicle in available_vehicles: + vehicle_departure_date: datetime.datetime = vehicle.large_scheduled_vehicle.scheduled_arrival + dwell_time_if_vehicle_is_chosen = vehicle_departure_date - container_arrival + dwell_time_if_vehicle_is_chosen_in_hours = dwell_time_if_vehicle_is_chosen.total_seconds() / 3600 + associated_dwell_times.append(dwell_time_if_vehicle_is_chosen_in_hours) + + container_dwell_time_distribution = self._get_container_dwell_time_distribution( + container.delivered_by, container.picked_up_by, container.storage_requirement + ) + container_dwell_time_probabilities = container_dwell_time_distribution.get_probabilities(associated_dwell_times) + total_probabilities = multiply_discretized_probability_densities( + associated_dwell_times, + container_dwell_time_probabilities + ) + vehicle: AbstractLargeScheduledVehicle = random.choices( - population=list(vehicle_distribution.keys()), - weights=list(vehicle_distribution.values()) + population=available_vehicles, + weights=total_probabilities )[0] large_scheduled_vehicle: LargeScheduledVehicle = vehicle.large_scheduled_vehicle vehicle_type = vehicle.get_mode_of_transport() @@ -165,23 +162,13 @@ def _pick_vehicle_for_container( def _get_dwell_times(self, container: Container) -> Tuple[int, int]: """get correct dwell time depending on transportation mode. """ - if (container.picked_up_by in (ModeOfTransport.deep_sea_vessel, ModeOfTransport.feeder) - and container.delivered_by in (ModeOfTransport.deep_sea_vessel, ModeOfTransport.feeder)): - minimum_dwell_time_in_hours = self.minimum_dwell_time_of_transshipment_containers_in_hours - maximum_dwell_time_in_hours = self.maximum_dwell_time_of_transshipment_containers_in_hours - elif (container.picked_up_by in (ModeOfTransport.train, ModeOfTransport.barge) - and container.delivered_by in (ModeOfTransport.deep_sea_vessel, ModeOfTransport.feeder)): - minimum_dwell_time_in_hours = self.minimum_dwell_time_of_import_containers_in_hours - maximum_dwell_time_in_hours = self.maximum_dwell_time_of_import_containers_in_hours - elif (container.picked_up_by in (ModeOfTransport.deep_sea_vessel, ModeOfTransport.feeder) - and container.delivered_by in (ModeOfTransport.train, ModeOfTransport.barge, ModeOfTransport.truck)): - minimum_dwell_time_in_hours = self.minimum_dwell_time_of_export_containers_in_hours - maximum_dwell_time_in_hours = self.maximum_dwell_time_of_export_containers_in_hours - else: - raise Exception(f"ModeOfTransport " - f"picked_up_by: {container.picked_up_by} " - f"delivered_by: {container.delivered_by} " - f"is not considered at this point.") + + distribution = self.container_dwell_time_distributions[container.delivered_by][container.picked_up_by][ + container.storage_requirement] + + minimum_dwell_time_in_hours = int(math.ceil(distribution.minimum)) + maximum_dwell_time_in_hours = int(math.floor(distribution.maximum)) + return minimum_dwell_time_in_hours, maximum_dwell_time_in_hours @staticmethod @@ -198,6 +185,19 @@ def _get_arrival_time_of_container(container: Container) -> datetime.datetime: container_arrival = large_scheduled_vehicle.scheduled_arrival return container_arrival + @staticmethod + def _get_departure_time_of_vehicle(vehicle: Any) -> datetime.datetime: + """get container arrival from correct source + """ + vehicle_departure: datetime.datetime + if isinstance(vehicle, Truck): + truck_arrival_information: TruckArrivalInformationForDelivery = \ + vehicle.truck_arrival_information_for_delivery + vehicle_departure = truck_arrival_information.planned_container_delivery_time_at_window_start + else: + vehicle_departure = vehicle.scheduled_arrival + return vehicle_departure + def _find_alternative_mode_of_transportation( self, container: Container, @@ -253,3 +253,11 @@ def _find_alternative_mode_of_transportation( # obviously no vehicles of this type are left either, so it should also be excluded from the random # selection procedure in the beginning del vehicle_types_and_frequencies[vehicle_type] + + def _get_container_dwell_time_distribution( + self, + inbound_vehicle: ModeOfTransport, + outbound_vehicle: ModeOfTransport, + storage_requirement: StorageRequirement + ) -> ContinuousDistribution: + return self.container_dwell_time_distributions[inbound_vehicle][outbound_vehicle][storage_requirement] diff --git a/conflowgen/flow_generator/truck_for_export_containers_manager.py b/conflowgen/flow_generator/truck_for_export_containers_manager.py index df5664c8..ea834714 100644 --- a/conflowgen/flow_generator/truck_for_export_containers_manager.py +++ b/conflowgen/flow_generator/truck_for_export_containers_manager.py @@ -1,77 +1,75 @@ from __future__ import annotations import datetime -import logging import random -from typing import List, Tuple, Union, Optional +from typing import Dict -from conflowgen.tools.weekly_distribution import WeeklyDistribution +from .abstract_truck_for_containers_manager import AbstractTruckForContainersManager +from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForDelivery from ..domain_models.container import Container -from ..domain_models.distribution_repositories.truck_arrival_distribution_repository import \ - TruckArrivalDistributionRepository -from ..domain_models.factories.vehicle_factory import VehicleFactory from ..domain_models.data_types.mode_of_transport import ModeOfTransport from ..domain_models.vehicle import LargeScheduledVehicle +from ..tools.continuous_distribution import ContinuousDistribution +from ..tools.weekly_distribution import WeeklyDistribution -class TruckForExportContainersManager: +class TruckForExportContainersManager(AbstractTruckForContainersManager): """ - Manages all trucks. + This determines when the trucks deliver the container which is later picked up from the terminal by vessel, either + feeder or deep sea vessel. """ - def __init__(self, ): - self.logger = logging.getLogger("conflowgen") - self.truck_arrival_distribution_repository = TruckArrivalDistributionRepository() - self.distribution: WeeklyDistribution | None = None - self.vehicle_factory = VehicleFactory() - self.minimum_dwell_time_in_hours: Optional[float] = None - self.maximum_dwell_time_in_hours: Optional[float] = None - self.time_window_length_in_hours: Optional[float] = None + def is_reversed(self) -> bool: + return True - def reload_distribution( + def _get_container_dwell_time_distribution( self, - minimum_dwell_time_in_hours: float, - maximum_dwell_time_in_hours: float - ): - # noinspection PyTypeChecker - hour_of_the_week_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]] = \ - list(self.truck_arrival_distribution_repository.get_distribution().items()) - self.minimum_dwell_time_in_hours = minimum_dwell_time_in_hours - self.maximum_dwell_time_in_hours = maximum_dwell_time_in_hours - self.distribution = WeeklyDistribution( - hour_fraction_pairs=hour_of_the_week_fraction_pairs, - considered_time_window_in_hours=self.maximum_dwell_time_in_hours - 1, # because the latest slot is reset - minimum_dwell_time_in_hours=self.minimum_dwell_time_in_hours - ) - self.time_window_length_in_hours = self.distribution.time_window_length_in_hours + vehicle: ModeOfTransport, + storage_requirement: StorageRequirement + ) -> ContinuousDistribution: + distribution = self.container_dwell_time_distributions[ModeOfTransport.truck][vehicle][storage_requirement] + return distribution.reversed() + + def _get_truck_arrival_distributions(self, container: Container) -> Dict[StorageRequirement, WeeklyDistribution]: + return self.truck_arrival_distributions[container.picked_up_by] def _get_container_delivery_time( self, + container: Container, container_departure_time: datetime.datetime ) -> datetime.datetime: - latest_slot = container_departure_time.replace(minute=0, second=0, microsecond=0) - datetime.timedelta(hours=1) + + container_dwell_time_distribution, truck_arrival_distribution = self._get_distributions(container) + minimum_dwell_time_in_hours = container_dwell_time_distribution.minimum + maximum_dwell_time_in_hours = container_dwell_time_distribution.maximum + + # as we add up to 59 minutes later, we need to subtract one hour from any time at this stage + latest_slot = ( + container_departure_time.replace(minute=0, second=0, microsecond=0) + - datetime.timedelta(hours=1) + - datetime.timedelta(hours=minimum_dwell_time_in_hours) + ) earliest_slot = ( - latest_slot - - datetime.timedelta(hours=self.maximum_dwell_time_in_hours - 1) # because the latest slot is reset + latest_slot + - datetime.timedelta(hours=maximum_dwell_time_in_hours) ) - distribution_slice = self.distribution.get_distribution_slice(earliest_slot) - time_windows_for_truck_arrival = list(distribution_slice.keys()) - delivery_time_window_start = random.choices( - population=time_windows_for_truck_arrival, - weights=list(distribution_slice.values()) - )[0] + truck_arrival_distribution_slice = truck_arrival_distribution.get_distribution_slice(earliest_slot) + + delivery_time_window_start = self._get_time_window_of_truck_arrival( + container_dwell_time_distribution, truck_arrival_distribution_slice + ) # arrival within the last time slot random_time_component = random.uniform(0, self.time_window_length_in_hours - (1 / 60)) assert 0 <= random_time_component < self.time_window_length_in_hours, \ - "The random time component be less than the time slot" + "The random time component must be shorter than the length of the time slot" # go back to the earliest possible day truck_arrival_time = ( - earliest_slot - + datetime.timedelta(hours=delivery_time_window_start) - + datetime.timedelta(hours=random_time_component) + earliest_slot + + datetime.timedelta(hours=delivery_time_window_start) + + datetime.timedelta(hours=random_time_component) ) return truck_arrival_time @@ -94,19 +92,17 @@ def generate_trucks_for_delivering(self) -> None: # a container for that vessel drop off the container too early container_pickup_time: datetime.datetime = picked_up_with.scheduled_arrival - truck_arrival_time = self._get_container_delivery_time(container_pickup_time) + truck_arrival_time = self._get_container_delivery_time(container, container_pickup_time) truck_arrival_information_for_delivery = TruckArrivalInformationForDelivery.create( planned_container_delivery_time_at_window_start=truck_arrival_time, realized_container_delivery_time=truck_arrival_time ) - truck_arrival_information_for_delivery.save() truck = self.vehicle_factory.create_truck( delivers_container=True, picks_up_container=False, truck_arrival_information_for_delivery=truck_arrival_information_for_delivery, truck_arrival_information_for_pickup=None ) - truck.save() container.delivered_by_truck = truck container.save() self.logger.info("All trucks that deliver a container are created now.") diff --git a/conflowgen/flow_generator/truck_for_import_containers_manager.py b/conflowgen/flow_generator/truck_for_import_containers_manager.py index 8d5d65b0..6fe81686 100644 --- a/conflowgen/flow_generator/truck_for_import_containers_manager.py +++ b/conflowgen/flow_generator/truck_for_import_containers_manager.py @@ -1,47 +1,48 @@ import datetime -import logging import random -from typing import List, Tuple, Union +from typing import Dict -from conflowgen.tools.weekly_distribution import WeeklyDistribution +from .abstract_truck_for_containers_manager import AbstractTruckForContainersManager +from ..domain_models.data_types.storage_requirement import StorageRequirement from ..domain_models.arrival_information import TruckArrivalInformationForPickup from ..domain_models.container import Container -from ..domain_models.distribution_repositories.truck_arrival_distribution_repository import \ - TruckArrivalDistributionRepository -from ..domain_models.factories.vehicle_factory import VehicleFactory from ..domain_models.data_types.mode_of_transport import ModeOfTransport from ..domain_models.vehicle import LargeScheduledVehicle +from ..tools.continuous_distribution import ContinuousDistribution +from ..tools.weekly_distribution import WeeklyDistribution -class TruckForImportContainersManager: - def __init__(self): - self.logger = logging.getLogger("conflowgen") - self.truck_arrival_distribution_repository = TruckArrivalDistributionRepository() - self.distribution: Union[WeeklyDistribution, None] = None - self.vehicle_factory = VehicleFactory() +class TruckForImportContainersManager(AbstractTruckForContainersManager): - def reload_distribution(self, minimum_dwell_time_in_hours: float, maximum_dwell_time_in_hours: float): - # noinspection PyTypeChecker - hour_of_the_week_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]] = \ - list(self.truck_arrival_distribution_repository.get_distribution().items()) - self.distribution = WeeklyDistribution( - hour_fraction_pairs=hour_of_the_week_fraction_pairs, - considered_time_window_in_hours=maximum_dwell_time_in_hours - 1, # because the earliest slot is reset - minimum_dwell_time_in_hours=minimum_dwell_time_in_hours - ) + def is_reversed(self) -> bool: + return False + + def _get_container_dwell_time_distribution( + self, + vehicle: ModeOfTransport, + storage_requirement: StorageRequirement + ) -> ContinuousDistribution: + return self.container_dwell_time_distributions[vehicle][ModeOfTransport.truck][storage_requirement] + + def _get_truck_arrival_distributions(self, container: Container) -> Dict[StorageRequirement, WeeklyDistribution]: + return self.truck_arrival_distributions[container.delivered_by] def _get_container_pickup_time( self, + container: Container, container_arrival_time: datetime.datetime ) -> datetime.datetime: + + container_dwell_time_distribution, truck_arrival_distribution = self._get_distributions(container) + earliest_slot = container_arrival_time.replace(minute=0, second=0, microsecond=0) + datetime.timedelta(hours=1) - distribution_slice = self.distribution.get_distribution_slice(earliest_slot) - time_windows_for_truck_arrival = list(distribution_slice.keys()) - pickup_time_window_start = random.choices( - population=time_windows_for_truck_arrival, - weights=list(distribution_slice.values()))[0] - time_window_length_in_hours = (time_windows_for_truck_arrival[1] - time_windows_for_truck_arrival[0]) - random_time_component = random.uniform(0, time_window_length_in_hours) + truck_arrival_distribution_slice = truck_arrival_distribution.get_distribution_slice(earliest_slot) + + pickup_time_window_start = self._get_time_window_of_truck_arrival( + container_dwell_time_distribution, truck_arrival_distribution_slice + ) + + random_time_component = random.uniform(0, self.time_window_length_in_hours) truck_arrival_time = ( earliest_slot + datetime.timedelta(hours=pickup_time_window_start) # these are several days, comparable to time slot @@ -66,20 +67,18 @@ def generate_trucks_for_picking_up(self): container_arrival_time: datetime.datetime = \ delivered_by.realized_arrival or delivered_by.scheduled_arrival - truck_arrival_time = self._get_container_pickup_time(container_arrival_time) + truck_arrival_time = self._get_container_pickup_time(container, container_arrival_time) truck_arrival_information_for_pickup = TruckArrivalInformationForPickup.create( planned_container_pickup_time_prior_berthing=None, # TODO: set value if required planned_container_pickup_time_after_initial_storage=None, # TODO: set value if required realized_container_pickup_time=truck_arrival_time ) - truck_arrival_information_for_pickup.save() truck = self.vehicle_factory.create_truck( delivers_container=False, picks_up_container=True, truck_arrival_information_for_delivery=None, truck_arrival_information_for_pickup=truck_arrival_information_for_pickup ) - truck.save() container.picked_up_by_truck = truck container.save() self.logger.info("All trucks that pick up a container have been generated.") diff --git a/conflowgen/metadata.py b/conflowgen/metadata.py index eaa7230b..7cbec112 100644 --- a/conflowgen/metadata.py +++ b/conflowgen/metadata.py @@ -1,4 +1,4 @@ -__version__ = "1.0.3" +__version__ = "2.0.0" __license__ = "MIT" __description__ = """ A generator for synthetic container flows at maritime container terminals with a focus on yard operations diff --git a/conflowgen/posthoc_analyses/modal_split_analysis_report.py b/conflowgen/posthoc_analyses/modal_split_analysis_report.py deleted file mode 100644 index 93da1708..00000000 --- a/conflowgen/posthoc_analyses/modal_split_analysis_report.py +++ /dev/null @@ -1,190 +0,0 @@ -from __future__ import annotations - -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt -import seaborn as sns - -from conflowgen.posthoc_analyses.modal_split_analysis import ModalSplitAnalysis -from conflowgen.reporting import AbstractReportWithMatplotlib -from conflowgen.reporting.no_data_plot import no_data_text - -sns.set_palette(sns.color_palette()) - - -class ModalSplitAnalysisReport(AbstractReportWithMatplotlib): - """ - This analysis report takes the data structure as generated by :class:`.ModalSplitAnalysis` - and creates a comprehensible representation for the user, either as text or as a graph. - """ - - report_description = """ - Analyze the amount of containers dedicated for or coming from the hinterland compared to the amount of containers - that are transshipment. - """ - - def __init__(self): - super().__init__() - self.analysis = ModalSplitAnalysis() - - def get_report_as_text( - self - ) -> str: - """ - The report as a text is represented as a table suitable for logging. It uses a human-readable formatting style. - """ - - # gather data - transshipment = self.analysis.get_transshipment_and_hinterland_fraction() - transshipment_as_fraction = np.nan - if sum(transshipment) > 0: - transshipment_as_fraction = ( - transshipment.transshipment_capacity / - (transshipment.transshipment_capacity + transshipment.hinterland_capacity) - ) - modal_split_for_hinterland_inbound = self.analysis.get_modal_split_for_hinterland( - inbound=True, outbound=False - ) - inbound_total = sum(modal_split_for_hinterland_inbound) - if inbound_total == 0: - inbound_total = np.nan - modal_split_for_hinterland_outbound = self.analysis.get_modal_split_for_hinterland( - inbound=False, outbound=True - ) - outbound_total = sum(modal_split_for_hinterland_outbound) - if outbound_total == 0: - outbound_total = np.nan - modal_split_for_hinterland_both = self.analysis.get_modal_split_for_hinterland( - inbound=True, outbound=True - ) - inbound_and_outbound_total = sum(modal_split_for_hinterland_both) - if inbound_and_outbound_total == 0: - inbound_and_outbound_total = np.nan - - # create string representation - report = "\nTransshipment share\n" - report += f"transshipment proportion (in TEU): {transshipment.transshipment_capacity:>10.2f} " - report += f"({transshipment_as_fraction * 100:.2f}%)\n" - report += f"hinterland proportion (in TEU): {transshipment.hinterland_capacity:>10.2f} " - report += f"({(1 - transshipment_as_fraction) * 100:.2f}%)\n" - report += "\n" - - report += "Inbound modal split\n" - report += f"truck proportion (in TEU): {modal_split_for_hinterland_inbound.truck_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_inbound.truck_capacity / inbound_total * 100:.2f}%)\n" - report += f"barge proportion (in TEU): {modal_split_for_hinterland_inbound.barge_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_inbound.barge_capacity / inbound_total * 100:.2f}%)\n" - report += f"train proportion (in TEU): {modal_split_for_hinterland_inbound.train_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_inbound.train_capacity / inbound_total * 100:.2f}%)\n\n" - - report += "Outbound modal split\n" - report += f"truck proportion (in TEU): {modal_split_for_hinterland_outbound.truck_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_outbound.truck_capacity / outbound_total * 100:.2f}%)\n" - report += f"barge proportion (in TEU): {modal_split_for_hinterland_outbound.barge_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_outbound.barge_capacity / outbound_total * 100:.2f}%)\n" - report += f"train proportion (in TEU): {modal_split_for_hinterland_outbound.train_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_outbound.train_capacity / outbound_total * 100:.2f}%)\n\n" - - report += "Absolute modal split (both inbound and outbound)\n" - report += f"truck proportion (in TEU): {modal_split_for_hinterland_both.truck_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_both.truck_capacity / inbound_and_outbound_total * 100:.2f}%)\n" - report += f"barge proportion (in TEU): {modal_split_for_hinterland_both.barge_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_both.barge_capacity / inbound_and_outbound_total * 100:.2f}%)\n" - report += f"train proportion (in TEU): {modal_split_for_hinterland_both.train_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_both.train_capacity / inbound_and_outbound_total * 100:.2f}%)\n" - - report = report.replace("(nan%)", "(-%)") - - report += "(rounding errors might exist)\n" - return report - - def get_report_as_graph(self) -> object: - """ - The report as a graph is represented as a set of pie charts using pandas. - - Returns: - The matplotlib axis of the last bar chart. - """ - - # gather data - transshipment = self.analysis.get_transshipment_and_hinterland_fraction() - modal_split_for_hinterland_inbound = self.analysis.get_modal_split_for_hinterland( - inbound=True, outbound=False - ) - modal_split_for_hinterland_outbound = self.analysis.get_modal_split_for_hinterland( - inbound=False, outbound=True - ) - modal_split_for_hinterland_both = self.analysis.get_modal_split_for_hinterland( - inbound=True, outbound=True - ) - - # Start plotting - fig, axes = plt.subplots(2, 2) - series_hinterland_and_transshipment = pd.Series({ - "hinterland capacity": transshipment.hinterland_capacity, - "transshipment capacity": transshipment.transshipment_capacity - }, name="Transshipment share") - - if sum(series_hinterland_and_transshipment) == 0: - axes[0, 0] = no_data_text() - else: - series_hinterland_and_transshipment.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Transshipment share", - ax=axes[0, 0] - ) - - series_modal_split_inbound = pd.Series({ - "train": modal_split_for_hinterland_inbound.train_capacity, - "truck": modal_split_for_hinterland_inbound.truck_capacity, - "barge": modal_split_for_hinterland_inbound.barge_capacity - }, name="Modal split for hinterland (inbound)") - - if sum(series_modal_split_inbound) == 0: - axes[0, 1] = no_data_text() - else: - series_modal_split_inbound.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Modal split for hinterland\n(inbound)", - ax=axes[0, 1] - ) - - series_modal_split_outbound = pd.Series({ - "train": modal_split_for_hinterland_outbound.train_capacity, - "truck": modal_split_for_hinterland_outbound.truck_capacity, - "barge": modal_split_for_hinterland_outbound.barge_capacity - }, name="Modal split for hinterland (outbound)") - - if sum(series_modal_split_outbound) == 0: - axes[1, 0] = no_data_text() - else: - series_modal_split_outbound.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Modal split for hinterland\n(outbound)", - ax=axes[1, 0] - ) - - series_modal_split_both = pd.Series({ - "train": modal_split_for_hinterland_both.train_capacity, - "truck": modal_split_for_hinterland_both.truck_capacity, - "barge": modal_split_for_hinterland_both.barge_capacity - }, name="Modal split for hinterland (inbound and outbound)") - - if sum(series_modal_split_both) == 0: - axes[1, 1] = no_data_text() - else: - series_modal_split_both.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Modal split for hinterland\n(inbound and outbound)", - ax=axes[1, 1] - ) - - return axes diff --git a/conflowgen/previews/container_flow_by_vehicle_type_preview.py b/conflowgen/previews/container_flow_by_vehicle_type_preview.py index 2d733821..e106445f 100644 --- a/conflowgen/previews/container_flow_by_vehicle_type_preview.py +++ b/conflowgen/previews/container_flow_by_vehicle_type_preview.py @@ -54,7 +54,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] ): validate_distribution_with_one_dependent_variable( - mode_of_transport_distribution, ModeOfTransport, ModeOfTransport + mode_of_transport_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True ) self.mode_of_transport_distribution = mode_of_transport_distribution diff --git a/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py b/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py index 590e50ed..39eea3f8 100644 --- a/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py +++ b/conflowgen/previews/container_flow_by_vehicle_type_preview_report.py @@ -1,5 +1,6 @@ from __future__ import annotations import itertools +import logging from typing import Dict import plotly.graph_objects as go @@ -24,6 +25,8 @@ class ContainerFlowByVehicleTypePreviewReport(AbstractReportWithPlotly): This report previews the container flow between vehicle types as defined by schedules and input distributions. """ + logger = logging.getLogger("conflowgen") + def __init__(self): super().__init__() self.preview = ContainerFlowByVehicleTypePreview( @@ -52,15 +55,16 @@ def get_report_as_text( for vehicle_type_from, vehicle_type_to in itertools.product(self.order_of_vehicle_types_in_report, repeat=2): vehicle_type_from_as_text = str(vehicle_type_from).replace("_", " ") vehicle_type_to_as_text = str(vehicle_type_to).replace("_", " ") + required_capacity = inbound_to_outbound_flow[vehicle_type_from][vehicle_type_to] report += f"{vehicle_type_from_as_text:<19} " report += f"{vehicle_type_to_as_text:<18} " - report += f"{inbound_to_outbound_flow[vehicle_type_from][vehicle_type_to]:>25.1f}" + report += f"{required_capacity:>25.1f}" report += "\n" report += "(rounding errors might exist)\n" return report - def _get_inbound_to_outbound_flow(self): + def _get_inbound_to_outbound_flow(self) -> Dict[ModeOfTransport, Dict[ModeOfTransport, float]]: assert self.start_date is not None assert self.end_date is not None assert self.transportation_buffer is not None @@ -104,6 +108,10 @@ def get_report_as_graph(self) -> object: for inbound_vehicle_type in inbound_to_outbound_flow.keys() for outbound_vehicle_type in inbound_to_outbound_flow[inbound_vehicle_type].keys() ] + + if sum(value) == 0: + self.logger.warning("No data available for plotting") + inbound_labels = [ str(inbound_vehicle_type).replace("_", " ").capitalize() + ":
Inbound: " + str( round(sum(inbound_to_outbound_flow[inbound_vehicle_type].values()), 2)) diff --git a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py index 1ce1db95..cf712981 100644 --- a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py +++ b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview.py @@ -1,6 +1,7 @@ from __future__ import annotations import datetime from typing import Dict +import numpy as np from conflowgen.descriptive_datatypes import OutboundUsedAndMaximumCapacity from conflowgen.domain_models.distribution_validators import validate_distribution_with_one_dependent_variable @@ -73,7 +74,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] ): validate_distribution_with_one_dependent_variable( - mode_of_transport_distribution, ModeOfTransport, ModeOfTransport + mode_of_transport_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True ) self.mode_of_transport_distribution = mode_of_transport_distribution @@ -119,6 +120,7 @@ def get_outbound_capacity_of_vehicles(self) -> OutboundUsedAndMaximumCapacity: for vehicle_type in ModeOfTransport } + schedule: Schedule for schedule in Schedule.select(): assert schedule.average_moved_capacity <= schedule.average_vehicle_capacity, \ @@ -151,7 +153,7 @@ def get_outbound_capacity_of_vehicles(self) -> OutboundUsedAndMaximumCapacity: outbound_used_capacity[ModeOfTransport.truck] = self._get_truck_capacity_for_export_containers( inbound_capacity ) - outbound_maximum_capacity[ModeOfTransport.truck] = -1 # Not meaningful, trucks can always be added as required + outbound_maximum_capacity[ModeOfTransport.truck] = np.nan # Trucks can always be added as required return OutboundUsedAndMaximumCapacity( used=outbound_used_capacity, diff --git a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview_report.py b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview_report.py index 28292946..e0dc56da 100644 --- a/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview_report.py +++ b/conflowgen/previews/inbound_and_outbound_vehicle_capacity_preview_report.py @@ -2,8 +2,8 @@ from typing import Dict +import numpy as np import pandas as pd -import seaborn as sns from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ InboundAndOutboundVehicleCapacityPreview @@ -53,10 +53,13 @@ def get_report_as_text(self) -> str: report += "\n" for vehicle_type in self.order_of_vehicle_types_in_report: vehicle_type_as_text = str(vehicle_type).replace("_", " ") + max_capacities_repr = -1 if np.isnan(outbound_maximum_capacities[vehicle_type]) \ + else outbound_maximum_capacities[vehicle_type] + report += f"{vehicle_type_as_text:<15} " report += f"{inbound_capacities[vehicle_type]:>25.1f} " report += f"{outbound_average_capacities[vehicle_type]:>30.1f} " - report += f"{outbound_maximum_capacities[vehicle_type]:>30.1f}" + report += f"{max_capacities_repr:>30.1f}" report += "\n" report += "(rounding errors might exist)\n" return report @@ -68,9 +71,8 @@ def get_report_as_graph(self) -> object: Returns: The matplotlib axis of the bar chart. """ - sns.set_palette(sns.color_palette()) - inbound_capacities, outbound_average_capacities, outbound_maximum_capacities = self._get_capacities() + df = pd.DataFrame({ "inbound capacities": inbound_capacities, "outbound average capacities": outbound_average_capacities, diff --git a/conflowgen/previews/modal_split_preview.py b/conflowgen/previews/modal_split_preview.py index e5177fb3..7138c4a0 100644 --- a/conflowgen/previews/modal_split_preview.py +++ b/conflowgen/previews/modal_split_preview.py @@ -6,7 +6,7 @@ from conflowgen.previews.container_flow_by_vehicle_type_preview import \ ContainerFlowByVehicleTypePreview from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandComparison +from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandSplit from conflowgen.descriptive_datatypes import HinterlandModalSplit @@ -66,7 +66,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution ) - def get_transshipment_and_hinterland_share(self) -> TransshipmentAndHinterlandComparison: + def get_transshipment_and_hinterland_split(self) -> TransshipmentAndHinterlandSplit: """ Returns: The amount of containers in TEU dedicated for or coming from the hinterland versus the amount of @@ -85,7 +85,7 @@ def get_transshipment_and_hinterland_share(self) -> TransshipmentAndHinterlandCo else: hinterland_capacity += capacity - return TransshipmentAndHinterlandComparison( + return TransshipmentAndHinterlandSplit( transshipment_capacity=transshipment_capacity, hinterland_capacity=hinterland_capacity ) diff --git a/conflowgen/previews/modal_split_preview_report.py b/conflowgen/previews/modal_split_preview_report.py index c99a28c4..e138cb5e 100644 --- a/conflowgen/previews/modal_split_preview_report.py +++ b/conflowgen/previews/modal_split_preview_report.py @@ -2,14 +2,10 @@ from typing import Dict -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt -import seaborn as sns - from conflowgen.previews.modal_split_preview import ModalSplitPreview from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport -from conflowgen.reporting import AbstractReportWithMatplotlib +from conflowgen.reporting import AbstractReportWithMatplotlib, modal_split_report +from conflowgen.reporting.modal_split_report import plot_modal_splits class ModalSplitPreviewReport(AbstractReportWithMatplotlib): @@ -49,67 +45,27 @@ def get_report_as_text( preview = self._get_updated_preview() # gather data - transshipment = preview.get_transshipment_and_hinterland_share() - transshipment_as_fraction = np.nan - if sum(transshipment) > 0: - transshipment_as_fraction = ( - transshipment.transshipment_capacity / - (transshipment.transshipment_capacity + transshipment.hinterland_capacity) - ) - modal_split_for_hinterland_inbound = preview.get_modal_split_for_hinterland( + transshipment_and_hinterland_split = preview.get_transshipment_and_hinterland_split() + + modal_split_in_hinterland_inbound_traffic = preview.get_modal_split_for_hinterland( inbound=True, outbound=False ) - inbound_total = sum(modal_split_for_hinterland_inbound) - if inbound_total == 0: - inbound_total = np.nan - modal_split_for_hinterland_outbound = preview.get_modal_split_for_hinterland( + + modal_split_in_hinterland_outbound_traffic = preview.get_modal_split_for_hinterland( inbound=False, outbound=True ) - outbound_total = sum(modal_split_for_hinterland_outbound) - if outbound_total == 0: - outbound_total = np.nan - modal_split_for_hinterland_both = preview.get_modal_split_for_hinterland( + + modal_split_in_hinterland_traffic_both_directions = preview.get_modal_split_for_hinterland( inbound=True, outbound=True ) - inbound_and_outbound_total = sum(modal_split_for_hinterland_both) - if inbound_and_outbound_total == 0: - inbound_and_outbound_total = np.nan - - # create string representation - report = "\nTransshipment share\n" - report += f"transshipment proportion (in TEU): {transshipment.transshipment_capacity:>10.2f} " - report += f"({transshipment_as_fraction * 100:.2f}%)\n" - report += f"hinterland proportion (in TEU): {transshipment.hinterland_capacity:>10.2f} " - report += f"({(1 - transshipment_as_fraction) * 100:.2f}%)\n" - report += "\n" - - report += "Inbound modal split\n" - report += f"truck proportion (in TEU): {modal_split_for_hinterland_inbound.truck_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_inbound.truck_capacity / inbound_total * 100:.2f}%)\n" - report += f"barge proportion (in TEU): {modal_split_for_hinterland_inbound.barge_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_inbound.barge_capacity / inbound_total * 100:.2f}%)\n" - report += f"train proportion (in TEU): {modal_split_for_hinterland_inbound.train_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_inbound.train_capacity / inbound_total * 100:.2f}%)\n\n" - - report += "Outbound modal split\n" - report += f"truck proportion (in TEU): {modal_split_for_hinterland_outbound.truck_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_outbound.truck_capacity / outbound_total * 100:.2f}%)\n" - report += f"barge proportion (in TEU): {modal_split_for_hinterland_outbound.barge_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_outbound.barge_capacity / outbound_total * 100:.2f}%)\n" - report += f"train proportion (in TEU): {modal_split_for_hinterland_outbound.train_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_outbound.train_capacity / outbound_total * 100:.2f}%)\n\n" - - report += "Absolute modal split (both inbound and outbound)\n" - report += f"truck proportion (in TEU): {modal_split_for_hinterland_both.truck_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_both.truck_capacity / inbound_and_outbound_total * 100:.2f}%)\n" - report += f"barge proportion (in TEU): {modal_split_for_hinterland_both.barge_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_both.barge_capacity / inbound_and_outbound_total * 100:.2f}%)\n" - report += f"train proportion (in TEU): {modal_split_for_hinterland_both.train_capacity:>10.1f} " - report += f"({modal_split_for_hinterland_both.train_capacity / inbound_and_outbound_total * 100:.2f}%)\n" - - report = report.replace("(nan%)", "(-%)") - - report += "(rounding errors might exist)\n" + + report = modal_split_report.insert_values_in_template( + transshipment_and_hinterland_split=transshipment_and_hinterland_split, + modal_split_in_hinterland_inbound_traffic=modal_split_in_hinterland_inbound_traffic, + modal_split_in_hinterland_outbound_traffic=modal_split_in_hinterland_outbound_traffic, + modal_split_in_hinterland_traffic_both_directions=modal_split_in_hinterland_traffic_both_directions + ) + return report def _get_updated_preview(self) -> ModalSplitPreview: @@ -128,76 +84,27 @@ def get_report_as_graph(self) -> object: The report as a graph is represented as a set of pie charts using pandas. Returns: - The matplotlib axis of the last bar chart. - - .. todo:: All pie charts should be plotted in a single plot using subplots. + The matplotlib axes """ preview = self._get_updated_preview() - sns.set_palette(sns.color_palette()) - # gather data - transshipment = preview.get_transshipment_and_hinterland_share() - modal_split_for_hinterland_inbound = preview.get_modal_split_for_hinterland( + transshipment_and_hinterland_split = preview.get_transshipment_and_hinterland_split() + modal_split_in_hinterland_inbound_traffic = preview.get_modal_split_for_hinterland( inbound=True, outbound=False ) - modal_split_for_hinterland_outbound = preview.get_modal_split_for_hinterland( + modal_split_in_hinterland_outbound_traffic = preview.get_modal_split_for_hinterland( inbound=False, outbound=True ) - modal_split_for_hinterland_both = preview.get_modal_split_for_hinterland( + modal_split_in_hinterland_traffic_both_directions = preview.get_modal_split_for_hinterland( inbound=True, outbound=True ) - # Start plotting - series_hinterland_and_transshipment = pd.Series({ - "hinterland capacity": transshipment.hinterland_capacity, - "transshipment capacity": transshipment.transshipment_capacity - }, name="Transshipment share") - series_hinterland_and_transshipment.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Transshipment share" - ) - plt.show() - - series_modal_split_inbound = pd.Series({ - "train": modal_split_for_hinterland_inbound.train_capacity, - "truck": modal_split_for_hinterland_inbound.truck_capacity, - "barge": modal_split_for_hinterland_inbound.barge_capacity - }, name="Modal split for hinterland (inbound)") - series_modal_split_inbound.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Modal split for hinterland (inbound)" - ) - plt.show() - - series_modal_split_outbound = pd.Series({ - "train": modal_split_for_hinterland_outbound.train_capacity, - "truck": modal_split_for_hinterland_outbound.truck_capacity, - "barge": modal_split_for_hinterland_outbound.barge_capacity - }, name="Modal split for hinterland (outbound)") - series_modal_split_outbound.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Modal split for hinterland (outbound)" - ) - plt.show() - - series_modal_split_both = pd.Series({ - "train": modal_split_for_hinterland_both.train_capacity, - "truck": modal_split_for_hinterland_both.truck_capacity, - "barge": modal_split_for_hinterland_both.barge_capacity - }, name="Modal split for hinterland (inbound and outbound)") - ax = series_modal_split_both.plot.pie( - legend=False, - autopct='%1.1f%%', - label="", - title="Modal split for hinterland (inbound and outbound)" + axes = plot_modal_splits( + transshipment_and_hinterland_split=transshipment_and_hinterland_split, + modal_split_in_hinterland_both_directions=modal_split_in_hinterland_traffic_both_directions, + modal_split_in_hinterland_inbound_traffic=modal_split_in_hinterland_inbound_traffic, + modal_split_in_hinterland_outbound_traffic=modal_split_in_hinterland_outbound_traffic, ) - plt.show() - return ax + return axes diff --git a/conflowgen/previews/vehicle_capacity_exceeded_preview.py b/conflowgen/previews/vehicle_capacity_exceeded_preview.py index 182f6d09..89ed42e5 100644 --- a/conflowgen/previews/vehicle_capacity_exceeded_preview.py +++ b/conflowgen/previews/vehicle_capacity_exceeded_preview.py @@ -71,7 +71,7 @@ def hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution: Dict[ModeOfTransport, Dict[ModeOfTransport, float]] ): validate_distribution_with_one_dependent_variable( - mode_of_transport_distribution, ModeOfTransport, ModeOfTransport + mode_of_transport_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True ) self.inbound_and_outbound_vehicle_capacity_preview.hypothesize_with_mode_of_transport_distribution( mode_of_transport_distribution diff --git a/conflowgen/previews/vehicle_capacity_exceeded_preview_report.py b/conflowgen/previews/vehicle_capacity_exceeded_preview_report.py index 9154ce1e..c05bfc94 100644 --- a/conflowgen/previews/vehicle_capacity_exceeded_preview_report.py +++ b/conflowgen/previews/vehicle_capacity_exceeded_preview_report.py @@ -2,6 +2,7 @@ from typing import Dict +import numpy as np import pandas as pd import seaborn as sns @@ -70,8 +71,10 @@ def get_report_as_text( else: difference = container_capacity_to_pick_up - maximum_capacity + max_capacity_repr = -1 if np.isnan(maximum_capacity) else maximum_capacity + vehicle_type_capacity_is_exceeded_as_text = "yes" if vehicle_type_capacity_is_exceeded else "no" - report += f"{maximum_capacity:>24.1f} " + report += f"{max_capacity_repr:>24.1f} " report += f"{container_capacity_to_pick_up:>25.1f} " report += f"{vehicle_type_capacity_is_exceeded_as_text:>9}" report += f"{difference:>20.1f}" diff --git a/conflowgen/reporting/__init__.py b/conflowgen/reporting/__init__.py index 8460e76a..a4760141 100644 --- a/conflowgen/reporting/__init__.py +++ b/conflowgen/reporting/__init__.py @@ -2,8 +2,10 @@ import abc import datetime +import enum import tempfile -from typing import cast +from typing import cast, Any, Type +from collections.abc import Iterable import matplotlib.pyplot as plt from matplotlib import image as mpimg @@ -43,11 +45,11 @@ def reload(self): properties = self.container_flow_generation_properties_repository.get_container_flow_generation_properties() self.start_date = properties.start_date self.end_date = properties.end_date - assert self.start_date is not None - assert self.end_date is not None - assert self.start_date < self.end_date + assert self.start_date is not None, "A start date needs to be set." + assert self.end_date is not None, "An end date needs to be set." + assert self.start_date < self.end_date, "The start date needs to be before the end date." self.transportation_buffer = properties.transportation_buffer - assert -1 < self.transportation_buffer + assert -1 < self.transportation_buffer, "The transportation buffer needs to be larger than -100%." @abc.abstractmethod def get_report_as_text(self) -> str: @@ -59,28 +61,41 @@ def get_report_as_text(self) -> str: """ return "" + @abc.abstractmethod def get_report_as_graph(self) -> object: raise NotImplementedError("No graph representation of this report has yet been defined.") + @abc.abstractmethod def show_report_as_graph(self, **kwargs) -> None: """ - This method first invokes ``.get_report_as_graph()`` and then it displays the graph object, e.g. by invoking - ``plt.show()`` or ``fig.show``. This depends on the visualisation library. + This method first invokes ``.get_report_as_graph()`` and then it displays the graph object, e.g., by invoking + ``plt.show()`` or ``fig.show()``. + This depends on the visualisation library. Args: **kwargs: The additional keyword arguments are passed to the analysis instance. """ raise NotImplementedError("No show method has yet been defined.") + @staticmethod + def _get_enum_or_enum_set_representation(enum_or_enum_set: Any, enum_type: Type[enum.Enum]) -> str: + if enum_or_enum_set is None or enum_or_enum_set == "all": + return "all" + if isinstance(enum_or_enum_set, enum_type): # a + return str(enum_or_enum_set) + if isinstance(enum_or_enum_set, Iterable): # a & b & c + return " & ".join([str(element) for element in enum_or_enum_set]) + return str(enum_or_enum_set) # just give it a try + class AbstractReportWithMatplotlib(AbstractReport, metaclass=abc.ABCMeta): - def show_report_as_graph(self, **kwargs) -> None: # pragma: no cover + def show_report_as_graph(self, **kwargs) -> None: self.get_report_as_graph() plt.show() class AbstractReportWithPlotly(AbstractReport, metaclass=abc.ABCMeta): - def show_report_as_graph(self, **kwargs) -> None: # pragma: no cover + def show_report_as_graph(self, **kwargs) -> None: fig: go.Figure = cast(go.Figure, self.get_report_as_graph()) if "static" in kwargs and kwargs["static"]: png_format_image = fig.to_image(format="png", width=800) diff --git a/conflowgen/reporting/modal_split_report.py b/conflowgen/reporting/modal_split_report.py new file mode 100644 index 00000000..0e7ca97a --- /dev/null +++ b/conflowgen/reporting/modal_split_report.py @@ -0,0 +1,144 @@ +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd + +from conflowgen.descriptive_datatypes import TransshipmentAndHinterlandSplit, HinterlandModalSplit +from conflowgen.reporting.no_data_plot import no_data_text + + +def _plt_modal_split_instance( + modal_split: HinterlandModalSplit, + name: str, + ax: plt.axis +) -> None: + series_modal_split_inbound = pd.Series({ + "train": modal_split.train_capacity, + "truck": modal_split.truck_capacity, + "barge": modal_split.barge_capacity + }, name=name) + if sum(series_modal_split_inbound) == 0: + no_data_text(ax) + else: + series_modal_split_inbound.plot.pie( + legend=False, + autopct='%1.1f%%', + label="", + title=name, + ax=ax + ) + + +def plot_modal_splits( + transshipment_and_hinterland_split: TransshipmentAndHinterlandSplit, + modal_split_in_hinterland_both_directions: HinterlandModalSplit, + modal_split_in_hinterland_inbound_traffic: HinterlandModalSplit, + modal_split_in_hinterland_outbound_traffic: HinterlandModalSplit +) -> plt.Axes: + fig, axes = plt.subplots(2, 2) + + series_hinterland_and_transshipment = pd.Series({ + "Inland gateway traffic": transshipment_and_hinterland_split.hinterland_capacity, + "Transshipment traffic": transshipment_and_hinterland_split.transshipment_capacity + }, name="Role in network") + + if series_hinterland_and_transshipment.sum() == 0: + no_data_text(axes[0, 0]) + else: + series_hinterland_and_transshipment.plot.pie( + legend=False, + autopct='%1.1f%%', + label="", + title=series_hinterland_and_transshipment.name, + ax=axes[0, 0] + ) + + modal_splits = [ + { + "modal_split": modal_split_in_hinterland_both_directions, + "name": "Modal split in hinterland traffic\n(both inbound and outbound traffic)", + "ax": axes[1, 0] + }, + { + "modal_split": modal_split_in_hinterland_inbound_traffic, + "name": "Modal split in hinterland traffic\n(only inbound traffic)", + "ax": axes[0, 1] + }, + { + "modal_split": modal_split_in_hinterland_outbound_traffic, + "name": "Modal split in hinterland traffic\n(only outbound traffic)", + "ax": axes[1, 1] + }, + ] + + for modal_split in modal_splits: + _plt_modal_split_instance(**modal_split) + + plt.tight_layout() + + return axes + + +def insert_values_in_template( + transshipment_and_hinterland_split: TransshipmentAndHinterlandSplit, + modal_split_in_hinterland_inbound_traffic: HinterlandModalSplit, + modal_split_in_hinterland_outbound_traffic: HinterlandModalSplit, + modal_split_in_hinterland_traffic_both_directions: HinterlandModalSplit, +) -> str: + + transshipment_as_fraction = np.nan + if sum(transshipment_and_hinterland_split) > 0: + transshipment_as_fraction = ( + transshipment_and_hinterland_split.transshipment_capacity / + (transshipment_and_hinterland_split.transshipment_capacity + + transshipment_and_hinterland_split.hinterland_capacity) + ) + + inbound_total = sum(modal_split_in_hinterland_inbound_traffic) + if inbound_total == 0: + inbound_total = np.nan + + outbound_total = sum(modal_split_in_hinterland_outbound_traffic) + if outbound_total == 0: + outbound_total = np.nan + + inbound_and_outbound_total = sum(modal_split_in_hinterland_traffic_both_directions) + if inbound_and_outbound_total == 0: + inbound_and_outbound_total = np.nan + + # create string representation + report = "\nRole in network\n" + report += f"transshipment traffic (in TEU): {transshipment_and_hinterland_split.transshipment_capacity:>10.2f} " + report += f"({transshipment_as_fraction * 100:.2f}%)\n" + report += f"inland gateway traffic (in TEU): {transshipment_and_hinterland_split.hinterland_capacity:>10.2f} " + report += f"({(1 - transshipment_as_fraction) * 100:.2f}%)\n" + report += "\n" + report += "Modal split in hinterland traffic (only inbound traffic)\n" + report += f"trucks (in TEU): {modal_split_in_hinterland_inbound_traffic.truck_capacity:>10.1f} " + report += f"({modal_split_in_hinterland_inbound_traffic.truck_capacity / inbound_total * 100:.2f}%)\n" + report += f"barges (in TEU): {modal_split_in_hinterland_inbound_traffic.barge_capacity:>10.1f} " + report += f"({modal_split_in_hinterland_inbound_traffic.barge_capacity / inbound_total * 100:.2f}%)\n" + report += f"trains (in TEU): {modal_split_in_hinterland_inbound_traffic.train_capacity:>10.1f} " + report += f"({modal_split_in_hinterland_inbound_traffic.train_capacity / inbound_total * 100:.2f}%)\n\n" + + report += "Modal split in hinterland traffic (only outbound traffic)\n" + report += f"trucks (in TEU): {modal_split_in_hinterland_outbound_traffic.truck_capacity:>10.1f} " + report += f"({modal_split_in_hinterland_outbound_traffic.truck_capacity / outbound_total * 100:.2f}%)\n" + report += f"barges (in TEU): {modal_split_in_hinterland_outbound_traffic.barge_capacity:>10.1f} " + report += f"({modal_split_in_hinterland_outbound_traffic.barge_capacity / outbound_total * 100:.2f}%)\n" + report += f"trains (in TEU): {modal_split_in_hinterland_outbound_traffic.train_capacity:>10.1f} " + report += f"({modal_split_in_hinterland_outbound_traffic.train_capacity / outbound_total * 100:.2f}%)\n\n" + + modal_split_both = modal_split_in_hinterland_traffic_both_directions # introduce shorthand for template + + report += "Modal split in hinterland traffic (both inbound and outbound traffic)\n" + report += f"trucks (in TEU): {modal_split_both.truck_capacity:>10.1f} " + report += f"({modal_split_both.truck_capacity / inbound_and_outbound_total * 100:.2f}%)\n" + report += f"barges (in TEU): {modal_split_both.barge_capacity:>10.1f} " + report += f"({modal_split_both.barge_capacity / inbound_and_outbound_total * 100:.2f}%)\n" + report += f"trains (in TEU): {modal_split_both.train_capacity:>10.1f} " + report += f"({modal_split_both.train_capacity / inbound_and_outbound_total * 100:.2f}%)\n" + + report = report.replace("(nan%)", "(-%)") + report += "(rounding errors might exist)\n" + + return report diff --git a/conflowgen/reporting/no_data_plot.py b/conflowgen/reporting/no_data_plot.py index 930a2305..00bdefea 100644 --- a/conflowgen/reporting/no_data_plot.py +++ b/conflowgen/reporting/no_data_plot.py @@ -2,11 +2,10 @@ def no_data_graph() -> plt.Axes: - axs = plt.subplots() - no_data_text() - return axs + fig, ax = plt.subplots() + no_data_text(ax) + return ax -def no_data_text() -> plt.Axes: - ax = plt.text(0.35, 0.5, 'No data available for plotting', dict(size=30)) - return ax +def no_data_text(ax: plt.Axes) -> None: + ax.text(0.1, 0.1, 'No data') diff --git a/conflowgen/reporting/output_style.py b/conflowgen/reporting/output_style.py index c41c0d2c..b5000748 100644 --- a/conflowgen/reporting/output_style.py +++ b/conflowgen/reporting/output_style.py @@ -35,7 +35,7 @@ def display_explanation(self, text: str) -> None: """ Args: text: The text of an explanatory text (shown in normal font, wrapped if required). - Different paragraphs are separated by repeated invocations of this method. + Different paragraphs are separated by repeated invocations of this method. """ ... @@ -70,12 +70,12 @@ def display_explanation(self, text: str): class DisplayAsMarkdown(DisplayAsMarkupLanguage): """ With this style, the output is set in Markdown. - This is e.g. helpful when showing the output in Jupyter Notebooks. + This is, e.g., helpful when showing the output in Jupyter Notebooks. """ def __init__(self, display_markdown_func: Callable): self.display_markdown_func = display_markdown_func - def display_headline(self, text: str, level: int = 3): + def display_headline(self, text: str, level: int = 4): self.display_markdown_func("#" * level + " " + text + "\n") def display_verbatim(self, text: str): diff --git a/conflowgen/tests/posthoc_analyses/__init__.py b/conflowgen/tests/analyses/__init__.py similarity index 100% rename from conflowgen/tests/posthoc_analyses/__init__.py rename to conflowgen/tests/analyses/__init__.py diff --git a/conflowgen/tests/analyses/test_container_dwell_time_analysis.py b/conflowgen/tests/analyses/test_container_dwell_time_analysis.py new file mode 100644 index 00000000..1d101215 --- /dev/null +++ b/conflowgen/tests/analyses/test_container_dwell_time_analysis.py @@ -0,0 +1,159 @@ +import datetime +import unittest + +from conflowgen.domain_models.arrival_information import TruckArrivalInformationForPickup, \ + TruckArrivalInformationForDelivery +from conflowgen.domain_models.container import Container +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder, \ + container_dwell_time_distribution_seeder +from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination +from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder +from conflowgen.analyses.container_dwell_time_analysis import ContainerDwellTimeAnalysis +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestContainerDwellTimeAnalysis(unittest.TestCase): + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + Container, + LargeScheduledVehicle, + Truck, + TruckArrivalInformationForDelivery, + TruckArrivalInformationForPickup, + Feeder, + ModeOfTransportDistribution, + Destination, + ContainerDwellTimeDistribution + ]) + mode_of_transport_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() + self.analysis = ContainerDwellTimeAnalysis( + transportation_buffer=0.2 + ) + + def test_with_no_data(self): + """If no schedules are provided, no capacity is needed""" + empty_yard = self.analysis.get_container_dwell_times() + self.assertEqual(len(empty_yard), 0) + + def test_with_single_container(self): + now = datetime.datetime.now() + schedule = Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=now.date(), + vehicle_arrives_at_time=now.time(), + average_vehicle_capacity=300, + average_moved_capacity=300, + ) + feeder_lsv = LargeScheduledVehicle.create( + vehicle_name="TestFeeder1", + capacity_in_teu=300, + moved_capacity=schedule.average_moved_capacity, + scheduled_arrival=now, + schedule=schedule + ) + Feeder.create( + large_scheduled_vehicle=feeder_lsv + ) + aip = TruckArrivalInformationForPickup.create( + realized_container_pickup_time=now + datetime.timedelta(hours=25) + ) + truck = Truck.create( + delivers_container=False, + picks_up_container=True, + truck_arrival_information_for_delivery=None, + truck_arrival_information_for_pickup=aip + ) + Container.create( + weight=20, + length=ContainerLength.twenty_feet, + storage_requirement=StorageRequirement.standard, + delivered_by=ModeOfTransport.feeder, + delivered_by_large_scheduled_vehicle=feeder_lsv, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + picked_up_by_truck=truck + ) + + dwell_times = self.analysis.get_container_dwell_times() + self.assertEqual(len(dwell_times), 1) + dwell_time = dwell_times.pop() + self.assertEqual(dwell_time, datetime.timedelta(hours=25)) + + def test_with_two_containers(self): + now = datetime.datetime.now() + schedule = Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=now.date(), + vehicle_arrives_at_time=now.time(), + average_vehicle_capacity=300, + average_moved_capacity=300, + ) + feeder_lsv = LargeScheduledVehicle.create( + vehicle_name="TestFeeder1", + capacity_in_teu=300, + moved_capacity=schedule.average_moved_capacity, + scheduled_arrival=now, + schedule=schedule + ) + Feeder.create( + large_scheduled_vehicle=feeder_lsv + ) + aip = TruckArrivalInformationForPickup.create( + realized_container_pickup_time=now + datetime.timedelta(hours=25) + ) + truck = Truck.create( + delivers_container=False, + picks_up_container=True, + truck_arrival_information_for_delivery=None, + truck_arrival_information_for_pickup=aip + ) + Container.create( + weight=20, + length=ContainerLength.twenty_feet, + storage_requirement=StorageRequirement.standard, + delivered_by=ModeOfTransport.feeder, + delivered_by_large_scheduled_vehicle=feeder_lsv, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + picked_up_by_truck=truck + ) + aip_2 = TruckArrivalInformationForPickup.create( + realized_container_pickup_time=now + datetime.timedelta(hours=12) + ) + truck_2 = Truck.create( + delivers_container=False, + picks_up_container=True, + truck_arrival_information_for_delivery=None, + truck_arrival_information_for_pickup=aip_2 + ) + Container.create( + weight=20, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard, + delivered_by=ModeOfTransport.feeder, + delivered_by_large_scheduled_vehicle=feeder_lsv, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + picked_up_by_truck=truck_2 + ) + + container_dwell_times = self.analysis.get_container_dwell_times() + self.assertEqual(len(container_dwell_times), 2) + self.assertSetEqual( + container_dwell_times, + { + datetime.timedelta(hours=25), + datetime.timedelta(hours=12) + }) diff --git a/conflowgen/tests/analyses/test_container_dwell_time_analysis_report.py b/conflowgen/tests/analyses/test_container_dwell_time_analysis_report.py new file mode 100644 index 00000000..e18e2d78 --- /dev/null +++ b/conflowgen/tests/analyses/test_container_dwell_time_analysis_report.py @@ -0,0 +1,164 @@ +import datetime +import unittest + +from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties +from conflowgen.domain_models.arrival_information import TruckArrivalInformationForPickup, \ + TruckArrivalInformationForDelivery +from conflowgen.domain_models.container import Container +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution +from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution +from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder, \ + container_dwell_time_distribution_seeder +from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination +from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder +from conflowgen.analyses import ContainerDwellTimeAnalysisReport +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +def setup_feeder_data(): + now = datetime.datetime.now() + schedule = Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=now.date(), + vehicle_arrives_at_time=now.time(), + average_vehicle_capacity=300, + average_moved_capacity=300, + ) + feeder_lsv = LargeScheduledVehicle.create( + vehicle_name="TestFeeder1", + capacity_in_teu=300, + moved_capacity=schedule.average_moved_capacity, + scheduled_arrival=now, + schedule=schedule + ) + Feeder.create( + large_scheduled_vehicle=feeder_lsv + ) + aip = TruckArrivalInformationForPickup.create( + realized_container_pickup_time=datetime.datetime.now() + datetime.timedelta(hours=25) + ) + truck = Truck.create( + delivers_container=False, + picks_up_container=True, + truck_arrival_information_for_delivery=None, + truck_arrival_information_for_pickup=aip + ) + Container.create( + weight=20, + length=ContainerLength.twenty_feet, + storage_requirement=StorageRequirement.standard, + delivered_by=ModeOfTransport.feeder, + delivered_by_large_scheduled_vehicle=feeder_lsv, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + picked_up_by_truck=truck + ) + aip_2 = TruckArrivalInformationForPickup.create( + realized_container_pickup_time=datetime.datetime.now() + datetime.timedelta(hours=12) + ) + truck_2 = Truck.create( + delivers_container=False, + picks_up_container=True, + truck_arrival_information_for_delivery=None, + truck_arrival_information_for_pickup=aip_2 + ) + Container.create( + weight=20, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard, + delivered_by=ModeOfTransport.feeder, + delivered_by_large_scheduled_vehicle=feeder_lsv, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + picked_up_by_truck=truck_2 + ) + + +class TestContainerDwellTimeAnalysisReport(unittest.TestCase): + def setUp(self) -> None: + """Create container database in memory""" + self.sqlite_db = setup_sqlite_in_memory_db() + self.sqlite_db.create_tables([ + Schedule, + Container, + LargeScheduledVehicle, + Truck, + TruckArrivalInformationForPickup, + TruckArrivalInformationForDelivery, + Feeder, + ModeOfTransportDistribution, + Destination, + ContainerFlowGenerationProperties, + ContainerDwellTimeDistribution + ]) + mode_of_transport_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() + ContainerFlowGenerationProperties.create( + start_date=datetime.datetime(2021, 12, 1), + end_date=datetime.datetime(2021, 12, 6) + ) + self.analysis_report = ContainerDwellTimeAnalysisReport() + + def test_with_no_data(self): + """If no schedules are provided, no capacity is needed""" + actual_report = self.analysis_report.get_report_as_text() + expected_report = """ +container is delivered by vehicle type = all +container picked up by vehicle type = all +storage requirement = all + (reported in h) +minimum container dwell time: 0.0 +average container dwell time: 0.0 +maximum container dwell time: 0.0 +standard deviation: -1.0 +(rounding errors might exist) +""" + self.assertEqual(actual_report, expected_report) + + def test_inbound_with_single_feeder(self): + setup_feeder_data() + actual_report = self.analysis_report.get_report_as_text() + expected_report = """ +container is delivered by vehicle type = all +container picked up by vehicle type = all +storage requirement = all + (reported in h) +minimum container dwell time: 12.0 +average container dwell time: 18.5 +maximum container dwell time: 25.0 +standard deviation: 9.2 +(rounding errors might exist) +""" + self.assertEqual(actual_report, expected_report) + + def test_inbound_with_single_feeder_but_filter(self): + setup_feeder_data() + actual_report = self.analysis_report.get_report_as_text( + container_delivered_by_vehicle_type=ModeOfTransport.barge + ) + expected_report = """ +container is delivered by vehicle type = barge +container picked up by vehicle type = all +storage requirement = all + (reported in h) +minimum container dwell time: 0.0 +average container dwell time: 0.0 +maximum container dwell time: 0.0 +standard deviation: -1.0 +(rounding errors might exist) +""" + self.assertEqual(actual_report, expected_report) + + def test_graph_no_data(self): + empty_graph = self.analysis_report.get_report_as_graph() + self.assertIsNotNone(empty_graph) + + def test_graph_with_single_feeder(self): + setup_feeder_data() + graph = self.analysis_report.get_report_as_graph() + self.assertIsNotNone(graph) diff --git a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis.py b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis.py similarity index 99% rename from conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis.py rename to conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis.py index 9d246433..8828e0d9 100644 --- a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis.py +++ b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis.py @@ -8,7 +8,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis import \ ContainerFlowAdjustmentByVehicleTypeAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_report.py b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_report.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_report.py rename to conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_report.py index 7099d6b8..b6d36f20 100644 --- a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_report.py +++ b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_report.py @@ -10,7 +10,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_report import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_report import \ ContainerFlowAdjustmentByVehicleTypeAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py similarity index 96% rename from conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py rename to conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py index 2e007775..08e236c5 100644 --- a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py +++ b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary.py @@ -8,7 +8,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary import \ ContainerFlowAdjustmentByVehicleTypeAnalysisSummary from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py similarity index 97% rename from conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py rename to conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py index 506a6b7e..3d776cc8 100644 --- a/conflowgen/tests/posthoc_analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py +++ b/conflowgen/tests/analyses/test_container_flow_adjustment_by_vehicle_type_analysis_summary_report.py @@ -10,7 +10,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.container_flow_adjustment_by_vehicle_type_analysis_summary_report import \ +from conflowgen.analyses.container_flow_adjustment_by_vehicle_type_analysis_summary_report import \ ContainerFlowAdjustmentByVehicleTypeAnalysisSummaryReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_container_flow_by_vehicle_type_analysis.py b/conflowgen/tests/analyses/test_container_flow_by_vehicle_type_analysis.py similarity index 97% rename from conflowgen/tests/posthoc_analyses/test_container_flow_by_vehicle_type_analysis.py rename to conflowgen/tests/analyses/test_container_flow_by_vehicle_type_analysis.py index 7e07c275..421f1629 100644 --- a/conflowgen/tests/posthoc_analyses/test_container_flow_by_vehicle_type_analysis.py +++ b/conflowgen/tests/analyses/test_container_flow_by_vehicle_type_analysis.py @@ -9,7 +9,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis +from conflowgen.analyses.container_flow_by_vehicle_type_analysis import ContainerFlowByVehicleTypeAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_container_flow_by_vehicle_type_analysis_report.py b/conflowgen/tests/analyses/test_container_flow_by_vehicle_type_analysis_report.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_container_flow_by_vehicle_type_analysis_report.py rename to conflowgen/tests/analyses/test_container_flow_by_vehicle_type_analysis_report.py index 009c8b98..15843965 100644 --- a/conflowgen/tests/posthoc_analyses/test_container_flow_by_vehicle_type_analysis_report.py +++ b/conflowgen/tests/analyses/test_container_flow_by_vehicle_type_analysis_report.py @@ -10,7 +10,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.container_flow_by_vehicle_type_analysis_report import \ +from conflowgen.analyses.container_flow_by_vehicle_type_analysis_report import \ ContainerFlowByVehicleTypeAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_inbound_and_outbound_vehicle_capacity_analysis.py b/conflowgen/tests/analyses/test_inbound_and_outbound_vehicle_capacity_analysis.py similarity index 94% rename from conflowgen/tests/posthoc_analyses/test_inbound_and_outbound_vehicle_capacity_analysis.py rename to conflowgen/tests/analyses/test_inbound_and_outbound_vehicle_capacity_analysis.py index a7486ea4..588b2878 100644 --- a/conflowgen/tests/posthoc_analyses/test_inbound_and_outbound_vehicle_capacity_analysis.py +++ b/conflowgen/tests/analyses/test_inbound_and_outbound_vehicle_capacity_analysis.py @@ -1,6 +1,8 @@ import datetime import unittest +import numpy as np + from conflowgen.domain_models.container import Container from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport @@ -9,7 +11,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.inbound_and_outbound_vehicle_capacity_analysis import \ +from conflowgen.analyses.inbound_and_outbound_vehicle_capacity_analysis import \ InboundAndOutboundVehicleCapacityAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -50,7 +52,7 @@ def test_outbound_with_no_data(self): self.assertEqual(capacity_in_teu, 0, f"capacity of {mode_of_transport} is unequal 0") self.assertEqual(empty_capacity[ModeOfTransport.truck], 0) - self.assertEqual(empty_max_capacity[ModeOfTransport.truck], -1) + self.assertTrue(np.isnan(empty_max_capacity[ModeOfTransport.truck])) def test_inbound_with_single_feeder(self): one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) @@ -110,7 +112,6 @@ def test_outbound_with_single_feeder(self): average_moved_capacity=300, vehicle_arrives_every_k_days=-1 ) - schedule.save() feeder_lsv = LargeScheduledVehicle.create( vehicle_name="TestFeeder1", capacity_in_teu=300, @@ -118,12 +119,10 @@ def test_outbound_with_single_feeder(self): scheduled_arrival=datetime.datetime.now(), schedule=schedule ) - feeder_lsv.save() - feeder = Feeder.create( + Feeder.create( large_scheduled_vehicle=feeder_lsv ) - feeder.save() - container = Container.create( + Container.create( weight=20, length=ContainerLength.twenty_feet, storage_requirement=StorageRequirement.standard, @@ -132,7 +131,6 @@ def test_outbound_with_single_feeder(self): picked_up_by=ModeOfTransport.truck, picked_up_by_initial=ModeOfTransport.truck ) - container.save() capacity_actual, capacity_maximum = self.analysis.get_outbound_capacity_of_vehicles() self.assertSetEqual(set(ModeOfTransport), set(capacity_actual.keys())) @@ -150,4 +148,4 @@ def test_outbound_with_single_feeder(self): outbound_max_capacity_of_feeder_in_teu = capacity_maximum[ModeOfTransport.feeder] self.assertEqual(outbound_max_capacity_of_feeder_in_teu, 300) outbound_max_capacity_of_trucks_in_teu = capacity_maximum[ModeOfTransport.truck] - self.assertEqual(outbound_max_capacity_of_trucks_in_teu, -1) + self.assertTrue(np.isnan(outbound_max_capacity_of_trucks_in_teu)) diff --git a/conflowgen/tests/posthoc_analyses/test_inbound_and_outbound_vehicle_capacity_analysis_report.py b/conflowgen/tests/analyses/test_inbound_and_outbound_vehicle_capacity_analysis_report.py similarity index 95% rename from conflowgen/tests/posthoc_analyses/test_inbound_and_outbound_vehicle_capacity_analysis_report.py rename to conflowgen/tests/analyses/test_inbound_and_outbound_vehicle_capacity_analysis_report.py index bee7bf11..a833698b 100644 --- a/conflowgen/tests/posthoc_analyses/test_inbound_and_outbound_vehicle_capacity_analysis_report.py +++ b/conflowgen/tests/analyses/test_inbound_and_outbound_vehicle_capacity_analysis_report.py @@ -10,7 +10,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.inbound_and_outbound_vehicle_capacity_analysis_report import \ +from conflowgen.analyses.inbound_and_outbound_vehicle_capacity_analysis_report import \ InboundAndOutboundVehicleCapacityAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -34,12 +34,10 @@ def setup_feeder_data(): scheduled_arrival=datetime.datetime.now(), schedule=schedule ) - feeder_lsv.save() - feeder = Feeder.create( + Feeder.create( large_scheduled_vehicle=feeder_lsv ) - feeder.save() - container = Container.create( + Container.create( weight=20, length=ContainerLength.twenty_feet, storage_requirement=StorageRequirement.standard, @@ -48,7 +46,6 @@ def setup_feeder_data(): picked_up_by=ModeOfTransport.truck, picked_up_by_initial=ModeOfTransport.truck ) - container.save() class TestInboundAndOutboundVehicleCapacityAnalysis(unittest.TestCase): diff --git a/conflowgen/tests/posthoc_analyses/test_inbound_to_outbound_capacity_utilization_analysis.py b/conflowgen/tests/analyses/test_inbound_to_outbound_capacity_utilization_analysis.py similarity index 97% rename from conflowgen/tests/posthoc_analyses/test_inbound_to_outbound_capacity_utilization_analysis.py rename to conflowgen/tests/analyses/test_inbound_to_outbound_capacity_utilization_analysis.py index f7369e6d..a4aa37b9 100644 --- a/conflowgen/tests/posthoc_analyses/test_inbound_to_outbound_capacity_utilization_analysis.py +++ b/conflowgen/tests/analyses/test_inbound_to_outbound_capacity_utilization_analysis.py @@ -9,7 +9,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ +from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis import \ InboundToOutboundVehicleCapacityUtilizationAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_inbound_to_outbound_capacity_utilization_analysis_report.py b/conflowgen/tests/analyses/test_inbound_to_outbound_capacity_utilization_analysis_report.py similarity index 94% rename from conflowgen/tests/posthoc_analyses/test_inbound_to_outbound_capacity_utilization_analysis_report.py rename to conflowgen/tests/analyses/test_inbound_to_outbound_capacity_utilization_analysis_report.py index 43ec35e4..f1047faf 100644 --- a/conflowgen/tests/posthoc_analyses/test_inbound_to_outbound_capacity_utilization_analysis_report.py +++ b/conflowgen/tests/analyses/test_inbound_to_outbound_capacity_utilization_analysis_report.py @@ -10,7 +10,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis_report import \ +from conflowgen.analyses.inbound_to_outbound_vehicle_capacity_utilization_analysis_report import \ InboundToOutboundVehicleCapacityUtilizationAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -26,7 +26,6 @@ def setup_feeder_data(): average_moved_capacity=250, vehicle_arrives_every_k_days=-1 ) - schedule.save() feeder_lsv = LargeScheduledVehicle.create( vehicle_name="TestFeeder1", capacity_in_teu=schedule.average_vehicle_capacity, @@ -35,10 +34,9 @@ def setup_feeder_data(): schedule=schedule ) feeder_lsv.save() - feeder = Feeder.create( + Feeder.create( large_scheduled_vehicle=feeder_lsv ) - feeder.save() Container.create( weight=20, length=ContainerLength.twenty_feet, @@ -73,7 +71,6 @@ def setUp(self) -> None: def test_with_no_data(self): actual_report = self.analysis.get_report_as_text() - print(actual_report) expected_report = """ vehicle type = all vehicle identifier inbound capacity (in TEU) outbound capacity (in TEU) @@ -84,7 +81,6 @@ def test_with_no_data(self): def test_inbound_with_single_feeder(self): setup_feeder_data() actual_report = self.analysis.get_report_as_text() - print(actual_report) expected_report = """ vehicle type = all vehicle identifier inbound capacity (in TEU) outbound capacity (in TEU) diff --git a/conflowgen/tests/posthoc_analyses/test_modal_split_analysis.py b/conflowgen/tests/analyses/test_modal_split_analysis.py similarity index 97% rename from conflowgen/tests/posthoc_analyses/test_modal_split_analysis.py rename to conflowgen/tests/analyses/test_modal_split_analysis.py index a1c38420..cf575fbb 100644 --- a/conflowgen/tests/posthoc_analyses/test_modal_split_analysis.py +++ b/conflowgen/tests/analyses/test_modal_split_analysis.py @@ -9,7 +9,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.modal_split_analysis import ModalSplitAnalysis +from conflowgen.analyses.modal_split_analysis import ModalSplitAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -30,11 +30,11 @@ def setUp(self) -> None: self.analysis = ModalSplitAnalysis() def test_transshipment_share_with_no_data(self): - both_zero = self.analysis.get_transshipment_and_hinterland_fraction() + both_zero = self.analysis.get_transshipment_and_hinterland_split() self.assertSetEqual(set(both_zero), {0}) def test_hinterland_split_with_no_data(self): - all_three_zero = self.analysis.get_modal_split_for_hinterland(True, True) + all_three_zero = self.analysis.get_modal_split_for_hinterland_traffic(True, True) self.assertSetEqual(set(all_three_zero), {0}) def test_transshipment_share_with_single_feeder(self): @@ -72,7 +72,7 @@ def test_transshipment_share_with_single_feeder(self): ) container.save() - fractions = self.analysis.get_transshipment_and_hinterland_fraction() + fractions = self.analysis.get_transshipment_and_hinterland_split() self.assertEqual(fractions.transshipment_capacity, 0) self.assertEqual(fractions.hinterland_capacity, 1) @@ -111,7 +111,7 @@ def test_outbound_with_single_feeder(self): ) container.save() - fractions = self.analysis.get_modal_split_for_hinterland(True, True) + fractions = self.analysis.get_modal_split_for_hinterland_traffic(True, True) self.assertEqual(fractions.truck_capacity, 1) self.assertEqual(fractions.barge_capacity, 0) self.assertEqual(fractions.train_capacity, 0) diff --git a/conflowgen/tests/posthoc_analyses/test_modal_split_analysis_report.py b/conflowgen/tests/analyses/test_modal_split_analysis_report.py similarity index 71% rename from conflowgen/tests/posthoc_analyses/test_modal_split_analysis_report.py rename to conflowgen/tests/analyses/test_modal_split_analysis_report.py index 80144723..d4a45b5e 100644 --- a/conflowgen/tests/posthoc_analyses/test_modal_split_analysis_report.py +++ b/conflowgen/tests/analyses/test_modal_split_analysis_report.py @@ -10,7 +10,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.modal_split_analysis_report import ModalSplitAnalysisReport +from conflowgen.analyses.modal_split_analysis_report import ModalSplitAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -74,53 +74,53 @@ def setUp(self) -> None: def test_with_no_data(self): actual_report = self.analysis.get_report_as_text() expected_report = """ -Transshipment share -transshipment proportion (in TEU): 0.00 (-%) -hinterland proportion (in TEU): 0.00 (-%) +Role in network +transshipment traffic (in TEU): 0.00 (-%) +inland gateway traffic (in TEU): 0.00 (-%) -Inbound modal split -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (only inbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) -Outbound modal split -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (only outbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) -Absolute modal split (both inbound and outbound) -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (both inbound and outbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) (rounding errors might exist) """ - self.assertEqual(actual_report, expected_report) + self.assertEqual(expected_report, actual_report) def test_inbound_with_single_feeder(self): setup_feeder_data() actual_report = self.analysis.get_report_as_text() expected_report = """ -Transshipment share -transshipment proportion (in TEU): 0.00 (0.00%) -hinterland proportion (in TEU): 1.00 (100.00%) +Role in network +transshipment traffic (in TEU): 0.00 (0.00%) +inland gateway traffic (in TEU): 1.00 (100.00%) -Inbound modal split -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (only inbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) -Outbound modal split -truck proportion (in TEU): 1.0 (100.00%) -barge proportion (in TEU): 0.0 (0.00%) -train proportion (in TEU): 0.0 (0.00%) +Modal split in hinterland traffic (only outbound traffic) +trucks (in TEU): 1.0 (100.00%) +barges (in TEU): 0.0 (0.00%) +trains (in TEU): 0.0 (0.00%) -Absolute modal split (both inbound and outbound) -truck proportion (in TEU): 1.0 (100.00%) -barge proportion (in TEU): 0.0 (0.00%) -train proportion (in TEU): 0.0 (0.00%) +Modal split in hinterland traffic (both inbound and outbound traffic) +trucks (in TEU): 1.0 (100.00%) +barges (in TEU): 0.0 (0.00%) +trains (in TEU): 0.0 (0.00%) (rounding errors might exist) """ - self.assertEqual(actual_report, expected_report) + self.assertEqual(expected_report, actual_report) def test_graph_with_no_data(self): empty_graph = self.analysis.get_report_as_graph() diff --git a/conflowgen/tests/posthoc_analyses/test_quay_side_throughput_analysis.py b/conflowgen/tests/analyses/test_quay_side_throughput_analysis.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_quay_side_throughput_analysis.py rename to conflowgen/tests/analyses/test_quay_side_throughput_analysis.py index 1d41a5d3..f207e7cd 100644 --- a/conflowgen/tests/posthoc_analyses/test_quay_side_throughput_analysis.py +++ b/conflowgen/tests/analyses/test_quay_side_throughput_analysis.py @@ -11,7 +11,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.quay_side_throughput_analysis import QuaySideThroughputAnalysis +from conflowgen.analyses.quay_side_throughput_analysis import QuaySideThroughputAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_quay_side_throughput_analysis_report.py b/conflowgen/tests/analyses/test_quay_side_throughput_analysis_report.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_quay_side_throughput_analysis_report.py rename to conflowgen/tests/analyses/test_quay_side_throughput_analysis_report.py index a3a15195..430bc1a8 100644 --- a/conflowgen/tests/posthoc_analyses/test_quay_side_throughput_analysis_report.py +++ b/conflowgen/tests/analyses/test_quay_side_throughput_analysis_report.py @@ -12,7 +12,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder, Train -from conflowgen.posthoc_analyses.quay_side_throughput_analysis_report import QuaySideThroughputAnalysisReport +from conflowgen.analyses.quay_side_throughput_analysis_report import QuaySideThroughputAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_run_all_posthoc_analyses.py b/conflowgen/tests/analyses/test_run_all_analyses.py similarity index 58% rename from conflowgen/tests/posthoc_analyses/test_run_all_posthoc_analyses.py rename to conflowgen/tests/analyses/test_run_all_analyses.py index d69aebd5..88c45f41 100644 --- a/conflowgen/tests/posthoc_analyses/test_run_all_posthoc_analyses.py +++ b/conflowgen/tests/analyses/test_run_all_analyses.py @@ -1,14 +1,15 @@ import datetime import unittest +import unittest.mock from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties from conflowgen.database_connection.create_tables import create_tables from conflowgen.domain_models.distribution_seeders import seed_all_distributions -from conflowgen.posthoc_analyses import run_all_analyses +from conflowgen.analyses import run_all_analyses from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestRunAllPosthocAnalyses(unittest.TestCase): +class TestRunAllAnalyses(unittest.TestCase): def setUp(self) -> None: self.sqlite_db = setup_sqlite_in_memory_db() create_tables(self.sqlite_db) @@ -19,6 +20,12 @@ def setUp(self) -> None: ) def test_with_no_data(self): - with self.assertLogs('conflowgen', level='INFO') as cm: + with self.assertLogs('conflowgen', level='INFO') as context: run_all_analyses() - self.assertEqual(len(cm.output), 29) + self.assertEqual(len(context.output), 32) + + def test_with_no_data_as_graph(self): + with unittest.mock.patch('matplotlib.pyplot.show'): + with self.assertLogs('conflowgen', level='INFO') as context: + run_all_analyses(as_text=False, as_graph=True, static_graphs=True) + self.assertEqual(len(context.output), 24) diff --git a/conflowgen/tests/posthoc_analyses/test_truck_gate_throughput_analysis.py b/conflowgen/tests/analyses/test_truck_gate_throughput_analysis.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_truck_gate_throughput_analysis.py rename to conflowgen/tests/analyses/test_truck_gate_throughput_analysis.py index 61710ae6..6c4fabeb 100644 --- a/conflowgen/tests/posthoc_analyses/test_truck_gate_throughput_analysis.py +++ b/conflowgen/tests/analyses/test_truck_gate_throughput_analysis.py @@ -11,7 +11,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.truck_gate_throughput_analysis import TruckGateThroughputAnalysis +from conflowgen.analyses.truck_gate_throughput_analysis import TruckGateThroughputAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_truck_gate_throughput_analysis_report.py b/conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_truck_gate_throughput_analysis_report.py rename to conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py index 4963f1b8..87fa6780 100644 --- a/conflowgen/tests/posthoc_analyses/test_truck_gate_throughput_analysis_report.py +++ b/conflowgen/tests/analyses/test_truck_gate_throughput_analysis_report.py @@ -12,7 +12,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder, Train -from conflowgen.posthoc_analyses.truck_gate_throughput_analysis_report import TruckGateThroughputAnalysisReport +from conflowgen.analyses.truck_gate_throughput_analysis_report import TruckGateThroughputAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_yard_capacity_analysis.py b/conflowgen/tests/analyses/test_yard_capacity_analysis.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_yard_capacity_analysis.py rename to conflowgen/tests/analyses/test_yard_capacity_analysis.py index 99d398b1..8d261e5a 100644 --- a/conflowgen/tests/posthoc_analyses/test_yard_capacity_analysis.py +++ b/conflowgen/tests/analyses/test_yard_capacity_analysis.py @@ -11,7 +11,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.yard_capacity_analysis import YardCapacityAnalysis +from conflowgen.analyses.yard_capacity_analysis import YardCapacityAnalysis from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/posthoc_analyses/test_yard_capacity_analysis_report.py b/conflowgen/tests/analyses/test_yard_capacity_analysis_report.py similarity index 98% rename from conflowgen/tests/posthoc_analyses/test_yard_capacity_analysis_report.py rename to conflowgen/tests/analyses/test_yard_capacity_analysis_report.py index e0b77845..bba0b98e 100644 --- a/conflowgen/tests/posthoc_analyses/test_yard_capacity_analysis_report.py +++ b/conflowgen/tests/analyses/test_yard_capacity_analysis_report.py @@ -12,7 +12,7 @@ from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder from conflowgen.domain_models.large_vehicle_schedule import Schedule, Destination from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck, Feeder -from conflowgen.posthoc_analyses.yard_capacity_analysis_report import YardCapacityAnalysisReport +from conflowgen.analyses.yard_capacity_analysis_report import YardCapacityAnalysisReport from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db diff --git a/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py b/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py new file mode 100644 index 00000000..467bf053 --- /dev/null +++ b/conflowgen/tests/api/test_container_dwell_time_distribution_manager.py @@ -0,0 +1,32 @@ +import unittest +import unittest.mock + +from conflowgen import ContainerDwellTimeDistributionManager +from conflowgen.domain_models.distribution_seeders import container_dwell_time_distribution_seeder + + +class TestContainerDwellTimeDistributionManager(unittest.TestCase): + + SAMPLE_DISTRIBUTION = container_dwell_time_distribution_seeder.DEFAULT_CONTAINER_DWELL_TIME_DISTRIBUTIONS + + def setUp(self) -> None: + self.container_dwell_time_distribution_manager = ContainerDwellTimeDistributionManager() + + def test_get_container_dwell_time_distributions(self): + with unittest.mock.patch.object( + self.container_dwell_time_distribution_manager.container_dwell_time_distribution_repository, + 'get_distributions', + return_value=self.SAMPLE_DISTRIBUTION) as mock_method: + distribution = self.container_dwell_time_distribution_manager.get_container_dwell_time_distribution() + mock_method.assert_called_once() + self.assertEqual(distribution, self.SAMPLE_DISTRIBUTION) + + def test_set_container_dwell_time_distributions(self): + with unittest.mock.patch.object( + self.container_dwell_time_distribution_manager.container_dwell_time_distribution_repository, + 'set_distributions', + return_value=None) as mock_method: + self.container_dwell_time_distribution_manager.set_container_dwell_time_distribution( + self.SAMPLE_DISTRIBUTION + ) + mock_method.assert_called_once_with(self.SAMPLE_DISTRIBUTION) diff --git a/conflowgen/tests/api/test_container_flow_generation_manager.py b/conflowgen/tests/api/test_container_flow_generation_manager.py index 504955dc..592e7403 100644 --- a/conflowgen/tests/api/test_container_flow_generation_manager.py +++ b/conflowgen/tests/api/test_container_flow_generation_manager.py @@ -75,13 +75,7 @@ class MockedProperties: 'name': "my test data", 'start_date': datetime.date(2030, 1, 1), 'end_date': datetime.date(2030, 12, 31), - 'transportation_buffer': 0.2, - 'minimum_dwell_time_of_import_containers_in_hours': 3, - 'minimum_dwell_time_of_export_containers_in_hours': 4, - 'minimum_dwell_time_of_transshipment_containers_in_hours': 5, - 'maximum_dwell_time_of_import_containers_in_hours': 40, - 'maximum_dwell_time_of_export_containers_in_hours': 50, - 'maximum_dwell_time_of_transshipment_containers_in_hours': 60 + 'transportation_buffer': 0.2 } with unittest.mock.patch.object( diff --git a/conflowgen/tests/api/test_container_storage_requirements_distribution_manager.py b/conflowgen/tests/api/test_container_storage_requirements_distribution_manager.py new file mode 100644 index 00000000..947ea3f4 --- /dev/null +++ b/conflowgen/tests/api/test_container_storage_requirements_distribution_manager.py @@ -0,0 +1,30 @@ +import unittest +import unittest.mock + +from conflowgen import ContainerStorageRequirementDistributionManager +from conflowgen.domain_models.distribution_seeders import container_storage_requirement_distribution_seeder + + +class TestContainerStorageRequirementsDistributionManager(unittest.TestCase): + + SAMPLE_DISTRIBUTION = container_storage_requirement_distribution_seeder.DEFAULT_STORAGE_REQUIREMENT_DISTRIBUTION + + def setUp(self) -> None: + self.manager = ContainerStorageRequirementDistributionManager() + + def test_get_container_lengths(self): + with unittest.mock.patch.object( + self.manager.storage_requirement_repository, + 'get_distribution', + return_value=self.SAMPLE_DISTRIBUTION) as mock_method: + distribution = self.manager.get_storage_requirement_distribution() + mock_method.assert_called_once() + self.assertEqual(distribution, self.SAMPLE_DISTRIBUTION) + + def test_set_container_lengths(self): + with unittest.mock.patch.object( + self.manager.storage_requirement_repository, + 'set_distribution', + return_value=None) as mock_method: + self.manager.set_storage_requirement_distribution(self.SAMPLE_DISTRIBUTION) + mock_method.assert_called_once_with(self.SAMPLE_DISTRIBUTION) diff --git a/conflowgen/tests/api/test_mode_of_transport_distribution_manager.py b/conflowgen/tests/api/test_mode_of_transport_distribution_manager.py index c0bdbb88..23f296be 100644 --- a/conflowgen/tests/api/test_mode_of_transport_distribution_manager.py +++ b/conflowgen/tests/api/test_mode_of_transport_distribution_manager.py @@ -33,7 +33,7 @@ def test_get(self): self.assertAlmostEqual(sum_of_all_proportions, 1) def test_set_with_missing_keys_first_level(self) -> None: - with self.assertRaises(DistributionElementIsMissingException) as cm: + with self.assertRaises(DistributionElementIsMissingException) as context: self.mode_of_transport_distribution_manager.set_mode_of_transport_distribution( { ModeOfTransport.feeder: { @@ -51,10 +51,10 @@ def test_set_with_missing_keys_first_level(self) -> None: "The distribution {'feeder': {...}} was expected to have the following " "elements: ['truck', 'train', 'feeder', 'deep_sea_vessel', 'barge'] but it " "provided the following elements: ['feeder'].") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_set_with_missing_keys_second_level(self) -> None: - with self.assertRaises(DistributionElementIsMissingException) as cm: + with self.assertRaises(DistributionElementIsMissingException) as context: self.mode_of_transport_distribution_manager.set_mode_of_transport_distribution( { ModeOfTransport.feeder: { @@ -85,7 +85,7 @@ def test_set_with_missing_keys_second_level(self) -> None: "provided the following elements: ['train']. This is error occurred while " "examining the dependent variable 'feeder'." ) - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_happy_path(self) -> None: self.mode_of_transport_distribution_manager.set_mode_of_transport_distribution( @@ -129,7 +129,7 @@ def test_happy_path(self) -> None: ) def test_set_distribution_with_wrongly_typed_distribution(self) -> None: - with self.assertRaises(DistributionElementIsInvalidException) as cm: + with self.assertRaises(DistributionElementIsInvalidException) as context: self.mode_of_transport_distribution_manager.set_mode_of_transport_distribution( { ContainerLength.twenty_feet: { # the culprit @@ -169,7 +169,7 @@ def test_set_distribution_with_wrongly_typed_distribution(self) -> None: } }) expected_message = "Element '20 feet' could not be casted to type ''" - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_set_distribution_with_dirty_distribution(self) -> None: clean_distribution = { @@ -248,5 +248,6 @@ def test_set_distribution_with_dirty_distribution(self) -> None: } } sanitized_distribution = validate_distribution_with_one_dependent_variable( - dirty_distribution, ModeOfTransport, ModeOfTransport) + dirty_distribution, ModeOfTransport, ModeOfTransport, values_are_frequencies=True + ) self.assertDictEqual(sanitized_distribution, clean_distribution) diff --git a/conflowgen/tests/application_models/repositories/test_container_stream_generation_properties_repository.py b/conflowgen/tests/application_models/repositories/test_container_stream_generation_properties_repository.py index 718e3a4e..f6d6f765 100644 --- a/conflowgen/tests/application_models/repositories/test_container_stream_generation_properties_repository.py +++ b/conflowgen/tests/application_models/repositories/test_container_stream_generation_properties_repository.py @@ -7,7 +7,7 @@ from conflowgen.application.models.container_flow_generation_properties import ContainerFlowGenerationProperties from conflowgen.application.repositories.container_flow_generation_properties_repository import \ ContainerFlowGenerationPropertiesRepository, InvalidTimeRangeException, \ - DuplicatedContainerFlowGenerationPropertiesEntryException, MinimumNotStrictlySmallerThanMaximumException + DuplicatedContainerFlowGenerationPropertiesEntryException from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db @@ -138,29 +138,3 @@ def test_set_values_twice(self): self.assertEqual(loaded_properties.name, name_new) self.assertEqual(loaded_properties.start_date, start_date_new) self.assertEqual(loaded_properties.end_date, end_date_new) - - def test_set_invalid_minimum_maximum_pair_for_import_containers(self): - properties = self.repository.get_container_flow_generation_properties() - name = "Test" - properties.name = name - start_date = datetime.datetime.now() - properties.start_date = start_date - end_date = datetime.datetime.now() + datetime.timedelta(days=5) - properties.end_date = end_date - properties.minimum_dwell_time_of_import_containers_in_hours = 10 - properties.maximum_dwell_time_of_import_containers_in_hours = 9 - with self.assertRaises(MinimumNotStrictlySmallerThanMaximumException): - self.repository.set_container_flow_generation_properties(properties) - - def test_set_invalid_minimum_maximum_pair_for_export_containers(self): - properties = self.repository.get_container_flow_generation_properties() - name = "Test" - properties.name = name - start_date = datetime.datetime.now() - properties.start_date = start_date - end_date = datetime.datetime.now() + datetime.timedelta(days=5) - properties.end_date = end_date - properties.minimum_dwell_time_of_export_containers_in_hours = 10 - properties.maximum_dwell_time_of_export_containers_in_hours = 9 - with self.assertRaises(MinimumNotStrictlySmallerThanMaximumException): - self.repository.set_container_flow_generation_properties(properties) diff --git a/conflowgen/tests/application_models/repositories/test_container_stream_statistics_report.py b/conflowgen/tests/application_models/repositories/test_container_stream_statistics_report.py index 82a2a584..3bf643c7 100644 --- a/conflowgen/tests/application_models/repositories/test_container_stream_statistics_report.py +++ b/conflowgen/tests/application_models/repositories/test_container_stream_statistics_report.py @@ -188,7 +188,7 @@ def test_outbound_loaded_ship_using_buffer_as_maximum(self): feeder.large_scheduled_vehicle.save() truck = self._create_truck(arrival=now) container = self._create_container_delivered_by_truck(truck) - container.picked_up_by_large_scheduled_vehicle = feeder.id + container.picked_up_by_large_scheduled_vehicle = feeder.id # pylint: disable=no-member container.picked_up_by = feeder.get_mode_of_transport() container.save() @@ -218,7 +218,7 @@ def test_outbound_loaded_ship_using_capacity_as_maximum(self): feeder.large_scheduled_vehicle.save() truck = self._create_truck(arrival=now) container = self._create_container_delivered_by_truck(truck) - container.picked_up_by_large_scheduled_vehicle = feeder.id + container.picked_up_by_large_scheduled_vehicle = feeder.id # pylint: disable=no-member container.picked_up_by = feeder.get_mode_of_transport() container.save() @@ -285,7 +285,7 @@ def test_two_loaded_ships_one_with_outbound_traffic(self): truck = self._create_truck(arrival=now) container = self._create_container_delivered_by_truck(truck) - container.picked_up_by_large_scheduled_vehicle = feeder_1.id + container.picked_up_by_large_scheduled_vehicle = feeder_1.id # pylint: disable=no-member container.picked_up_by = feeder_1.get_mode_of_transport() container.save() diff --git a/conflowgen/tests/domain_models/distribution_model_seeder/test_container_dwell_time_distribution_seeder.py b/conflowgen/tests/domain_models/distribution_model_seeder/test_container_dwell_time_distribution_seeder.py new file mode 100644 index 00000000..6c9271df --- /dev/null +++ b/conflowgen/tests/domain_models/distribution_model_seeder/test_container_dwell_time_distribution_seeder.py @@ -0,0 +1,39 @@ +""" +Check if mode of transportation is properly translated between application and database. +""" + +import unittest + +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution +from conflowgen.domain_models.distribution_seeders import container_dwell_time_distribution_seeder +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db + + +class TestContainerDwellTimeDistributionSeeder(unittest.TestCase): + """ + The actual ModeOfTransportField behavior is implemented in peewee. + """ + + def setUp(self) -> None: + """Create container database in memory""" + sqlite_db = setup_sqlite_in_memory_db() + sqlite_db.create_tables([ + ContainerDwellTimeDistribution + ]) + + def test_seeding(self): + """This should just not throw any exception""" + container_dwell_time_distribution_seeder.seed() + + for mode_of_transport_i in ModeOfTransport: + for mode_of_transport_j in ModeOfTransport: + for storage_requirement in StorageRequirement: + entry = ContainerDwellTimeDistribution.select().where( + (ContainerDwellTimeDistribution.picked_up_by == mode_of_transport_i) + & (ContainerDwellTimeDistribution.delivered_by == mode_of_transport_j) + & (ContainerDwellTimeDistribution.storage_requirement == storage_requirement) + ) + self.assertEqual(len(list(entry)), 1) diff --git a/conflowgen/tests/domain_models/distribution_model_seeder/test_container_length_distribution_seeder.py b/conflowgen/tests/domain_models/distribution_model_seeder/test_container_length_distribution_seeder.py index 5b2f040b..562d021c 100644 --- a/conflowgen/tests/domain_models/distribution_model_seeder/test_container_length_distribution_seeder.py +++ b/conflowgen/tests/domain_models/distribution_model_seeder/test_container_length_distribution_seeder.py @@ -23,4 +23,4 @@ def setUp(self) -> None: def test_seeding(self): """This should just not throw any exception""" - container_length_distribution_seeder.seed() + self.assertIsNone(container_length_distribution_seeder.seed()) diff --git a/conflowgen/tests/domain_models/distribution_model_seeder/test_container_weight_distribution_seeder.py b/conflowgen/tests/domain_models/distribution_model_seeder/test_container_weight_distribution_seeder.py index f7314fe0..f8a6c469 100644 --- a/conflowgen/tests/domain_models/distribution_model_seeder/test_container_weight_distribution_seeder.py +++ b/conflowgen/tests/domain_models/distribution_model_seeder/test_container_weight_distribution_seeder.py @@ -23,4 +23,4 @@ def setUp(self) -> None: def test_seeding(self): """This should just not throw any exception""" - container_weight_distribution_seeder.seed() + self.assertIsNone(container_weight_distribution_seeder.seed()) diff --git a/conflowgen/tests/domain_models/distribution_model_seeder/test_mode_of_transport_distribution_seeder.py b/conflowgen/tests/domain_models/distribution_model_seeder/test_mode_of_transport_distribution_seeder.py index 9369bb52..a4451e1c 100644 --- a/conflowgen/tests/domain_models/distribution_model_seeder/test_mode_of_transport_distribution_seeder.py +++ b/conflowgen/tests/domain_models/distribution_model_seeder/test_mode_of_transport_distribution_seeder.py @@ -23,4 +23,4 @@ def setUp(self) -> None: def test_seeding(self): """The seed method includes a verification at the end and throws an error in case of a problem.""" - mode_of_transport_distribution_seeder.seed() + self.assertIsNone(mode_of_transport_distribution_seeder.seed()) diff --git a/conflowgen/tests/domain_models/distribution_repositories/example_container_dwell_time_distribution.py b/conflowgen/tests/domain_models/distribution_repositories/example_container_dwell_time_distribution.py new file mode 100644 index 00000000..298ea969 --- /dev/null +++ b/conflowgen/tests/domain_models/distribution_repositories/example_container_dwell_time_distribution.py @@ -0,0 +1,888 @@ +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport + +example_container_dwell_time_distribution = { + ModeOfTransport.truck: + { + ModeOfTransport.truck: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 451.2, + 'variance': 1353.6, + 'maximum_number_of_hours': 2256.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 170.4, + 'variance': 511.2, + 'maximum_number_of_hours': 852, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 170.4, + 'variance': 511.2, + 'maximum_number_of_hours': 852, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 170.4, + 'variance': 511.2, + 'maximum_number_of_hours': 852, + 'minimum_number_of_hours': 0 + } + }, + ModeOfTransport.train: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 276.0, + 'variance': 828.0, + 'maximum_number_of_hours': 1380.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 40.8, + 'variance': 122.4, + 'maximum_number_of_hours': 204.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 40.8, + 'variance': 122.4, + 'maximum_number_of_hours': 204.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 40.8, + 'variance': 122.4, + 'maximum_number_of_hours': 204.0, + 'minimum_number_of_hours': 0 + } + }, + ModeOfTransport.feeder: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 312.0, + 'variance': 936.0, + 'maximum_number_of_hours': 1560.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 85.92, + 'variance': 257.76, + 'maximum_number_of_hours': 429.6, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 85.92, + 'variance': 257.76, + 'maximum_number_of_hours': 429.6, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 85.92, + 'variance': 257.76, + 'maximum_number_of_hours': 429.6, + 'minimum_number_of_hours': 12 + } + }, + ModeOfTransport.deep_sea_vessel: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 321.6, + 'variance': 964.8, + 'maximum_number_of_hours': 1608.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 156.0, + 'variance': 468.0, + 'maximum_number_of_hours': 780.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 156.0, + 'variance': 468.0, + 'maximum_number_of_hours': 780.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 156.0, + 'variance': 468.0, + 'maximum_number_of_hours': 780.0, + 'minimum_number_of_hours': 12 + } + }, + ModeOfTransport.barge: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 177.6, + 'variance': 532.8, + 'maximum_number_of_hours': 888, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 240, + 'variance': 720, + 'maximum_number_of_hours': 1200, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 240, + 'variance': 720, + 'maximum_number_of_hours': 1200, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 240, + 'variance': 720, + 'maximum_number_of_hours': 1200, + 'minimum_number_of_hours': 0 + } + } + }, ModeOfTransport.train: + { + ModeOfTransport.truck: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 228.0, + 'variance': 684.0, + 'maximum_number_of_hours': 1140.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 69.6, + 'variance': 208.8, + 'maximum_number_of_hours': 348.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 69.6, + 'variance': 208.8, + 'maximum_number_of_hours': 348.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 69.6, + 'variance': 208.8, + 'maximum_number_of_hours': 348.0, + 'minimum_number_of_hours': 0 + } + }, + ModeOfTransport.train: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 199.2, + 'variance': 597.6, + 'maximum_number_of_hours': 996.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 288, + 'variance': 864, 'maximum_number_of_hours': 1440, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 288, + 'variance': 864, + 'maximum_number_of_hours': 1440, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 288, + 'variance': 864, + 'maximum_number_of_hours': 1440, + 'minimum_number_of_hours': 0 + } + }, + ModeOfTransport.feeder: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 384, + 'variance': 1152, + 'maximum_number_of_hours': 1920, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 98.4, + 'variance': 295.2, + 'maximum_number_of_hours': 492, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 98.4, + 'variance': 295.2, + 'maximum_number_of_hours': 492, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 98.4, + 'variance': 295.2, + 'maximum_number_of_hours': 492, + 'minimum_number_of_hours': 12 + } + }, + ModeOfTransport.deep_sea_vessel: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 307.2, + 'variance': 921.6, + 'maximum_number_of_hours': 1536.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 160.8, + 'variance': 482.4, + 'maximum_number_of_hours': 804.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 160.8, + 'variance': 482.4, + 'maximum_number_of_hours': 804.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 160.8, + 'variance': 482.4, + 'maximum_number_of_hours': 804.0, + 'minimum_number_of_hours': 12 + } + }, + ModeOfTransport.barge: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 194.4, + 'variance': 583.2, + 'maximum_number_of_hours': 972, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 350.4, + 'variance': 1051.2, + 'maximum_number_of_hours': 1752.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 350.4, + 'variance': 1051.2, + 'maximum_number_of_hours': 1752.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 350.4, + 'variance': 1051.2, + 'maximum_number_of_hours': 1752.0, + 'minimum_number_of_hours': 0 + } + } + }, + ModeOfTransport.feeder: + { + ModeOfTransport.truck: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 326.4, + 'variance': 979.2, + 'maximum_number_of_hours': 1632.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 74.4, + 'variance': 223.2, + 'maximum_number_of_hours': 372.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 74.4, + 'variance': 223.2, + 'maximum_number_of_hours': 372.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 74.4, + 'variance': 223.2, + 'maximum_number_of_hours': 372.0, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.train: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 326.4, + 'variance': 979.2, + 'maximum_number_of_hours': 1632.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 96, + 'variance': 288, + 'maximum_number_of_hours': 480, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 96, + 'variance': 288, + 'maximum_number_of_hours': 480, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 96, + 'variance': 288, + 'maximum_number_of_hours': 480, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.feeder: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 254.4, + 'variance': 763.1999999999999, + 'maximum_number_of_hours': 1272.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 91.2, + 'variance': 273.6, + 'maximum_number_of_hours': 456, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 91.2, + 'variance': 273.6, + 'maximum_number_of_hours': 456, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 91.2, + 'variance': 273.6, + 'maximum_number_of_hours': 456, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.deep_sea_vessel: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 350.4, + 'variance': 1051.2, + 'maximum_number_of_hours': 1752.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 199.2, + 'variance': 597.6, + 'maximum_number_of_hours': 996.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 199.2, + 'variance': 597.6, + 'maximum_number_of_hours': 996.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 199.2, + 'variance': 597.6, + 'maximum_number_of_hours': 996.0, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.barge: + { + StorageRequirement.empty: { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 196.8, + 'variance': 590.4, + 'maximum_number_of_hours': 984, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 57.6, + 'variance': 172.8, + 'maximum_number_of_hours': 288.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 57.6, + 'variance': 172.8, + 'maximum_number_of_hours': 288.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 57.6, + 'variance': 172.8, + 'maximum_number_of_hours': 288.0, + 'minimum_number_of_hours': 3 + } + } + }, + ModeOfTransport.deep_sea_vessel: + { + ModeOfTransport.truck: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 292.8, + 'variance': 878.4, + 'maximum_number_of_hours': 1464, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 72, + 'variance': 216, + 'maximum_number_of_hours': 360, + 'minimum_number_of_hours': 3}, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 72, + 'variance': 216, + 'maximum_number_of_hours': 360, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 72, + 'variance': 216, + 'maximum_number_of_hours': 360, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.train: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 264, + 'variance': 792, + 'maximum_number_of_hours': 1320, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + + 'distribution_name': 'lognormal', + 'average_number_of_hours': 72, + 'variance': 216, + 'maximum_number_of_hours': 360, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 72, + 'variance': 216, + 'maximum_number_of_hours': 360, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 72, + 'variance': 216, + 'maximum_number_of_hours': 360, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.feeder: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 336, + 'variance': 1008, + 'maximum_number_of_hours': 1680, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 103.2, + 'variance': 309.6, + 'maximum_number_of_hours': 516.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 103.2, + 'variance': 309.6, + 'maximum_number_of_hours': 516.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 103.2, + 'variance': 309.5, + 'maximum_number_of_hours': 516.0, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.deep_sea_vessel: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 664.8, + 'variance': 1994.4, + 'maximum_number_of_hours': 3324.0, + 'minimum_number_of_hours': 3}, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 223.2, + 'variance': 669.6, + 'maximum_number_of_hours': 1116.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 223.2, + 'variance': 669.6, + 'maximum_number_of_hours': 1116.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 223.2, + 'variance': 669.6, + 'maximum_number_of_hours': 1116.0, + 'minimum_number_of_hours': 3 + } + }, + ModeOfTransport.barge: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 268.8, + 'variance': 806.4, + 'maximum_number_of_hours': 1344, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 60.0, + 'variance': 180.0, + 'maximum_number_of_hours': 300.0, + 'minimum_number_of_hours': 3 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 60.0, + 'variance': 180.0, + 'maximum_number_of_hours': 300.0, + 'minimum_number_of_hours': 3}, + StorageRequirement.dangerous_goods: + { + + 'distribution_name': 'lognormal', + 'average_number_of_hours': 60.0, + 'variance': 180.0, + 'maximum_number_of_hours': 300.0, + 'minimum_number_of_hours': 3 + } + } + }, + ModeOfTransport.barge: + { + ModeOfTransport.truck: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 230.4, + 'variance': 691.2, + 'maximum_number_of_hours': 1152.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 192, + 'variance': 576, + 'maximum_number_of_hours': 960, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 192, + 'variance': 576, + 'maximum_number_of_hours': 960, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 192, + 'variance': 576, + 'maximum_number_of_hours': 960, + 'minimum_number_of_hours': 0 + } + }, + ModeOfTransport.train: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 196.8, + 'variance': 590.4, + 'maximum_number_of_hours': 984, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 240, + 'variance': 720, + 'maximum_number_of_hours': 1200, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 240, + 'variance': 720, + 'maximum_number_of_hours': 1200, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 240, + 'variance': 720, + 'maximum_number_of_hours': 1200, + 'minimum_number_of_hours': 0 + } + }, + ModeOfTransport.feeder: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 345.6, + 'variance': 1036.8, + 'maximum_number_of_hours': 1728.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 100.8, + 'variance': 302.4, + 'maximum_number_of_hours': 504.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 100.8, + 'variance': 302.40000000000003, + 'maximum_number_of_hours': 504.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 100.8, + 'variance': 302.4, + 'maximum_number_of_hours': 504.0, + 'minimum_number_of_hours': 12 + } + }, + ModeOfTransport.deep_sea_vessel: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 422.40000000000003, + 'variance': 1267.2, + 'maximum_number_of_hours': 2112.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 163.2, + 'variance': 489.6, + 'maximum_number_of_hours': 816.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 163.2, + 'variance': 489.6, + 'maximum_number_of_hours': 816.0, + 'minimum_number_of_hours': 12 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 163.2, + 'variance': 489.6, + 'maximum_number_of_hours': 816.0, + 'minimum_number_of_hours': 12 + } + }, + ModeOfTransport.barge: + { + StorageRequirement.empty: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 278.4, + 'variance': 835.1999999999999, + 'maximum_number_of_hours': 1392.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.standard: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 108.0, + 'variance': 324.0, + 'maximum_number_of_hours': 540.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.reefer: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 108.0, + 'variance': 324.0, + 'maximum_number_of_hours': 540.0, + 'minimum_number_of_hours': 0 + }, + StorageRequirement.dangerous_goods: + { + 'distribution_name': 'lognormal', + 'average_number_of_hours': 108.0, + 'variance': 324.0, + 'maximum_number_of_hours': 540.0, + 'minimum_number_of_hours': 0 + } + } + } +} diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_container_dwell_time_distribution_repository.py b/conflowgen/tests/domain_models/distribution_repositories/test_container_dwell_time_distribution_repository.py new file mode 100644 index 00000000..de391430 --- /dev/null +++ b/conflowgen/tests/domain_models/distribution_repositories/test_container_dwell_time_distribution_repository.py @@ -0,0 +1,56 @@ +import unittest + +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution +from conflowgen.domain_models.distribution_repositories.container_dwell_time_distribution_repository import \ + ContainerDwellTimeDistributionRepository +from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db +from conflowgen.tools.continuous_distribution import ContinuousDistribution, ClippedLogNormal +from .example_container_dwell_time_distribution import example_container_dwell_time_distribution + + +class TestContainerDwellTimeDistributionRepository(unittest.TestCase): + + default_data = example_container_dwell_time_distribution + + def setUp(self) -> None: + """Create container database in memory""" + sqlite_db = setup_sqlite_in_memory_db() + sqlite_db.create_tables([ + ContainerDwellTimeDistribution + ]) + + def test_get(self): + repo = ContainerDwellTimeDistributionRepository() + repo.set_distributions( + self.default_data + ) + distributions = ContainerDwellTimeDistributionRepository().get_distributions() + for mode_of_transport_i in ModeOfTransport: + self.assertIn(mode_of_transport_i, distributions.keys()) + for mode_of_transport_j in ModeOfTransport: + self.assertIn(mode_of_transport_j, distributions[mode_of_transport_i].keys()) + for storage_requirement in StorageRequirement: + self.assertIn(storage_requirement, distributions[mode_of_transport_i][mode_of_transport_j].keys()) + distribution = distributions[mode_of_transport_i][mode_of_transport_j][storage_requirement] + + distribution_data = self.default_data[mode_of_transport_i][mode_of_transport_j][storage_requirement] + + self.assertIsInstance(distribution, ContinuousDistribution) + self.assertEqual(distribution.average, distribution_data["average_number_of_hours"]) + self.assertEqual(distribution.minimum, distribution_data["minimum_number_of_hours"]) + self.assertEqual(distribution.maximum, distribution_data["maximum_number_of_hours"]) + + self.assertIsInstance(distribution, ClippedLogNormal) + self.assertEqual(distribution.variance, distribution_data["variance"]) + + def test_set_twice(self): + repo = ContainerDwellTimeDistributionRepository() + repo.set_distributions( + self.default_data + ) + repo.set_distributions( + self.default_data + ) diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py b/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py index 25fc91c2..76f84036 100644 --- a/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py +++ b/conflowgen/tests/domain_models/distribution_repositories/test_container_length_distribution_repository.py @@ -50,7 +50,7 @@ def test_distribution_values_range_between_zero_to_one(self): self.assertLessEqual(proportion, 1) def test_happy_path(self) -> None: - ContainerLengthDistributionRepository.set_distribution( + no_return = ContainerLengthDistributionRepository.set_distribution( { ContainerLength.twenty_feet: 0.5, ContainerLength.forty_feet: 0.5, @@ -58,6 +58,7 @@ def test_happy_path(self) -> None: ContainerLength.other: 0 } ) + self.assertIsNone(no_return) def test_set_twice(self) -> None: ContainerLengthDistributionRepository.set_distribution( @@ -68,7 +69,7 @@ def test_set_twice(self) -> None: ContainerLength.other: 0 } ) - ContainerLengthDistributionRepository.set_distribution( + no_return = ContainerLengthDistributionRepository.set_distribution( { ContainerLength.twenty_feet: 0.5, ContainerLength.forty_feet: 0.5, @@ -76,6 +77,7 @@ def test_set_twice(self) -> None: ContainerLength.other: 0 } ) + self.assertIsNone(no_return) def test_set_container_lengths_with_missing_lengths(self) -> None: with self.assertRaises(ContainerLengthMissing): diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_container_weight_distribution_repository.py b/conflowgen/tests/domain_models/distribution_repositories/test_container_weight_distribution_repository.py index 6dbe880d..9c280eb5 100644 --- a/conflowgen/tests/domain_models/distribution_repositories/test_container_weight_distribution_repository.py +++ b/conflowgen/tests/domain_models/distribution_repositories/test_container_weight_distribution_repository.py @@ -60,7 +60,10 @@ def test_distribution_weight_setter(self): 20: 8 } } - normalized_default_distribution = normalize_distribution_with_one_dependent_variable(default_distribution) + normalized_default_distribution = normalize_distribution_with_one_dependent_variable( + default_distribution, + values_are_frequencies=True + ) self.repository.set_distribution(normalized_default_distribution) distribution = self.repository.get_distribution() self.assertDictEqual(normalized_default_distribution, distribution) diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_normalize_dependent_distribution.py b/conflowgen/tests/domain_models/distribution_repositories/test_normalize_dependent_distribution.py index 4897d06f..d9826f41 100644 --- a/conflowgen/tests/domain_models/distribution_repositories/test_normalize_dependent_distribution.py +++ b/conflowgen/tests/domain_models/distribution_repositories/test_normalize_dependent_distribution.py @@ -16,8 +16,11 @@ def test_simple_case(self): "f": 16 } } - with self.assertLogs('conflowgen', level='DEBUG') as cm: - normalized_distribution = normalize_distribution_with_one_dependent_variable(distributions) + with self.assertLogs('conflowgen', level='DEBUG') as context: + normalized_distribution = normalize_distribution_with_one_dependent_variable( + distributions, + values_are_frequencies=True + ) self.assertDictEqual( normalized_distribution, { "a": { @@ -30,13 +33,13 @@ def test_simple_case(self): } } ) - self.assertEqual(len(cm.output), 2) + self.assertEqual(len(context.output), 2) self.assertEqual( - cm.output[0], + context.output[0], "DEBUG:conflowgen:Sum of fractions was not 1 for 'a' and was automatically normalized." ) self.assertEqual( - cm.output[1], + context.output[1], "DEBUG:conflowgen:Sum of fractions was not 1 for 'd' and was automatically normalized." ) @@ -51,8 +54,11 @@ def test_mixed_case(self): "f": 16 } } - with self.assertLogs('conflowgen', level='DEBUG') as cm: - normalized_distribution = normalize_distribution_with_one_dependent_variable(distributions) + with self.assertLogs('conflowgen', level='DEBUG') as context: + normalized_distribution = normalize_distribution_with_one_dependent_variable( + distributions, + values_are_frequencies=True + ) self.assertDictEqual( normalized_distribution, { "a": { @@ -65,8 +71,8 @@ def test_mixed_case(self): } } ) - self.assertEqual(len(cm.output), 1) + self.assertEqual(len(context.output), 1) self.assertEqual( - cm.output[0], + context.output[0], "DEBUG:conflowgen:Sum of fractions was not 1 for 'd' and was automatically normalized." ) diff --git a/conflowgen/tests/domain_models/distribution_repositories/test_normalize_distribution.py b/conflowgen/tests/domain_models/distribution_repositories/test_normalize_distribution.py index fe3dea05..bea65733 100644 --- a/conflowgen/tests/domain_models/distribution_repositories/test_normalize_distribution.py +++ b/conflowgen/tests/domain_models/distribution_repositories/test_normalize_distribution.py @@ -10,17 +10,20 @@ def test_with_normalization(self): "b": 3, "c": 2 } - with self.assertLogs('conflowgen', level='DEBUG') as cm: - normalized_distribution = normalize_distribution_with_no_dependent_variable(distribution) + with self.assertLogs('conflowgen', level='DEBUG') as context: + normalized_distribution = normalize_distribution_with_no_dependent_variable( + distribution, + values_are_frequencies=True + ) self.assertDictEqual( normalized_distribution, { "b": 0.6, "c": 0.4 } ) - self.assertEqual(len(cm.output), 1, "Excatly one log message") + self.assertEqual(len(context.output), 1, "Excatly one log message") self.assertEqual( - cm.output[0], + context.output[0], "DEBUG:conflowgen:Sum of fractions was not 1 and was automatically normalized." ) @@ -29,5 +32,8 @@ def test_without_normalization(self): "b": 0.6, "c": 0.4 } - normalized_distribution = normalize_distribution_with_no_dependent_variable(distribution) + normalized_distribution = normalize_distribution_with_no_dependent_variable( + distribution, + values_are_frequencies=True + ) self.assertDictEqual(distribution, normalized_distribution) diff --git a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py index 41f8e91f..2b2ef757 100644 --- a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py +++ b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_large_scheduled_vehicle.py @@ -43,7 +43,7 @@ def setUp(self) -> None: container_length_distribution_seeder.seed() container_storage_requirement_distribution_seeder.seed() - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.feeder, vehicle_arrives_at=datetime.date(2021, 7, 9), @@ -52,7 +52,7 @@ def setUp(self) -> None: average_moved_capacity=3 ) self.feeders = FleetFactory().create_feeder_fleet( - schedule=s, + schedule=schedule, first_at=datetime.date(2021, 7, 7), latest_at=datetime.date(2021, 7, 18) ) diff --git a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py index f98d389c..d98dce7e 100644 --- a/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py +++ b/conflowgen/tests/domain_models/factories/test_container_factory__create_for_truck.py @@ -49,7 +49,7 @@ def setUp(self) -> None: container_length_distribution_seeder.seed() container_storage_requirement_distribution_seeder.seed() - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.feeder, vehicle_arrives_at=datetime.date(2021, 7, 9), @@ -58,7 +58,7 @@ def setUp(self) -> None: average_moved_capacity=1 ) feeders = FleetFactory().create_feeder_fleet( - schedule=s, + schedule=schedule, first_at=datetime.date(2021, 7, 7), latest_at=datetime.date(2021, 7, 10) ) diff --git a/conflowgen/tests/domain_models/factories/test_fleet_factory__create_arrivals_within_timerange.py b/conflowgen/tests/domain_models/factories/test_fleet_factory__create_arrivals_within_timerange.py index d5683831..62259537 100644 --- a/conflowgen/tests/domain_models/factories/test_fleet_factory__create_arrivals_within_timerange.py +++ b/conflowgen/tests/domain_models/factories/test_fleet_factory__create_arrivals_within_timerange.py @@ -8,7 +8,7 @@ from conflowgen.domain_models.factories.fleet_factory import create_arrivals_within_time_range -class TestVehicleFactory__create_arrivals_within_time_range(unittest.TestCase): +class TestVehicleFactory__create_arrivals_within_time_range(unittest.TestCase): # pylint: disable=invalid-name def test_create_time_range_happy_path(self) -> None: """This is the happy path""" diff --git a/conflowgen/tests/domain_models/factories/test_fleet_factory__create_feeder_fleet.py b/conflowgen/tests/domain_models/factories/test_fleet_factory__create_feeder_fleet.py index 4d64db4f..0e5796df 100644 --- a/conflowgen/tests/domain_models/factories/test_fleet_factory__create_feeder_fleet.py +++ b/conflowgen/tests/domain_models/factories/test_fleet_factory__create_feeder_fleet.py @@ -11,7 +11,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestVehicleFactory__create_feeder(unittest.TestCase): +class TestVehicleFactory__create_feeder(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -24,7 +24,7 @@ def setUp(self) -> None: self.fleet_factory = FleetFactory() def test_create_feeder_fleet(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.feeder, vehicle_arrives_at=datetime.date(2021, 7, 9), @@ -33,7 +33,7 @@ def test_create_feeder_fleet(self) -> None: average_moved_capacity=50 ) feeders = self.fleet_factory.create_feeder_fleet( - schedule=s, + schedule=schedule, first_at=datetime.date(2021, 7, 7), latest_at=datetime.date(2021, 7, 18) ) diff --git a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_barge.py b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_barge.py index f8543de9..30d7613a 100644 --- a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_barge.py +++ b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_barge.py @@ -11,7 +11,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestVehicleFactory__create_barge(unittest.TestCase): +class TestVehicleFactory__create_barge(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -24,7 +24,7 @@ def setUp(self) -> None: self.vehicle_factory = VehicleFactory() def test_create_normal_barge(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.barge, vehicle_arrives_at=datetime.datetime.now(), @@ -35,11 +35,11 @@ def test_create_normal_barge(self) -> None: capacity_in_teu=60, moved_capacity=30, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) def test_create_unrealistic_barge(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.barge, vehicle_arrives_at=datetime.datetime.now(), @@ -51,12 +51,12 @@ def test_create_unrealistic_barge(self) -> None: capacity_in_teu=-1, moved_capacity=1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) with self.assertRaises(UnrealisticValuesException): self.vehicle_factory.create_barge( capacity_in_teu=1, moved_capacity=-1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) diff --git a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_deep_sea_vessel.py b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_deep_sea_vessel.py index f3994b08..7587f5a6 100644 --- a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_deep_sea_vessel.py +++ b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_deep_sea_vessel.py @@ -11,7 +11,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestVehicleFactory__create_deep_sea_vessel(unittest.TestCase): +class TestVehicleFactory__create_deep_sea_vessel(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -24,7 +24,7 @@ def setUp(self) -> None: self.vehicle_factory = VehicleFactory() def test_create_normal_deep_sea_vessel(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.deep_sea_vessel, vehicle_arrives_at=datetime.datetime.now(), @@ -35,11 +35,11 @@ def test_create_normal_deep_sea_vessel(self) -> None: capacity_in_teu=800, moved_capacity=50, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) def test_create_unrealistic_deep_sea_vessel(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.deep_sea_vessel, vehicle_arrives_at=datetime.datetime.now(), @@ -51,19 +51,19 @@ def test_create_unrealistic_deep_sea_vessel(self) -> None: capacity_in_teu=-1, moved_capacity=1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) with self.assertRaises(UnrealisticValuesException): self.vehicle_factory.create_deep_sea_vessel( capacity_in_teu=1, moved_capacity=-1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) with self.assertRaises(UnrealisticValuesException): self.vehicle_factory.create_deep_sea_vessel( capacity_in_teu=50, moved_capacity=100, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) diff --git a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_feeder.py b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_feeder.py index f75fc1af..e188692e 100644 --- a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_feeder.py +++ b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_feeder.py @@ -11,7 +11,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestVehicleFactory__create_feeder(unittest.TestCase): +class TestVehicleFactory__create_feeder(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -24,7 +24,7 @@ def setUp(self) -> None: self.vehicle_factory = VehicleFactory() def test_create_normal_feeder(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.feeder, vehicle_arrives_at=datetime.datetime.now(), @@ -35,11 +35,11 @@ def test_create_normal_feeder(self) -> None: capacity_in_teu=800, moved_capacity=50, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) def test_create_unrealistic_feeder(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.feeder, vehicle_arrives_at=datetime.datetime.now(), @@ -51,7 +51,7 @@ def test_create_unrealistic_feeder(self) -> None: capacity_in_teu=-1, moved_capacity=1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) with self.assertRaises(UnrealisticValuesException): @@ -59,5 +59,5 @@ def test_create_unrealistic_feeder(self) -> None: capacity_in_teu=1, moved_capacity=-1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) diff --git a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_train.py b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_train.py index 4977bb8b..1f185464 100644 --- a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_train.py +++ b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_train.py @@ -11,7 +11,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestVehicleFactory__create_train(unittest.TestCase): +class TestVehicleFactory__create_train(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -24,7 +24,7 @@ def setUp(self) -> None: self.vehicle_factory = VehicleFactory() def test_create_normal_train(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.train, vehicle_arrives_at=datetime.datetime.now(), @@ -35,11 +35,11 @@ def test_create_normal_train(self) -> None: capacity_in_teu=90, moved_capacity=90, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) def test_create_unrealistic_train(self) -> None: - s = Schedule.create( + schedule = Schedule.create( service_name="LX050", vehicle_type=ModeOfTransport.train, vehicle_arrives_at=datetime.datetime.now(), @@ -51,5 +51,5 @@ def test_create_unrealistic_train(self) -> None: capacity_in_teu=-1, moved_capacity=1, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) diff --git a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_truck.py b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_truck.py index dff27619..af114b37 100644 --- a/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_truck.py +++ b/conflowgen/tests/domain_models/factories/test_vehicle_factory__create_truck.py @@ -13,7 +13,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestVehicleFactory__create_truck(unittest.TestCase): +class TestVehicleFactory__create_truck(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" diff --git a/conflowgen/tests/domain_models/test_container.py b/conflowgen/tests/domain_models/test_container.py index 6d99e799..027819bb 100644 --- a/conflowgen/tests/domain_models/test_container.py +++ b/conflowgen/tests/domain_models/test_container.py @@ -32,7 +32,7 @@ def setUp(self) -> None: def test_save_to_database(self) -> None: """Check if container can be saved""" - container_1 = Container.create( + container = Container.create( weight=20, delivered_by=ModeOfTransport.truck, picked_up_by=ModeOfTransport.deep_sea_vessel, @@ -40,7 +40,7 @@ def test_save_to_database(self) -> None: length=ContainerLength.twenty_feet, storage_requirement=StorageRequirement.standard ) - container_1.save() + self.assertIsNotNone(container) def test_missing_delivered(self) -> None: with self.assertRaises(IntegrityError): @@ -48,19 +48,21 @@ def test_missing_delivered(self) -> None: weight=20, delivered_by=None, picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, length=ContainerLength.twenty_feet, storage_requirement=StorageRequirement.standard - ).save() + ) def test_missing_picked_up(self) -> None: with self.assertRaises(IntegrityError): Container.create( weight=20, delivered_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, picked_up_by=None, length=ContainerLength.twenty_feet, storage_requirement=StorageRequirement.standard - ).save() + ) def test_missing_length(self) -> None: with self.assertRaises(IntegrityError): @@ -68,9 +70,10 @@ def test_missing_length(self) -> None: weight=10, delivered_by=ModeOfTransport.barge, picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, length=None, storage_requirement=StorageRequirement.dangerous_goods - ).save() + ) def test_missing_storage_requirement(self) -> None: with self.assertRaises(IntegrityError): @@ -78,6 +81,23 @@ def test_missing_storage_requirement(self) -> None: weight=10, delivered_by=ModeOfTransport.barge, picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, length=ContainerLength.forty_feet, storage_requirement=None - ).save() + ) + + def test_container_repr(self) -> None: + container = Container.create( + weight=10, + delivered_by=ModeOfTransport.barge, + picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, + length=ContainerLength.forty_feet, + storage_requirement=StorageRequirement.standard + ) + representation = repr(container) + self.assertEqual( + representation, + "" + ) diff --git a/conflowgen/tests/domain_models/test_distribution_validators.py b/conflowgen/tests/domain_models/test_distribution_validators.py index 7fa2bb8b..eaf45636 100644 --- a/conflowgen/tests/domain_models/test_distribution_validators.py +++ b/conflowgen/tests/domain_models/test_distribution_validators.py @@ -10,10 +10,12 @@ class TestDistributionValidatorWithOneDependentVariable(unittest.TestCase): def test_validating_completely_empty_distribution_raises_exception(self): - with self.assertRaises(DistributionHasNoElementsException) as cm: - validate_distribution_with_one_dependent_variable({}, ContainerLength, StorageRequirement) + with self.assertRaises(DistributionHasNoElementsException) as context: + validate_distribution_with_one_dependent_variable( + {}, ContainerLength, StorageRequirement, values_are_frequencies=True + ) expected_message = "The distribution does not have any elements to draw from." - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_validating_empty_distribution_for_one_dependent_variable_raises_exception(self): default_distribution_without_dependent_variable = { @@ -22,18 +24,18 @@ def test_validating_empty_distribution_for_one_dependent_variable_raises_excepti StorageRequirement.empty: 0.25, StorageRequirement.dangerous_goods: 0.25 } - with self.assertRaises(DistributionHasNoElementsException) as cm: + with self.assertRaises(DistributionHasNoElementsException) as context: validate_distribution_with_one_dependent_variable({ ContainerLength.twenty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_five_feet: default_distribution_without_dependent_variable, ContainerLength.other: {} # here is the culprit - }, ContainerLength, StorageRequirement) + }, ContainerLength, StorageRequirement, values_are_frequencies=True) expected_message = ( 'The distribution does not have any elements to draw from. This is error ' "occurred while examining the dependent variable 'other'.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_missing_element_on_second_level(self): default_distribution_without_dependent_variable = { @@ -48,13 +50,13 @@ def test_missing_element_on_second_level(self): StorageRequirement.empty: 0.25, StorageRequirement.dangerous_goods: 0.25 } - with self.assertRaises(DistributionElementIsMissingException) as cm: + with self.assertRaises(DistributionElementIsMissingException) as context: validate_distribution_with_one_dependent_variable({ ContainerLength.twenty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_five_feet: default_distribution_without_dependent_variable, ContainerLength.other: variation_of_distribution - }, ContainerLength, StorageRequirement) + }, ContainerLength, StorageRequirement, values_are_frequencies=True) expected_message = ( "The distribution {'reefer': 0.25000, 'empty': 0.25000, 'dangerous_goods': " @@ -62,7 +64,7 @@ def test_missing_element_on_second_level(self): "'reefer', 'dangerous_goods'] but it provided the following elements: " "['reefer', 'empty', 'dangerous_goods']. This is error occurred while " "examining the dependent variable 'other'.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_one_frequency_is_out_of_range_because_it_is_negative(self): default_distribution_without_dependent_variable = { @@ -77,13 +79,13 @@ def test_one_frequency_is_out_of_range_because_it_is_negative(self): StorageRequirement.empty: 0.25, StorageRequirement.dangerous_goods: 0.25 } - with self.assertRaises(DistributionProbabilityOutOfRange) as cm: + with self.assertRaises(DistributionProbabilityOutOfRange) as context: validate_distribution_with_one_dependent_variable({ ContainerLength.twenty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_five_feet: default_distribution_without_dependent_variable, ContainerLength.other: variation_of_distribution - }, ContainerLength, StorageRequirement) + }, ContainerLength, StorageRequirement, values_are_frequencies=True) expected_message = ( 'The probability of an element to be drawn must range between 0 and 1 but for ' @@ -91,7 +93,7 @@ def test_one_frequency_is_out_of_range_because_it_is_negative(self): "{'reefer': 0.25000, 'standard': -0.25000, 'empty': 0.25000, " "'dangerous_goods': 0.25000}. This is error occurred while examining the " "dependent variable 'other'.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_element_is_invalid(self): default_distribution_without_dependent_variable = { @@ -106,16 +108,16 @@ def test_element_is_invalid(self): StorageRequirement.empty: 0.25, StorageRequirement.dangerous_goods: 0.25 } - with self.assertRaises(DistributionElementIsInvalidException) as cm: + with self.assertRaises(DistributionElementIsInvalidException) as context: validate_distribution_with_one_dependent_variable({ ContainerLength.twenty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_five_feet: default_distribution_without_dependent_variable, ContainerLength.other: variation_of_distribution - }, ContainerLength, StorageRequirement) + }, ContainerLength, StorageRequirement, values_are_frequencies=True) expected_message = "Element '40 feet' could not be casted to type ''" - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_one_frequency_is_out_of_range_because_it_is_larger_than_one(self): default_distribution_without_dependent_variable = { @@ -130,13 +132,13 @@ def test_one_frequency_is_out_of_range_because_it_is_larger_than_one(self): StorageRequirement.empty: 0.25, StorageRequirement.dangerous_goods: 0.25 } - with self.assertRaises(DistributionProbabilityOutOfRange) as cm: + with self.assertRaises(DistributionProbabilityOutOfRange) as context: validate_distribution_with_one_dependent_variable({ ContainerLength.twenty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_five_feet: default_distribution_without_dependent_variable, ContainerLength.other: variation_of_distribution - }, ContainerLength, StorageRequirement) + }, ContainerLength, StorageRequirement, values_are_frequencies=True) expected_message = ( 'The probability of an element to be drawn must range between 0 and 1 but for ' @@ -144,7 +146,7 @@ def test_one_frequency_is_out_of_range_because_it_is_larger_than_one(self): "{'reefer': 0.25000, 'standard': 1.10000, 'empty': 0.25000, " "'dangerous_goods': 0.25000}. This is error occurred while examining the " "dependent variable 'other'.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_sum_of_frequencies_is_unequal_one(self): default_distribution_without_dependent_variable = { @@ -159,20 +161,20 @@ def test_sum_of_frequencies_is_unequal_one(self): ContainerLength.forty_five_feet: 0.25, ContainerLength.other: 0.25 } - with self.assertRaises(DistributionProbabilitiesUnequalOne) as cm: + with self.assertRaises(DistributionProbabilitiesUnequalOne) as context: validate_distribution_with_one_dependent_variable({ ContainerLength.twenty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_feet: default_distribution_without_dependent_variable, ContainerLength.forty_five_feet: default_distribution_without_dependent_variable, ContainerLength.other: variation_of_distribution - }, ContainerLength, ContainerLength) + }, ContainerLength, ContainerLength, values_are_frequencies=True) expected_message = ( 'The sum of all probabilities should sum to 1 but for the ' "distribution {'20 feet': 0.25000, '40 feet': 1.00000, '45 feet': " "0.25000, 'other': 0.25000} the sum was 1.75000. This is error occurred " "while examining the dependent variable 'other'.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_distribution_with_ints_on_second_level(self): distribution_with_int_keys = { @@ -200,7 +202,8 @@ def test_distribution_with_ints_on_second_level(self): sanitized_distribution = validate_distribution_with_one_dependent_variable( distribution_with_int_keys, ContainerLength, - int + int, + True ) self.assertDictEqual(distribution_with_int_keys, sanitized_distribution) @@ -209,10 +212,10 @@ def test_distribution_with_ints_on_second_level(self): class TestDistributionValidatorWithNoDependentVariables(unittest.TestCase): def test_validating_completely_empty_distribution_raises_exception(self): - with self.assertRaises(DistributionHasNoElementsException) as cm: - validate_distribution_with_no_dependent_variables({}, ContainerLength) + with self.assertRaises(DistributionHasNoElementsException) as context: + validate_distribution_with_no_dependent_variables({}, ContainerLength, values_are_frequencies=True) expected_message = 'The distribution does not have any elements to draw from.' - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_missing_element(self): variation_of_distribution = { @@ -221,8 +224,10 @@ def test_missing_element(self): ContainerLength.forty_five_feet: 0.25, ContainerLength.other: 0.25 } - with self.assertRaises(DistributionElementIsMissingException) as cm: - validate_distribution_with_no_dependent_variables(variation_of_distribution, ContainerLength) + with self.assertRaises(DistributionElementIsMissingException) as context: + validate_distribution_with_no_dependent_variables( + variation_of_distribution, ContainerLength, values_are_frequencies=True + ) expected_message = ( "The distribution {'20 feet': 0.25000, '45 feet': 0.25000, 'other': " @@ -230,7 +235,7 @@ def test_missing_element(self): "feet', '45 feet', 'other'] but it provided the following elements: ['20 " "feet', '45 feet', 'other']." ) - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_one_frequency_is_out_of_range_because_it_is_negative(self): variation_of_distribution = { @@ -239,8 +244,10 @@ def test_one_frequency_is_out_of_range_because_it_is_negative(self): ContainerLength.forty_five_feet: 0.25, ContainerLength.other: 0.25 } - with self.assertRaises(DistributionProbabilityOutOfRange) as cm: - validate_distribution_with_no_dependent_variables(variation_of_distribution, ContainerLength) + with self.assertRaises(DistributionProbabilityOutOfRange) as context: + validate_distribution_with_no_dependent_variables( + variation_of_distribution, ContainerLength, values_are_frequencies=True + ) expected_message = ( 'The probability of an element to be drawn must range between 0 and 1 but for ' @@ -248,7 +255,7 @@ def test_one_frequency_is_out_of_range_because_it_is_negative(self): "feet': 0.25000, '40 feet': -0.25000, '45 feet': 0.25000, 'other': " "0.25000}." ) - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_one_frequency_is_out_of_range_because_it_is_larger_than_one(self): variation_of_distribution = { @@ -257,14 +264,16 @@ def test_one_frequency_is_out_of_range_because_it_is_larger_than_one(self): ContainerLength.forty_five_feet: 0.25, ContainerLength.other: 0.25 } - with self.assertRaises(DistributionProbabilityOutOfRange) as cm: - validate_distribution_with_no_dependent_variables(variation_of_distribution, ContainerLength) + with self.assertRaises(DistributionProbabilityOutOfRange) as context: + validate_distribution_with_no_dependent_variables( + variation_of_distribution, ContainerLength, values_are_frequencies=True + ) expected_message = ( 'The probability of an element to be drawn must range between 0 and 1 but for ' "the element '40 feet' the probability was 1.1 in the distribution {'20 feet': " "0.25000, '40 feet': 1.10000, '45 feet': 0.25000, 'other': 0.25000}.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_sum_of_frequencies_is_unequal_one(self): variation_of_distribution = { @@ -273,14 +282,16 @@ def test_sum_of_frequencies_is_unequal_one(self): ContainerLength.forty_five_feet: 0.25, ContainerLength.other: 0.25 } - with self.assertRaises(DistributionProbabilitiesUnequalOne) as cm: - validate_distribution_with_no_dependent_variables(variation_of_distribution, ContainerLength) + with self.assertRaises(DistributionProbabilitiesUnequalOne) as context: + validate_distribution_with_no_dependent_variables( + variation_of_distribution, ContainerLength, values_are_frequencies=True + ) expected_message = ( 'The sum of all probabilities should sum to 1 but for the ' "distribution {'20 feet': 0.25000, '40 feet': 1.00000, '45 feet': " "0.25000, 'other': 0.25000} the sum was 1.75000.") - self.assertEqual(expected_message, str(cm.exception)) + self.assertEqual(expected_message, str(context.exception)) def test_auto_cast(self): dirty_distribution = { @@ -296,7 +307,7 @@ def test_auto_cast(self): ContainerLength.other: 0.25 } sanitized_distribution = validate_distribution_with_no_dependent_variables( - dirty_distribution, ContainerLength + dirty_distribution, ContainerLength, values_are_frequencies=True ) self.assertDictEqual(clean_distribution, sanitized_distribution) @@ -308,7 +319,8 @@ def test_distribution_with_ints(self): } sanitized_distribution = validate_distribution_with_no_dependent_variables( distribution_with_int_keys, - int + int, + values_are_frequencies=True ) self.assertDictEqual(distribution_with_int_keys, sanitized_distribution) diff --git a/conflowgen/tests/domain_models/test_vehicle.py b/conflowgen/tests/domain_models/test_vehicle.py index 9737e9dc..c76fb5cd 100644 --- a/conflowgen/tests/domain_models/test_vehicle.py +++ b/conflowgen/tests/domain_models/test_vehicle.py @@ -29,7 +29,7 @@ def test_save_truck_delivering_a_container_to_database(self) -> None: delivers_container=True, picks_up_container=False ) - truck.save() + self.assertIsNotNone(truck) def test_save_truck_picking_up_a_container_to_database(self) -> None: ati = TruckArrivalInformationForPickup.create( @@ -42,7 +42,7 @@ def test_save_truck_picking_up_a_container_to_database(self) -> None: picks_up_container=True, truck_arrival_information_for_pickup=ati ) - truck.save() + self.assertIsNotNone(truck) class TestFeeder(unittest.TestCase): @@ -58,7 +58,7 @@ def setUp(self) -> None: def test_save_feeder_to_database(self) -> None: """Check if feeder can be saved""" - s = Schedule.create( + schedule = Schedule.create( service_name="MyTestFeederLine", vehicle_type=ModeOfTransport.feeder, vehicle_arrives_at=datetime.datetime.now(), @@ -70,9 +70,8 @@ def test_save_feeder_to_database(self) -> None: capacity_in_teu=1000, moved_capacity=200, scheduled_arrival=datetime.datetime.now(), - schedule=s + schedule=schedule ) - feeder = Feeder.create( + Feeder.create( large_scheduled_vehicle=lsv ) - feeder.save() diff --git a/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py b/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py index c1c967d0..3f787dda 100644 --- a/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py +++ b/conflowgen/tests/flow_generator/test_allocate_space_for_containers_delivered_by_truck_service.py @@ -177,7 +177,7 @@ def test_happy_path(self): ) # due to the existing import container, one export container must be generated - self.assertEqual(self.service._get_number_containers_to_allocate(), 1) + self.assertEqual(self.service._get_number_containers_to_allocate(), 1) # pylint: disable=protected-access # that export container is generated now self.assertIsNone(self.service.allocate()) diff --git a/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py b/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py index ca358792..206e263b 100644 --- a/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py +++ b/conflowgen/tests/flow_generator/test_assign_destination_to_container_service.py @@ -160,7 +160,7 @@ def test_load_container_from_truck_onto_feeder(self): } } self.repository.set_distribution(distribution) - self.service.reload_distribution() + self.service.reload_distributions() self.service.assign() @@ -196,7 +196,7 @@ def test_wrong_direction(self): } } self.repository.set_distribution(distribution) - self.service.reload_distribution() + self.service.reload_distributions() self.service.assign() diff --git a/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py b/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py index 2501c641..fa60047f 100644 --- a/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py +++ b/conflowgen/tests/flow_generator/test_container_flow_generator_service__container_flow_data_exists.py @@ -12,7 +12,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestContainerFlowGeneratorService__generate(unittest.TestCase): +class TestContainerFlowGeneratorService__generate(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -26,10 +26,10 @@ def setUp(self) -> None: LargeScheduledVehicle ]) mode_of_transport_distribution_seeder.seed() - self.container_Flow_generator_service = ContainerFlowGenerationService() + self.container_flow_generator_service = ContainerFlowGenerationService() def test_container_flow_data_exists_with_no_data(self): - container_flow_data_exists = self.container_Flow_generator_service.container_flow_data_exists() + container_flow_data_exists = self.container_flow_generator_service.container_flow_data_exists() self.assertFalse(container_flow_data_exists) def test_container_flow_data_exists_with_some_data(self): @@ -41,5 +41,5 @@ def test_container_flow_data_exists_with_some_data(self): picked_up_by=ModeOfTransport.truck, picked_up_by_initial=ModeOfTransport.truck ) - container_flow_data_exists = self.container_Flow_generator_service.container_flow_data_exists() + container_flow_data_exists = self.container_flow_generator_service.container_flow_data_exists() self.assertTrue(container_flow_data_exists) diff --git a/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py b/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py index 4c93382f..3f7a1f11 100644 --- a/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py +++ b/conflowgen/tests/flow_generator/test_container_flow_generator_service__generate.py @@ -6,9 +6,12 @@ from conflowgen.application.repositories.container_flow_generation_properties_repository import \ ContainerFlowGenerationPropertiesRepository from conflowgen.database_connection.create_tables import create_tables +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution from conflowgen.domain_models.distribution_models.storage_requirement_distribution import StorageRequirementDistribution -from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder, seed_all_distributions +from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder, \ + seed_all_distributions, container_dwell_time_distribution_seeder from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.flow_generator.container_flow_generation_service import \ ContainerFlowGenerationService @@ -16,7 +19,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestContainerFlowGeneratorService__generate(unittest.TestCase): +class TestContainerFlowGeneratorService__generate(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" @@ -25,9 +28,11 @@ def setUp(self) -> None: ContainerFlowGenerationProperties, ModeOfTransportDistribution, Schedule, - StorageRequirementDistribution + StorageRequirementDistribution, + ContainerDwellTimeDistribution ]) mode_of_transport_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() container_flow_generation_properties_manager = ContainerFlowGenerationPropertiesRepository() properties = container_flow_generation_properties_manager.get_container_flow_generation_properties() @@ -36,7 +41,7 @@ def setUp(self) -> None: properties.end_date = datetime.datetime.now().date() + datetime.timedelta(days=21) container_flow_generation_properties_manager.set_container_flow_generation_properties(properties) - self.container_Flow_generator_service = ContainerFlowGenerationService() + self.container_flow_generator_service = ContainerFlowGenerationService() def test_happy_path_no_mocking(self): create_tables(self.sqlite_db) @@ -60,4 +65,4 @@ def test_happy_path_no_mocking(self): average_moved_capacity=100, next_destinations=None ) - self.container_Flow_generator_service.generate() + self.container_flow_generator_service.generate() diff --git a/conflowgen/tests/flow_generator/test_distribution_approximator.py b/conflowgen/tests/flow_generator/test_distribution_approximator.py index 2571cccc..65e870c2 100644 --- a/conflowgen/tests/flow_generator/test_distribution_approximator.py +++ b/conflowgen/tests/flow_generator/test_distribution_approximator.py @@ -12,13 +12,13 @@ class TestDistributionApproximator(unittest.TestCase): def test_happy_path(self) -> None: """This is the happy path""" - da = DistributionApproximator({ + distribution_approximator = DistributionApproximator({ "type_1": 1, "type_2": 1 }) all_samples = [ - da.sample(), - da.sample() + distribution_approximator.sample(), + distribution_approximator.sample() ] all_samples.sort() self.assertEqual(all_samples[0], "type_1") @@ -26,23 +26,23 @@ def test_happy_path(self) -> None: def test_exception(self) -> None: """Check if sampler stops once the target destination is reached.""" - da = DistributionApproximator({ + distribution_approximator = DistributionApproximator({ "type_1": 1 }) - da.sample() + distribution_approximator.sample() with self.assertRaises(SamplerExhaustedException): - da.sample() + distribution_approximator.sample() def test_slightly_more_complex(self) -> None: """Check if finally from each category sufficient elements are drawn.""" - da = DistributionApproximator({ + distribution_approximator = DistributionApproximator({ "a": 4, "b": 2, "c": 10 }) all_samples = [] for _ in range(16): # 4 + 2 + 10 - all_samples.append(da.sample()) + all_samples.append(distribution_approximator.sample()) counted_samples = collections.Counter(all_samples) self.assertDictEqual(counted_samples, { @@ -53,14 +53,14 @@ def test_slightly_more_complex(self) -> None: def test_from_random_distribution_simple_case(self) -> None: """Check if finally from each category sufficient elements are drawn.""" - da = DistributionApproximator.from_distribution({ + distribution_approximator = DistributionApproximator.from_distribution({ "a": 4 / 16, "b": 2 / 16, "c": 10 / 16 }, 16) all_samples = [] for _ in range(16): # 4 + 2 + 10 - all_samples.append(da.sample()) + all_samples.append(distribution_approximator.sample()) counted_samples = collections.Counter(all_samples) self.assertDictEqual(counted_samples, { @@ -72,13 +72,13 @@ def test_from_random_distribution_simple_case(self) -> None: def test_from_random_distribution_complex_case(self) -> None: """Check if finally from each category sufficient elements are drawn. One of the elements is randomly assigned""" - da = DistributionApproximator.from_distribution({ + distribution_approximator = DistributionApproximator.from_distribution({ "a": 0.5, "b": 0.5 }, 3) all_samples = [] for _ in range(3): - all_samples.append(da.sample()) + all_samples.append(distribution_approximator.sample()) counted_samples = collections.Counter(all_samples) self.assertGreaterEqual(counted_samples["a"], 1) diff --git a/conflowgen/tests/flow_generator/test_export_container_flow_service__container.py b/conflowgen/tests/flow_generator/test_export_container_flow_service__container.py index b3492561..3963a612 100644 --- a/conflowgen/tests/flow_generator/test_export_container_flow_service__container.py +++ b/conflowgen/tests/flow_generator/test_export_container_flow_service__container.py @@ -12,7 +12,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestExportContainerFlowService__Container(unittest.TestCase): +class TestExportContainerFlowService__Container(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: self.service = ExportContainerFlowService() @@ -24,7 +24,7 @@ def test_convert_empty_table_to_pandas_dataframe(self): Schedule, Destination ]) - df_container = self.service._convert_table_to_pandas_dataframe(Container) + df_container = self.service._convert_table_to_pandas_dataframe(Container) # pylint: disable=protected-access self.assertEqual(len(df_container), 0) def test_convert_table_to_pandas_dataframe_with_container_without_destination(self): @@ -45,7 +45,7 @@ def test_convert_table_to_pandas_dataframe_with_container_without_destination(se storage_requirement=StorageRequirement.standard ) container.save() - df_container = self.service._convert_table_to_pandas_dataframe(Container) + df_container = self.service._convert_table_to_pandas_dataframe(Container) # pylint: disable=protected-access self.assertEqual(len(df_container), 1) self.assertSetEqual( set(df_container.columns), @@ -131,7 +131,7 @@ def test_convert_table_to_pandas_dataframe_with_container_with_destination(self) storage_requirement=StorageRequirement.standard, destination=destination ) - df_container = self.service._convert_table_to_pandas_dataframe(Container) + df_container = self.service._convert_table_to_pandas_dataframe(Container) # pylint: disable=protected-access self.assertEqual(len(df_container), 1) self.assertSetEqual( set(df_container.columns), diff --git a/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py b/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py index b9265355..32deaad5 100644 --- a/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py +++ b/conflowgen/tests/flow_generator/test_large_scheduled_vehicle_for_onward_transportation_manager.py @@ -1,11 +1,15 @@ import datetime import unittest +from typing import Iterable from conflowgen.domain_models.arrival_information import TruckArrivalInformationForDelivery, \ TruckArrivalInformationForPickup from conflowgen.domain_models.container import Container +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution from conflowgen.domain_models.distribution_models.mode_of_transport_distribution import ModeOfTransportDistribution -from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder +from conflowgen.domain_models.distribution_seeders import mode_of_transport_distribution_seeder, \ + container_dwell_time_distribution_seeder from conflowgen.domain_models.data_types.container_length import ContainerLength from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement @@ -34,19 +38,15 @@ def setUp(self) -> None: TruckArrivalInformationForDelivery, TruckArrivalInformationForPickup, Destination, - ModeOfTransportDistribution + ModeOfTransportDistribution, + ContainerDwellTimeDistribution, ]) mode_of_transport_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() self.manager = LargeScheduledVehicleForOnwardTransportationManager() self.manager.reload_properties( - minimum_dwell_time_of_import_containers_in_hours=3, - minimum_dwell_time_of_transshipment_containers_in_hours=3, - minimum_dwell_time_of_export_containers_in_hours=3, - maximum_dwell_time_of_import_containers_in_hours=3 * 24, - maximum_dwell_time_of_transshipment_containers_in_hours=5 * 24, - maximum_dwell_time_of_export_containers_in_hours=5 * 24, transportation_buffer=0 ) @@ -129,7 +129,6 @@ def _create_container_for_truck(truck: Truck): picked_up_by=ModeOfTransport.feeder, picked_up_by_initial=ModeOfTransport.feeder ) - container.save() return container @staticmethod @@ -145,7 +144,6 @@ def _create_container_for_large_scheduled_vehicle(vehicle: AbstractLargeSchedule picked_up_by=ModeOfTransport.feeder, picked_up_by_initial=ModeOfTransport.feeder ) - container.save() return container def test_no_exception_for_empty_database(self): @@ -183,7 +181,7 @@ def test_do_not_overload_feeder_with_truck_traffic(self): self.assertTrue(set(containers_reloaded).issubset(set(containers)), "Feeder must only load generated " "containers") teu_loaded = 0 - for container in containers_reloaded: + for container in containers_reloaded: # pylint: disable=E1133 self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) teu_loaded += ContainerLength.get_factor(container.length) self.assertLessEqual(teu_loaded, 10, "Feeder must not be loaded with more than 10 TEU") @@ -205,13 +203,13 @@ def test_do_not_overload_feeder_with_train_traffic(self): self.manager.choose_departing_vehicle_for_containers() - containers_reloaded = Container.select().where( + containers_reloaded: Iterable[Container] = Container.select().where( Container.picked_up_by_large_scheduled_vehicle == feeder ) self.assertTrue(set(containers_reloaded).issubset(set(containers)), "Feeder must only load generated " "containers") teu_loaded = 0 - for container in containers_reloaded: + for container in containers_reloaded: # pylint: disable=not-an-iterable self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) teu_loaded += ContainerLength.get_factor(container.length) self.assertLessEqual(teu_loaded, 80, "Feeder must not be loaded with more than what it can carry") @@ -239,13 +237,13 @@ def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles(self): self.manager.choose_departing_vehicle_for_containers() - containers_reloaded = Container.select().where( + containers_reloaded: Iterable[Container] = Container.select().where( Container.picked_up_by_large_scheduled_vehicle == feeder ) self.assertTrue(set(containers_reloaded).issubset(set(containers)), "Feeder must only load generated " "containers") teu_loaded = 0 - for container in containers_reloaded: + for container in containers_reloaded: # pylint: disable=not-an-iterable self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) teu_loaded += ContainerLength.get_factor(container.length) self.assertLessEqual(teu_loaded, 80, "Feeder must not be loaded with more than what it can carry") @@ -276,13 +274,13 @@ def test_do_not_overload_feeder_with_train_traffic_of_two_vehicles_and_changing_ self.manager.choose_departing_vehicle_for_containers() - containers_reloaded = Container.select().where( + containers_reloaded: Iterable[Container] = Container.select().where( Container.picked_up_by_large_scheduled_vehicle == feeder ) self.assertTrue(set(containers_reloaded).issubset(set(containers)), "Feeder must only load generated " "containers") teu_loaded = 0 - for container in containers_reloaded: + for container in containers_reloaded: # pylint: disable=not-an-iterable self.assertEqual(container.picked_up_by_large_scheduled_vehicle, feeder.large_scheduled_vehicle) teu_loaded += ContainerLength.get_factor(container.length) self.assertLessEqual(teu_loaded, 80, "Feeder must not be loaded with more than what it can carry") diff --git a/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py b/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py index 974eaad5..b58659bc 100644 --- a/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py +++ b/conflowgen/tests/flow_generator/test_truck_for_export_containers_manager.py @@ -1,14 +1,27 @@ +from __future__ import annotations + import datetime import unittest from collections import Counter import matplotlib.pyplot as plt +from conflowgen.domain_models.data_types.mode_of_transport import ModeOfTransport +from conflowgen.domain_models.data_types.storage_requirement import StorageRequirement +from conflowgen.domain_models.data_types.container_length import ContainerLength +from conflowgen.domain_models.container import Container +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution -from conflowgen.domain_models.distribution_seeders import truck_arrival_distribution_seeder +from conflowgen.domain_models.distribution_seeders import truck_arrival_distribution_seeder, \ + container_dwell_time_distribution_seeder +from conflowgen.domain_models.large_vehicle_schedule import Destination +from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck from conflowgen.flow_generator.truck_for_export_containers_manager import \ TruckForExportContainersManager from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db +from conflowgen.tools.continuous_distribution import ContinuousDistribution +from conflowgen.tools.weekly_distribution import WeeklyDistribution class TestTruckForExportContainersManager(unittest.TestCase): @@ -17,34 +30,57 @@ def setUp(self) -> None: """Create container database in memory""" sqlite_db = setup_sqlite_in_memory_db() sqlite_db.create_tables([ - TruckArrivalDistribution + TruckArrivalDistribution, + ContainerDwellTimeDistribution, + Container, + Destination, + Truck, + LargeScheduledVehicle ]) truck_arrival_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() # Enables visualisation, helpful for probability distributions # However, this blocks the execution of tests. self.debug = False self.manager = TruckForExportContainersManager() - self.manager.reload_distribution( - minimum_dwell_time_in_hours=3, # after ship arrival, at least 3h pass - maximum_dwell_time_in_hours=(3 * 24) # 3 days after ship arrival the container must have left the yard - ) + self.manager.reload_distributions() + + def visualize_probabilities(self, container: Container, drawn_times, container_departure_time): + import inspect # pylint: disable=import-outside-toplevel + import seaborn as sns # pylint: disable=import-outside-toplevel + container_dwell_time_distribution, _ = self._get_distributions(container) + sns.kdeplot(drawn_times, bw=0.01).set(title='Triggered from: ' + inspect.stack()[1].function) + plt.axvline(x=container_departure_time - datetime.timedelta(hours=container_dwell_time_distribution.minimum)) + plt.axvline(x=container_departure_time - datetime.timedelta(hours=container_dwell_time_distribution.maximum)) + plt.show(block=True) + + def _get_distributions(self, container: Container) -> tuple[ContinuousDistribution, WeeklyDistribution | None]: + + # pylint: disable=protected-access + container_dwell_time_distribution, truck_arrival_distribution = self.manager._get_distributions(container) + + return container_dwell_time_distribution, truck_arrival_distribution def test_delivery_time_in_required_time_range_weekday(self): container_departure_time = datetime.datetime( year=2021, month=7, day=30, hour=11, minute=55 ) - earliest_container_delivery = datetime.datetime( - year=2021, month=7, day=27, hour=11, minute=55 + container: Container = Container.create( + delivered_by=ModeOfTransport.truck, + picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, + storage_requirement=StorageRequirement.standard, + weight=23, + length=ContainerLength.twenty_feet ) delivery_times = [] for i in range(1000): - delivery_time = self.manager._get_container_delivery_time(container_departure_time) - self.assertGreaterEqual(delivery_time, earliest_container_delivery, - "container must not arrive earlier than three days before export, " - f"but here we had {delivery_time} in round {i + 1}") + # pylint: disable=protected-access + delivery_time = self.manager._get_container_delivery_time(container, container_departure_time) + self.assertLessEqual(delivery_time, container_departure_time, "container must not arrive later than their departure time " f"but here we had {delivery_time} in round {i + 1}") @@ -53,30 +89,35 @@ def test_delivery_time_in_required_time_range_weekday(self): delivery_times.append(delivery_time) if self.debug: - import seaborn as sns - sns.kdeplot(delivery_times, bw=0.01) - plt.show(block=True) + self.visualize_probabilities(container, delivery_times, container_departure_time) def test_delivery_time_in_required_time_range_with_sunday(self): container_departure_time = datetime.datetime( year=2021, month=8, day=2, hour=11, minute=30 # 11:30 -3h dwell time = 08:30 latest arrival ) - earliest_container_delivery = datetime.datetime( - year=2021, month=7, day=30, hour=11, minute=30 + container: Container = Container.create( + delivered_by=ModeOfTransport.truck, + picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, + storage_requirement=StorageRequirement.standard, + weight=23, + length=ContainerLength.twenty_feet ) delivery_times = [] for i in range(1000): - delivery_time = self.manager._get_container_delivery_time(container_departure_time) + # pylint: disable=protected-access + delivery_time = self.manager._get_container_delivery_time(container, container_departure_time) + delivery_times.append(delivery_time) - self.assertGreaterEqual(delivery_time, earliest_container_delivery, - "container must not arrive earlier than three days before export, " - f"but here we had {delivery_time} in round {i + 1}") self.assertLessEqual(delivery_time, container_departure_time, "container must not arrive later than their departure time " f"but here we had {delivery_time} in round {i + 1}") self.assertTrue(delivery_time.weekday() != 6, f"containers do not arrive on Sundays, but here we had {delivery_time} in round {i + 1}") + if self.debug: + self.visualize_probabilities(container, delivery_times, container_departure_time) + weekday_counter = Counter([delivery_time.weekday() for delivery_time in delivery_times]) self.assertIn(4, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Friday must be counted (30.07.2021)") @@ -85,25 +126,24 @@ def test_delivery_time_in_required_time_range_with_sunday(self): self.assertIn(0, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Monday must be counted (02.08.2021)") - if self.debug: - import seaborn as sns # pylint: disable=import-outside-toplevel - sns.kdeplot(delivery_times, bw=0.01) - plt.show(block=True) - def test_delivery_time_in_required_time_range_with_sunday_and_at_different_day_times(self): container_departure_time = datetime.datetime( year=2021, month=8, day=2, hour=11, minute=2 ) - earliest_container_delivery = datetime.datetime( - year=2021, month=7, day=30, hour=5, minute=0 + container: Container = Container.create( + delivered_by=ModeOfTransport.truck, + picked_up_by=ModeOfTransport.deep_sea_vessel, + picked_up_by_initial=ModeOfTransport.deep_sea_vessel, + storage_requirement=StorageRequirement.standard, + weight=23, + length=ContainerLength.twenty_feet ) delivery_times = [] for i in range(1000): - delivery_time = self.manager._get_container_delivery_time(container_departure_time) + # pylint: disable=protected-access + delivery_time = self.manager._get_container_delivery_time(container, container_departure_time) + delivery_times.append(delivery_time) - self.assertGreaterEqual(delivery_time, earliest_container_delivery, - "container must not arrive earlier than three days before export, " - f"but here we had {delivery_time} in round {i + 1}") self.assertLessEqual(delivery_time, container_departure_time, "container must not arrive later than their departure time " f"but here we had {delivery_time} in round {i + 1}") @@ -111,6 +151,9 @@ def test_delivery_time_in_required_time_range_with_sunday_and_at_different_day_t f"containers do not arrive on Sundays, " f"but here we had {delivery_time} in round {i + 1}") + if self.debug: + self.visualize_probabilities(container, delivery_times, container_departure_time) + weekday_counter = Counter([delivery_time.weekday() for delivery_time in delivery_times]) self.assertIn(4, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Friday must be counted (30.07.2021)") @@ -118,8 +161,3 @@ def test_delivery_time_in_required_time_range_with_sunday_and_at_different_day_t "At least once a Saturday must be counted (31.07.2021)") self.assertIn(0, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Monday must be counted (02.08.2021)") - - if self.debug: - import seaborn as sns # pylint: disable=import-outside-toplevel - sns.kdeplot(delivery_times, bw=0.01) - plt.show(block=True) diff --git a/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py b/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py index 562ab998..36ef0b29 100644 --- a/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py +++ b/conflowgen/tests/flow_generator/test_truck_for_import_containers_manager.py @@ -1,14 +1,25 @@ +from __future__ import annotations + import datetime import unittest from collections import Counter import matplotlib.pyplot as plt +from conflowgen import ModeOfTransport, StorageRequirement, ContainerLength +from conflowgen.domain_models.container import Container +from conflowgen.domain_models.distribution_models.container_dwell_time_distribution import \ + ContainerDwellTimeDistribution from conflowgen.domain_models.distribution_models.truck_arrival_distribution import TruckArrivalDistribution -from conflowgen.domain_models.distribution_seeders import truck_arrival_distribution_seeder +from conflowgen.domain_models.distribution_seeders import truck_arrival_distribution_seeder, \ + container_dwell_time_distribution_seeder +from conflowgen.domain_models.large_vehicle_schedule import Destination +from conflowgen.domain_models.vehicle import LargeScheduledVehicle, Truck from conflowgen.flow_generator.truck_for_import_containers_manager import \ TruckForImportContainersManager from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db +from conflowgen.tools.continuous_distribution import ContinuousDistribution +from conflowgen.tools.weekly_distribution import WeeklyDistribution class TestTruckForImportContainersManager(unittest.TestCase): @@ -17,57 +28,123 @@ def setUp(self) -> None: """Create container database in memory""" sqlite_db = setup_sqlite_in_memory_db() sqlite_db.create_tables([ - TruckArrivalDistribution + TruckArrivalDistribution, + ContainerDwellTimeDistribution, + Container, + Truck, + LargeScheduledVehicle, + Destination ]) truck_arrival_distribution_seeder.seed() + container_dwell_time_distribution_seeder.seed() + + self.manager = TruckForImportContainersManager() + self.manager.reload_distributions() - # Enables visualisation, helpful for probability distributions + # Enables visualisation, helpful for visualizing the probability distributions. # However, this blocks the execution of tests. self.debug = False - def test_pickup_time_in_required_time_range_weekday(self): - manager = TruckForImportContainersManager() - manager.reload_distribution( - minimum_dwell_time_in_hours=3, - maximum_dwell_time_in_hours=(5 * 24) + def visualize_probabilities(self, container, drawn_times, container_arrival_time): + import inspect # pylint: disable=import-outside-toplevel + import seaborn as sns # pylint: disable=import-outside-toplevel + container_dwell_time_distribution, _ = self._get_distribution(container) + sns.kdeplot(drawn_times, bw=0.01).set(title='Triggered from: ' + inspect.stack()[1].function) + plt.axvline(x=container_arrival_time + datetime.timedelta(hours=container_dwell_time_distribution.minimum)) + plt.axvline(x=container_arrival_time + datetime.timedelta(hours=container_dwell_time_distribution.maximum)) + plt.show(block=True) + + def _get_distribution(self, container: Container) -> tuple[ContinuousDistribution, WeeklyDistribution | None]: + + # pylint: disable=protected-access + container_dwell_time_distribution, truck_arrival_distribution = self.manager._get_distributions( + container) + + return container_dwell_time_distribution, truck_arrival_distribution + + def test_container_dwell_time_and_truck_arrival_distributions_match(self): + container = Container.create( + weight=20, + delivered_by=ModeOfTransport.deep_sea_vessel, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + length=ContainerLength.twenty_feet, + storage_requirement=StorageRequirement.standard + ) + container_dwell_time_distribution, truck_arrival_distribution = self._get_distribution(container) + + self.assertEqual(3, int(container_dwell_time_distribution.minimum)) + self.assertEqual(3, int(truck_arrival_distribution.minimum_dwell_time_in_hours)) + + self.assertEqual(216, container_dwell_time_distribution.maximum) + + possible_hours_for_truck_arrival = truck_arrival_distribution.considered_time_window_in_hours + self.assertEqual( + 216 - 3 - 1, + possible_hours_for_truck_arrival, + "The truck might arrive 216h after the arrival of the container, but not within the first three hours. " + "Furthermore, the last hour is subtracted because up to 59 minutes are later added again and the maximum " + "should not be surpassed." + ) + + def test_not_reversed_distribution_is_used(self): + container = Container.create( + weight=20, + delivered_by=ModeOfTransport.deep_sea_vessel, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + length=ContainerLength.twenty_feet, + storage_requirement=StorageRequirement.standard ) - _datetime = datetime.datetime( + container_dwell_time_distribution, _ = self._get_distribution(container) + self.assertFalse(container_dwell_time_distribution.reversed_distribution) + + def test_pickup_time_in_required_time_range_weekday(self): + + container_arrival_time = datetime.datetime( year=2021, month=8, day=1 ) + container: Container = Container.create( + delivered_by=ModeOfTransport.deep_sea_vessel, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + storage_requirement=StorageRequirement.standard, + weight=23, + length=ContainerLength.twenty_feet + ) pickup_times = [] for _ in range(1000): - pickup_time = manager._get_container_pickup_time(_datetime) - self.assertGreaterEqual(pickup_time, _datetime) - self.assertLessEqual(pickup_time.date(), datetime.date( - year=2021, month=8, day=6 - )) + pickup_time = self.get_pickup_time(container, container_arrival_time) + self.assertGreaterEqual(pickup_time, container_arrival_time) pickup_times.append(pickup_time) if self.debug: - import seaborn as sns # pylint: disable=import-outside-toplevel - sns.kdeplot(pickup_times, bw=0.01) - plt.show(block=True) + self.visualize_probabilities(container, pickup_times, container_arrival_time) def test_pickup_time_in_required_time_range_with_sunday_starting_from_a_full_hour(self): - manager = TruckForImportContainersManager() - manager.reload_distribution( - minimum_dwell_time_in_hours=3, - maximum_dwell_time_in_hours=(5 * 24) - ) - _datetime = datetime.datetime( + container_arrival_time = datetime.datetime( year=2021, month=8, day=6 # a Monday ) + container: Container = Container.create( + delivered_by=ModeOfTransport.deep_sea_vessel, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + storage_requirement=StorageRequirement.standard, + weight=23, + length=ContainerLength.twenty_feet + ) pickup_times = [] for _ in range(1000): - pickup_time = manager._get_container_pickup_time(_datetime) + pickup_time = self.get_pickup_time(container, container_arrival_time) pickup_times.append(pickup_time) - self.assertGreaterEqual(pickup_time, _datetime) - self.assertLessEqual(pickup_time.date(), datetime.date( - year=2021, month=8, day=11 - )) + self.assertGreaterEqual(pickup_time, container_arrival_time, + "Container is picked up after it has arrived in the yard") self.assertTrue(pickup_time.weekday() != 6, f"containers are not picked up on Sundays but {pickup_time} was presented") + if self.debug: + self.visualize_probabilities(container, pickup_times, container_arrival_time) + weekday_counter = Counter([pickup_time.weekday() for pickup_time in pickup_times]) self.assertIn(4, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Friday was counted (30.07.2021)") @@ -75,31 +152,40 @@ def test_pickup_time_in_required_time_range_with_sunday_starting_from_a_full_hou "At least once a Saturday was counted (31.07.2021)") self.assertIn(0, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Monday was counted (02.08.2021)") - if self.debug: - import seaborn as sns # pylint: disable=import-outside-toplevel - sns.kdeplot(pickup_times, bw=0.01) - plt.show(block=True) - def test_pickup_time_in_required_time_range_with_sunday_starting_within_an_hour(self): - manager = TruckForImportContainersManager() - manager.reload_distribution( - minimum_dwell_time_in_hours=3, - maximum_dwell_time_in_hours=(5 * 24) + def get_pickup_time(self, container, container_arrival_time): + + # pylint: disable=protected-access + pickup_time = self.manager._get_container_pickup_time( + container, container_arrival_time ) - _datetime = datetime.datetime( + + return pickup_time + + def test_pickup_time_in_required_time_range_with_sunday_starting_within_an_hour(self): + container_arrival_time = datetime.datetime( year=2021, month=8, day=6, hour=12, minute=13 # a Monday ) + container: Container = Container.create( + delivered_by=ModeOfTransport.deep_sea_vessel, + picked_up_by=ModeOfTransport.truck, + picked_up_by_initial=ModeOfTransport.truck, + storage_requirement=StorageRequirement.standard, + weight=23, + length=ContainerLength.twenty_feet + ) pickup_times = [] for _ in range(1000): - pickup_time = manager._get_container_pickup_time(_datetime) + pickup_time = self.get_pickup_time(container, container_arrival_time) pickup_times.append(pickup_time) - self.assertGreaterEqual(pickup_time, _datetime) - self.assertLessEqual(pickup_time.date(), datetime.date( - year=2021, month=8, day=11 - )) + self.assertGreaterEqual(pickup_time, container_arrival_time, + "Container is picked up after it has arrived in the yard") self.assertTrue(pickup_time.weekday() != 6, f"containers are not picked up on Sundays but {pickup_time} was presented") + if self.debug: + self.visualize_probabilities(container, pickup_times, container_arrival_time) + weekday_counter = Counter([pickup_time.weekday() for pickup_time in pickup_times]) self.assertIn(4, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Friday was counted (30.07.2021)") @@ -107,7 +193,3 @@ def test_pickup_time_in_required_time_range_with_sunday_starting_within_an_hour( "At least once a Saturday was counted (31.07.2021)") self.assertIn(0, weekday_counter.keys(), "Probability (out of 1000 repetitions): " "At least once a Monday was counted (02.08.2021)") - if self.debug: - import seaborn as sns # pylint: disable=import-outside-toplevel - sns.kdeplot(pickup_times, bw=0.01) - plt.show(block=True) diff --git a/conflowgen/tests/notebooks/analyses_with_missing_data.ipynb b/conflowgen/tests/notebooks/analyses_with_missing_data.ipynb new file mode 100644 index 00000000..c2540508 --- /dev/null +++ b/conflowgen/tests/notebooks/analyses_with_missing_data.ipynb @@ -0,0 +1,147 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "568c22c5-73f1-4852-bc2d-699d379b1f2b", + "metadata": {}, + "source": [ + "# Analyses with missing data\n", + "\n", + "If there is no input data available, analyses need to reflect that.\n", + "This is not a default case so each visual does not need to be that elaborated as long as it does not harm the style guidelines and conveys the message." + ] + }, + { + "cell_type": "markdown", + "id": "b7a81385-f85b-4e20-99da-9a940bf65436", + "metadata": {}, + "source": [ + "## Load dev version" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba0722b3-ed03-4442-af60-6f0e07ba3d0a", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import sys\n", + "import datetime\n", + "\n", + "path_to_conflowgen = os.path.abspath(\n", + " os.path.join(\n", + " os.pardir, # notebooks\n", + " os.pardir, # tests\n", + " os.pardir # conflowgen\n", + " )\n", + ")\n", + "\n", + "path_to_conflowgen" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5b21054-b0a1-4145-8b85-fd58bce84c3a", + "metadata": {}, + "outputs": [], + "source": [ + "sys.path.insert(\n", + " 0,\n", + " path_to_conflowgen\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aceb1765-ea2b-4a6e-96b9-6ab7fa49d351", + "metadata": {}, + "outputs": [], + "source": [ + "import conflowgen\n", + "\n", + "conflowgen.__file__" + ] + }, + { + "cell_type": "markdown", + "id": "55e957d4-1637-4dde-8ed8-7c690b0a9914", + "metadata": {}, + "source": [ + "## Initialization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c51d34e7-cb06-4a3e-9d3e-e836b01e1062", + "metadata": {}, + "outputs": [], + "source": [ + "database_chooser = conflowgen.DatabaseChooser()\n", + "database_chooser.create_new_sqlite_database(\":memory:\")\n", + "conflowgen.setup_logger(\n", + " logging_directory=\"./data/logger\"\n", + ")\n", + "now = datetime.datetime.now()\n", + "manager = conflowgen.ContainerFlowGenerationManager()\n", + "manager.set_properties(\n", + " start_date=now,\n", + " end_date=now + datetime.timedelta(days=30)\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "00947f71-f19e-4207-863a-64bbf2f47961", + "metadata": {}, + "source": [ + "## Run previews" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d9e73814-eebe-4da4-b6fb-11f0a5b1c4a9", + "metadata": {}, + "outputs": [], + "source": [ + "conflowgen.run_all_analyses(\n", + " as_text=True,\n", + " as_graph=True,\n", + " display_text_func=print,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "557a5934-f433-4455-8375-63bb6618f27c", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/conflowgen/tests/notebooks/data/logger/.gitkeep b/conflowgen/tests/notebooks/data/logger/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/conflowgen/tests/notebooks/index.ipynb b/conflowgen/tests/notebooks/index.ipynb new file mode 100644 index 00000000..8dba3c56 --- /dev/null +++ b/conflowgen/tests/notebooks/index.ipynb @@ -0,0 +1,37 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a763af19-c39d-41f5-8d7d-249e1bcd96d6", + "metadata": {}, + "source": [ + "# Index\n", + "\n", + "This directory contains tests that are not automated.\n", + "The Jupyter Notebooks generate graphics that need to be checked by invidiuals.\n", + "After any major change in previews or analyses, please run these." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/conflowgen/tests/notebooks/previews_with_missing_data.ipynb b/conflowgen/tests/notebooks/previews_with_missing_data.ipynb new file mode 100644 index 00000000..da588ae4 --- /dev/null +++ b/conflowgen/tests/notebooks/previews_with_missing_data.ipynb @@ -0,0 +1,147 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "568c22c5-73f1-4852-bc2d-699d379b1f2b", + "metadata": {}, + "source": [ + "# Previews with missing data\n", + "\n", + "If there is no input data available, previews need to reflect that.\n", + "This is not a default case so each visual does not need to be that elaborated as long as it does not harm the style guidelines and conveys the message." + ] + }, + { + "cell_type": "markdown", + "id": "b7a81385-f85b-4e20-99da-9a940bf65436", + "metadata": {}, + "source": [ + "## Load dev version" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba0722b3-ed03-4442-af60-6f0e07ba3d0a", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import sys\n", + "import datetime\n", + "\n", + "path_to_conflowgen = os.path.abspath(\n", + " os.path.join(\n", + " os.pardir, # notebooks\n", + " os.pardir, # tests\n", + " os.pardir # conflowgen\n", + " )\n", + ")\n", + "\n", + "path_to_conflowgen" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5b21054-b0a1-4145-8b85-fd58bce84c3a", + "metadata": {}, + "outputs": [], + "source": [ + "sys.path.insert(\n", + " 0,\n", + " path_to_conflowgen\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aceb1765-ea2b-4a6e-96b9-6ab7fa49d351", + "metadata": {}, + "outputs": [], + "source": [ + "import conflowgen\n", + "\n", + "conflowgen.__file__" + ] + }, + { + "cell_type": "markdown", + "id": "55e957d4-1637-4dde-8ed8-7c690b0a9914", + "metadata": {}, + "source": [ + "## Initialization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c51d34e7-cb06-4a3e-9d3e-e836b01e1062", + "metadata": {}, + "outputs": [], + "source": [ + "database_chooser = conflowgen.DatabaseChooser()\n", + "database_chooser.create_new_sqlite_database(\":memory:\")\n", + "conflowgen.setup_logger(\n", + " logging_directory=\"./data/logger\"\n", + ")\n", + "now = datetime.datetime.now()\n", + "manager = conflowgen.ContainerFlowGenerationManager()\n", + "manager.set_properties(\n", + " start_date=now,\n", + " end_date=now + datetime.timedelta(days=30)\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "00947f71-f19e-4207-863a-64bbf2f47961", + "metadata": {}, + "source": [ + "## Run previews" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d9e73814-eebe-4da4-b6fb-11f0a5b1c4a9", + "metadata": {}, + "outputs": [], + "source": [ + "conflowgen.run_all_previews(\n", + " as_text=True,\n", + " as_graph=True,\n", + " display_text_func=print,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "5b3e3960-faf4-4ab2-83bf-5a0e1b63f7aa", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py b/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py index 209b69e3..48c0c1e6 100644 --- a/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py +++ b/conflowgen/tests/previews/test_container_flow_by_vehicle_type_preview_report.py @@ -102,7 +102,7 @@ def test_report_with_no_schedules(self): def test_inbound_with_single_arrival_schedules(self): one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( + Schedule.create( vehicle_type=ModeOfTransport.feeder, service_name="TestFeederService", vehicle_arrives_at=one_week_later.date(), @@ -111,7 +111,6 @@ def test_inbound_with_single_arrival_schedules(self): average_moved_capacity=300, vehicle_arrives_every_k_days=-1 ) - schedule.save() actual_report = self.preview_report.get_report_as_text() expected_report = """ vehicle type (from) vehicle type (to) required capacity (in TEU) @@ -143,3 +142,23 @@ def test_inbound_with_single_arrival_schedules(self): (rounding errors might exist) """ self.assertEqual(expected_report, actual_report) + + def test_report_with_no_schedules_as_graph(self): + """Not throwing an exception is sufficient""" + fig = self.preview_report.get_report_as_graph() + self.assertIsNotNone(fig) + + def test_report_with_schedules_as_graph(self): + """Not throwing an exception is sufficient for now""" + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=400, + average_moved_capacity=300, + vehicle_arrives_every_k_days=-1 + ) + fig = self.preview_report.get_report_as_graph() + self.assertIsNotNone(fig) diff --git a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py index 6b3e2eb6..edb7dc64 100644 --- a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py +++ b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview.py @@ -1,6 +1,8 @@ import datetime import unittest +import numpy as np + from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository from conflowgen.previews.inbound_and_outbound_vehicle_capacity_preview import \ @@ -162,7 +164,7 @@ def test_outbound_average_capacity_with_several_arrivals_schedules(self): def test_outbound_maximum_capacity_with_several_arrivals_schedules(self): """`_, capacity_with_one_feeder = self.preview.get_outbound_capacity_of_vehicles()` is the key difference!""" two_days_later = datetime.datetime.now() + datetime.timedelta(days=2) - schedule = Schedule.create( + Schedule.create( vehicle_type=ModeOfTransport.feeder, service_name="TestFeederService", vehicle_arrives_at=two_days_later.date(), @@ -170,7 +172,6 @@ def test_outbound_maximum_capacity_with_several_arrivals_schedules(self): average_vehicle_capacity=400, average_moved_capacity=300 ) - schedule.save() _, capacity_with_one_feeder = self.preview.get_outbound_capacity_of_vehicles() self.assertSetEqual(set(ModeOfTransport), set(capacity_with_one_feeder.keys())) @@ -187,5 +188,7 @@ def test_outbound_maximum_capacity_with_several_arrivals_schedules(self): # based on the seeded ModeOfTransportDistribution, this value might vary if not properly set truck_capacity_in_teu = capacity_with_one_feeder[ModeOfTransport.truck] - self.assertEqual(truck_capacity_in_teu, -1, "There is no maximum capacity for trucks, they are generated " - "as they are needed.") + self.assertTrue( + np.isnan(truck_capacity_in_teu), + "There is no maximum capacity for trucks, they are generated as they are needed." + ) diff --git a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py index 402091f2..468c30dd 100644 --- a/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py +++ b/conflowgen/tests/previews/test_inbound_and_outbound_vehicle_capacity_preview_report.py @@ -82,7 +82,7 @@ def test_report_with_no_schedules(self): def test_inbound_with_single_arrival_schedules(self): one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( + Schedule.create( vehicle_type=ModeOfTransport.feeder, service_name="TestFeederService", vehicle_arrives_at=one_week_later.date(), @@ -91,7 +91,6 @@ def test_inbound_with_single_arrival_schedules(self): average_moved_capacity=300, vehicle_arrives_every_k_days=-1 ) - schedule.save() actual_report = self.preview_report.get_report_as_text() expected_report = """ vehicle type inbound capacity (in TEU) outbound avg capacity (in TEU) outbound max capacity (in TEU) @@ -103,3 +102,23 @@ def test_inbound_with_single_arrival_schedules(self): (rounding errors might exist) """ self.assertEqual(expected_report, actual_report) + + def test_report_with_no_schedules_as_graph(self): + """Not throwing an exception is sufficient""" + ax = self.preview_report.get_report_as_graph() + self.assertIsNotNone(ax) + + def test_report_with_schedules_as_graph(self): + """Not throwing an exception is sufficient for now""" + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=400, + average_moved_capacity=300, + vehicle_arrives_every_k_days=-1 + ) + fig = self.preview_report.get_report_as_graph() + self.assertIsNotNone(fig) diff --git a/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py b/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py index dbb7ab3a..7166d38b 100644 --- a/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py +++ b/conflowgen/tests/previews/test_modal_split_preview__get_modal_split_for_hinterland.py @@ -10,7 +10,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestModalSplitPreview__get_modal_split_for_hinterland(unittest.TestCase): +class TestModalSplitPreview__get_modal_split_for_hinterland(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" self.sqlite_db = setup_sqlite_in_memory_db() diff --git a/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py b/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py index 3b8fdb5e..cbeaf1e3 100644 --- a/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py +++ b/conflowgen/tests/previews/test_modal_split_preview__get_transshipment.py @@ -10,7 +10,7 @@ from conflowgen.tests.substitute_peewee_database import setup_sqlite_in_memory_db -class TestModalSplitPreview__get_transshipment(unittest.TestCase): +class TestModalSplitPreview__get_transshipment(unittest.TestCase): # pylint: disable=invalid-name def setUp(self) -> None: """Create container database in memory""" self.sqlite_db = setup_sqlite_in_memory_db() @@ -64,14 +64,14 @@ def setUp(self) -> None: def test_with_no_schedules(self): """If no schedules are provided, this should not fail""" - empty_split = self.preview.get_transshipment_and_hinterland_share() + empty_split = self.preview.get_transshipment_and_hinterland_split() self.assertEqual(empty_split.hinterland_capacity, 0) self.assertEqual(empty_split.transshipment_capacity, 0) def test_with_single_arrival_schedules(self): """transshipment is 300 TEU inbound and 300TEU * (15% + 15%) = 90TEU outbound""" one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( + Schedule.create( vehicle_type=ModeOfTransport.feeder, service_name="TestFeederService", vehicle_arrives_at=one_week_later.date(), @@ -80,7 +80,6 @@ def test_with_single_arrival_schedules(self): average_moved_capacity=300, vehicle_arrives_every_k_days=-1 ) - schedule.save() - actual_split = self.preview.get_transshipment_and_hinterland_share() + actual_split = self.preview.get_transshipment_and_hinterland_split() self.assertAlmostEqual(actual_split.hinterland_capacity, 270) self.assertAlmostEqual(actual_split.transshipment_capacity, 90) diff --git a/conflowgen/tests/previews/test_modal_split_preview_report.py b/conflowgen/tests/previews/test_modal_split_preview_report.py index 8cb912e5..c4b0049c 100644 --- a/conflowgen/tests/previews/test_modal_split_preview_report.py +++ b/conflowgen/tests/previews/test_modal_split_preview_report.py @@ -68,31 +68,31 @@ def test_report_with_no_schedules(self): """If no schedules are provided, no flows exist""" actual_report = self.preview_report.get_report_as_text() expected_report = """ -Transshipment share -transshipment proportion (in TEU): 0.00 (-%) -hinterland proportion (in TEU): 0.00 (-%) +Role in network +transshipment traffic (in TEU): 0.00 (-%) +inland gateway traffic (in TEU): 0.00 (-%) -Inbound modal split -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (only inbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) -Outbound modal split -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (only outbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) -Absolute modal split (both inbound and outbound) -truck proportion (in TEU): 0.0 (-%) -barge proportion (in TEU): 0.0 (-%) -train proportion (in TEU): 0.0 (-%) +Modal split in hinterland traffic (both inbound and outbound traffic) +trucks (in TEU): 0.0 (-%) +barges (in TEU): 0.0 (-%) +trains (in TEU): 0.0 (-%) (rounding errors might exist) """ self.assertEqual(expected_report, actual_report) def test_inbound_with_single_arrival_schedules(self): one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( + Schedule.create( vehicle_type=ModeOfTransport.feeder, service_name="TestFeederService", vehicle_arrives_at=one_week_later.date(), @@ -101,27 +101,46 @@ def test_inbound_with_single_arrival_schedules(self): average_moved_capacity=300, vehicle_arrives_every_k_days=-1 ) - schedule.save() actual_report = self.preview_report.get_report_as_text() expected_report = """ -Transshipment share -transshipment proportion (in TEU): 90.00 (25.00%) -hinterland proportion (in TEU): 270.00 (75.00%) +Role in network +transshipment traffic (in TEU): 90.00 (25.00%) +inland gateway traffic (in TEU): 270.00 (75.00%) -Inbound modal split -truck proportion (in TEU): 60.0 (100.00%) -barge proportion (in TEU): 0.0 (0.00%) -train proportion (in TEU): 0.0 (0.00%) +Modal split in hinterland traffic (only inbound traffic) +trucks (in TEU): 60.0 (100.00%) +barges (in TEU): 0.0 (0.00%) +trains (in TEU): 0.0 (0.00%) -Outbound modal split -truck proportion (in TEU): 60.0 (28.57%) -barge proportion (in TEU): 30.0 (14.29%) -train proportion (in TEU): 120.0 (57.14%) +Modal split in hinterland traffic (only outbound traffic) +trucks (in TEU): 60.0 (28.57%) +barges (in TEU): 30.0 (14.29%) +trains (in TEU): 120.0 (57.14%) -Absolute modal split (both inbound and outbound) -truck proportion (in TEU): 120.0 (44.44%) -barge proportion (in TEU): 30.0 (11.11%) -train proportion (in TEU): 120.0 (44.44%) +Modal split in hinterland traffic (both inbound and outbound traffic) +trucks (in TEU): 120.0 (44.44%) +barges (in TEU): 30.0 (11.11%) +trains (in TEU): 120.0 (44.44%) (rounding errors might exist) """ - self.assertEqual(expected_report, actual_report) + self.assertEqual(actual_report, expected_report) + + def test_report_with_no_schedules_as_graph(self): + """Not throwing an exception is sufficient""" + axes = self.preview_report.get_report_as_graph() + self.assertIsNotNone(axes) + + def test_report_with_schedules_as_graph(self): + """Not throwing an exception is sufficient for now""" + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=400, + average_moved_capacity=300, + vehicle_arrives_every_k_days=-1 + ) + axes = self.preview_report.get_report_as_graph() + self.assertIsNotNone(axes) diff --git a/conflowgen/tests/previews/test_run_all_previews.py b/conflowgen/tests/previews/test_run_all_previews.py index 592ac8b4..e66ae5c2 100644 --- a/conflowgen/tests/previews/test_run_all_previews.py +++ b/conflowgen/tests/previews/test_run_all_previews.py @@ -1,4 +1,5 @@ import unittest +import unittest.mock import datetime from conflowgen.api.container_flow_generation_manager import ContainerFlowGenerationManager @@ -21,22 +22,27 @@ def setUp(self) -> None: end_date=datetime.datetime.now().date() + datetime.timedelta(days=21) ) - def test_with_no_data(self): - with self.assertLogs('conflowgen', level='INFO') as cm: - run_all_previews() - self.maxDiff = None - self.assertEqual(len(cm.output), 14) + def test_with_no_data_as_text(self): + with self.assertLogs('conflowgen', level='INFO') as context: + run_all_previews(as_text=True) + self.assertEqual(len(context.output), 14) # Test only some entries. The detailed tests should be done in the unit test of the respective report. self.assertEqual( - cm.output[0], + context.output[0], "INFO:conflowgen:Run all previews for the input distributions in combination with the schedules." ) self.assertEqual( - cm.output[1], + context.output[1], "INFO:conflowgen:\nInbound And Outbound Vehicle Capacity Preview Report\n" ) self.assertEqual( - cm.output[-1], + context.output[-1], 'INFO:conflowgen:All previews have been presented.' ) + + def test_with_no_data_as_graph(self): + with unittest.mock.patch('matplotlib.pyplot.show'): + with self.assertLogs('conflowgen', level='INFO') as context: + run_all_previews(as_text=False, as_graph=True, static_graphs=True) + self.assertEqual(len(context.output), 11) diff --git a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py index 8f26791d..1785bad2 100644 --- a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py +++ b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview.py @@ -1,6 +1,8 @@ import datetime import unittest +import numpy as np + from conflowgen.domain_models.distribution_repositories.mode_of_transport_distribution_repository import \ ModeOfTransportDistributionRepository from conflowgen.previews.vehicle_capacity_exceeded_preview import VehicleCapacityExceededPreview @@ -85,7 +87,7 @@ def test_with_no_schedules(self): ) = no_excess_comparison[mode_of_transport_from] self.assertEqual(container_capacity_to_pick_up, 0, msg=f"mode_of_transport_from: {mode_of_transport_from}") - self.assertEqual(maximum_capacity, -1, msg=f"mode_of_transport_from: {mode_of_transport_from}") + self.assertTrue(np.isnan(maximum_capacity), msg=f"mode_of_transport_from: {mode_of_transport_from}") self.assertFalse(vehicle_type_capacity_is_exceeded, msg=f"mode_of_transport_from: {mode_of_transport_from}") def test_with_single_arrival_schedules(self): @@ -128,5 +130,5 @@ def test_with_single_arrival_schedules(self): ) = with_excess_comparison[mode_of_transport_from] self.assertAlmostEqual(container_capacity_to_pick_up, 60, msg="20% of 300 is 60") - self.assertEqual(maximum_capacity, -1, msg=f"mode_of_transport_from: {mode_of_transport_from}") + self.assertTrue(np.isnan(maximum_capacity), msg=f"mode_of_transport_from: {mode_of_transport_from}") self.assertFalse(vehicle_type_capacity_is_exceeded, msg=f"mode_of_transport_from: {mode_of_transport_from}") diff --git a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py index f728c540..ebb3b173 100644 --- a/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py +++ b/conflowgen/tests/previews/test_vehicle_capacity_exceeded_preview_report.py @@ -84,7 +84,7 @@ def test_inbound_with_single_arrival_schedules(self): """A feeder delivers containers for every vehicle type. For the types truck and feeder it is fine, deep sea vessels, barges and trains do not exist und thus their capacity is exceeded.""" one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) - schedule = Schedule.create( + Schedule.create( vehicle_type=ModeOfTransport.feeder, service_name="TestFeederService", vehicle_arrives_at=one_week_later.date(), @@ -93,7 +93,6 @@ def test_inbound_with_single_arrival_schedules(self): average_moved_capacity=300, vehicle_arrives_every_k_days=-1 ) - schedule.save() actual_report = self.preview_report.get_report_as_text() expected_report = """ vehicle type maximum capacity (in TEU) required capacity (in TEU) exceeded difference (in TEU) @@ -105,3 +104,23 @@ def test_inbound_with_single_arrival_schedules(self): (rounding errors might exist) """ self.assertEqual(expected_report, actual_report) + + def test_report_with_no_schedules_as_graph(self): + """Not throwing an exception is sufficient""" + fig = self.preview_report.get_report_as_graph() + self.assertIsNotNone(fig) + + def test_report_with_schedules_as_graph(self): + """Not throwing an exception is sufficient for now""" + one_week_later = datetime.datetime.now() + datetime.timedelta(weeks=1) + Schedule.create( + vehicle_type=ModeOfTransport.feeder, + service_name="TestFeederService", + vehicle_arrives_at=one_week_later.date(), + vehicle_arrives_at_time=one_week_later.time(), + average_vehicle_capacity=400, + average_moved_capacity=300, + vehicle_arrives_every_k_days=-1 + ) + fig = self.preview_report.get_report_as_graph() + self.assertIsNotNone(fig) diff --git a/conflowgen/tests/tools/test_theoretical_distribution__clipped_log_normal.py b/conflowgen/tests/tools/test_theoretical_distribution__clipped_log_normal.py new file mode 100644 index 00000000..acf32731 --- /dev/null +++ b/conflowgen/tests/tools/test_theoretical_distribution__clipped_log_normal.py @@ -0,0 +1,35 @@ +import unittest + +from conflowgen.tools.continuous_distribution import ClippedLogNormal + + +class TestClippedLogNormal(unittest.TestCase): + + def setUp(self) -> None: + self.cln = ClippedLogNormal(average=5, variance=2, minimum=1, maximum=15) + + def assertArrayEqual(self, array_1, array_2, msg=""): # pylint: disable=invalid-name + self.assertListEqual(list(array_1), list(array_2), msg=msg) + + def test_minimum_is_respected(self): + self.assertArrayEqual(self.cln.get_probabilities([0.5, 4]), [0, 1]) + self.assertArrayEqual(self.cln.get_probabilities([0, 6]), [0, 1]) + self.assertArrayEqual(self.cln.get_probabilities([-2, 5]), [0, 1]) + + def test_maximum_is_respected(self): + self.assertArrayEqual(self.cln.get_probabilities([14, 15.1]), [1, 0]) + self.assertArrayEqual(self.cln.get_probabilities([14, 16]), [1, 0]) + self.assertArrayEqual(self.cln.get_probabilities([14, 100]), [1, 0]) + + def test_lognorm_properties(self): + self.assertAlmostEqual(self.cln._lognorm.mean(), 5) # pylint: disable=protected-access + self.assertAlmostEqual(self.cln._lognorm.var(), 2) # pylint: disable=protected-access + + def test_reversed(self): + reversed_cln = self.cln.reversed() + self.assertAlmostEqual(reversed_cln._lognorm.mean(), 5) # pylint: disable=protected-access + self.assertAlmostEqual(reversed_cln._lognorm.var(), 2) # pylint: disable=protected-access + xs = [0, 2, 4, 10, 12, 15, 20] + probs = self.cln.get_probabilities(xs) + reversed_probs = reversed_cln.get_probabilities(xs) + self.assertListEqual(list(probs), list(reversed(reversed_probs))) diff --git a/conflowgen/tests/tools/test_theoretical_distribution__mutliply_discretized_probability_densities.py b/conflowgen/tests/tools/test_theoretical_distribution__mutliply_discretized_probability_densities.py new file mode 100644 index 00000000..d6b09e46 --- /dev/null +++ b/conflowgen/tests/tools/test_theoretical_distribution__mutliply_discretized_probability_densities.py @@ -0,0 +1,13 @@ +import unittest + +from conflowgen.tools.continuous_distribution import multiply_discretized_probability_densities + + +class TestMultiplyDiscretizedProbabilityDensities(unittest.TestCase): + + def test(self): + vector_a = [0.5, 0.25, 0.25] + vector_b = [0.3, 0.3, 0.4] + vector_c = multiply_discretized_probability_densities(vector_a, vector_b) + self.assertEqual(len(vector_c), 3) + self.assertAlmostEqual(sum(vector_c), 1) diff --git a/conflowgen/tools/continuous_distribution.py b/conflowgen/tools/continuous_distribution.py new file mode 100644 index 00000000..5d15cc09 --- /dev/null +++ b/conflowgen/tools/continuous_distribution.py @@ -0,0 +1,129 @@ +from __future__ import annotations + +import abc +import math +from typing import Collection, Sequence, Optional + +import numpy +import scipy.stats + + +class ContinuousDistribution(abc.ABC): + + average: float + minimum: float + maximum: float + + def __init__( + self, + average: float, + minimum: float, + maximum: float, + unit: Optional[str] = None, + reversed_distribution: bool = False + ): + assert minimum < average < maximum, f"The assertion {minimum} < {average} < {maximum} failed." + self.average = average + self.minimum = minimum + self.maximum = maximum + self.reversed_distribution = reversed_distribution + + self.unit = unit + self.unit_repr, self.unit_repr_square = "", "" + if unit: + self.unit_repr = unit + self.unit_repr_square = unit + "²" + + @abc.abstractmethod + def _get_probabilities_based_on_distribution(self, xs: numpy.typing.ArrayLike) -> numpy.typing.ArrayLike: + pass + + def get_probabilities(self, xs: numpy.typing.ArrayLike) -> numpy.typing.ArrayLike: + """ + + Args: + xs: Elements that are on the same scale as average, variance, minimum, and maximum + + Returns: + The respective probability that element x as an element of xs is drawn from this distribution + """ + xs = numpy.array(xs) + densities = self._get_probabilities_based_on_distribution(xs) + densities[xs <= self.minimum] = 0 + densities[xs >= self.maximum] = 0 + densities = densities / densities.sum() + if self.reversed_distribution: + densities = numpy.flip(densities) + return densities + + @abc.abstractmethod + def reversed(self) -> ContinuousDistribution: + pass + + +class ClippedLogNormal(ContinuousDistribution): + + variance: float + + def __init__( + self, + average: float, + variance: float, + minimum: float, + maximum: float, + unit: Optional[str] = None, + reversed_distribution: bool = False + ): + super().__init__( + average=average, + minimum=minimum, + maximum=maximum, + unit=unit, + reversed_distribution=reversed_distribution + ) + self.variance = variance + self._lognorm = self._get_scipy_lognorm() + + def _get_scipy_lognorm(self) -> "scipy.stats.rv_frozen": + # See https://www.johndcook.com/blog/2022/02/24/find-log-normal-parameters/ for reference + sigma2 = math.log(self.variance / self.average ** 2 + 1) + mu = math.log(self.average) - sigma2 / 2 + + scipy_shape = sigma2 ** 0.5 + scipy_scale = math.exp(mu) + + frozen_lognorm = scipy.stats.lognorm(s=scipy_shape, scale=scipy_scale) + + return frozen_lognorm + + def _get_probabilities_based_on_distribution(self, xs: numpy.typing.ArrayLike) -> numpy.typing.ArrayLike: + return self._lognorm.pdf(xs) + + def __repr__(self): + return ( + f"<{self.__class__.__name__}: " + f"avg={self.average:.1f}{self.unit_repr}, " + f"min={self.minimum:.1f}{self.unit_repr}, " + f"max={self.maximum:.1f}{self.unit_repr}, " + f"var={self.variance:.1f}{self.unit_repr_square}, " + f"rev={self.reversed_distribution}" + f">" + ) + + def reversed(self) -> ClippedLogNormal: + return self.__class__( + average=self.average, + minimum=self.minimum, + maximum=self.maximum, + variance=self.variance, + unit=self.unit, + reversed_distribution=(not self.reversed_distribution) + ) + + +def multiply_discretized_probability_densities(*probabilities: Collection[float]) -> Sequence[float]: + assert len({len(p) for p in probabilities}) == 1, "All probability vectors have the same length" + np_probs = [numpy.array(probs, dtype=numpy.double) for probs in probabilities] + multiplied_probabilities = numpy.multiply(*np_probs) + normalized_probabilities = multiplied_probabilities / multiplied_probabilities.sum() + return normalized_probabilities diff --git a/conflowgen/tools/weekly_distribution.py b/conflowgen/tools/weekly_distribution.py index ac3edec2..77c9ff92 100644 --- a/conflowgen/tools/weekly_distribution.py +++ b/conflowgen/tools/weekly_distribution.py @@ -14,11 +14,16 @@ class WeeklyDistribution: def __init__( self, hour_fraction_pairs: List[Union[Tuple[int, float], Tuple[int, int]]], - considered_time_window_in_hours: float, - minimum_dwell_time_in_hours: float + considered_time_window_in_hours: int, + minimum_dwell_time_in_hours: int, + is_reversed: bool = False, + context: str = "" ): + self.is_reversed = is_reversed self.considered_time_window_in_hours = considered_time_window_in_hours self.minimum_dwell_time_in_hours = minimum_dwell_time_in_hours + self.context = context + self.hour_of_the_week_fraction_pairs = [] number_of_weeks_to_consider = 2 + int(considered_time_window_in_hours / 24 / 7) for week in range(number_of_weeks_to_consider): @@ -30,10 +35,6 @@ def __init__( fraction ) ) - self.time_window_length_in_hours = ( - self.hour_of_the_week_fraction_pairs[1][0] - - self.hour_of_the_week_fraction_pairs[0][0] - ) @classmethod def _get_hour_of_the_week_from_datetime(cls, point_in_time: datetime.datetime) -> int: @@ -50,9 +51,13 @@ def _get_hour_of_the_week_from_datetime(cls, point_in_time: datetime.datetime) - f"Time since Monday in completed hours: {completed_hours_since_monday}" return completed_hours_since_monday - def get_distribution_slice(self, _datetime: datetime.datetime) -> Dict[int, float]: - start_hour = self._get_hour_of_the_week_from_datetime(_datetime) + def get_distribution_slice(self, start_as_datetime: datetime.datetime) -> Dict[int, float]: + start_hour = self._get_hour_of_the_week_from_datetime(start_as_datetime) end_hour = start_hour + self.considered_time_window_in_hours + + if self.is_reversed: + end_hour -= self.minimum_dwell_time_in_hours + assert 0 <= start_hour <= self.HOURS_IN_WEEK, "Start hour must be in first week" assert start_hour < end_hour, "Start hour must be before end hour" @@ -72,14 +77,17 @@ def get_distribution_slice(self, _datetime: datetime.datetime) -> Dict[int, floa need_to_modify_first_entry = False previous_fraction = None for i, (hour_after_start, fraction) in enumerate(not_normalized_distribution_slice): - if hour_after_start > self.minimum_dwell_time_in_hours: - if need_to_modify_first_entry: - del not_normalized_distribution_slice[:i] - not_normalized_distribution_slice.insert( - 0, - (self.minimum_dwell_time_in_hours, previous_fraction) - ) - break + + # drop first entries if in forward mode + if not self.is_reversed: + if hour_after_start > self.minimum_dwell_time_in_hours: + if need_to_modify_first_entry: + del not_normalized_distribution_slice[:i] + not_normalized_distribution_slice.insert( + 0, + (self.minimum_dwell_time_in_hours, previous_fraction) + ) + break need_to_modify_first_entry = True previous_fraction = fraction @@ -93,3 +101,14 @@ def get_distribution_slice(self, _datetime: datetime.datetime) -> Dict[int, floa for (hour_after_start, hour_fraction) in not_normalized_distribution_slice } return distribution_slice + + def __repr__(self): + maximum = self.minimum_dwell_time_in_hours + self.considered_time_window_in_hours + return ( + f"<{self.__class__.__name__}: " + f"min={self.minimum_dwell_time_in_hours:.1f}h, " + f"max={maximum:.1f}h={maximum / 24:.1f}d, " + f"rev={self.is_reversed}, " + f"context='{self.context}'" + f">" + ) diff --git a/docs/api.rst b/docs/api.rst index c95417bd..f7f32f1d 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -24,7 +24,7 @@ Domain datatypes .. autoenum:: conflowgen.StorageRequirement :members: -.. autonamedtuple:: conflowgen.TransshipmentAndHinterlandComparison +.. autonamedtuple:: conflowgen.TransshipmentAndHinterlandSplit .. autonamedtuple:: conflowgen.CompleteVehicleIdentifier @@ -58,6 +58,9 @@ These are all required for generating the synthetic data. .. autoclass:: conflowgen.ModeOfTransportDistributionManager :members: +.. autoclass:: conflowgen.ContainerDwellTimeDistributionManager + :members: + .. autoclass:: conflowgen.PortCallManager :members: @@ -65,8 +68,8 @@ These are all required for generating the synthetic data. :members: -Getting previews -================ +Generating previews +=================== .. autoclass:: conflowgen.ContainerFlowByVehicleTypePreview :members: @@ -94,9 +97,15 @@ Getting previews .. autoclass:: conflowgen.VehicleCapacityExceededPreviewReport :members: -Getting analyses +Running analyses ================ +.. autoclass:: conflowgen.ContainerDwellTimeAnalysis + :members: + +.. autoclass:: conflowgen.ContainerDwellTimeAnalysisReport + :members: + .. autoclass:: conflowgen.ContainerFlowAdjustmentByVehicleTypeAnalysis :members: @@ -153,6 +162,16 @@ Getting analyses .. autoclass:: conflowgen.YardCapacityAnalysisReport :members: +Using distributions +=================== + +Most of the distributions in ConFlowGen are discrete distributions and are just reflected by classic Python +dictionaries where the key refers to the element to be drawn and the value is the probability. +In some cases, such as container dwell times, continuous distributions are required. + +.. autoclass:: conflowgen.ContinuousDistribution + :members: + Working with reports ==================== diff --git a/docs/conf.py b/docs/conf.py index 73e2edd6..1d88c327 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -110,7 +110,8 @@ version_link = f"{sys.version_info.major}.{sys.version_info.minor}" intersphinx_mapping = { - 'python': (f'https://docs.python.org/{version_link}', None) # link to used Python version + 'python': (f'https://docs.python.org/{version_link}', None), # link to used Python version + 'numpy': ('https://numpy.org/doc/stable/', None), # link to numpy } # -- Options for Included Jupyter Notebooks ---------------------------------- diff --git a/docs/index.rst b/docs/index.rst index b2ff9d53..23010e3f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,6 +14,7 @@ Its source code hosted notebooks/input_distributions.ipynb notebooks/previews.ipynb notebooks/analyses.ipynb + notebooks/in_spotlight.ipynb examples api contributing diff --git a/docs/notebooks/analyses.ipynb b/docs/notebooks/analyses.ipynb index d67995ac..a751143d 100644 --- a/docs/notebooks/analyses.ipynb +++ b/docs/notebooks/analyses.ipynb @@ -144,7 +144,7 @@ "For more informaiton on this preview report, please check\n", ":class:`.InboundAndOutboundVehicleCapacityAnalysisReport`.\n", "A list of all analyses including their corresponding reports shipped with ConFlowGen is available at\n", - ":ref:`Getting analyses`." + ":ref:`Running analyses`." ] }, { @@ -283,6 +283,32 @@ ")" ] }, + { + "cell_type": "markdown", + "id": "6849b3d4-4d3f-43bb-9900-d0a0b6538f24", + "metadata": {}, + "source": [ + "In some cases, even several filters can be combined." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27dccf0d-e7c6-42e4-899e-4ed45aeaa656", + "metadata": {}, + "outputs": [], + "source": [ + "container_dwell_time_report = conflowgen.ContainerDwellTimeAnalysisReport()\n", + "container_dwell_time_report.get_report_as_graph(\n", + " container_delivered_by_vehicle_type={\n", + " conflowgen.ModeOfTransport.deep_sea_vessel,\n", + " conflowgen.ModeOfTransport.feeder,\n", + " },\n", + " storage_requirement=conflowgen.StorageRequirement.empty\n", + ")\n", + "plt.show()" + ] + }, { "cell_type": "raw", "id": "44c27c8a-269b-4954-9b78-24b2f111c145", @@ -292,7 +318,7 @@ }, "source": [ "The same pattern of restricting the output to certain vehicle or container types is usable for other analyses in case you spot the corresponding keyword arguments in\n", - ":ref:`Getting analyses`." + ":ref:`Running analyses`." ] }, { @@ -335,7 +361,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.8" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite b/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite index e535dca0..71877f69 100644 --- a/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite +++ b/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:76cdee6f1bc6fa9bd99133884bd242e57efa84894ee4fb6b3ccd60ab945090f5 -size 18337792 +oid sha256:9dfa9a4dd1bab8c7d63f78d858d40655cb608bfe8bbf6903d65ae3b9754861c9 +size 19890176 diff --git a/docs/notebooks/data/prepared_dbs/demo_poc.sqlite b/docs/notebooks/data/prepared_dbs/demo_poc.sqlite index 1e82876d..70e77aa3 100644 --- a/docs/notebooks/data/prepared_dbs/demo_poc.sqlite +++ b/docs/notebooks/data/prepared_dbs/demo_poc.sqlite @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f63c11346859f013784054bc644063c849056e084f5228ce6bb04e34a3f46d21 -size 282624 +oid sha256:3a31cc9401c7beab6a00bb25083a352c9c01157c7e4b619eb5ba1f112b077726 +size 299008 diff --git a/docs/notebooks/first_steps.ipynb b/docs/notebooks/first_steps.ipynb index 7a29739c..0edb7546 100644 --- a/docs/notebooks/first_steps.ipynb +++ b/docs/notebooks/first_steps.ipynb @@ -456,7 +456,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.8" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/docs/notebooks/in_spotlight.ipynb b/docs/notebooks/in_spotlight.ipynb new file mode 100644 index 00000000..60674b1f --- /dev/null +++ b/docs/notebooks/in_spotlight.ipynb @@ -0,0 +1,566 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d746cfe2-cfd6-4286-8ab2-872e45c1546f", + "metadata": {}, + "source": [ + "# In Spotlight\n", + "\n", + "In this chapter, selected aspects of the\n", + "[Data Generation Process](../background.rst#data-generation-process)\n", + "are explained on a more detailled level and supported by visuals.\n", + "In this scope, some internal functions and methods are imported that are not part of the official interface." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2822c10c-6094-4548-b16f-34a39fb55c4d", + "metadata": {}, + "outputs": [], + "source": [ + "import datetime\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import conflowgen" + ] + }, + { + "cell_type": "markdown", + "id": "29482d8a-18ff-4803-b5cd-ce77c15dd920", + "metadata": {}, + "source": [ + "Load some internal classes and functions that are not part of the regular API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b362f514-570f-4145-87a9-e2eedf96d77f", + "metadata": {}, + "outputs": [], + "source": [ + "from conflowgen.domain_models.container import Container\n", + "from conflowgen.tools.continuous_distribution import multiply_discretized_probability_densities\n", + "from conflowgen.flow_generator.truck_for_export_containers_manager import TruckForExportContainersManager\n", + "from conflowgen.flow_generator.truck_for_import_containers_manager import TruckForImportContainersManager" + ] + }, + { + "cell_type": "markdown", + "id": "36fd2e90-caf6-4477-bb69-cbe42348a1cd", + "metadata": {}, + "source": [ + "Initialize ConFlowGen." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8733896f-a6cc-4cd9-9246-74e4f79c0d3e", + "metadata": {}, + "outputs": [], + "source": [ + "logger = conflowgen.setup_logger(\n", + " logging_directory=\"./data/logger\", # use subdirectory relative to Jupyter Notebook\n", + " format_string=\"%(message)s\" # only show log messages, discard timestamp etc.\n", + ")\n", + "database_chooser = conflowgen.DatabaseChooser()\n", + "database_chooser.create_new_sqlite_database(\":memory:\")" + ] + }, + { + "cell_type": "markdown", + "id": "cb0706b3-38ac-4f57-bda6-2900a966c154", + "metadata": {}, + "source": [ + "## Combining truck arrival and container dwell time distribution\n", + "\n", + "It is a challenge to synthetically generate container flows that take both the truck arrival distribution and the container dwell time distribution into account.\n", + "This is, however, necessary in two cases:\n", + "\n", + "- When a container is picked up by a truck\n", + "- When a container is delivered by a truck\n", + "\n", + "The approach chosen in ConFlowGen is presented in the following, first for the import and then for the export process." + ] + }, + { + "cell_type": "markdown", + "id": "edd69745-2384-4378-b347-9ea860804975", + "metadata": {}, + "source": [ + "### Picking up a container by truck\n", + "When a container is delivered to the container terminal by a vessel and a truck is to be generated to pick up the container, two naive approaches exist.\n", + "First, a truck arrival time might be drawn from the truck arrival distribution.\n", + "This, e.g., ensures that no truck arrivals happen on a Sunday.\n", + "However, only considering the truck arrival distribution means that the container dwell time distribution is ignored.\n", + "Second, the container dwell time distribution might be picked to draw the arrival of the truck.\n", + "This ensures that the container dwell times are realistic.\n", + "At the same time, the truck arrival patterns are ignored." + ] + }, + { + "cell_type": "markdown", + "id": "5c0ffce8-669a-4821-8cde-149bb3f8b44c", + "metadata": {}, + "source": [ + "Prepare the container that arrives at the terminal with a deep sea vessel and departs with a truck" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "852dea38-b137-4171-a566-b7a05a7c1991", + "metadata": {}, + "outputs": [], + "source": [ + "container = Container.create(\n", + " weight=20,\n", + " delivered_by=conflowgen.ModeOfTransport.deep_sea_vessel,\n", + " picked_up_by=conflowgen.ModeOfTransport.truck,\n", + " picked_up_by_initial=conflowgen.ModeOfTransport.truck,\n", + " length=conflowgen.ContainerLength.twenty_feet,\n", + " storage_requirement=conflowgen.StorageRequirement.standard\n", + ")\n", + "container_arrival_time = datetime.datetime.now().replace(second=0, microsecond=0)\n", + "container_arrival_time_hour = (\n", + " container_arrival_time.replace(minute=0)\n", + " + datetime.timedelta(hours=1) # turn 8:45 into 09:00\n", + ")\n", + "\n", + "print(f\"The container arrives at the terminal at {container_arrival_time.isoformat()} \"\n", + " f\"which is counted as {container_arrival_time_hour.isoformat()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "b818cb33-a570-4106-8746-28fb44539d95", + "metadata": {}, + "source": [ + "Load the two distributions that fit the container charateristics." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7838f5b-3015-4324-a1ca-36347b4dbcc2", + "metadata": {}, + "outputs": [], + "source": [ + "manager = TruckForImportContainersManager()\n", + "manager.reload_distributions()\n", + "container_dwell_time_distribution, truck_arrival_distribution = manager._get_distributions(container)\n", + "\n", + "print(container_dwell_time_distribution)\n", + "print(truck_arrival_distribution)" + ] + }, + { + "cell_type": "markdown", + "id": "384619d0-fea3-4990-bd12-08e344ad7221", + "metadata": {}, + "source": [ + "Then, the earliest truck time slot is chosen, i.e., when it can arrive first on the terminal." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8fa88c99-85ae-4089-802a-423e7022ba7d", + "metadata": {}, + "outputs": [], + "source": [ + "earliest_truck_time_slot = (\n", + " container_arrival_time_hour\n", + " + datetime.timedelta(hours=container_dwell_time_distribution.minimum)\n", + ")\n", + "print(f\"The earliest available truck time slot is {earliest_truck_time_slot.isoformat()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "97bddf09-b5ee-4a9a-8054-387fb5dd4c5c", + "metadata": {}, + "source": [ + "Now the truck arrival distribution is converted to a distribution that reflects the probability that the container is picked up at a given time.\n", + "While the truck arrival distribution only covers a work week, the derived distribution must cover the whole time range from the time the container has arrived at the terminal until the point that is determined as the maximum dwell time.\n", + "This time range is often longer than a week." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7f55e947-17f4-4cef-9ee0-2be2e53b71e0", + "metadata": {}, + "outputs": [], + "source": [ + "truck_arrival_distribution_slice = truck_arrival_distribution.get_distribution_slice(earliest_truck_time_slot)\n", + "\n", + "truck_arrival_distribution_slice_as_dates = {\n", + " container_arrival_time_hour + datetime.timedelta(hours=hours_from_now): fraction * 100\n", + " for hours_from_now, fraction in truck_arrival_distribution_slice.items()\n", + "}\n", + "\n", + "df_truck_arrival_distribution = pd.Series(truck_arrival_distribution_slice_as_dates).to_frame(\"Truck Arrival Distribution\")\n", + "\n", + "df_truck_arrival_distribution.plot(legend=False)\n", + "plt.ylabel(\"Probability (as percentage overall)\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "1f2ee669-3777-4d75-bf5d-b72cbffc0b7f", + "metadata": {}, + "source": [ + "After having loaded the truck arrival distribution, now it is time to turn to the container dwell time distribution.\n", + "It assigns a probability of the container being picked up to any suggested time slot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23cbf1b5-6df2-4bb6-ae5c-085d78c18288", + "metadata": {}, + "outputs": [], + "source": [ + "time_windows_for_truck_arrival = list(truck_arrival_distribution_slice.keys())\n", + "container_dwell_time_probabilities = container_dwell_time_distribution.get_probabilities(\n", + " time_windows_for_truck_arrival\n", + ")\n", + "\n", + "container_dwell_time_probabilities_as_dates = {\n", + " earliest_truck_time_slot + datetime.timedelta(hours=hours_from_now): fraction * 100\n", + " for hours_from_now, fraction in enumerate(container_dwell_time_probabilities)\n", + "}\n", + "\n", + "df_container_dwell_time_distribution = pd.Series(\n", + " container_dwell_time_probabilities_as_dates).to_frame(\"Container Dwell Time Distribution\")\n", + "\n", + "df_container_dwell_time_distribution.plot(legend=False)\n", + "plt.ylabel(\"Probability (as percentage overall)\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "d573c2ed-828d-41c4-85d4-d9cc2e9fd553", + "metadata": {}, + "source": [ + "In the last step, the two distributions are merged by multiplication." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc11a695-5cb4-4510-92b9-c475a235b433", + "metadata": {}, + "outputs": [], + "source": [ + "merged_distribution = multiply_discretized_probability_densities(\n", + " list(truck_arrival_distribution_slice.values()),\n", + " container_dwell_time_probabilities\n", + ")\n", + "\n", + "merged_distribution_as_dates = {\n", + " earliest_truck_time_slot + datetime.timedelta(hours=hours_from_now): fraction * 100\n", + " for hours_from_now, fraction in enumerate(merged_distribution)\n", + "}\n", + "\n", + "df_merged_distributions = \\\n", + " pd.Series(merged_distribution_as_dates).to_frame(\"Multiplication of Both Distributions\")\n", + "\n", + "df_merged_distributions.plot(legend=False)\n", + "plt.ylabel(\"Probability (as percentage overall)\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "d649aa32-e40e-49b2-bb90-bced610022ec", + "metadata": {}, + "source": [ + "Let's re-check how the multiplication of the two distributions affected the merged distribution." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76a35107-5732-4a53-a2da-93475e76763e", + "metadata": {}, + "outputs": [], + "source": [ + "df_merged = pd.concat([\n", + " df_truck_arrival_distribution,\n", + " df_container_dwell_time_distribution,\n", + " df_merged_distributions\n", + "], axis=1)\n", + "\n", + "ax = df_merged[[\"Container Dwell Time Distribution\", \"Truck Arrival Distribution\"]].plot(\n", + " color={\n", + " \"Truck Arrival Distribution\": \"navy\",\n", + " \"Container Dwell Time Distribution\": \"dimgray\",\n", + " },\n", + " alpha=0.5,\n", + " style=\"--\"\n", + ")\n", + "df_merged[[\"Multiplication of Both Distributions\"]].plot(ax=ax, alpha=1, color=\"k\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "60af3649-89da-4f30-b82b-79c6e1b8e8ba", + "metadata": {}, + "source": [ + "The multiplication of the two distributions clearly leads to a new distribution that will help to approximate both the container dwell time distribution and the truck arrival distribution likewise." + ] + }, + { + "cell_type": "markdown", + "id": "ff97385b-b851-4988-ad1c-5b2a067319cb", + "metadata": {}, + "source": [ + "### Delivering a container by truck\n", + "When a container is delivered by truck, ConFlowGen actually first allocated the container on a vessel and only then decides on the truck arrival time.\n", + "The process is thus very similar to the previous case, only that both distributions needs to be reversed.\n", + "This is because we look backwards:\n", + "Given the chosen vessel, how many hours before the truck most likely has arrived?" + ] + }, + { + "cell_type": "markdown", + "id": "ea488811-09ad-4301-a137-43addb061de4", + "metadata": {}, + "source": [ + "Prepare the container that departs from the terminal with a deep sea vessel." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4f1c8b99-1a75-4a94-940e-73466a4d8c84", + "metadata": {}, + "outputs": [], + "source": [ + "container = Container.create(\n", + " weight=20,\n", + " delivered_by=conflowgen.ModeOfTransport.truck,\n", + " picked_up_by=conflowgen.ModeOfTransport.deep_sea_vessel,\n", + " picked_up_by_initial=conflowgen.ModeOfTransport.deep_sea_vessel,\n", + " length=conflowgen.ContainerLength.forty_feet,\n", + " storage_requirement=conflowgen.StorageRequirement.standard\n", + ")\n", + "container_departure_time = datetime.datetime.now().replace(second=0, microsecond=0)\n", + "container_departure_time_hour = (\n", + " container_departure_time.replace(minute=0)\n", + ")\n", + "\n", + "print(f\"The container departs from the terminal at {container_departure_time.isoformat()} \"\n", + " f\"which is counted as {container_departure_time_hour.isoformat()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "42f61c8b-eb3d-463d-8cdf-612965657502", + "metadata": {}, + "source": [ + "Load the two distributions that fit the container charateristics." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "402105cf-481f-44b1-b23b-074189c56434", + "metadata": {}, + "outputs": [], + "source": [ + "manager = TruckForExportContainersManager()\n", + "manager.reload_distributions()\n", + "container_dwell_time_distribution, truck_arrival_distribution = manager._get_distributions(container)\n", + "container_dwell_time_distribution = container_dwell_time_distribution.reversed()\n", + "\n", + "print(container_dwell_time_distribution)\n", + "print(truck_arrival_distribution)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "347f56c5-af90-409d-b773-07631b56d24f", + "metadata": {}, + "outputs": [], + "source": [ + "earliest_truck_time_slot = (\n", + " container_departure_time_hour\n", + " - datetime.timedelta(hours=container_dwell_time_distribution.maximum)\n", + ")\n", + "\n", + "print(f\"The earliest available truck time slot is {earliest_truck_time_slot.isoformat()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "fb447968-45ca-4cfd-b46c-cb2ebdd53345", + "metadata": {}, + "source": [ + "The truck arrival distribution is prepared like before." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15a198a2-297a-4ea1-9778-06c78c79414c", + "metadata": {}, + "outputs": [], + "source": [ + "truck_arrival_distribution_slice = truck_arrival_distribution.get_distribution_slice(earliest_truck_time_slot)\n", + "\n", + "truck_arrival_distribution_slice_as_dates = {\n", + " earliest_truck_time_slot + datetime.timedelta(hours=hours_from_now): fraction * 100\n", + " for hours_from_now, fraction in truck_arrival_distribution_slice.items()\n", + "}\n", + "\n", + "df_truck_arrival_distribution = pd.Series(\n", + " truck_arrival_distribution_slice_as_dates).to_frame(\"Truck Arrival Distribution\")\n", + "\n", + "df_truck_arrival_distribution.plot(legend=False)\n", + "plt.ylabel(\"Probability (as percentage overall)\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "676d008b-6e57-4043-8ff8-6a900780e9fd", + "metadata": {}, + "source": [ + "Likewise, the container dwell time distribution is prepared." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b76c75ec-0a57-4b1e-91a7-186fd6c02809", + "metadata": {}, + "outputs": [], + "source": [ + "time_windows_for_truck_arrival = list(truck_arrival_distribution_slice.keys())\n", + "container_dwell_time_probabilities = container_dwell_time_distribution.get_probabilities(\n", + " time_windows_for_truck_arrival\n", + ")\n", + "\n", + "container_dwell_time_probabilities_as_dates = {\n", + " earliest_truck_time_slot + datetime.timedelta(hours=hours_from_now): fraction * 100\n", + " for hours_from_now, fraction in enumerate(container_dwell_time_probabilities)\n", + "}\n", + "\n", + "df_container_dwell_time_distribution = pd.Series(\n", + " container_dwell_time_probabilities_as_dates).to_frame(\"Container Dwell Time Distribution\")\n", + "\n", + "df_container_dwell_time_distribution.plot(legend=False)\n", + "plt.ylabel(\"Probability (as percentage overall)\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "31b292d2-d1f4-445c-bff6-40e8672ecb10", + "metadata": {}, + "source": [ + "In the last step, the two distributions are merged by multiplication." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4c9fb034-332a-4f80-b909-ef93c243ad25", + "metadata": {}, + "outputs": [], + "source": [ + "merged_distribution = multiply_discretized_probability_densities(\n", + " list(truck_arrival_distribution_slice.values()),\n", + " container_dwell_time_probabilities\n", + ")\n", + "\n", + "merged_distribution_as_dates = {\n", + " earliest_truck_time_slot + datetime.timedelta(hours=hours_from_now): fraction * 100\n", + " for hours_from_now, fraction in enumerate(merged_distribution)\n", + "}\n", + "\n", + "df_merged_distributions = pd.Series(\n", + " merged_distribution_as_dates).to_frame(\"Multiplication of Both Distributions\")\n", + "\n", + "df_merged_distributions.plot(legend=False)\n", + "plt.ylabel(\"Probability (as percentage overall)\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "14369777-cd39-4fe5-95a4-979355d6556f", + "metadata": {}, + "source": [ + "Let's re-check how the multiplication of the two distributions affected the merged distribution." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0ffbf265-38bc-4d6d-93cc-e7aa884d6e89", + "metadata": {}, + "outputs": [], + "source": [ + "df_merged = pd.concat([\n", + " df_truck_arrival_distribution,\n", + " df_container_dwell_time_distribution,\n", + " df_merged_distributions\n", + "], axis=1)\n", + "\n", + "ax = df_merged[[\"Container Dwell Time Distribution\", \"Truck Arrival Distribution\"]].plot(\n", + " color={\n", + " \"Truck Arrival Distribution\": \"navy\",\n", + " \"Container Dwell Time Distribution\": \"dimgray\",\n", + " },\n", + " alpha=0.5,\n", + " style=\"--\"\n", + ")\n", + "df_merged[[\"Multiplication of Both Distributions\"]].plot(ax=ax, alpha=1, color=\"k\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "f8d19d87-f903-4f31-a0de-7204387af67f", + "metadata": {}, + "source": [ + "## Further topics\n", + "\n", + "If you have a topic in mind that should be presented step-by-step like the previous one, please reach out to https://github.com/1kastner/conflowgen/issues or write a mail directly to marvin.kastner@tuhh.de." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/notebooks/input_distributions.ipynb b/docs/notebooks/input_distributions.ipynb index 1a7a0d2e..2e408476 100644 --- a/docs/notebooks/input_distributions.ipynb +++ b/docs/notebooks/input_distributions.ipynb @@ -32,7 +32,9 @@ "\n", "import matplotlib\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import seaborn as sns\n", + "from IPython.display import Markdown\n", "\n", "import conflowgen" ] @@ -681,7 +683,8 @@ }, "source": [ ".. note::\n", - " .. autodata:: conflowgen.domain_models.distribution_seeders.truck_arrival_distribution_seeder.DEFAULT_TRUCK_ARRIVAL_DISTRIBUTION_WITH_NO_ARRIVAL_MANAGEMENT" + " .. autodata:: conflowgen.domain_models.distribution_seeders.truck_arrival_distribution_seeder.DEFAULT_TRUCK_ARRIVAL_DISTRIBUTION_WITH_NO_ARRIVAL_MANAGEMENT\n", + " :no-value:" ] }, { @@ -759,7 +762,8 @@ }, "source": [ ".. note::\n", - " .. autodata:: conflowgen.domain_models.distribution_seeders.truck_arrival_distribution_seeder.DEFAULT_TRUCK_ARRIVAL_DISTRIBUTION_WITH_SLOT_BOOKING" + " .. autodata:: conflowgen.domain_models.distribution_seeders.truck_arrival_distribution_seeder.DEFAULT_TRUCK_ARRIVAL_DISTRIBUTION_WITH_SLOT_BOOKING\n", + " :no-value:" ] }, { @@ -1008,6 +1012,106 @@ ":class:`.TruckArrivalDistributionManager`." ] }, + { + "cell_type": "markdown", + "id": "21bbffb9-5e2c-4ff8-9759-fdef6afad68e", + "metadata": {}, + "source": [ + "## Container Dwell Time Distribution" + ] + }, + { + "cell_type": "markdown", + "id": "ddbfdcec-afcd-4542-8f65-991fd40484fc", + "metadata": {}, + "source": [ + "The container dwell time distribution is used in two cases.\n", + "First, the truck arrivals are made more likely whenever the container dwell time is close to its expected duration.\n", + "Second, a vehicle that arrives at the terminal close to the expected container dwell time is more likely to pick up the container." + ] + }, + { + "cell_type": "raw", + "id": "58b165fd-7541-46fc-9e4e-e46cbdbbbd45", + "metadata": { + "raw_mimetype": "text/restructuredtext", + "tags": [] + }, + "source": [ + ".. note::\n", + " .. autodata:: conflowgen.domain_models.distribution_seeders.container_dwell_time_distribution_seeder.DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS\n", + " .. autodata:: conflowgen.domain_models.distribution_seeders.container_dwell_time_distribution_seeder.DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS \n", + " .. autodata:: conflowgen.domain_models.distribution_seeders.container_dwell_time_distribution_seeder.DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS\n", + " .. autodata:: conflowgen.domain_models.distribution_seeders.container_dwell_time_distribution_seeder.DEFAULT_AVERAGE_CONTAINER_DWELL_TIMES\n", + " :no-value:\n", + "\n", + "The default values can be overwritten with the help of :meth:`.ContainerDwellTimeDistributionManager.set_container_dwell_time_distribution`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9dffd81e-453a-4020-be20-5ae486b7f196", + "metadata": {}, + "outputs": [], + "source": [ + "container_dwell_time_distribution_manager = conflowgen.ContainerDwellTimeDistributionManager()\n", + "\n", + "distributions = container_dwell_time_distribution_manager.get_container_dwell_time_distribution()\n", + "\n", + "number_days_in_hours = 30 * 24\n", + "x = np.linspace(0, number_days_in_hours, number_days_in_hours)\n", + "locator = matplotlib.ticker.MultipleLocator(24)\n", + "\n", + "for inbound_vehicle in conflowgen.ModeOfTransport:\n", + "\n", + " display(Markdown(f\"### {str(inbound_vehicle).replace('_', ' ').title()}\"))\n", + "\n", + " for outbound_vehicle in conflowgen.ModeOfTransport:\n", + " for storage_requirement in [\n", + " conflowgen.StorageRequirement.standard, conflowgen.StorageRequirement.empty\n", + " ]:\n", + " distribution = distributions[inbound_vehicle][outbound_vehicle][storage_requirement]\n", + "\n", + " plt.figure(figsize=(20, 5))\n", + " plt.axvline(distribution.minimum, color=\"dimgray\")\n", + " if distribution.maximum < number_days_in_hours:\n", + " plt.axvline(distribution.maximum, color=\"dimgray\")\n", + "\n", + " x_in_range = x[np.where((distribution.minimum < x) & (x < distribution.maximum))]\n", + "\n", + " plt.plot(\n", + " x_in_range,\n", + " distribution.get_probabilities(x_in_range),\n", + " color='gray',\n", + " lw=5,\n", + " alpha=0.6,\n", + " )\n", + " plt.xlabel(\"Container Dwell Time (h)\")\n", + " plt.gca().xaxis.set_major_locator(locator)\n", + " title = (\n", + " f'Container dwell time from {inbound_vehicle} to {outbound_vehicle} '\n", + " f'for a(n) {storage_requirement} container\\n'\n", + " f'avg={distribution.average:.1f}h, var={distribution.variance:.1f}h², '\n", + " f'{distribution.minimum:.1f}h ≤ x ≤ {distribution.maximum:.1f}h or in other terms '\n", + " f'{distribution.minimum / 24:.1f}d ≤ x ≤ {distribution.maximum / 24:.1f}d'\n", + " )\n", + " plt.title(title)\n", + " plt.show()" + ] + }, + { + "cell_type": "raw", + "id": "f3833069-6100-4a1a-bec5-d327b13c161f", + "metadata": { + "raw_mimetype": "text/restructuredtext", + "tags": [] + }, + "source": [ + "More information on setting and getting the distribution can be found at\n", + ":class:`.ContainerDwellTimeDistributionManager`." + ] + }, { "cell_type": "markdown", "id": "60afb2c6-a1c9-435a-92f7-b942bc072314", @@ -1031,28 +1135,11 @@ }, "source": [ ".. note::\n", - " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_MAXIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS\n", - " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_MINIMUM_DWELL_TIME_OF_IMPORT_CONTAINERS_IN_HOURS\n", - " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_MAXIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS\n", - " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_MINIMUM_DWELL_TIME_OF_EXPORT_CONTAINERS_IN_HOURS\n", - " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_MAXIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS\n", - " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_MINIMUM_DWELL_TIME_OF_TRANSSHIPMENT_CONTAINERS_IN_HOURS\n", " .. autodata:: conflowgen.domain_models.seeders.DEFAULT_TRANSPORTATION_BUFFER\n", "\n", "The default values can be overwritten with the help of :meth:`.ContainerFlowGenerationManager.set_properties`." ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "9dffd81e-453a-4020-be20-5ae486b7f196", - "metadata": {}, - "outputs": [], - "source": [ - "container_flow_generation_manager = conflowgen.ContainerFlowGenerationManager()\n", - "container_flow_generation_manager.get_properties()" - ] - }, { "cell_type": "markdown", "id": "92aa124e-eebc-463d-9701-817c0613829c", @@ -1060,7 +1147,7 @@ "source": [ "All default values are optional.\n", "They are only overwritten if provided.\n", - "The parameters `start_date` and `end_date` are obligatory though." + "The parameters `start_date` and `end_date` are obligatory though and no default values are provided." ] }, { @@ -1070,10 +1157,12 @@ "metadata": {}, "outputs": [], "source": [ + "container_flow_generation_manager = conflowgen.ContainerFlowGenerationManager()\n", + "\n", "container_flow_generation_manager.set_properties(\n", " start_date=datetime.date(2021, 1, 15),\n", " end_date=datetime.date(2021, 1, 31),\n", - " maximum_dwell_time_of_export_containers_in_hours=10 * 24\n", + " transportation_buffer=0.4\n", ")\n", "container_flow_generation_manager.get_properties()" ] diff --git a/docs/notebooks/previews.ipynb b/docs/notebooks/previews.ipynb index 85b21672..fca84273 100644 --- a/docs/notebooks/previews.ipynb +++ b/docs/notebooks/previews.ipynb @@ -156,7 +156,7 @@ "For more informaiton on this preview report, please check\n", ":class:`.InboundAndOutboundVehicleCapacityPreviewReport`.\n", "A list of all previews including their corresponding reports shipped with ConFlowGen is available at\n", - ":ref:`Getting previews`." + ":ref:`Generating previews`." ] }, { @@ -249,7 +249,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.8" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/docs/references.bib b/docs/references.bib index a4ecd1c9..263870d5 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -30,6 +30,12 @@ @online{hafenhamburg2020modalsplit year = 2021 } +@misc{cto2021interview, + author = {{A container terminal operator in the German Bight}}, + title = {Conversation on Container Dwell times}, + year = 2021 +} + @article{hartmann2004generating, title={Generating scenarios for simulation and optimization of container terminal logistics}, author={Hartmann, S{\"o}nke}, diff --git a/run_ci_light.bat b/run_ci_light.bat index 7f785b17..212a75ff 100644 --- a/run_ci_light.bat +++ b/run_ci_light.bat @@ -78,6 +78,11 @@ pylint setup.py || ( EXIT /B ) +pylint conflowgen.tests || ( + ECHO.While linting the conflowgen tests, pylint failed! + EXIT /B +) + REM build docs CALL docs/make clean || ( ECHO.Cleaning up the last built of the documentation failed! @@ -93,7 +98,7 @@ START "" ./docs/_build/html/index.html REM check the links in the docs CALL python -m sphinx -W --keep-going ./docs/ ./docs/_build/linkcheck/ -b linkcheck || ( - ECHO.At least one link in the docs is broken! + ECHO.The linkcheck has spotted an issue, please check! EXIT /B ) diff --git a/setup.py b/setup.py index 6024caee..55ea0296 100644 --- a/setup.py +++ b/setup.py @@ -23,6 +23,9 @@ url='https://github.com/1kastner/conflowgen', python_requires='>=3.8', install_requires=[ + # working with distributions and statistics + 'scipy', # used for, e.g., the lognorm distribution + # data export 'numpy', # used in combination with pandas for column types 'pandas >=1', # CSV/Excel import and export @@ -36,8 +39,9 @@ # for creating the visuals 'matplotlib', # default plots such as bar charts, pie charts, etc. - 'plotly', # useful for e.g. Sankey diagrams 'seaborn', # exchanges matplotlib color palletes + 'plotly', # useful for, e.g., Sankey diagrams + 'kaleido', # plotly depends on this package for exporting its figures, we got this as a present ], extras_require={ # Only needed to run the unittests and generate the documentation @@ -48,7 +52,7 @@ 'pytest-github-actions-annotate-failures', # turns pytest failures into action annotations # build documentation - 'sphinx', # build the documentation + 'sphinx <5', # build the documentation 'sphinx-rtd-theme', # adding the nice sphinx theme 'sphinx-toolbox', # dependency of enum_tools, we got this as a present 'myst-parser', # for Contributing.md @@ -59,13 +63,10 @@ 'ipykernel', # for allowing nbsphinx to execute the Jupyter Notebooks 'jupyterlab', # develop the Jupyter Notebooks - # usually optional dependencies that are used in the documentation - 'kaleido', # plotly depends on this package for SVG export, we got this as a present - # checking code quality 'pylint', # lint Python code 'flake8', # lint Python code - 'flake8_nb', # lint Jupyter Notebooks + 'flake8_nb >=0.4', # lint Jupyter Notebooks # publish at PyPI 'twine'