diff --git a/.github/workflows/installation-from-remote.yaml b/.github/workflows/installation-from-remote.yaml index 1bc585d0..6435eb3f 100644 --- a/.github/workflows/installation-from-remote.yaml +++ b/.github/workflows/installation-from-remote.yaml @@ -10,7 +10,7 @@ jobs: build-conda-on-windows: name: ConFlowGen installation via conda on Windows - runs-on: windows-latest + runs-on: windows-2019 env: MPLBACKEND: Agg @@ -31,10 +31,15 @@ jobs: conda info conda create -n test-install-conflowgen -c conda-forge conflowgen pytest + - name: Prepare tests + run: | + conda activate test-install-conflowgen + conda install pillow>=9.0 + - name: Run tests run: | conda activate test-install-conflowgen - conda install pillow=9.0.0 + python -c "import conflowgen; print('ConFlowGen version: ', conflowgen.__version__)" python -m pytest --pyargs conflowgen build-conda-on-linux: @@ -67,13 +72,14 @@ jobs: run: | eval "$(conda shell.bash hook)" conda activate test-install-conflowgen + python -c "import conflowgen; print('ConFlowGen version: ', conflowgen.__version__)" python -m pytest --pyargs conflowgen build-pip: strategy: matrix: - os: [ubuntu-latest, windows-latest] + os: [ubuntu-latest, windows-2019] name: ConFlowGen installation via pip runs-on: ${{ matrix.os }} @@ -96,4 +102,5 @@ jobs: - name: Run tests run: | + python -c "import conflowgen; print('ConFlowGen version: ', conflowgen.__version__)" python -m pytest --pyargs conflowgen diff --git a/.gitignore b/.gitignore index 37679273..f1eb5052 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,7 @@ docs/_build/ # Publishing new versions build/ dist/ +.pypirc # The dirty Read the Docs hack - downloading git-lfs on-the-fly in case it is missing docs/.tools/ @@ -61,5 +62,5 @@ examples/Python_Script/export/ examples/Python_Script/databases/ # Ignore local changes as they happen with every execution. If something changes, the commit must be forced. -docs/notebooks/data/prepared_dbs/demo_poc.sqlite conflowgen/data/tools/ +docs/notebooks/data/prepared_dbs/ diff --git a/CITATION.cff b/CITATION.cff index a7477ecd..09703732 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -18,7 +18,7 @@ authors: email: ole.grasse@tuhh.de affiliation: Hamburg University of Technology (TUHH), Institute of Maritime Logistics orcid: 'https://orcid.org/0000-0003-1982-9436' -version: 2.0.1 +version: 2.1.1 repository-code: "https://github.com/1kastner/conflowgen" keywords: - logistics diff --git a/Contributing.md b/Contributing.md index a796b367..9f7f84b2 100644 --- a/Contributing.md +++ b/Contributing.md @@ -61,7 +61,21 @@ For generating the documentation, is used - mostly the default settings are maintained. The documentation generation process is based on the sphinx boilerplate and the `make` process is unchanged. To generate the documentation, move to the directory `/docs`. -Here, as a Windows user you run `.\make.bat html` from the PowerShell or CMD. +First, please make sure that you have up-to-date prepared sqlite databases in +`/docs/notebooks/data/prepared_dbs/`. +The sqlite databases compatible with the latest version of ConFlowGen are available at +https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/. +In `./docs/download_prepared_sqlite_databases.ps1`, you find the instructions for how to download the latest databases +and where to store them. +In case you have updated the sqlite scheme, you might need to create these databases on your own with your latest +adaptions. +This is achieved by running the scripts stored in +`/examples/Python_Script/` +and copy the resulting sqlite database into +`/docs/notebooks/data/prepared_dbs/`. + +Once the prepared databases are in place, the documentation can be created. +As a Windows user you run `.\make.bat html` from the PowerShell or CMD. Linux users invoke `make html` instead. The landing page of the documentation is created at `/docs/_build/html/index.html`. It is advised to use a strict approach by using the additional argument `SPHINXOPTS="-W --keep-going` diff --git a/conflowgen/metadata.py b/conflowgen/metadata.py index 5464b875..4a94be33 100644 --- a/conflowgen/metadata.py +++ b/conflowgen/metadata.py @@ -1,4 +1,4 @@ -__version__ = "2.0.1" +__version__ = "2.1.1" __license__ = "MIT" __description__ = """ A generator for synthetic container flows at maritime container terminals with a focus on yard operations diff --git a/conflowgen/tests/data_summaries/test_data_summaries_cache.py b/conflowgen/tests/data_summaries/test_data_summaries_cache.py index 993cc97f..6448b8d9 100644 --- a/conflowgen/tests/data_summaries/test_data_summaries_cache.py +++ b/conflowgen/tests/data_summaries/test_data_summaries_cache.py @@ -137,8 +137,6 @@ def test_with_preview(self): preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") - self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and - {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") # pylint: disable=protected-access self.assertDictEqual( DataSummariesCache._hit_counter, @@ -155,8 +153,7 @@ def test_with_preview(self): preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") - self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and - {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access self.assertDictEqual( DataSummariesCache._hit_counter, @@ -187,8 +184,7 @@ def test_with_adjusted_preview(self): preview = self.preview.get_weekly_truck_arrivals(True, True) self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect") self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results") - self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and - {3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached") + # pylint: disable=protected-access self.assertDictEqual( DataSummariesCache._hit_counter, @@ -217,10 +213,7 @@ def test_with_adjusted_preview(self): self.assertEqual(preview, {3: 6, 4: 24, 5: 30}, "New result is incorrect") self.assertEqual( len(DataSummariesCache.cached_results), 10, - "There should be 9 cached results, because the preview was adjusted") - self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and - {3: 6, 4: 24, 5: 30} in list(DataSummariesCache.cached_results.values()), - "Incorrect results cached") + "There should be 10 cached results, because the preview was adjusted") # pylint: disable=protected-access self.assertDictEqual( DataSummariesCache._hit_counter, @@ -444,8 +437,9 @@ def return_dictionary(): self.assertEqual(return_dictionary(), {"a": 1, "b": 2}, "Function should return dictionary") self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result") - self.assertTrue({"a": 1, "b": 2} in list(DataSummariesCache.cached_results.values()), "Dictionary should be " - "cached") + self.assertTrue( + {"a": 1, "b": 2} in list(DataSummariesCache.cached_results.values()), + "Dictionary should be cached") # pylint: disable=protected-access self.assertEqual(DataSummariesCache._hit_counter, {'return_dictionary': 1}) diff --git a/docs/download_prepared_sqlite_databases.ps1 b/docs/download_prepared_sqlite_databases.ps1 new file mode 100644 index 00000000..9654e3fe --- /dev/null +++ b/docs/download_prepared_sqlite_databases.ps1 @@ -0,0 +1,15 @@ +# Do not show the progress bar during download +$ProgressPreference = 'SilentlyContinue' + +# Describe metadata for download operation +$sqlite_databases = "demo_continental_gateway", "demo_deham_cta", "demo_poc" +$web_root_directory = "https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/" +$local_prepared_db_root_directory = "./notebooks/data/prepared_dbs" + +# Actually execute the download +foreach ($db in $sqlite_databases) { + $source_url = "$($web_root_directory)$($db).sqlite" + $target_path = "$($local_prepared_db_root_directory)/$($db).sqlite" + Write-Output "Download $source_url and save it at $target_path" + Invoke-WebRequest $source_url -OutFile $target_path +} diff --git a/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite b/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite deleted file mode 100644 index a375e91b..00000000 --- a/docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:afe66aad30052ed3f1883f7147fa0a302fc84f84f55ccbb52f40aa6ddb1d1a44 -size 18980864 diff --git a/docs/notebooks/data/prepared_dbs/demo_poc.sqlite b/docs/notebooks/data/prepared_dbs/demo_poc.sqlite deleted file mode 100644 index 9622ba2e..00000000 --- a/docs/notebooks/data/prepared_dbs/demo_poc.sqlite +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1ea43c0a5ed2e40a7aae26e57ce0ef8d4e63bf1638396baacd173a3afa0f80ae -size 323584 diff --git a/run_ci_light.bat b/run_ci_light.bat index 5197cf0c..c06d2132 100644 --- a/run_ci_light.bat +++ b/run_ci_light.bat @@ -90,6 +90,9 @@ CALL docs/make clean || ( ) CALL docs/make html || ( ECHO.Building the documentation failed! + ECHO.If you have issues with the SQLITE databases, consider running ./docs/download_prepared_sqlite_files.ps1 + ECHO.If the issues remain, consider using your own up-to-date databases created by the demo scripts. You just need + ECHO.to copy those to the directories as indicated in the PowerShell script. EXIT /B ) diff --git a/setup.py b/setup.py index 3d70c443..245d6576 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ 'peewee >=3', # ORM mapper # documentation - decorators used for sphinx but are part of the source code delivered to customers - 'enum_tools >=0.7', # used for documenting enums via decorators + 'enum_tools >=0.7', # used for documenting enums via decorators - previous versions are not compatible # for creating the visuals 'matplotlib', # default plots such as bar charts, pie charts, etc. @@ -51,42 +51,36 @@ 'pytest-xdist', # use several processes to speed up the testing process 'pytest-github-actions-annotate-failures', # turns pytest failures into action annotations 'seaborn', # some visuals in unittests are generated by seaborn - 'nbformat', - 'nbconvert', + 'nbconvert', # used to run tests in Jupyter notebooks, see ./test/notebooks/test_run_notebooks.py # build documentation - 'sphinx', # build the documentation + 'sphinx >=6.2', # build the documentation - restrict version to improve pip version resolution 'sphinx-rtd-theme', # adding the nice sphinx theme - 'sphinx-toolbox', # dependency of enum_tools, we got this as a present + 'sphinx-toolbox >=3', # additional dependency of enum_tools - restrict version to improve pip resolution 'myst-parser', # for Contributing.md - 'sphinxcontrib-bibtex', # a good help for citing + 'sphinxcontrib-bibtex >=2.4', # a good help for citing - restrict version to improve pip resolution 'nbsphinx', # use Jupyter Notebooks in the documentation 'ipython', # for setting up the pygments_lexer 'ipykernel', # for allowing nbsphinx to execute the Jupyter Notebooks - 'jupyterlab', # continue development on the Jupyter Notebooks included in this repository # checking code quality 'pylint', # lint Python code - 'flake8 <6.0', # lint Python code - 'flake8_nb', # lint Jupyter Notebooks + 'flake8', # lint Python code + 'flake8_nb', # lint code in Jupyter Notebooks # publish at PyPI 'wheel', # use command 'bdist_wheel' 'twine', # check and upload package to PyPI + + # pip resolution issue - https://github.com/pypa/pip/issues/12430#issuecomment-1849059000 + 'sphinx-tabs' ], # a collection of nice-to-haves for working on Jupyter Notebooks - just a favorites list of the authors 'jupyterlab': [ - "jupyterlab-spellchecker", - "jupyterlab-lsp", - "python-lsp-server", - "pyls-flake8", - "autopep8", - "rope", - "yapf", - "pydocstyle", - "jupyterlab_code_formatter", - "black", - "isort" + 'jupyterlab', # continue development on the Jupyter Notebooks included in this repository + "jupyterlab-spellchecker", # avoid typos in documentation + "jupyterlab-lsp", # better autocomplete + "python-lsp-server[all]", # better autocomplete ] }, license=metadata['__license__'],