Skip to content

Commit

Permalink
Merge branch 'main' into make-latex-docs-possible
Browse files Browse the repository at this point in the history
# Conflicts:
#	setup.py
  • Loading branch information
1kastner committed Dec 14, 2023
2 parents c585795 + 121b272 commit 1516ee8
Show file tree
Hide file tree
Showing 11 changed files with 66 additions and 44 deletions.
13 changes: 10 additions & 3 deletions .github/workflows/installation-from-remote.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
build-conda-on-windows:

name: ConFlowGen installation via conda on Windows
runs-on: windows-latest
runs-on: windows-2019
env:
MPLBACKEND: Agg

Expand All @@ -31,10 +31,15 @@ jobs:
conda info
conda create -n test-install-conflowgen -c conda-forge conflowgen pytest
- name: Prepare tests
run: |
conda activate test-install-conflowgen
conda install pillow>=9.0
- name: Run tests
run: |
conda activate test-install-conflowgen
conda install pillow=9.0.0
python -c "import conflowgen; print('ConFlowGen version: ', conflowgen.__version__)"
python -m pytest --pyargs conflowgen
build-conda-on-linux:
Expand Down Expand Up @@ -67,13 +72,14 @@ jobs:
run: |
eval "$(conda shell.bash hook)"
conda activate test-install-conflowgen
python -c "import conflowgen; print('ConFlowGen version: ', conflowgen.__version__)"
python -m pytest --pyargs conflowgen
build-pip:

strategy:
matrix:
os: [ubuntu-latest, windows-latest]
os: [ubuntu-latest, windows-2019]

name: ConFlowGen installation via pip
runs-on: ${{ matrix.os }}
Expand All @@ -96,4 +102,5 @@ jobs:
- name: Run tests
run: |
python -c "import conflowgen; print('ConFlowGen version: ', conflowgen.__version__)"
python -m pytest --pyargs conflowgen
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ docs/_build/
# Publishing new versions
build/
dist/
.pypirc

# The dirty Read the Docs hack - downloading git-lfs on-the-fly in case it is missing
docs/.tools/
Expand All @@ -61,5 +62,5 @@ examples/Python_Script/export/
examples/Python_Script/databases/

# Ignore local changes as they happen with every execution. If something changes, the commit must be forced.
docs/notebooks/data/prepared_dbs/demo_poc.sqlite
conflowgen/data/tools/
docs/notebooks/data/prepared_dbs/
2 changes: 1 addition & 1 deletion CITATION.cff
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ authors:
email: [email protected]
affiliation: Hamburg University of Technology (TUHH), Institute of Maritime Logistics
orcid: 'https://orcid.org/0000-0003-1982-9436'
version: 2.0.1
version: 2.1.1
repository-code: "https://github.com/1kastner/conflowgen"
keywords:
- logistics
Expand Down
16 changes: 15 additions & 1 deletion Contributing.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,21 @@ For generating the documentation,
is used - mostly the default settings are maintained.
The documentation generation process is based on the sphinx boilerplate and the `make` process is unchanged.
To generate the documentation, move to the directory `<project-root>/docs`.
Here, as a Windows user you run `.\make.bat html` from the PowerShell or CMD.
First, please make sure that you have up-to-date prepared sqlite databases in
`<project-root>/docs/notebooks/data/prepared_dbs/`.
The sqlite databases compatible with the latest version of ConFlowGen are available at
https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/.
In `./docs/download_prepared_sqlite_databases.ps1`, you find the instructions for how to download the latest databases
and where to store them.
In case you have updated the sqlite scheme, you might need to create these databases on your own with your latest
adaptions.
This is achieved by running the scripts stored in
`<project-root>/examples/Python_Script/`
and copy the resulting sqlite database into
`<project-root>/docs/notebooks/data/prepared_dbs/`.

Once the prepared databases are in place, the documentation can be created.
As a Windows user you run `.\make.bat html` from the PowerShell or CMD.
Linux users invoke `make html` instead.
The landing page of the documentation is created at `<project-root>/docs/_build/html/index.html`.
It is advised to use a strict approach by using the additional argument `SPHINXOPTS="-W --keep-going`
Expand Down
2 changes: 1 addition & 1 deletion conflowgen/metadata.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.0.1"
__version__ = "2.1.1"
__license__ = "MIT"
__description__ = """
A generator for synthetic container flows at maritime container terminals with a focus on yard operations
Expand Down
18 changes: 6 additions & 12 deletions conflowgen/tests/data_summaries/test_data_summaries_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,6 @@ def test_with_preview(self):
preview = self.preview.get_weekly_truck_arrivals(True, True)
self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect")
self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results")
self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and
{3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached")
# pylint: disable=protected-access
self.assertDictEqual(
DataSummariesCache._hit_counter,
Expand All @@ -155,8 +153,7 @@ def test_with_preview(self):
preview = self.preview.get_weekly_truck_arrivals(True, True)
self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect")
self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results")
self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and
{3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached")

# pylint: disable=protected-access
self.assertDictEqual(
DataSummariesCache._hit_counter,
Expand Down Expand Up @@ -187,8 +184,7 @@ def test_with_adjusted_preview(self):
preview = self.preview.get_weekly_truck_arrivals(True, True)
self.assertEqual(preview, {3: 12, 4: 48}, "Uncached result is incorrect")
self.assertEqual(len(DataSummariesCache.cached_results), 10, "There should be 10 cached results")
self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and
{3: 12, 4: 48} in list(DataSummariesCache.cached_results.values()), "Incorrect results cached")

# pylint: disable=protected-access
self.assertDictEqual(
DataSummariesCache._hit_counter,
Expand Down Expand Up @@ -217,10 +213,7 @@ def test_with_adjusted_preview(self):
self.assertEqual(preview, {3: 6, 4: 24, 5: 30}, "New result is incorrect")
self.assertEqual(
len(DataSummariesCache.cached_results), 10,
"There should be 9 cached results, because the preview was adjusted")
self.assertTrue(59.999999999999986 in list(DataSummariesCache.cached_results.values()) and
{3: 6, 4: 24, 5: 30} in list(DataSummariesCache.cached_results.values()),
"Incorrect results cached")
"There should be 10 cached results, because the preview was adjusted")
# pylint: disable=protected-access
self.assertDictEqual(
DataSummariesCache._hit_counter,
Expand Down Expand Up @@ -444,8 +437,9 @@ def return_dictionary():

self.assertEqual(return_dictionary(), {"a": 1, "b": 2}, "Function should return dictionary")
self.assertEqual(len(DataSummariesCache.cached_results), 1, "Cache should have one result")
self.assertTrue({"a": 1, "b": 2} in list(DataSummariesCache.cached_results.values()), "Dictionary should be "
"cached")
self.assertTrue(
{"a": 1, "b": 2} in list(DataSummariesCache.cached_results.values()),
"Dictionary should be cached")
# pylint: disable=protected-access
self.assertEqual(DataSummariesCache._hit_counter, {'return_dictionary': 1})

Expand Down
15 changes: 15 additions & 0 deletions docs/download_prepared_sqlite_databases.ps1
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Do not show the progress bar during download
$ProgressPreference = 'SilentlyContinue'

# Describe metadata for download operation
$sqlite_databases = "demo_continental_gateway", "demo_deham_cta", "demo_poc"
$web_root_directory = "https://media.tuhh.de/mls/software/conflowgen/docs/data/prepared_dbs/"
$local_prepared_db_root_directory = "./notebooks/data/prepared_dbs"

# Actually execute the download
foreach ($db in $sqlite_databases) {
$source_url = "$($web_root_directory)$($db).sqlite"
$target_path = "$($local_prepared_db_root_directory)/$($db).sqlite"
Write-Output "Download $source_url and save it at $target_path"
Invoke-WebRequest $source_url -OutFile $target_path
}
3 changes: 0 additions & 3 deletions docs/notebooks/data/prepared_dbs/demo_deham_cta.sqlite

This file was deleted.

3 changes: 0 additions & 3 deletions docs/notebooks/data/prepared_dbs/demo_poc.sqlite

This file was deleted.

3 changes: 3 additions & 0 deletions run_ci_light.bat
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,9 @@ CALL docs/make clean || (
)
CALL docs/make html || (
ECHO.Building the documentation failed!
ECHO.If you have issues with the SQLITE databases, consider running ./docs/download_prepared_sqlite_files.ps1
ECHO.If the issues remain, consider using your own up-to-date databases created by the demo scripts. You just need
ECHO.to copy those to the directories as indicated in the PowerShell script.
EXIT /B
)

Expand Down
32 changes: 13 additions & 19 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
'peewee >=3', # ORM mapper

# documentation - decorators used for sphinx but are part of the source code delivered to customers
'enum_tools >=0.7', # used for documenting enums via decorators
'enum_tools >=0.7', # used for documenting enums via decorators - previous versions are not compatible

# for creating the visuals
'matplotlib', # default plots such as bar charts, pie charts, etc.
Expand All @@ -51,15 +51,14 @@
'pytest-xdist', # use several processes to speed up the testing process
'pytest-github-actions-annotate-failures', # turns pytest failures into action annotations
'seaborn', # some visuals in unittests are generated by seaborn
'nbformat',
'nbconvert',
'nbconvert', # used to run tests in Jupyter notebooks, see ./test/notebooks/test_run_notebooks.py

# build documentation
'sphinx', # build the documentation
'sphinx >=6.2', # build the documentation - restrict version to improve pip version resolution
'sphinx-rtd-theme', # adding the nice sphinx theme
'sphinx-toolbox', # dependency of enum_tools, we got this as a present
'sphinx-toolbox >=3', # additional dependency of enum_tools - restrict version to improve pip resolution
'myst-parser', # for Contributing.md
'sphinxcontrib-bibtex', # a good help for citing
'sphinxcontrib-bibtex >=2.4', # a good help for citing - restrict version to improve pip resolution
'sphinxcontrib-svg2pdfconverter[CairoSVG]', # creating a PDF
'nbsphinx', # use Jupyter Notebooks in the documentation
'ipython', # for setting up the pygments_lexer
Expand All @@ -68,27 +67,22 @@

# checking code quality
'pylint', # lint Python code
'flake8 <6.0', # lint Python code
'flake8_nb', # lint Jupyter Notebooks
'flake8', # lint Python code
'flake8_nb', # lint code in Jupyter Notebooks

# publish at PyPI
'wheel', # use command 'bdist_wheel'
'twine', # check and upload package to PyPI

# pip resolution issue - https://github.com/pypa/pip/issues/12430#issuecomment-1849059000
'sphinx-tabs'
],
# a collection of nice-to-haves for working on Jupyter Notebooks - just a favorites list of the authors
'jupyterlab': [
'jupyterlab', # continue development on the Jupyter Notebooks included in this repository
"jupyterlab-spellchecker",
"jupyterlab-lsp",
"python-lsp-server",
"pyls-flake8",
"autopep8",
"rope",
"yapf",
"pydocstyle",
"jupyterlab_code_formatter",
"black",
"isort"
"jupyterlab-spellchecker", # avoid typos in documentation
"jupyterlab-lsp", # better autocomplete
"python-lsp-server[all]", # better autocomplete
]
},
license=metadata['__license__'],
Expand Down

0 comments on commit 1516ee8

Please sign in to comment.