-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtox.ini
347 lines (298 loc) · 13.1 KB
/
tox.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
[tox]
envlist =
{py311, py310, py39, py38, py37, py36}-{dev, sdist, wheel}-{linux, macos, windows}
isolated_build = true
skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True}
passenv = TOXENV CI CODECOV_*
[gh-actions]
python =
3.6: {py36}{, -path, -sdist, -wheel, -dev}
3.7: {py37}{, -path, -sdist, -wheel, -dev}
3.8: {py38}{, -path, -sdist, -wheel, -dev}
3.9: {py39}{, -path, -sdist, -wheel, -dev}
3.10: {py310}{, -path, -sdist, -wheel, -dev}
3.11: {py311}{, -path, -sdist, -wheel, -dev}
[gh-actions:env]
PLATFORM =
ubuntu-latest: linux
macos-latest: macos
windows-latest: windows
[testenv]
; basepython =
; {docs,spell,dev,dev-cov,notebook}: {env:TOXPYTHON:python3}
; {bootstrap,clean,check,report,codecov,coveralls,quality}: {env:TOXPYTHON:python3}
passenv =
YOUTUBE_DL
CI_*
setenv =
PYTHONPATH={toxinidir}/tests
PYTHOUNBUFFERED=yes
PIP_DISABLE_PIP_VERSION_CHECK=1
VIRTUALENV_NO_DOWNLOAD=1
DIST_DIR = dist
PY_PACKAGE = music_album_creation
black,lint,isort: LINT_ARGS = "src tests"
COVERAGE_FILE = {toxworkdir}{/}.coverage.{envname}
TEST_STATUS_DIR = {envtmpdir}
extras = test
commands =
{posargs:pytest --cov --cov-report=term-missing -vv}
; {posargs:pytest -vv --ignore=src}
[testenv:dev]
description = Install in 'edit' mode and test
usedevelop = true
commands = pytest {posargs:-ra tests -vv -s}
[testenv:dev-cov]
description = Install in 'edit' mode, Test & measure Coverage
usedevelop = true
commands =
; add '-n auto' flag for automatic distribution of unit tests on avaiable CPUs
pytest -ra --cov --cov-report=term-missing \
--cov-report=html:{envdir}/htmlcov --cov-context=test \
--cov-report=xml:{toxworkdir}/coverage.{envname}.xml \
--junit-xml={toxworkdir}/xunit.{envname}.xml \
{posargs:{toxinidir}{/}tests}
[testenv:download-test-data]
description = Install in 'edit' mode and test
usedevelop = true
changedir = {toxinidir}
commands =
python -c 'from pytube import YouTube; from music_album_creation.downloading import CMDYoutubeDownloader as YD; YD().download_trials("https://www.youtube.com/watch?v=OvC-4BixxkY", "tests/data")'
## COVERAGE
[testenv:coverage]
description = Combine coverage from test environments
passenv =
DIFF_AGAINST
setenv =
COVERAGE_FILE = {toxworkdir}/.coverage
skip_install = true
deps =
coverage[toml]>=5.1
diff_cover>=6
parallel_show_output = true
commands =
coverage combine
coverage report --skip-covered --show-missing -i
coverage xml -o {toxworkdir}/coverage.xml -i
coverage html -d {toxworkdir}/htmlcov -i
depends = {py311, py310, py39, py38, py37, py36}{, -path, -sdist, -wheel, -dev}
[testenv:clean]
deps = coverage
skip_install = true
commands = coverage erase
[testenv:notebook]
description = Run ML ops in jupyter notebook
extras = notebook
usedevelop = true
commands =
; poetry install -E notebook
jupyter notebook
; jupyter notebook
## PYTHON PACKAGING
[testenv:build]
description = Create a source and wheel distribution.
Creates .tar.gz and .whl files in the {env:DIST_DIR} folder, that can be upload to a pypi index server.
basepython = {env:TOXPYTHON:python3}
deps = build
skip_install = true
changedir = {toxinidir}
commands_pre =
python -c 'import os; import shutil; d = "{env:DIST_DIR}"; exec("if os.path.exists(d):\n shutil.rmtree(d)");'
commands =
python -m build {toxinidir} --outdir {env:DIST_DIR}
[testenv:check]
description = Check the code for compliance with best practises of Python packaging ecosystem (PyPI, pip, Distribute, etc).
deps =
poetry-core
pyroma
twine
skip_install = true
commands =
pyroma --directory {toxinidir}
pyroma --file {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PKG_VERSION}.tar.gz
python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PKG_VERSION}*
# TODO Improvement run 'pyroma --pypi' from some script/CI server after uploading to test-pypi
depends = build
### DEPLOYMENT
[testenv:deploy]
# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy
# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy
description = Deploy the python package to be hosted in a pypi server. Requires to authenticate with the pypi
server, so please set the TWINE_PASSWORD and TWINE_PASSWORD environment variables.
Also, requires the PACKAGE_DIST_VERSION variable to explicitly indicate which distribution
(semantic version: ie 0.5.3, 1.0.0) we intent to deploy/upload. That way we avoid unintentionally deploying
a wrong version and we make sure that the correct version is released to pypi. By default, deploys to a
pypi 'test server', currently at test.pypi.org. If you want to deploy to the "production" pypi (at pypi.org),
then you have to set the PYPI_SERVER environment variable to 'pypi', like `export PYPI_SERVER=pypi`.
Before deploying, certain sanity checks are ran on the distribution artefacts (ie .tar.gz, .whl) to be uploaded.
passenv =
PACKAGE_DIST_VERSION
TWINE_USERNAME
TWINE_PASSWORD
deps =
keyring==21.3.0
twine==3.4.0
skip_install = true
commands_pre =
python -c 'import os; n = "TWINE_USERNAME"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");'
python -c 'import os; n = "TWINE_PASSWORD"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");'
python -c 'import os; n = "PACKAGE_DIST_VERSION"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");'
python -c 'import os; n = "PYPI_SERVER"; exec("if n in os.environ:\n v = os.environ[n]\n if v != \"pypi\":\n print(\"Environment variable PYPI_SERVER detected, but was not set to pypi. Please set to pypi or run tox -e deploy from an environment where the PYPI_SERVER variable is NOT present at all.\")\n exit(1)");'
python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}*
commands =
twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose
#####################################################################
[testenv:report]
deps = coverage
skip_install = true
commands =
coverage report
coverage html
############## DOCS ##############
[testenv:spell]
setenv =
SPELLCHECK=1
usedevelop = true
extras = docs
; deps =
; setuptools >= 40.0.0
; -r{toxinidir}/docs/requirements.txt
; pyenchant
; sphinxcontrib-spelling
commands =
sphinx-build -b spelling docs dist/docs
[testenv:docs]
; deps =
; setuptools >= 40.0.0
; -r{toxinidir}/docs/requirements.txt
usedevelop = true
extras = docs
commands =
sphinx-build {posargs:-E} -b doctest docs dist/docs
sphinx-build {posargs:-E} -b html docs dist/docs
sphinx-build -b linkcheck docs dist/docs
# SDIST
[testenv:{py311, py310, py39, py38, py37, py36, pypy3}-sdist{, -linux, -macos, -windows}]
description = Install as Source Distribution & Test
# WHEEL
[testenv:{py311, py310, py39, py38, py37, py36, pypy3}-wheel{, -linux, -macos, -windows}]
description = Install as Wheel & Test
skip_install = true
changedir = {toxinidir}
commands_pre =
python -c 'import os; d = "{env:DIST_DIR}"; import shutil; exec("if os.path.exists(d):\n shutil.rmtree(d)");'
commands =
pip wheel --wheel-dir {toxworkdir}{/}{env:DIST_DIR} --cache-dir {envdir} {toxinidir}
pip install --exists-action w --force-reinstall "{toxworkdir}{/}{env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PKG_VERSION}-py3-none-any.whl[test]"
{[testenv]commands}
# DEV
[testenv:{py311, py310, py39, py38, py37, py36, pypy3}-dev{, -linux, -macos, -windows}]
description = Install in 'edit' mode & Test
usedevelop = true
# Static Type Checking (MYPY)
[testenv:type]
description = Static Type Checking with mypy
basepython = {env:TOXPYTHON:python3}
extras = typing
usedevelop = true
changedir = {toxinidir}
commands = mypy --show-error-codes {posargs:src{/}{env:PY_PACKAGE} tests}
# CODE LINTING, STATIC (STYLE) CHECKING
[testenv:lint]
description = test if code conforms with our styles
to check against code style (aka lint check) run: tox -e lint
to apply code style (aka lint apply) run: APPLY_LINT= tox -e lint
deps =
black
isort >= 5.0.0
passenv = APPLY_LINT
skip_install = true
changedir = {toxinidir}
commands =
isort {posargs:{env:APPLY_LINT:--check}} "{env:LINT_ARGS:.}"
black {posargs:{env:APPLY_LINT:--check}} -S --config pyproject.toml "{env:LINT_ARGS:.}"
[testenv:black]
description = black ops
deps = black
skip_install = true
changedir = {toxinidir}
commands = black {posargs:{env:APPLY_BLACK:--check}} --skip-string-normalization \
--config pyproject.toml "{env:LINT_ARGS:.}"
[testenv:isort]
description = isort
deps = isort >= 5.0.0
skip_install = true
changedir = {toxinidir}
commands = isort {posargs:{env:APPLY_ISORT:--check}} "{env:LINT_ARGS:.}"
[testenv:bandit]
description = static code security check
deps = bandit[toml]
skip_install = true
commands = bandit -r -c pyproject.toml {posargs:src tests}
[testenv:prospector]
description = Run multiple static code analysis tools defined in .prospector.yml
Runs the prospector tool which brings together the functionality of other
Python analysis tools such as Pyflakes and McCabe complexity.
We run tools: Pyflakes, Pyroma, McCabe and Dodgy
deps = prospector[with_pyroma] == 1.3.1
skip_install = true
changedir = {toxinidir}
commands_pre =
# We do not run pylint, since we have a dedicated pylint env for it.
# Prospector still tries to read .pylintrc, which causes a crash (because .pylintrc was generated with a pylint version higher than the one supported by prospector)
# So we temporarily "hide" .pylintrc from prospector
python -c 'import os; f = ".pylintrc"; exec("if os.path.exists(f):\n os.rename(f, \".pylintrc-bak\")")'
commands =
prospector src
prospector tests
commands_post =
# We "restore" .pylintrc (to be available to the pylint env command)
python -c 'import os; f = ".pylintrc-bak"; exec("if os.path.exists(f):\n os.rename(f, \".pylintrc\")")'
[testenv:pylint]
description = Run the Pylint tool to analyse the Python code and output
information about errors, potential problems and convention violations
deps =
pylint ; python_version == '3.11'
pylint == 2.7.4 ; python_version < '3.11'
usedevelop = true
changedir = {toxinidir}
commands =
- python -m pylint src{/}{env:PY_PACKAGE}
- python -m pylint tests
# GENERATE ARCHITECTURE GRAPHS
[testenv:pydeps]
description =
Visualise Python dependency graphs (roughly which module imports which) and store in .svg file(s).
Eg: `tox -e pydeps`, `PYDEPS_DIR=my-destination-dir tox -e pydeps`.
PYDEPS_DIR controls the relative location (to your current working dir) of the target dir to store
the generated files. The default target dir is 'pydeps'. Dir is created if it doesn't exist.
Requires the 'dot' executable to be in your PATH. Installing the graphviz library should make
the dot executable available in your PATH. Installing 'graphviz':
* For Linux, please run "sudo apt install graphviz"
* For MacOS, please run "brew install graphviz"
basepython = {env:TOXPYTHON:python3.10}
passenv =
HOME
PWD
PYDEPS_DIR
deps = pydeps==1.11.0
usedevelop = true
commands_pre =
python -c 'from pathlib import Path; import os; p = Path(os.environ["PWD"]) / os.getenv("PYDEPS_DIR", "pydeps"); p.mkdir(parents=True, exist_ok=True);'
commands =
pydeps --version
# --max-bacon : exclude nodes that are more than n hops away
# (default=2, 0 -> infinite)
# --min-cluster-size : the minimum number of nodes a dependency must have before being clustered (default=0)
# --max-cluster-size : the maximum number of nodes a dependency can have before the cluster is collapsed to a single node (default=0)
# --keep-target-cluster : draw target module as a cluster
# Draw only the source code package inner dependencies
pydeps src{/}{env:PY_PACKAGE} --only {env:PY_PACKAGE} --noshow -o {env:PWD}{/}{env:PYDEPS_DIR:pydeps}{/}deps_inner.svg
; # Draw the source code package inner and external dependencies
pydeps src{/}{env:PY_PACKAGE} --cluster --noshow -o {env:PWD}{/}{env:PYDEPS_DIR:pydeps}{/}deps_all.svg
; # Visualize the package inner dependencies and abstract the external (eg with numpy, pandas, etc) ones
; # Draw the source code package inner and minimum external dependencies
pydeps src{/}{env:PY_PACKAGE} --max-cluster-size=2 --keep-target-cluster --noshow -o {env:PWD}{/}{env:PYDEPS_DIR:pydeps}{/}deps_ktc-mcs_2.svg
; # Draw the source code package inner and all external dependencies
pydeps src{/}{env:PY_PACKAGE} --keep-target-cluster --noshow -o {env:PWD}{/}{env:PYDEPS_DIR:pydeps}{/}deps_ktc.svg
python -c 'from pathlib import Path; p = Path("{env:PWD}{/}{env:PYDEPS_DIR:pydeps}"); print(f"\nGenerated .svg files in \"\{str(p.absolute())\}\".");'