diff --git a/.codegen.json b/.codegen.json new file mode 100644 index 0000000..642f568 --- /dev/null +++ b/.codegen.json @@ -0,0 +1,14 @@ +{ + "version": { + "src/databricks/labs/pytester/__about__.py": "__version__ = \"$VERSION\"" + }, + "toolchain": { + "required": ["python3", "hatch"], + "pre_setup": ["hatch env create"], + "prepend_path": ".venv/bin", + "acceptance_path": "tests/integration", + "test": [ + "pytest -n 4 --cov src --cov-report=xml --timeout 30 tests/unit --durations 20" + ] + } +} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b728efb --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..93d9a5a --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,16 @@ + +## Changes + + +### Linked issues + + +Resolves #.. + +### Tests + + +- [ ] manually tested +- [ ] added unit tests +- [ ] added integration tests +- [ ] verified on staging environment (screenshot attached) diff --git a/.github/workflows/acceptance.yml b/.github/workflows/acceptance.yml new file mode 100644 index 0000000..99991b2 --- /dev/null +++ b/.github/workflows/acceptance.yml @@ -0,0 +1,55 @@ +name: acceptance + +on: + pull_request: + types: [ opened, synchronize, ready_for_review ] + merge_group: + types: [ checks_requested ] + push: + branches: + - main + +permissions: + id-token: write + contents: read + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + integration: + if: github.event_name == 'pull_request' && github.event.pull_request.draft == false + environment: account-admin + runs-on: larger + steps: + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install Python + uses: actions/setup-python@v5 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: '3.10' + + - name: Install hatch + run: pip install hatch==1.9.4 + + - name: Fetch relevant branches + run: | + git fetch origin $GITHUB_BASE_REF:$GITHUB_BASE_REF + git fetch origin $GITHUB_HEAD_REF:$GITHUB_HEAD_REF + + - name: Run integration tests + uses: databrickslabs/sandbox/acceptance@acceptance/v0.2.2 + with: + vault_uri: ${{ secrets.VAULT_URI }} + timeout: 55m + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml new file mode 100644 index 0000000..a3cc40c --- /dev/null +++ b/.github/workflows/nightly.yml @@ -0,0 +1,47 @@ +name: nightly + +on: + workflow_dispatch: + schedule: + - cron: '0 5 * * *' + +permissions: + id-token: write + issues: write + contents: read + pull-requests: read + +concurrency: + group: single-acceptance-job-per-repo + +jobs: + integration: + environment: account-admin + runs-on: larger + steps: + - name: Checkout Code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install Python + uses: actions/setup-python@v5 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: '3.10' + + - name: Install hatch + run: pip install hatch==1.9.4 + + - name: Run nightly tests + uses: databrickslabs/sandbox/acceptance@acceptance/v0.2.2 + with: + vault_uri: ${{ secrets.VAULT_URI }} + timeout: 55m + create_issues: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ARM_CLIENT_ID: ${{ secrets.ARM_CLIENT_ID }} + ARM_TENANT_ID: ${{ secrets.ARM_TENANT_ID }} + TEST_NIGHTLY: true diff --git a/.github/workflows/no-cheat.yml b/.github/workflows/no-cheat.yml new file mode 100644 index 0000000..bd5edca --- /dev/null +++ b/.github/workflows/no-cheat.yml @@ -0,0 +1,32 @@ +name: no-cheat + +on: + pull_request: + types: [opened, synchronize] + merge_group: + types: [checks_requested] + push: + # required for merge queue to work. jobs.integration.if will mark it as skipped + branches: + - main + +jobs: + no-pylint-disable: + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' && (github.event.action == 'opened' || github.event.action == 'synchronize') + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Verify no additional disabled lint in the new code + run: | + git fetch origin $GITHUB_BASE_REF:$GITHUB_BASE_REF + git diff $GITHUB_BASE_REF...$(git branch --show-current) >> diff_data.txt + python tests/unit/no_cheat.py diff_data.txt >> cheats.txt + COUNT=$(cat cheats.txt | wc -c) + if [ ${COUNT} -gt 1 ]; then + cat cheats.txt + exit 1 + fi diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml new file mode 100644 index 0000000..6f699f5 --- /dev/null +++ b/.github/workflows/push.yml @@ -0,0 +1,75 @@ +name: build + +on: + pull_request: + types: [opened, synchronize] + merge_group: + types: [checks_requested] + push: + # Always run on push to main. The build cache can only be reused + # if it was saved by a run from the repository's default branch. + # The run result will be identical to that from the merge queue + # because the commit is identical, yet we need to perform it to + # seed the build cache. + branches: + - main + +env: + HATCH_VERSION: 1.9.4 + +jobs: + ci: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + pyVersion: [ '3.10', '3.11', '3.12' ] + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install Python + uses: actions/setup-python@v5 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: ${{ matrix.pyVersion }} + + - name: Install hatch + run: pip install hatch==$HATCH_VERSION + + - name: Run unit tests + run: hatch run test + + - name: Publish test coverage + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + + fmt: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install Python + uses: actions/setup-python@v5 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: 3.10.x + + - name: Install hatch + run: pip install hatch==$HATCH_VERSION + + - name: Reformat code + run: make fmt + + - name: Fail on differences + run: | + # Exit with status code 1 if there are differences (i.e. unformatted files) + git diff --exit-code diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..88e2f97 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,47 @@ +name: Release + +on: + push: + tags: + - 'v*' + +jobs: + publish: + runs-on: ubuntu-latest + environment: release + permissions: + # Used to authenticate to PyPI via OIDC and sign the release's artifacts with sigstore-python. + id-token: write + # Used to attach signing artifacts to the published release. + contents: write + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: '3.10' + + - name: Build wheels + run: | + pip install hatch==1.9.4 + hatch build + + - name: Draft release + uses: softprops/action-gh-release@v2 + with: + files: | + dist/databricks_*.whl + dist/databricks_*.tar.gz + + - uses: pypa/gh-action-pypi-publish@release/v1 + name: Publish package distributions to PyPI + + - name: Sign artifacts with Sigstore + uses: sigstore/gh-action-sigstore-python@v3.0.0 + with: + inputs: | + dist/databricks_*.whl + dist/databricks_*.tar.gz + release-signing-artifacts: true diff --git a/.gitignore b/.gitignore index b898780..579712d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,6 @@ .pytest_cache *.egg-info tests/**/build +.coverage +coverage.xml +pytester.iml \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..ed7d5fa --- /dev/null +++ b/LICENSE @@ -0,0 +1,69 @@ + Databricks License + Copyright (2023) Databricks, Inc. + + Definitions. + + Agreement: The agreement between Databricks, Inc., and you governing + the use of the Databricks Services, as that term is defined in + the Master Cloud Services Agreement (MCSA) located at + www.databricks.com/legal/mcsa. + + Licensed Materials: The source code, object code, data, and/or other + works to which this license applies. + + Scope of Use. You may not use the Licensed Materials except in + connection with your use of the Databricks Services pursuant to + the Agreement. Your use of the Licensed Materials must comply at all + times with any restrictions applicable to the Databricks Services, + generally, and must be used in accordance with any applicable + documentation. You may view, use, copy, modify, publish, and/or + distribute the Licensed Materials solely for the purposes of using + the Licensed Materials within or connecting to the Databricks Services. + If you do not agree to these terms, you may not view, use, copy, + modify, publish, and/or distribute the Licensed Materials. + + Redistribution. You may redistribute and sublicense the Licensed + Materials so long as all use is in compliance with these terms. + In addition: + + - You must give any other recipients a copy of this License; + - You must cause any modified files to carry prominent notices + stating that you changed the files; + - You must retain, in any derivative works that you distribute, + all copyright, patent, trademark, and attribution notices, + excluding those notices that do not pertain to any part of + the derivative works; and + - If a "NOTICE" text file is provided as part of its + distribution, then any derivative works that you distribute + must include a readable copy of the attribution notices + contained within such NOTICE file, excluding those notices + that do not pertain to any part of the derivative works. + + You may add your own copyright statement to your modifications and may + provide additional license terms and conditions for use, reproduction, + or distribution of your modifications, or for any such derivative works + as a whole, provided your use, reproduction, and distribution of + the Licensed Materials otherwise complies with the conditions stated + in this License. + + Termination. This license terminates automatically upon your breach of + these terms or upon the termination of your Agreement. Additionally, + Databricks may terminate this license at any time on notice. Upon + termination, you must permanently delete the Licensed Materials and + all copies thereof. + + DISCLAIMER; LIMITATION OF LIABILITY. + + THE LICENSED MATERIALS ARE PROVIDED “AS-IS” AND WITH ALL FAULTS. + DATABRICKS, ON BEHALF OF ITSELF AND ITS LICENSORS, SPECIFICALLY + DISCLAIMS ALL WARRANTIES RELATING TO THE LICENSED MATERIALS, EXPRESS + AND IMPLIED, INCLUDING, WITHOUT LIMITATION, IMPLIED WARRANTIES, + CONDITIONS AND OTHER TERMS OF MERCHANTABILITY, SATISFACTORY QUALITY OR + FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. DATABRICKS AND + ITS LICENSORS TOTAL AGGREGATE LIABILITY RELATING TO OR ARISING OUT OF + YOUR USE OF OR DATABRICKS’ PROVISIONING OF THE LICENSED MATERIALS SHALL + BE LIMITED TO ONE THOUSAND ($1,000) DOLLARS. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE LICENSED MATERIALS OR + THE USE OR OTHER DEALINGS IN THE LICENSED MATERIALS. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..73dd2dc --- /dev/null +++ b/Makefile @@ -0,0 +1,28 @@ +all: clean lint fmt test coverage + +clean: + rm -fr .venv clean htmlcov .mypy_cache .pytest_cache .ruff_cache .coverage coverage.xml + rm -fr **/*.pyc + +.venv/bin/python: + hatch env create + +dev: .venv/bin/python + @hatch run which python + +lint: + hatch run verify + +fmt: + hatch run fmt + +test: + hatch run test + +integration: + hatch run integration + +coverage: + hatch run coverage && open htmlcov/index.html + +.PHONY: all clean dev lint fmt test integration coverage diff --git a/README.md b/README.md new file mode 100644 index 0000000..25b3f76 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# pytester \ No newline at end of file diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 647a9a0..0000000 --- a/poetry.lock +++ /dev/null @@ -1,300 +0,0 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "build" -version = "0.10.0" -description = "A simple, correct Python build frontend" -optional = false -python-versions = ">= 3.7" -files = [ - {file = "build-0.10.0-py3-none-any.whl", hash = "sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171"}, - {file = "build-0.10.0.tar.gz", hash = "sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -packaging = ">=19.0" -pyproject_hooks = "*" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2021.08.31)", "sphinx (>=4.0,<5.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=5.1)", "mypy (==0.991)", "tomli", "typing-extensions (>=3.7.4.3)"] -virtualenv = ["virtualenv (>=20.0.35)"] - -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.2.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "databricks-sdk" -version = "0.7.0" -description = "Databricks SDK for Python (Beta)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "databricks-sdk-0.7.0.tar.gz", hash = "sha256:a5705c000b86b89dd16da7ead0b6d354d27d84a2118aada48ede72ff1dfae443"}, - {file = "databricks_sdk-0.7.0-py3-none-any.whl", hash = "sha256:585846be31064459d901903a8c4f54f2ee3b69f52fbf6ea2eafe0f43f7aca5d7"}, -] - -[package.dependencies] -requests = ">=2.28.1,<3" - -[package.extras] -dev = ["autoflake", "ipython", "ipywidgets", "isort", "pycodestyle", "pytest", "pytest-cov", "pytest-mock", "pytest-xdist", "requests-mock", "wheel", "yapf"] -notebook = ["ipython (>=8,<9)", "ipywidgets (>=8,<9)"] - -[[package]] -name = "exceptiongroup" -version = "1.1.3" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] - -[[package]] -name = "pluggy" -version = "1.3.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pyproject-hooks" -version = "1.0.0" -description = "Wrappers to call pyproject.toml-based build backend hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"}, - {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"}, -] - -[package.dependencies] -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "pytest" -version = "7.4.0" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "urllib3" -version = "2.0.4" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "29073d9c38265dbf3aeb9f13964f2fca094584bc6db1b266deae0886974c8904" diff --git a/pyproject.toml b/pyproject.toml index df4c96f..533be17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,17 +1,764 @@ -[tool.poetry] -name = "databricks-labs-pytester" -version = "0.0.1" -description = "Python Testing for Databricks" -authors = ["Serge Smertin "] -packages = [{include = "databricks", from = "src"}] - -[tool.poetry.dependencies] -python = "^3.9" -databricks-sdk = "^0.7.0" -pytest = "^7.4.0" -build = "^0.10.0" - [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build] +sources = ["src"] +include = ["src"] + +[project] +name = "databricks-labs-pytested" +dynamic = ["version"] +description = 'Python Testing for Databricks' +readme = "README.md" +license-files = { paths = ["LICENSE", "NOTICE"] } +requires-python = ">=3.10" +keywords = ["Databricks", "pytest"] +maintainers = [ + { name = "Serge Smertin", email = "serge.smertin@databricks.com" }, +] +authors = [ + { name = "Serge Smertin", email = "serge.smertin@databricks.com" }, + { name = "Liran Bareket", email = "liran.bareket@databricks.com" }, + { name = "Marcin Wojtyczka", email = "marcin.wojtyczka@databricks.com" }, + { name = "Ziyuan Qin", email = "ziyuan.qin@databricks.com" }, + { name = "William Conti", email = "william.conti@databricks.com" }, + { name = "Hari Selvarajan", email = "hari.selvarajan@databricks.com" }, + { name = "Vuong Nguyen", email = "vuong.nguyen@databricks.com" }, +] +classifiers = [ + "Development Status :: 4 - Beta", + "License :: Other/Proprietary License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", + "Environment :: Console", + "Framework :: Pytest", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Operating System :: MacOS", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries", + "Topic :: Utilities", +] + +dependencies = [ + "databricks-sdk~=0.30", + "databricks-labs-blueprint>=0.8,<0.9", + "pytest~=8.1.0", +] + +[project.entry-points.pytest11] +databricks-labs-pytester = "databricks.labs.pytester.fixtures.plugin" + + +[project.urls] +Issues = "https://github.com/databricks/pytester/issues" +Source = "https://github.com/databricks/pytester" + +[tool.hatch.version] +path = "src/databricks/labs/pytester/__about__.py" + +[tool.hatch.envs.default] +dependencies = [ + "black~=24.3.0", + "coverage[toml]~=7.4.4", + "mypy~=1.9.0", + "pylint~=3.2.2", + "pylint-pytest==2.0.0a0", + "databricks-labs-pylint~=0.4.0", + "pytest-cov~=4.1.0", + "pytest-mock~=3.14.0", + "pytest-timeout~=2.3.1", + "pytest-xdist~=3.5.0", + "python-lsp-server>=1.9.0", + "ruff~=0.3.4", + "types-PyYAML~=6.0.12", + "types-requests~=2.31.0", +] + +# store virtual env as the child of this folder. Helps VSCode (and PyCharm) to run better +path = ".venv" + +[tool.hatch.envs.default.scripts] +test = "pytest -n 4 --cov src --cov-report=xml --timeout 30 tests/unit --durations 20" +coverage = "pytest -n auto --cov src tests/unit --timeout 30 --cov-report=html --durations 20" +integration = "pytest -n 10 --cov src tests/integration --durations 20" +fmt = ["black . --extend-exclude 'tests/unit/source_code/samples/'", + "ruff check . --fix", + "mypy --disable-error-code 'annotation-unchecked' --exclude 'tests/resources/*' --exclude dist .", + "pylint --output-format=colorized -j 0 src tests"] +verify = ["black --check . --extend-exclude 'tests/unit/source_code/samples/'", + "ruff check .", + "mypy --exclude 'tests/resources/*' --exclude dist .", + "pylint --output-format=colorized -j 0 src tests"] +lint = ["pylint --output-format=colorized -j 0 src tests"] + +[tool.pytest.ini_options] +# TODO: remove `-p no:warnings` +addopts = "--no-header -p no:warnings" +cache_dir = ".venv/pytest-cache" + +[tool.black] +target-version = ["py310"] +line-length = 120 +skip-string-normalization = true + +[tool.ruff] +cache-dir = ".venv/ruff-cache" +target-version = "py310" +line-length = 120 + +[tool.ruff.lint] +ignore = [ + # Allow boolean positional values in function calls, like `dict.get(... True)` + "FBT003", + # Ignore checks for possible passwords and SQL statement construction + "S105", "S106", "S107", "S603", "S608", + # Allow print statements + "T201", + # Allow asserts + "S101", + # Ignore Exception must not use a string literal, assign to variable first + "EM101", + # Ignore the error message will be duplicated in the traceback, which can make the traceback less readable. + "EM102", +] + +[tool.ruff.lint.isort] +known-first-party = ["databricks.labs.pytester"] + +[tool.ruff.lint.flake8-tidy-imports] +ban-relative-imports = "all" + +[tool.ruff.lint.per-file-ignores] + +"src/databricks/labs/pytester/mixins/*" = ["S311"] + +"tests/**/*" = [ + "PLR2004", "S101", "TID252", # tests can use magic values, assertions, and relative imports + "ARG001" # tests may not use the provided fixtures +] + +[tool.coverage.run] +branch = true +parallel = true + +[tool.coverage.report] +omit = [ + "*/working-copy/*", + "*/fresh_wheel_file/*", + "*/__init__.py" +] +exclude_lines = [ + "no cov", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", +] + +[tool.pylint.main] +# PyLint configuration is adapted from Google Python Style Guide with modifications. +# Sources https://google.github.io/styleguide/pylintrc +# License: https://github.com/google/styleguide/blob/gh-pages/LICENSE + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint in +# a server-like mode. +# clear-cache-post-run = + +# Always return a 0 (non-error) status code, even if lint errors are found. This +# is primarily useful in continuous integration scripts. +# exit-zero = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +# extension-pkg-allow-list = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +# extension-pkg-whitelist = + +# Specify a score threshold under which the program will exit with error. +fail-under = 10.0 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +# from-stdin = + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +ignore-paths='^tests/resources/.*$' + +# Files or directories matching the regular expression patterns are skipped. The +# regex matches against base names, not paths. The default value ignores Emacs +# file locks +ignore-patterns = ["^\\.#"] + +# List of module names for which member attributes should not be checked (useful +# for modules/projects where namespaces are manipulated during runtime and thus +# existing member attributes cannot be deduced by static analysis). It supports +# qualified module names, as well as Unix pattern matching. +# ignored-modules = + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook='' + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs = 0 + +# Control the amount of potential inferred values when inferring a single object. +# This can help the performance when dealing with large functions or complex, +# nested conditions. +limit-inference-results = 100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins = [ + "databricks.labs.pylint.mocking", + "databricks.labs.pylint.eradicate", + "pylint_pytest", + "pylint.extensions.bad_builtin", + "pylint.extensions.broad_try_clause", + "pylint.extensions.check_elif", + "pylint.extensions.code_style", + "pylint.extensions.confusing_elif", + "pylint.extensions.comparison_placement", + "pylint.extensions.consider_refactoring_into_while_condition", + "pylint.extensions.dict_init_mutate", + "pylint.extensions.docparams", + "pylint.extensions.dunder", + "pylint.extensions.for_any_all", + "pylint.extensions.mccabe", + "pylint.extensions.overlapping_exceptions", + "pylint.extensions.private_import", + "pylint.extensions.redefined_variable_type", + "pylint.extensions.set_membership", + "pylint.extensions.typing", +] + +# Pickle collected data for later comparisons. +persistent = true + +# Minimum Python version to use for version dependent checks. Will default to the +# version used to run pylint. +py-version = "3.10" + +# Discover python modules and packages in the file system subtree. +# recursive = + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +# source-roots = + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode = true + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +# unsafe-load-any-extension = + +[tool.pylint.basic] +# Naming style matching correct argument names. +argument-naming-style = "snake_case" + +# Regular expression matching correct argument names. Overrides argument-naming- +# style. If left empty, argument names will be checked with the set naming style. +argument-rgx = "[a-z_][a-z0-9_]{2,30}$" + +# Naming style matching correct attribute names. +attr-naming-style = "snake_case" + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +attr-rgx = "[a-z_][a-z0-9_]{1,}$" + +# Bad variable names which should always be refused, separated by a comma. +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +# bad-names-rgxs = + +# Naming style matching correct class attribute names. +class-attribute-naming-style = "any" + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +class-attribute-rgx = "([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$" + +# Naming style matching correct class constant names. +class-const-naming-style = "UPPER_CASE" + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +# class-const-rgx = + +# Naming style matching correct class names. +class-naming-style = "PascalCase" + +# Regular expression matching correct class names. Overrides class-naming-style. +# If left empty, class names will be checked with the set naming style. +class-rgx = "[A-Z_][a-zA-Z0-9]+$" + +# Naming style matching correct constant names. +const-naming-style = "UPPER_CASE" + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming style. +const-rgx = "(([A-Z_][A-Z0-9_]*)|(__.*__))$" + +# Minimum line length for functions/classes that require docstrings, shorter ones +# are exempt. +docstring-min-length = -1 + +# Naming style matching correct function names. +function-naming-style = "snake_case" + +# Regular expression matching correct function names. Overrides function-naming- +# style. If left empty, function names will be checked with the set naming style. +function-rgx = "[a-z_][a-z0-9_]{2,}$" + +# Good variable names which should always be accepted, separated by a comma. +good-names = [ + "f", # use for file handles + "i", "j", "k", # use for loops + "df", # use for pyspark.sql.DataFrame + "ex", "e", # use for exceptions + "fn", "cb", # use for callbacks + "_", # use for ignores + "a", # use for databricks.sdk.AccountClient + "w", "ws" # use for databricks.sdk.WorkspaceClient +] + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +# good-names-rgxs = + +# Include a hint for the correct naming format with invalid-name. +# include-naming-hint = + +# Naming style matching correct inline iteration names. +inlinevar-naming-style = "any" + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +inlinevar-rgx = "[A-Za-z_][A-Za-z0-9_]*$" + +# Naming style matching correct method names. +method-naming-style = "snake_case" + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +method-rgx = "(([a-z_][a-z0-9_]{2,})|(visit_.*))$" + +# Naming style matching correct module names. +module-naming-style = "snake_case" + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +module-rgx = "(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$" + +# Colon-delimited sets of names that determine each other's naming style when the +# name regexes allow several styles. +# name-group = + +# Regular expression which should only match function or class names that do not +# require a docstring. +no-docstring-rgx = "__.*__" + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. These +# decorators are taken in consideration only for invalid-name. +property-classes = ["abc.abstractproperty"] + +# Regular expression matching correct type alias names. If left empty, type alias +# names will be checked with the set naming style. +# typealias-rgx = + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +# typevar-rgx = + +# Naming style matching correct variable names. +variable-naming-style = "snake_case" + +# Regular expression matching correct variable names. Overrides variable-naming- +# style. If left empty, variable names will be checked with the set naming style. +variable-rgx = "[a-z_][a-z0-9_]{2,30}$" + +[tool.pylint.broad_try_clause] +# Maximum number of statements allowed in a try clause +max-try-statements = 7 + +[tool.pylint.classes] +# Warn about protected attribute access inside special methods +# check-protected-access-in-special-methods = + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods = ["__init__", "__new__", "setUp", "__post_init__"] + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make"] + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg = ["cls"] + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pylint.deprecated_builtins] +# List of builtins function names that should not be used, separated by a comma +bad-functions = ["map", "input"] + +[tool.pylint.design] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +# exclude-too-few-public-methods = + +# List of qualified class names to ignore when counting class parents (see R0901) +# ignored-parents = + +# Maximum number of arguments for function / method. +max-args = 9 + +# Maximum number of attributes for a class (see R0902). +max-attributes = 11 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr = 5 + +# Maximum number of branch for function / method body. +max-branches = 20 + +# Maximum number of locals for function / method body. +max-locals = 19 + +# Maximum number of parents for a class (see R0901). +max-parents = 7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods = 20 + +# Maximum number of return / yield for function / method body. +max-returns = 11 + +# Maximum number of statements in function / method body. +max-statements = 50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods = 2 + +[tool.pylint.exceptions] +# Exceptions that will emit a warning when caught. +overgeneral-exceptions = ["builtins.Exception"] + +[tool.pylint.format] +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +# expected-line-ending-format = + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines = "^\\s*(# )??$" + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren = 4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string = " " + +# Maximum number of characters on a single line. +max-line-length = 100 + +# Maximum number of lines in a module. +max-module-lines = 2000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +# single-line-class-stmt = + +# Allow the body of an if to be on the same line as the test if there is no else. +# single-line-if-stmt = + +[tool.pylint.imports] +# List of modules that can be imported at any level, not just the top level one. +# allow-any-import-level = + +# Allow explicit reexports by alias from a package __init__. +# allow-reexport-from-package = + +# Allow wildcard imports from modules that define __all__. +# allow-wildcard-with-all = + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules = ["regsub", "TERMIOS", "Bastion", "rexec"] + +# Output a graph (.gv or any supported image format) of external dependencies to +# the given file (report RP0402 must not be disabled). +# ext-import-graph = + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be disabled). +# import-graph = + +# Output a graph (.gv or any supported image format) of internal dependencies to +# the given file (report RP0402 must not be disabled). +# int-import-graph = + +# Force import order to recognize a module as part of the standard compatibility +# libraries. +# known-standard-library = + +# Force import order to recognize a module as part of a third party library. +known-third-party = ["enchant"] + +# Couples of modules and preferred modules, separated by a comma. +# preferred-modules = + +[tool.pylint.logging] +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style = "new" + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules = ["logging"] + +[tool.pylint."messages control"] +# Only show warnings with the listed confidence levels. Leave empty to show all. +# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] + +# Disable the message, report, category or checker with the given id(s). You can +# either give multiple identifiers separated by comma (,) or put this option +# multiple times (only on the command line, not in the configuration file where +# it should appear only once). You can also use "--disable=all" to disable +# everything first and then re-enable specific checks. For example, if you want +# to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable = [ + "prefer-typing-namedtuple", + "attribute-defined-outside-init", + "missing-module-docstring", + "missing-class-docstring", + "missing-function-docstring", + "too-few-public-methods", + "line-too-long", + "trailing-whitespace", + "missing-final-newline", + "trailing-newlines", + "unnecessary-semicolon", + "mixed-line-endings", + "unexpected-line-ending-format", + "fixme", + "consider-using-assignment-expr", + "logging-fstring-interpolation", + "consider-using-any-or-all" +] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where it +# should appear only once). See also the "--disable" option for examples. +enable = ["useless-suppression", "use-symbolic-message-instead"] + +[tool.pylint.method_args] +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] + +[tool.pylint.miscellaneous] +# List of note tags to take in consideration, separated by a comma. +notes = ["FIXME", "XXX", "TODO"] + +# Regular expression of note tags to take in consideration. +# notes-rgx = + +[tool.pylint.parameter_documentation] +# Whether to accept totally missing parameter documentation in the docstring of a +# function that has parameters. +accept-no-param-doc = true + +# Whether to accept totally missing raises documentation in the docstring of a +# function that raises an exception. +accept-no-raise-doc = true + +# Whether to accept totally missing return documentation in the docstring of a +# function that returns a statement. +accept-no-return-doc = true + +# Whether to accept totally missing yields documentation in the docstring of a +# generator. +accept-no-yields-doc = true + +# If the docstring type cannot be guessed the specified docstring type will be +# used. +default-docstring-type = "default" + +[tool.pylint.refactoring] +# Maximum number of nested blocks for function / method body +max-nested-blocks = 3 + +# Complete name of functions that never returns. When checking for inconsistent- +# return-statements if a never returning function is called then it will be +# considered as an explicit return statement and no message will be printed. +never-returning-functions = ["sys.exit", "argparse.parse_error"] + +[tool.pylint.reports] +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each category, +# as well as 'statement' which is the total number of statements analyzed. This +# score is used by the global evaluation report (RP0004). +evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +# msg-template = + +# Set the output format. Available formats are: text, parseable, colorized, json2 +# (improved json format), json (old json format) and msvs (visual studio). You +# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. +# output-format = + +# Tells whether to display a full report or only the messages. +# reports = + +# Activate the evaluation score. +score = true + +[tool.pylint.similarities] +# Comments are removed from the similarity computation +ignore-comments = true + +# Docstrings are removed from the similarity computation +ignore-docstrings = true + +# Imports are removed from the similarity computation +ignore-imports = true + +# Signatures are removed from the similarity computation +ignore-signatures = true + +# Minimum lines number of a similarity. +min-similarity-lines = 6 + +[tool.pylint.spelling] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions = 2 + +# Spelling dictionary name. No available dictionaries : You need to install both +# the python package and the system dependency for enchant to work. +# spelling-dict = + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:,pragma:,# noinspection" + +# List of comma separated words that should not be checked. +# spelling-ignore-words = + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file = ".pyenchant_pylint_custom_dict.txt" + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +# spelling-store-unknown-words = + +[tool.pylint.typecheck] +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators = ["contextlib.contextmanager"] + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members = "REQUEST,acl_users,aq_parent,argparse.Namespace" + +# Tells whether missing members accessed in mixin class should be ignored. A +# class is considered mixin if its name matches the mixin-class-rgx option. +# Tells whether to warn about missing members when the owner of the attribute is +# inferred to be None. +ignore-none = true + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference can +# return multiple potential results while evaluating a Python object, but some +# branches might not be evaluated, which results in partial inference. In that +# case, it might be useful to still emit no-member and other checks for the rest +# of the inferred objects. +ignore-on-opaque-inference = true + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes = ["SQLObject", "optparse.Values", "thread._local", "_thread._local"] + +# Show a hint with possible names when a member name was not found. The aspect of +# finding the hint is based on edit distance. +missing-member-hint = true + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance = 1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices = 1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx = ".*MixIn" + +# List of decorators that change the signature of a decorated function. +# signature-mutators = + +[tool.pylint.variables] +# List of additional names supposed to be defined in builtins. Remember that you +# should avoid defining new builtins when possible. +# additional-builtins = + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables = true + +# List of names allowed to shadow builtins +# allowed-redefined-builtins = + +# List of strings which can identify a callback function by name. A callback name +# must start or end with one of those strings. +callbacks = ["cb_", "_cb"] + +# A regular expression matching the name of dummy variables (i.e. expected to not +# be used). +dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" + +# Argument names that match this expression will be ignored. +ignored-argument-names = "_.*|^ignored_|^unused_" + +# Tells whether we should check for unused import in __init__ files. +# init-import = +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] diff --git a/src/databricks/labs/pytester/__about__.py b/src/databricks/labs/pytester/__about__.py new file mode 100644 index 0000000..6c8e6b9 --- /dev/null +++ b/src/databricks/labs/pytester/__about__.py @@ -0,0 +1 @@ +__version__ = "0.0.0" diff --git a/src/databricks/labs/pytester/environment.py b/src/databricks/labs/pytester/environment.py index 5565186..dc528e0 100644 --- a/src/databricks/labs/pytester/environment.py +++ b/src/databricks/labs/pytester/environment.py @@ -2,11 +2,10 @@ import os import pathlib import sys +import json _LOG = logging.getLogger(__name__) -import json - def load_debug_env_if_runs_from_ide(key) -> bool: """ @@ -47,13 +46,13 @@ def load_debug_env_if_runs_from_ide(key) -> bool: if key not in conf: msg = f"{key} not found in ~/.databricks/debug-env.json" raise KeyError(msg) - for k, v in conf[key].items(): - os.environ[k] = v + for env_key, value in conf[key].items(): + os.environ[env_key] = value return True def _is_in_debug() -> bool: - return os.path.basename(sys.argv[0]) in [ + return os.path.basename(sys.argv[0]) in { "_jb_pytest_runner.py", "testlauncher.py", - ] + } diff --git a/src/databricks/labs/pytester/fixtures/__init__.py b/src/databricks/labs/pytester/fixtures/__init__.py index f8a23ba..0f2aab9 100644 --- a/src/databricks/labs/pytester/fixtures/__init__.py +++ b/src/databricks/labs/pytester/fixtures/__init__.py @@ -6,10 +6,18 @@ from .wheel import workspace_library __all__ = [ - 'ws', 'make_random', - 'make_instance_pool', 'make_job', 'make_cluster', 'make_cluster_policy', - 'make_group', 'make_user', - 'make_notebook', 'make_directory', 'make_repo', - 'make_secret_scope', 'make_secret_scope_acl', - 'workspace_library' -] \ No newline at end of file + 'ws', + 'make_random', + 'make_instance_pool', + 'make_job', + 'make_cluster', + 'make_cluster_policy', + 'make_group', + 'make_user', + 'make_notebook', + 'make_directory', + 'make_repo', + 'make_secret_scope', + 'make_secret_scope_acl', + 'workspace_library', +] diff --git a/src/databricks/labs/pytester/fixtures/baseline.py b/src/databricks/labs/pytester/fixtures/baseline.py index 1626ce2..abfc657 100644 --- a/src/databricks/labs/pytester/fixtures/baseline.py +++ b/src/databricks/labs/pytester/fixtures/baseline.py @@ -4,7 +4,7 @@ import pytest from databricks.sdk import WorkspaceClient -from databricks.sdk.core import DatabricksError +from databricks.sdk.errors import NotFound _LOG = logging.getLogger(__name__) @@ -39,6 +39,7 @@ def make_random(): random_string = make_random(k=8) assert len(random_string) == 8 """ + def inner(k=16) -> str: """ Generate a random string. @@ -102,22 +103,19 @@ def cleanup(scope): cleanup = [] def inner(**kwargs): - x = create(**kwargs) - _LOG.debug(f"added {name} fixture: {x}") - cleanup.append(x) - return x + some = create(**kwargs) + _LOG.debug(f"added {name} fixture: {some}") + cleanup.append(some) + return some yield inner _LOG.debug(f"clearing {len(cleanup)} {name} fixtures") - for x in cleanup: + for some in cleanup: try: - _LOG.debug(f"removing {name} fixture: {x}") - remove(x) - except DatabricksError as e: - if e.error_code in ("RESOURCE_DOES_NOT_EXIST",): - _LOG.debug(f"ignoring error while {name} {x} teardown: {e}") - continue - raise e + _LOG.debug(f"removing {name} fixture: {some}") + remove(some) + except NotFound as e: + _LOG.debug(f"ignoring error while {name} {some} teardown: {e}") @pytest.fixture(scope="session") diff --git a/src/databricks/labs/pytester/fixtures/compute.py b/src/databricks/labs/pytester/fixtures/compute.py index a8b7934..cb095ed 100644 --- a/src/databricks/labs/pytester/fixtures/compute.py +++ b/src/databricks/labs/pytester/fixtures/compute.py @@ -37,6 +37,7 @@ def test_cluster_policy_management(make_cluster_policy): policy_info = make_cluster_policy(name="my-policy") assert policy_info is not None """ + def create(*, name: str | None = None, **kwargs): if name is None: name = f"sdk-{make_random(4)}" @@ -44,7 +45,7 @@ def create(*, name: str | None = None, **kwargs): kwargs["definition"] = json.dumps( {"spark_conf.spark.databricks.delta.preview.enabled": {"type": "fixed", "value": True}} ) - return ws.cluster_policies.create(name, **kwargs) + return ws.cluster_policies.create(name, **kwargs) # type: ignore def cleanup_policy(policy_info): ws.cluster_policies.delete(policy_info.policy_id) @@ -82,13 +83,14 @@ def test_cluster_management(make_cluster): cluster_info = make_cluster(cluster_name="my-cluster", single_node=True) assert cluster_info is not None """ + def create( - *, - single_node: bool = False, - cluster_name: str | None = None, - spark_version: str | None = None, - autotermination_minutes=10, - **kwargs, + *, + single_node: bool = False, + cluster_name: str | None = None, + spark_version: str | None = None, + autotermination_minutes=10, + **kwargs, ): if cluster_name is None: cluster_name = f"sdk-{make_random(4)}" @@ -144,6 +146,7 @@ def test_instance_pool_management(make_instance_pool): instance_pool_info = make_instance_pool(instance_pool_name="my-pool") assert instance_pool_info is not None """ + def create(*, instance_pool_name=None, node_type_id=None, **kwargs): if instance_pool_name is None: instance_pool_name = f"sdk-{make_random(4)}" @@ -189,6 +192,7 @@ def test_job_management(make_job): job_info = make_job(name="my-job") assert job_info is not None """ + def create(**kwargs): if "name" not in kwargs: kwargs["name"] = f"sdk-{make_random(4)}" diff --git a/src/databricks/labs/pytester/fixtures/iam.py b/src/databricks/labs/pytester/fixtures/iam.py index 569823f..5feb764 100644 --- a/src/databricks/labs/pytester/fixtures/iam.py +++ b/src/databricks/labs/pytester/fixtures/iam.py @@ -35,6 +35,7 @@ def test_user_management(make_user): user_info = make_user() assert user_info is not None """ + def create_user(**kwargs): return ws.users.create(user_name=f"sdk-{make_random(4)}@example.com".lower(), **kwargs) @@ -78,8 +79,9 @@ def test_group_management(make_group): group_info = make_group(members=["user@example.com"], roles=["viewer"]) assert group_info is not None """ + def create( - *, members: list[str] | None = None, roles: list[str] | None = None, display_name: str | None = None, **kwargs + *, members: list[str] | None = None, roles: list[str] | None = None, display_name: str | None = None, **kwargs ): kwargs["display_name"] = f"sdk-{make_random(4)}" if display_name is None else display_name if members is not None: diff --git a/src/databricks/labs/pytester/fixtures/notebooks.py b/src/databricks/labs/pytester/fixtures/notebooks.py index f7f057f..4c92cdd 100644 --- a/src/databricks/labs/pytester/fixtures/notebooks.py +++ b/src/databricks/labs/pytester/fixtures/notebooks.py @@ -37,6 +37,7 @@ def test_notebook_management(make_notebook): notebook_path = make_notebook() assert notebook_path.startswith("/Users/") and notebook_path.endswith(".py") """ + def create(*, path: str | None = None, content: typing.BinaryIO | None = None, **kwargs): if path is None: path = f"/Users/{ws.current_user.me().user_name}/sdk-{make_random(4)}.py" @@ -81,6 +82,7 @@ def test_directory_management(make_directory): directory_path = make_directory() assert directory_path.startswith("/Users/") and not directory_path.endswith(".py") """ + def create(*, path: str | None = None): if path is None: path = f"/Users/{ws.current_user.me().user_name}/sdk-{make_random(4)}" @@ -123,6 +125,7 @@ def test_repo_management(make_repo): repo_info = make_repo() assert repo_info is not None """ + def create(*, url=None, provider=None, path=None, **kwargs): if path is None: path = f"/Repos/{ws.current_user.me().user_name}/sdk-{make_random(4)}" diff --git a/src/databricks/labs/pytester/fixtures/plugin.py b/src/databricks/labs/pytester/fixtures/plugin.py new file mode 100644 index 0000000..c7bb155 --- /dev/null +++ b/src/databricks/labs/pytester/fixtures/plugin.py @@ -0,0 +1,25 @@ +import pytest + +# pylint: disable=wildcard-import,unused-wildcard-import +from databricks.labs.pytester.fixtures import * # noqa: F403, F401 + + +def pytest_addoption(parser): + group = parser.getgroup("helloworld") + group.addoption( + "--name", + action="store", + dest="name", + default="World", + help='Default "name" for hello().', + ) + + +@pytest.fixture +def hello(request): + def _hello(name=None): + if not name: + name = request.config.getoption("name") + return f"Hello {name}!" + + return _hello diff --git a/src/databricks/labs/pytester/fixtures/secrets.py b/src/databricks/labs/pytester/fixtures/secrets.py index 03e2dcd..b8c4863 100644 --- a/src/databricks/labs/pytester/fixtures/secrets.py +++ b/src/databricks/labs/pytester/fixtures/secrets.py @@ -36,12 +36,13 @@ def test_secret_scope_creation(make_secret_scope): secret_scope_name = make_secret_scope() assert secret_scope_name.startswith("sdk-") """ + def create(**kwargs): name = f"sdk-{make_random(4)}" ws.secrets.create_scope(name, **kwargs) return name - yield from factory("secret scope", create, lambda scope: ws.secrets.delete_scope(scope)) + yield from factory("secret scope", create, ws.secrets.delete_scope) @pytest.fixture @@ -77,6 +78,7 @@ def test_secret_scope_acl_management(make_secret_scope_acl): acl_info = make_secret_scope_acl(scope=scope_name, principal=principal_name, permission=permission) assert acl_info == (scope_name, principal_name) """ + def create(*, scope: str, principal: str, permission: workspace.AclPermission): ws.secrets.put_acl(scope, principal, permission) return scope, principal diff --git a/src/databricks/labs/pytester/fixtures/wheel.py b/src/databricks/labs/pytester/fixtures/wheel.py index ef4941f..41e4c3f 100644 --- a/src/databricks/labs/pytester/fixtures/wheel.py +++ b/src/databricks/labs/pytester/fixtures/wheel.py @@ -3,12 +3,13 @@ import subprocess import sys from pathlib import Path +from typing import Optional import pytest from databricks.sdk.service.workspace import ImportFormat -from pathlib import Path -from typing import Optional + +# pylint: disable=consider-alternative-union-syntax def find_dir_with_leaf(folder: Path, leaf: str) -> Optional[Path]: @@ -21,7 +22,7 @@ def find_dir_with_leaf(folder: Path, leaf: str) -> Optional[Path]: def find_project_root(folder: Path) -> Optional[Path]: - for leaf in ['pyproject.toml', 'setup.py']: + for leaf in ('pyproject.toml', 'setup.py'): root = find_dir_with_leaf(folder, leaf) if root is not None: return root @@ -31,8 +32,7 @@ def find_project_root(folder: Path) -> Optional[Path]: def build_wheel_in(project_path: Path, out_path: Path) -> Path: try: subprocess.run( - [sys.executable, "-m", "build", "--wheel", "--outdir", - out_path.absolute(), project_path.absolute()], + [sys.executable, "-m", "build", "--wheel", "--outdir", out_path.absolute(), project_path.absolute()], capture_output=True, check=True, ) @@ -41,7 +41,7 @@ def build_wheel_in(project_path: Path, out_path: Path) -> Path: sys.stderr.write(e.stderr.decode()) raise RuntimeError(e.output.decode().strip()) from None - found_wheels = list(out_path.glob(f"*.whl")) + found_wheels = list(out_path.glob("*.whl")) if not found_wheels: msg = f"cannot find *.whl in {out_path}" raise RuntimeError(msg) @@ -58,7 +58,7 @@ def build_wheel_in(project_path: Path, out_path: Path) -> Path: def fresh_local_wheel_file(tmp_path) -> Path: project_root = find_project_root(Path(os.getcwd())) build_root = tmp_path / fresh_local_wheel_file.__name__ - shutil.copytree(project_root, build_root) + shutil.copytree(project_root, build_root) # type: ignore return build_wheel_in(build_root, tmp_path / 'dist') diff --git a/tests/integration/test_fixtures.py b/tests/integration/test_fixtures.py index 483c8ea..2158eda 100644 --- a/tests/integration/test_fixtures.py +++ b/tests/integration/test_fixtures.py @@ -3,7 +3,6 @@ from databricks.sdk.service.workspace import AclPermission -from databricks.labs.pytester.fixtures import make_user from databricks.labs.pytester.environment import load_debug_env_if_runs_from_ide diff --git a/tests/resources/hatchling-whl/src/hatchling_whl/__init__.py b/tests/resources/hatchling-whl/src/hatchling_whl/__init__.py index f1188a7..c6a91b9 100644 --- a/tests/resources/hatchling-whl/src/hatchling_whl/__init__.py +++ b/tests/resources/hatchling-whl/src/hatchling_whl/__init__.py @@ -1,5 +1,6 @@ def main(): print('it works') + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tests/resources/poetry-whl/poetry_whl/__init__.py b/tests/resources/poetry-whl/poetry_whl/__init__.py index f1188a7..c6a91b9 100644 --- a/tests/resources/poetry-whl/poetry_whl/__init__.py +++ b/tests/resources/poetry-whl/poetry_whl/__init__.py @@ -1,5 +1,6 @@ def main(): print('it works') + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tests/resources/setuppy-whl/setup.py b/tests/resources/setuppy-whl/setup.py index e01ab03..802efcc 100644 --- a/tests/resources/setuppy-whl/setup.py +++ b/tests/resources/setuppy-whl/setup.py @@ -1,11 +1,10 @@ -import io -import pathlib - from setuptools import setup, find_packages -setup(name="setuppy-whl", - version="0.0.1", - packages=find_packages(exclude=["tests", "*tests.*", "*tests"]), - python_requires=">=3.7", - author="John Doe", - author_email="john@example.com") \ No newline at end of file +setup( + name="setuppy-whl", + version="0.0.1", + packages=find_packages(exclude=["tests", "*tests.*", "*tests"]), + python_requires=">=3.7", + author="John Doe", + author_email="john@example.com", +) diff --git a/tests/resources/setuppy-whl/setuppy_whl/__init__.py b/tests/resources/setuppy-whl/setuppy_whl/__init__.py index f1188a7..c6a91b9 100644 --- a/tests/resources/setuppy-whl/setuppy_whl/__init__.py +++ b/tests/resources/setuppy-whl/setuppy_whl/__init__.py @@ -1,5 +1,6 @@ def main(): print('it works') + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tests/resources/setuptools-whl/setuptools_whl/__init__.py b/tests/resources/setuptools-whl/setuptools_whl/__init__.py index f1188a7..c6a91b9 100644 --- a/tests/resources/setuptools-whl/setuptools_whl/__init__.py +++ b/tests/resources/setuptools-whl/setuptools_whl/__init__.py @@ -1,5 +1,6 @@ def main(): print('it works') + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tests/test_wheels.py b/tests/test_wheels.py deleted file mode 100644 index 0072f70..0000000 --- a/tests/test_wheels.py +++ /dev/null @@ -1,27 +0,0 @@ -from pathlib import Path - -import pytest - -from databricks.labs.pytester.fixtures.wheel import build_wheel_in, find_dir_with_leaf, fresh_local_wheel_file, workspace_library - -__folder__ = Path(__file__).parent - - -def test_find_dir_with_leaf(): - x = find_dir_with_leaf(Path(__file__), '.gitignore') - assert x is not None - - -def test_find_dir_with_leaf_missing(): - x = find_dir_with_leaf(Path(__file__), '.nothing') - assert x is None - - -@pytest.mark.parametrize("build_system", ['hatchling', 'poetry', 'setuppy', 'setuptools']) -def test_building_wheels(tmp_path, build_system): - whl = build_wheel_in(__folder__ / f'resources/{build_system}-whl', tmp_path) - assert whl.exists() - - -def test_fresh_wheel_file(fresh_local_wheel_file): - assert fresh_local_wheel_file is not None diff --git a/tests/__init__.py b/tests/unit/__init__.py similarity index 100% rename from tests/__init__.py rename to tests/unit/__init__.py diff --git a/tests/unit/test_some.py b/tests/unit/test_some.py new file mode 100644 index 0000000..363b3e2 --- /dev/null +++ b/tests/unit/test_some.py @@ -0,0 +1,2 @@ +def test_something(): + pass